lang
stringclasses
3 values
file_path
stringlengths
5
150
repo_name
stringlengths
6
110
commit
stringlengths
40
40
file_code
stringlengths
1.52k
18.9k
prefix
stringlengths
82
16.5k
suffix
stringlengths
0
15.1k
middle
stringlengths
121
8.18k
strategy
stringclasses
8 values
context_items
listlengths
0
100
Rust
game_plugin/src/actions.rs
NiklasEi/ld49
1fc925339de6d0aa64983d31186cf1ba932c595c
use crate::GameState; use bevy::prelude::*; pub struct ActionsPlugin; impl Plugin for ActionsPlugin { fn build(&self, app: &mut AppBuilder) { app.init_resource::<Actions>().add_system_set( SystemSet::on_update(GameState::InLevel).with_system(set_movement_actions.system()), ); } } #[derive(Default, Debug)] pub struct Actions { pub jump: bool, pub paddling: Option<f32>, pub head_balance: Option<f32>, pub restart: bool, } fn set_movement_actions(mut actions: ResMut<Actions>, keyboard_input: Res<Input<KeyCode>>) { if GameControl::PaddleBackward.just_released(&keyboard_input) || GameControl::PaddleBackward.pressed(&keyboard_input) || GameControl::PaddleForward.just_released(&keyboard_input) || GameControl::PaddleForward.pressed(&keyboard_input) { let mut paddling = actions.paddling.unwrap_or(0.); if GameControl::PaddleForward.just_released(&keyboard_input) || GameControl::PaddleBackward.just_released(&keyboard_input) { if GameControl::PaddleForward.pressed(&keyboard_input) { paddling = 1.; } else if GameControl::PaddleBackward.pressed(&keyboard_input) { paddling = -1.; } else { paddling = 0.; } } else if GameControl::PaddleForward.just_pressed(&keyboard_input) { paddling = 1.; } else if GameControl::PaddleBackward.just_pressed(&keyboard_input) { paddling = -1.; } actions.paddling = Some(paddling); } else { actions.paddling = None; } if GameControl::BalanceForward.just_released(&keyboard_input) || GameControl::BalanceForward.pressed(&keyboard_input) || GameControl::BalanceBackward.just_released(&keyboard_input) || GameControl::BalanceBackward.pressed(&keyboard_input) { let mut head_balance = actions.head_balance.unwrap_or(0.); if GameControl::BalanceForward.just_released(&keyboard_input) || GameControl::BalanceBackward.just_released(&keyboard_input) { if GameControl::BalanceForward.pressed(&keyboard_input) { head_balance = 1.; } else if GameControl::BalanceBackward.pressed(&keyboard_input) { head_balance = -1.; } else { head_balance = 0.; } } else if GameControl::BalanceForward.just_pressed(&keyboard_input) { head_balance = 1.; } else if GameControl::BalanceBackward.just_pressed(&keyboard_input) { head_balance = -1.; } actions.head_balance = Some(head_balance); } else { actions.head_balance = None; } actions.jump = GameControl::Jump.just_pressed(&keyboard_input); actions.restart = GameControl::Restart.just_pressed(&keyboard_input); } enum GameControl { BalanceForward, BalanceBackward, PaddleBackward, PaddleForward, Restart, Jump, } impl GameControl { fn just_released(&self, keyboard_input: &Res<Input<KeyCode>>) -> bool { match self { GameControl::BalanceForward => keyboard_input.just_released(KeyCode::Right), GameControl::BalanceBackward => keyboard_input.just_released(KeyCode::Left), GameControl::PaddleBackward => keyboard_input.just_released(KeyCode::A), GameControl::PaddleForward => keyboard_input.just_released(KeyCode::D), GameControl::Jump => keyboard_input.just_released(KeyCode::Space), GameControl::Restart => keyboard_input.just_released(KeyCode::R), } } fn pressed(&self, keyboard_input: &Res<Input<KeyCode>>) -> bool { match self { GameControl::BalanceForward => keyboard_input.pressed(KeyCode::Right), GameControl::BalanceBackward => keyboard_input.pressed(KeyCode::Left), GameControl::PaddleBackward => keyboard_input.pressed(KeyCode::A), GameControl::PaddleForward => keyboard_input.pressed(KeyCode::D), GameControl::Jump => keyboard_input.pressed(KeyCode::Space), GameControl::Restart => keyboard_input.pressed(KeyCode::R), } } fn just_pressed(&self, keyboard_input: &Res<Input<KeyCode>>) -> bool { match self { GameControl::BalanceForward => keyboard_input.just_pressed(KeyCode::Right), GameControl::BalanceBackward => keyboard_input.just_pressed(KeyCode::Left), GameControl::PaddleBackward => keyboard_input.just_pressed(KeyCode::A), GameControl::PaddleForward => keyboard_input.just_pressed(KeyCode::D), GameControl::Jump => keyboard_input.just_pressed(KeyCode::Space), GameControl::Restart => keyboard_input.just_pressed(KeyCode::R), } } }
use crate::GameState; use bevy::prelude::*; pub struct ActionsPlugin; impl Plugin for ActionsPlugin { fn build(&self, app: &mut AppBuilder) { app.init_resource::<Actions>().add_system_set( SystemSet::on_update(GameState::InLevel).with_system(set_movement_actions.system()), ); } } #[derive(Default, Debug)] pub struct Actions { pub jump: bool, pub paddling: Option<f32>, pub head_balance: Option<f32>, pub restart: bool, } fn set_movement_actions(mut actions: ResMut<Actions>, keyboard_input: Res<Input<KeyCode>>) { if GameControl::PaddleBackward.just_released(&keyboard_input) || GameControl::PaddleBackward.pressed(&keyboard_input) || GameControl::PaddleForward.just_released(&keyboard_input) || GameControl::PaddleForward.pressed(&keyboard_input) { let mut paddling = actions.paddling.unwrap_or(0.); if GameControl::PaddleForward.just_released(&keyboard_input) || GameControl::PaddleBackward.just_released(&keyboard_input) { if GameControl::PaddleForward.pressed(&keyboard_input) { paddling = 1.; } else if GameControl::PaddleBackward.pressed(&keyboard_input) { paddling = -1.; } else { paddling = 0.; } } else if GameControl::PaddleForward.just_pressed(&keyboard_input) { paddling = 1.; } else if GameControl::PaddleBackward.just_pressed(&keyboard_input) { paddling = -1.; } actions.paddling = Some(paddling); } else { actions.paddling = None; } if GameControl::BalanceForward.just_released(&keyboard_input) || GameControl::BalanceForward.pressed(&keyboard_input) || GameControl::BalanceBackward.just_released(&keyboard_input) || GameControl::BalanceBackward.pressed(&keyboard_input) { let mut head_balance
actions.head_balance = Some(head_balance); } else { actions.head_balance = None; } actions.jump = GameControl::Jump.just_pressed(&keyboard_input); actions.restart = GameControl::Restart.just_pressed(&keyboard_input); } enum GameControl { BalanceForward, BalanceBackward, PaddleBackward, PaddleForward, Restart, Jump, } impl GameControl { fn just_released(&self, keyboard_input: &Res<Input<KeyCode>>) -> bool { match self { GameControl::BalanceForward => keyboard_input.just_released(KeyCode::Right), GameControl::BalanceBackward => keyboard_input.just_released(KeyCode::Left), GameControl::PaddleBackward => keyboard_input.just_released(KeyCode::A), GameControl::PaddleForward => keyboard_input.just_released(KeyCode::D), GameControl::Jump => keyboard_input.just_released(KeyCode::Space), GameControl::Restart => keyboard_input.just_released(KeyCode::R), } } fn pressed(&self, keyboard_input: &Res<Input<KeyCode>>) -> bool { match self { GameControl::BalanceForward => keyboard_input.pressed(KeyCode::Right), GameControl::BalanceBackward => keyboard_input.pressed(KeyCode::Left), GameControl::PaddleBackward => keyboard_input.pressed(KeyCode::A), GameControl::PaddleForward => keyboard_input.pressed(KeyCode::D), GameControl::Jump => keyboard_input.pressed(KeyCode::Space), GameControl::Restart => keyboard_input.pressed(KeyCode::R), } } fn just_pressed(&self, keyboard_input: &Res<Input<KeyCode>>) -> bool { match self { GameControl::BalanceForward => keyboard_input.just_pressed(KeyCode::Right), GameControl::BalanceBackward => keyboard_input.just_pressed(KeyCode::Left), GameControl::PaddleBackward => keyboard_input.just_pressed(KeyCode::A), GameControl::PaddleForward => keyboard_input.just_pressed(KeyCode::D), GameControl::Jump => keyboard_input.just_pressed(KeyCode::Space), GameControl::Restart => keyboard_input.just_pressed(KeyCode::R), } } }
= actions.head_balance.unwrap_or(0.); if GameControl::BalanceForward.just_released(&keyboard_input) || GameControl::BalanceBackward.just_released(&keyboard_input) { if GameControl::BalanceForward.pressed(&keyboard_input) { head_balance = 1.; } else if GameControl::BalanceBackward.pressed(&keyboard_input) { head_balance = -1.; } else { head_balance = 0.; } } else if GameControl::BalanceForward.just_pressed(&keyboard_input) { head_balance = 1.; } else if GameControl::BalanceBackward.just_pressed(&keyboard_input) { head_balance = -1.; }
function_block-random_span
[ { "content": "fn jump(\n\n actions: Res<Actions>,\n\n mut wheel_query: Query<\n\n (Entity, &mut RigidBodyVelocity, &Transform),\n\n (With<Wheel>, Without<Body>),\n\n >,\n\n mut jump_block: ResMut<JumpBlock>,\n\n mut body_query: Query<&Transform, (With<Body>, Without<Wheel>)>,\n\n platform_query: Query<Entity, (With<Platform>, Without<Wheel>, Without<Body>)>,\n\n mut sound_effects: EventWriter<PlaySoundEffect>,\n\n narrow_phase: Res<NarrowPhase>,\n\n) {\n\n // give it a frame until allowing the next jump...\n\n if *jump_block == JumpBlock::Blocked {\n\n *jump_block = JumpBlock::NotBlocked;\n\n return;\n\n }\n\n if !actions.jump || actions.restart {\n\n return;\n\n }\n", "file_path": "game_plugin/src/player.rs", "rank": 1, "score": 92838.92916055983 }, { "content": "fn restart(\n\n mut commands: Commands,\n\n mut wheel_query: Query<\n\n (&mut RigidBodyVelocity, &mut RigidBodyPosition),\n\n (With<Wheel>, Without<Body>, Without<Head>),\n\n >,\n\n mut body_query: Query<\n\n (&mut RigidBodyVelocity, &mut RigidBodyPosition),\n\n (With<Body>, Without<Wheel>, Without<Head>),\n\n >,\n\n mut head_query: Query<\n\n (&mut RigidBodyVelocity, &mut RigidBodyPosition),\n\n (With<Head>, Without<Wheel>, Without<Body>),\n\n >,\n\n level: Res<Level>,\n\n button_materials: Res<ButtonMaterials>,\n\n mut state: ResMut<State<GameState>>,\n\n mut interaction_query: Query<ButtonInteraction, (Changed<Interaction>, With<Button>)>,\n\n text_query: Query<Entity, With<Text>>,\n\n) {\n", "file_path": "game_plugin/src/lost.rs", "rank": 2, "score": 92608.42080174292 }, { "content": "fn restart(\n\n actions: Res<Actions>,\n\n mut wheel_query: Query<\n\n (&mut RigidBodyVelocity, &mut RigidBodyPosition),\n\n (With<Wheel>, Without<Body>, Without<Head>),\n\n >,\n\n mut body_query: Query<\n\n (&mut RigidBodyVelocity, &mut RigidBodyPosition),\n\n (With<Body>, Without<Wheel>, Without<Head>),\n\n >,\n\n mut head_query: Query<\n\n (&mut RigidBodyVelocity, &mut RigidBodyPosition),\n\n (With<Head>, Without<Wheel>, Without<Body>),\n\n >,\n\n level: Res<Level>,\n\n) {\n\n if actions.restart {\n\n let (mut wheel_velocity, mut wheel_position) = wheel_query.single_mut().unwrap();\n\n let (mut body_velocity, mut body_position) = body_query.single_mut().unwrap();\n\n let (mut head_velocity, mut head_position) = head_query.single_mut().unwrap();\n\n reset_level(\n\n &level,\n\n (&mut wheel_velocity, &mut wheel_position),\n\n (&mut body_velocity, &mut body_position),\n\n (&mut head_velocity, &mut head_position),\n\n )\n\n }\n\n}\n\n\n", "file_path": "game_plugin/src/levels.rs", "rank": 3, "score": 92608.42080174292 }, { "content": "pub fn reset_level(\n\n level: &Level,\n\n (wheel_velocity, mut wheel_position): (&mut RigidBodyVelocity, &mut RigidBodyPosition),\n\n (body_velocity, mut body_position): (&mut RigidBodyVelocity, &mut RigidBodyPosition),\n\n (head_velocity, mut head_position): (&mut RigidBodyVelocity, &mut RigidBodyPosition),\n\n) {\n\n let starting_points = level.get_starting_points();\n\n *wheel_velocity = RigidBodyVelocity::default();\n\n wheel_position.position = Isometry::from(starting_points.wheel);\n\n wheel_position.next_position = Isometry::from(starting_points.wheel);\n\n *body_velocity = RigidBodyVelocity::default();\n\n body_position.position = Isometry::from(starting_points.body);\n\n body_position.next_position = Isometry::from(starting_points.body);\n\n *head_velocity = RigidBodyVelocity::default();\n\n head_position.position = Isometry::from(starting_points.head);\n\n head_position.next_position = Isometry::from(starting_points.head);\n\n}\n\n\n", "file_path": "game_plugin/src/levels.rs", "rank": 4, "score": 82748.57232093747 }, { "content": "pub fn prepare_player_and_platforms(\n\n mut commands: Commands,\n\n textures: Res<TextureAssets>,\n\n mut materials: ResMut<Assets<ColorMaterial>>,\n\n level: Res<Level>,\n\n) {\n\n spawn_ground(&mut commands, &level);\n\n let head_id = spawn_head(&mut commands, &textures, &mut materials);\n\n let body_id = spawn_body(&mut commands, &textures, &mut materials);\n\n let wheel_id = spawn_wheel(&mut commands, &textures, &mut materials);\n\n\n\n let mut wheel_body_joint = BallJoint::new(\n\n Vec2::new(0.0, 0.0).into(),\n\n Vec2::new(0.0, -0.5 * BODY_LENGTH - BODY_RADIUS - WHEEL_RADIUS - 0.1).into(),\n\n );\n\n wheel_body_joint.motor_model = SpringModel::Disabled;\n\n commands\n\n .spawn()\n\n .insert(JointBuilderComponent::new(\n\n wheel_body_joint,\n", "file_path": "game_plugin/src/player.rs", "rank": 5, "score": 80712.38777082809 }, { "content": "struct RestartButton;\n\n\n", "file_path": "game_plugin/src/lost.rs", "rank": 6, "score": 75347.58046391027 }, { "content": "fn setup_rapier_and_camera(mut commands: Commands, mut configuration: ResMut<RapierConfiguration>) {\n\n configuration.scale = PHYSICS_SCALE;\n\n\n\n let mut camera = OrthographicCameraBundle::new_2d();\n\n camera.transform = Transform::from_translation(Vec3::new(0.0, 300.0, 0.0));\n\n commands.spawn_bundle(camera).insert(Camera);\n\n commands.spawn_bundle(UiCameraBundle::default());\n\n}\n\n\n", "file_path": "game_plugin/src/player.rs", "rank": 7, "score": 70923.63766762023 }, { "content": "fn paddle_wheel(\n\n time: Res<Time>,\n\n actions: Res<Actions>,\n\n mut wheel_query: Query<&mut RigidBodyVelocity, With<Wheel>>,\n\n) {\n\n if actions.paddling.is_none() || actions.restart {\n\n return;\n\n }\n\n let speed = 20.;\n\n let movement = actions.paddling.unwrap() * speed * time.delta_seconds();\n\n for mut wheel_velocity in wheel_query.iter_mut() {\n\n wheel_velocity.angvel = wheel_velocity.angvel - movement;\n\n // player_velocity.linvel.data.0[0][0] += movement.x;\n\n }\n\n}\n\n\n", "file_path": "game_plugin/src/player.rs", "rank": 8, "score": 69487.29220633287 }, { "content": "fn start_level(mut state: ResMut<State<GameState>>) {\n\n state.set(GameState::InLevel).unwrap();\n\n}\n\n\n", "file_path": "game_plugin/src/levels.rs", "rank": 9, "score": 67695.10812173443 }, { "content": "fn start_game(mut state: ResMut<State<GameState>>) {\n\n state.set(GameState::Prepare).unwrap();\n\n}\n", "file_path": "game_plugin/src/menu.rs", "rank": 10, "score": 67695.10812173443 }, { "content": "fn prepare_level(mut state: ResMut<State<GameState>>) {\n\n state.set(GameState::PrepareLevel).unwrap();\n\n}\n\n\n", "file_path": "game_plugin/src/levels.rs", "rank": 11, "score": 67695.10812173443 }, { "content": "fn show_restart_button(\n\n mut commands: Commands,\n\n font_assets: Res<FontAssets>,\n\n button_materials: Res<ButtonMaterials>,\n\n) {\n\n commands\n\n .spawn_bundle(ButtonBundle {\n\n style: Style {\n\n size: Size::new(Val::Px(120.0), Val::Px(50.0)),\n\n margin: Rect::all(Val::Auto),\n\n justify_content: JustifyContent::Center,\n\n align_items: AlignItems::Center,\n\n ..Default::default()\n\n },\n\n material: button_materials.normal.clone(),\n\n ..Default::default()\n\n })\n\n .insert(RestartButton)\n\n .with_children(|parent| {\n\n parent.spawn_bundle(TextBundle {\n", "file_path": "game_plugin/src/lost.rs", "rank": 12, "score": 67087.27082607857 }, { "content": "fn spawn_ground(commands: &mut Commands, level: &Level) {\n\n let finish_line = level.finish_line();\n\n let ground_length = (finish_line + 800.) / PHYSICS_SCALE;\n\n let mut border_points: Vec<f32> = level\n\n .holes()\n\n .iter()\n\n .flat_map(|hole| vec![hole[0] / PHYSICS_SCALE, hole[1] / PHYSICS_SCALE])\n\n .collect();\n\n border_points.push((finish_line + 400.) / PHYSICS_SCALE);\n\n border_points.insert(0, -400. / PHYSICS_SCALE);\n\n let (beginning, end): (Vec<(usize, f32)>, Vec<(usize, f32)>) = border_points\n\n .drain(..)\n\n .enumerate()\n\n .partition(|(index, _)| index % 2 == 0);\n\n let mut starting_points: Vec<f32> = beginning.iter().map(|(_, value)| *value).collect();\n\n let mut ending_points: Vec<f32> = end.iter().map(|(_, value)| *value).collect();\n\n let borders: Vec<(f32, f32)> = starting_points\n\n .drain(..)\n\n .zip(ending_points.drain(..))\n\n .collect();\n", "file_path": "game_plugin/src/player.rs", "rank": 13, "score": 63109.192121612956 }, { "content": "fn build_parcours(mut commands: Commands, level: Res<Level>) {\n\n let mut colliders = level.colliders();\n\n for collider in colliders.drain(..) {\n\n commands\n\n .spawn_bundle(collider)\n\n .insert(ColliderDebugRender::default())\n\n .insert(ColliderPositionSync::Discrete)\n\n .insert(Platform)\n\n .insert(ForLevel);\n\n }\n\n}\n\n\n", "file_path": "game_plugin/src/levels.rs", "rank": 14, "score": 59925.75753409488 }, { "content": "fn clear_level(mut commands: Commands, level_entites: Query<Entity, With<ForLevel>>) {\n\n for entity in level_entites.iter() {\n\n commands.entity(entity).despawn_recursive();\n\n }\n\n}\n\n\n", "file_path": "game_plugin/src/levels.rs", "rank": 15, "score": 54572.539389805504 }, { "content": "struct FinishedButton;\n\n\n", "file_path": "game_plugin/src/levels.rs", "rank": 16, "score": 53357.64684138311 }, { "content": "fn landing(\n\n mut contact_event: EventReader<ContactEvent>,\n\n mut sound_effects: EventWriter<PlaySoundEffect>,\n\n mut land_block: ResMut<LandBlock>,\n\n) {\n\n // give it a frame until playing the next sound...\n\n let mut play = false;\n\n for event in contact_event.iter() {\n\n if let ContactEvent::Started(_, _) = event {\n\n play = true;\n\n }\n\n }\n\n if *land_block == LandBlock::Blocked {\n\n *land_block = LandBlock::NotBlocked;\n\n return;\n\n }\n\n if play {\n\n sound_effects.send(PlaySoundEffect::Land);\n\n *land_block = LandBlock::Blocked;\n\n }\n\n}\n\n\n", "file_path": "game_plugin/src/player.rs", "rank": 17, "score": 48707.29441597768 }, { "content": "fn fall(\n\n mut body_query: Query<&RigidBodyPosition, With<Body>>,\n\n mut state: ResMut<State<GameState>>,\n\n mut sound_effects: EventWriter<PlaySoundEffect>,\n\n) {\n\n let body_transform = body_query.single_mut().unwrap();\n\n\n\n if body_transform.position.translation.y < BOULDER_HEIGTH {\n\n sound_effects.send(PlaySoundEffect::Fall);\n\n state.push(GameState::Lost).unwrap();\n\n }\n\n}\n", "file_path": "game_plugin/src/levels.rs", "rank": 18, "score": 48707.29441597768 }, { "content": "fn lost(\n\n mut head_query: Query<Entity, (With<Head>, Without<Platform>)>,\n\n platform_query: Query<Entity, (With<Platform>, Without<Head>)>,\n\n narrow_phase: Res<NarrowPhase>,\n\n mut sounds: EventWriter<PlaySoundEffect>,\n\n mut state: ResMut<State<GameState>>,\n\n) {\n\n if let Ok(head) = head_query.single_mut() {\n\n for platform in platform_query.iter() {\n\n if let Some(contact_pair) = narrow_phase.contact_pair(head.handle(), platform.handle())\n\n {\n\n if contact_pair.has_any_active_contact {\n\n sounds.send(PlaySoundEffect::Loose);\n\n state.overwrite_push(GameState::Lost).unwrap();\n\n return;\n\n }\n\n }\n\n }\n\n } else {\n\n warn!(\"Why is there more than one player?\");\n\n }\n\n}\n\n\n", "file_path": "game_plugin/src/lost.rs", "rank": 19, "score": 48707.29441597768 }, { "content": "fn spawn_head(\n\n commands: &mut Commands,\n\n textures: &TextureAssets,\n\n materials: &mut Assets<ColorMaterial>,\n\n) -> Entity {\n\n commands\n\n .spawn_bundle(RigidBodyBundle {\n\n position: [\n\n 0.,\n\n 0.5 * BOULDER_HEIGTH\n\n + 2. * WHEEL_RADIUS\n\n + BODY_LENGTH\n\n + 2. * BODY_RADIUS\n\n + HEAD_RADIUS,\n\n ]\n\n .into(),\n\n ..Default::default()\n\n })\n\n .insert_bundle(ColliderBundle {\n\n shape: ColliderShape::ball(HEAD_RADIUS),\n", "file_path": "game_plugin/src/player.rs", "rank": 20, "score": 47308.12656231527 }, { "content": "fn draw_background(\n\n mut commands: Commands,\n\n textures: Res<TextureAssets>,\n\n mut materials: ResMut<Assets<ColorMaterial>>,\n\n level: Res<Level>,\n\n) {\n\n let mut random = rand::thread_rng();\n\n for slot in 0..5 {\n\n commands\n\n .spawn_bundle(SpriteBundle {\n\n material: materials.add(\n\n {\n\n match random.gen_range(0..3) {\n\n 0 => textures.background_1.clone(),\n\n 1 => textures.background_2.clone(),\n\n _ => textures.background_3.clone(),\n\n }\n\n }\n\n .into(),\n\n ),\n", "file_path": "game_plugin/src/player.rs", "rank": 21, "score": 47308.12656231527 }, { "content": "fn move_camera(\n\n head_query: Query<&Transform, (With<Head>, Without<Camera>)>,\n\n mut camera_query: Query<&mut Transform, (With<Camera>, Without<Head>)>,\n\n) {\n\n for head_transform in head_query.iter() {\n\n for mut camera_transform in camera_query.iter_mut() {\n\n camera_transform.translation.x = head_transform.translation.x;\n\n }\n\n }\n\n}\n\n\n", "file_path": "game_plugin/src/player.rs", "rank": 22, "score": 47308.12656231527 }, { "content": "fn move_head(\n\n time: Res<Time>,\n\n actions: Res<Actions>,\n\n mut head_query: Query<&mut RigidBodyVelocity, With<Head>>,\n\n) {\n\n if actions.head_balance.is_none() || actions.restart {\n\n return;\n\n }\n\n let speed = 20.;\n\n let movement = actions.head_balance.unwrap() * speed * time.delta_seconds();\n\n for mut head_velocity in head_query.iter_mut() {\n\n // head_velocity.angvel = clamp(head_velocity.angvel - movement, -5., 5.);\n\n head_velocity.linvel.data.0[0][0] += movement;\n\n }\n\n}\n\n\n", "file_path": "game_plugin/src/player.rs", "rank": 23, "score": 47308.12656231527 }, { "content": "fn next_level(\n\n mut commands: Commands,\n\n mut wheel_query: Query<\n\n (&mut RigidBodyVelocity, &mut RigidBodyPosition),\n\n (With<Wheel>, Without<Body>, Without<Head>),\n\n >,\n\n mut body_query: Query<\n\n (&mut RigidBodyVelocity, &mut RigidBodyPosition),\n\n (With<Body>, Without<Wheel>, Without<Head>),\n\n >,\n\n mut head_query: Query<\n\n (&mut RigidBodyVelocity, &mut RigidBodyPosition),\n\n (With<Head>, Without<Wheel>, Without<Body>),\n\n >,\n\n mut level: ResMut<Level>,\n\n button_materials: Res<ButtonMaterials>,\n\n mut state: ResMut<State<GameState>>,\n\n mut interaction_query: Query<ButtonInteraction, With<Button>>,\n\n text_query: Query<Entity, With<Text>>,\n\n input: Res<Input<KeyCode>>,\n", "file_path": "game_plugin/src/levels.rs", "rank": 24, "score": 47308.12656231527 }, { "content": "fn spawn_wheel(\n\n commands: &mut Commands,\n\n textures: &TextureAssets,\n\n materials: &mut Assets<ColorMaterial>,\n\n) -> Entity {\n\n commands\n\n .spawn_bundle(RigidBodyBundle {\n\n position: [0., 0.5 * BOULDER_HEIGTH + WHEEL_RADIUS].into(),\n\n damping: RigidBodyDamping {\n\n angular_damping: 0.2.into(),\n\n ..RigidBodyDamping::default()\n\n },\n\n ..Default::default()\n\n })\n\n .insert_bundle(ColliderBundle {\n\n shape: ColliderShape::ball(WHEEL_RADIUS),\n\n flags: ColliderFlags::from(ActiveEvents::CONTACT_EVENTS),\n\n ..Default::default()\n\n })\n\n .insert_bundle(SpriteBundle {\n", "file_path": "game_plugin/src/player.rs", "rank": 25, "score": 47308.12656231527 }, { "content": "fn spawn_body(\n\n commands: &mut Commands,\n\n textures: &TextureAssets,\n\n materials: &mut Assets<ColorMaterial>,\n\n) -> Entity {\n\n commands\n\n .spawn_bundle(RigidBodyBundle {\n\n position: [\n\n 0.,\n\n BOULDER_HEIGTH + 2. * WHEEL_RADIUS + 0.5 * BODY_LENGTH + BODY_RADIUS,\n\n ]\n\n .into(),\n\n forces: RigidBodyForces {\n\n gravity_scale: 0.3,\n\n ..Default::default()\n\n },\n\n ..Default::default()\n\n })\n\n .insert_bundle(ColliderBundle {\n\n shape: ColliderShape::capsule(\n", "file_path": "game_plugin/src/player.rs", "rank": 26, "score": 47308.12656231527 }, { "content": "fn show_finished_button(\n\n mut commands: Commands,\n\n font_assets: Res<FontAssets>,\n\n button_materials: Res<ButtonMaterials>,\n\n level: Res<Level>,\n\n) {\n\n let is_last_level = *level == Level::last();\n\n commands\n\n .spawn_bundle(ButtonBundle {\n\n style: Style {\n\n size: Size::new(Val::Px(120.0), Val::Px(50.0)),\n\n margin: Rect::all(Val::Auto),\n\n justify_content: JustifyContent::Center,\n\n align_items: AlignItems::Center,\n\n ..Default::default()\n\n },\n\n material: button_materials.normal.clone(),\n\n ..Default::default()\n\n })\n\n .insert(FinishedButton)\n", "file_path": "game_plugin/src/levels.rs", "rank": 27, "score": 46027.30161311518 }, { "content": "fn play_sound_effects(\n\n audio_assets: Res<AudioAssets>,\n\n audio: Res<Audio>,\n\n mut events: EventReader<PlaySoundEffect>,\n\n) {\n\n for event in events.iter() {\n\n match event {\n\n PlaySoundEffect::Jump => match rand::thread_rng().gen_range(0..2) {\n\n 0 => audio.play(audio_assets.jump_1.clone()),\n\n _ => audio.play(audio_assets.jump_2.clone()),\n\n },\n\n PlaySoundEffect::Land => {\n\n audio.play(audio_assets.land_1.clone());\n\n }\n\n PlaySoundEffect::Fall => {\n\n audio.play(audio_assets.fall.clone());\n\n }\n\n PlaySoundEffect::Won => {\n\n audio.play(audio_assets.won.clone());\n\n }\n\n PlaySoundEffect::Loose => match rand::thread_rng().gen_range(0..2) {\n\n 0 => audio.play(audio_assets.lose_1.clone()),\n\n _ => audio.play(audio_assets.lose_2.clone()),\n\n },\n\n }\n\n }\n\n}\n\n\n", "file_path": "game_plugin/src/audio.rs", "rank": 28, "score": 46027.30161311518 }, { "content": "fn cross_finish_line(\n\n mut body_query: Query<&Transform, With<Body>>,\n\n level: Res<Level>,\n\n mut state: ResMut<State<GameState>>,\n\n mut sound_effects: EventWriter<PlaySoundEffect>,\n\n) {\n\n let body_transform = body_query.single_mut().unwrap();\n\n\n\n if body_transform.translation.x > level.finish_line() {\n\n // make sure win + lose in one frame don't crash the game...\n\n state.overwrite_push(GameState::Finished).unwrap();\n\n sound_effects.send(PlaySoundEffect::Won);\n\n }\n\n}\n\n\n", "file_path": "game_plugin/src/levels.rs", "rank": 29, "score": 46027.30161311518 }, { "content": "fn main() {\n\n let target = env::var(\"TARGET\").unwrap();\n\n if target.contains(\"windows\") {\n\n embed_resource::compile(\"build/windows/icon.rc\");\n\n }\n\n}\n", "file_path": "build.rs", "rank": 30, "score": 41236.258763010825 }, { "content": "fn main() {\n\n let mut app = App::build();\n\n app\n\n // .insert_resource(Msaa { samples: 4 })\n\n .insert_resource(bevy::log::LogSettings {\n\n level: bevy::log::Level::WARN,\n\n filter: \"wgpu=error,bevy_ecs=error\".to_string(),\n\n })\n\n .insert_resource(ClearColor(Color::rgb(0.4, 0.4, 0.4)))\n\n .insert_resource(WindowDescriptor {\n\n width: 800.,\n\n height: 600.,\n\n title: \"Me And My Unicycle\".to_string(),\n\n ..Default::default()\n\n })\n\n .add_plugins(DefaultPlugins)\n\n .add_plugin(GamePlugin);\n\n\n\n #[cfg(target_arch = \"wasm32\")]\n\n app.add_plugin(bevy_webgl2::WebGL2Plugin);\n\n\n\n app.run();\n\n}\n", "file_path": "src/main.rs", "rank": 31, "score": 40003.71683590321 }, { "content": "#[derive(PartialEq)]\n\nenum JumpBlock {\n\n Blocked,\n\n NotBlocked,\n\n}\n\n\n", "file_path": "game_plugin/src/player.rs", "rank": 36, "score": 32462.343335864745 }, { "content": "fn start_background(audio_assets: Res<AudioAssets>, audio: Res<Audio>) {\n\n audio.play_looped(audio_assets.background.clone());\n\n}\n", "file_path": "game_plugin/src/audio.rs", "rank": 38, "score": 30950.494791323785 }, { "content": "fn build_collider(isometry: Isometry2<f32>, shape: ColliderShape) -> ColliderBundle {\n\n ColliderBundle {\n\n shape,\n\n position: ColliderPosition(isometry),\n\n ..Default::default()\n\n }\n\n}\n\n\n", "file_path": "game_plugin/src/levels.rs", "rank": 39, "score": 30950.494791323785 }, { "content": "use crate::GameState;\n\nuse bevy::prelude::*;\n\n\n\npub struct MenuPlugin;\n\n\n\nimpl Plugin for MenuPlugin {\n\n fn build(&self, app: &mut AppBuilder) {\n\n app.add_system_set(SystemSet::on_enter(GameState::Menu).with_system(start_game.system()));\n\n }\n\n}\n\n\n", "file_path": "game_plugin/src/menu.rs", "rank": 40, "score": 11364.341777958365 }, { "content": "use crate::GameState;\n\nuse bevy::prelude::*;\n\nuse bevy_asset_loader::{AssetCollection, AssetLoader};\n\nuse bevy_kira_audio::AudioSource;\n\n\n\npub struct LoadingPlugin;\n\n\n\nimpl Plugin for LoadingPlugin {\n\n fn build(&self, app: &mut AppBuilder) {\n\n AssetLoader::new(GameState::Loading, GameState::Menu)\n\n .with_collection::<FontAssets>()\n\n .with_collection::<AudioAssets>()\n\n .with_collection::<TextureAssets>()\n\n .build(app);\n\n }\n\n}\n\n\n\n#[derive(AssetCollection)]\n\npub struct AudioAssets {\n\n #[asset(path = \"audio/jump_1.ogg\")]\n", "file_path": "game_plugin/src/loading.rs", "rank": 41, "score": 11363.768627564994 }, { "content": "use crate::audio::PlaySoundEffect;\n\nuse crate::levels::{reset_level, Level};\n\nuse crate::loading::FontAssets;\n\nuse crate::player::*;\n\nuse crate::GameState;\n\nuse bevy::prelude::*;\n\nuse bevy_rapier2d::prelude::*;\n\n\n\npub struct LostPlugin;\n\n\n\nimpl Plugin for LostPlugin {\n\n fn build(&self, app: &mut AppBuilder) {\n\n app.init_resource::<ButtonMaterials>()\n\n .add_system_set(\n\n SystemSet::on_update(GameState::InLevel)\n\n .with_system(lost.system().label(LostSystem::Lost)),\n\n )\n\n .add_system_set(\n\n SystemSet::on_enter(GameState::Lost).with_system(show_restart_button.system()),\n\n )\n\n .add_system_set(SystemSet::on_update(GameState::Lost).with_system(restart.system()));\n\n }\n\n}\n\n\n\n#[derive(SystemLabel, Clone, Hash, Debug, Eq, PartialEq)]\n\npub enum LostSystem {\n\n Lost,\n\n}\n\n\n", "file_path": "game_plugin/src/lost.rs", "rank": 42, "score": 11363.12993451286 }, { "content": "\n\nimpl Plugin for GamePlugin {\n\n fn build(&self, app: &mut AppBuilder) {\n\n app.add_state(GameState::Loading)\n\n .add_plugin(RapierPhysicsPlugin::<NoUserData>::default())\n\n .add_plugin(RapierRenderPlugin)\n\n .add_plugin(LoadingPlugin)\n\n .add_plugin(ActionsPlugin)\n\n .add_plugin(PlayerPlugin)\n\n .add_plugin(InternalAudioPlugin)\n\n .add_plugin(LevelsPlugin)\n\n .add_plugin(LostPlugin)\n\n .add_plugin(MenuPlugin);\n\n }\n\n}\n\n\n\n#[derive(Clone, Eq, PartialEq, Debug, Hash)]\n", "file_path": "game_plugin/src/lib.rs", "rank": 43, "score": 11362.047248236733 }, { "content": "pub const PHYSICS_SCALE: f32 = 32.0;\n\n\n\npub struct Wheel;\n\npub struct Head;\n\npub struct Body;\n\npub struct Camera;\n\npub struct Platform;\n\n\n\nimpl Plugin for PlayerPlugin {\n\n fn build(&self, app: &mut AppBuilder) {\n\n app.insert_resource(JumpBlock::NotBlocked)\n\n .insert_resource(LandBlock::NotBlocked)\n\n .add_system_set(\n\n SystemSet::on_enter(GameState::Prepare)\n\n .with_system(setup_rapier_and_camera.system()),\n\n )\n\n .add_system_set(\n\n SystemSet::on_enter(GameState::PrepareLevel)\n\n .with_system(prepare_player_and_platforms.system())\n\n .with_system(draw_background.system()),\n", "file_path": "game_plugin/src/player.rs", "rank": 44, "score": 11361.618838018603 }, { "content": "use crate::loading::AudioAssets;\n\nuse crate::GameState;\n\nuse bevy::prelude::*;\n\nuse bevy_kira_audio::{Audio, AudioPlugin};\n\nuse rand::Rng;\n\n\n\npub struct InternalAudioPlugin;\n\n\n\nimpl Plugin for InternalAudioPlugin {\n\n fn build(&self, app: &mut AppBuilder) {\n\n app.add_plugin(AudioPlugin)\n\n .add_event::<PlaySoundEffect>()\n\n .add_system_set(\n\n SystemSet::on_enter(GameState::Menu).with_system(start_background.system()),\n\n )\n\n .add_system_set(\n\n SystemSet::on_update(GameState::InLevel).with_system(play_sound_effects.system()),\n\n )\n\n .add_system_set(\n\n SystemSet::on_update(GameState::Lost).with_system(play_sound_effects.system()),\n", "file_path": "game_plugin/src/audio.rs", "rank": 45, "score": 11361.401937056771 }, { "content": "use bevy::prelude::*;\n\nuse bevy_rapier2d::prelude::*;\n\n\n\npub struct GamePlugin;\n\n\n\nmod actions;\n\nmod audio;\n\nmod levels;\n\nmod loading;\n\nmod lost;\n\nmod menu;\n\nmod player;\n\n\n\nuse crate::actions::ActionsPlugin;\n\nuse crate::audio::InternalAudioPlugin;\n\nuse crate::levels::LevelsPlugin;\n\nuse crate::lost::LostPlugin;\n\nuse crate::menu::MenuPlugin;\n\nuse crate::player::PlayerPlugin;\n\nuse loading::LoadingPlugin;\n", "file_path": "game_plugin/src/lib.rs", "rank": 46, "score": 11361.39948606434 }, { "content": "use crate::actions::Actions;\n\nuse crate::audio::PlaySoundEffect;\n\nuse crate::levels::{ForLevel, Level};\n\nuse crate::loading::TextureAssets;\n\nuse crate::lost::LostSystem;\n\nuse crate::GameState;\n\nuse bevy::prelude::*;\n\nuse bevy_rapier2d::na::Point2;\n\nuse bevy_rapier2d::prelude::*;\n\nuse nalgebra::Isometry2;\n\nuse rand::Rng;\n\n\n\npub struct PlayerPlugin;\n\n\n\npub const WHEEL_RADIUS: f32 = 1.;\n\npub const HEAD_RADIUS: f32 = 0.5;\n\npub const BODY_RADIUS: f32 = 0.5;\n\npub const BODY_LENGTH: f32 = 1.;\n\npub const BOULDER_HEIGTH: f32 = 1.0;\n\n\n", "file_path": "game_plugin/src/player.rs", "rank": 47, "score": 11360.352115428175 }, { "content": "use crate::actions::Actions;\n\nuse crate::audio::PlaySoundEffect;\n\nuse crate::loading::FontAssets;\n\nuse crate::lost::{ButtonInteraction, ButtonMaterials};\n\nuse crate::nalgebra::Isometry2;\n\nuse crate::player::*;\n\nuse crate::GameState;\n\nuse bevy::prelude::*;\n\nuse bevy_rapier2d::na::Point2;\n\nuse bevy_rapier2d::prelude::*;\n\n\n\npub struct LevelsPlugin;\n\n\n\n#[derive(PartialEq)]\n\npub enum Level {\n\n Tutorial,\n\n First,\n\n Second,\n\n Third,\n\n}\n", "file_path": "game_plugin/src/levels.rs", "rank": 48, "score": 11359.887675548745 }, { "content": " Isometry2::new(\n\n [1250.0 / PHYSICS_SCALE, 2.].into(),\n\n -std::f32::consts::FRAC_PI_4,\n\n ),\n\n ColliderShape::cuboid(4., 1.),\n\n ));\n\n }\n\n }\n\n\n\n colliders\n\n }\n\n}\n\n\n\nimpl Plugin for LevelsPlugin {\n\n fn build(&self, app: &mut AppBuilder) {\n\n app.insert_resource(Level::Tutorial)\n\n .add_system_set(\n\n SystemSet::on_update(GameState::Prepare).with_system(prepare_level.system()),\n\n )\n\n .add_system_set(\n", "file_path": "game_plugin/src/levels.rs", "rank": 49, "score": 11359.193423182809 }, { "content": " Interaction::None => {\n\n *material = button_materials.normal.clone();\n\n }\n\n }\n\n }\n\n}\n\n\n\npub struct ButtonMaterials {\n\n pub normal: Handle<ColorMaterial>,\n\n pub hovered: Handle<ColorMaterial>,\n\n}\n\n\n\nimpl FromWorld for ButtonMaterials {\n\n fn from_world(world: &mut World) -> Self {\n\n let mut materials = world.get_resource_mut::<Assets<ColorMaterial>>().unwrap();\n\n ButtonMaterials {\n\n normal: materials.add(Color::rgb(0.15, 0.15, 0.15).into()),\n\n hovered: materials.add(Color::rgb(0.25, 0.25, 0.25).into()),\n\n }\n\n }\n\n}\n\n\n", "file_path": "game_plugin/src/lost.rs", "rank": 50, "score": 11358.865377434195 }, { "content": " pub tutorial_jump: Handle<Texture>,\n\n #[asset(path = \"textures/tutorial_falling.png\")]\n\n pub tutorial_falling: Handle<Texture>,\n\n #[asset(path = \"textures/tutorial_restart.png\")]\n\n pub tutorial_restart: Handle<Texture>,\n\n #[asset(path = \"textures/finish.png\")]\n\n pub finish: Handle<Texture>,\n\n #[asset(path = \"textures/player_won.png\")]\n\n pub player_won: Handle<Texture>,\n\n #[asset(path = \"textures/thanks.png\")]\n\n pub thanks: Handle<Texture>,\n\n}\n", "file_path": "game_plugin/src/loading.rs", "rank": 51, "score": 11357.56341215961 }, { "content": " pub jump_1: Handle<AudioSource>,\n\n #[asset(path = \"audio/jump_2.ogg\")]\n\n pub jump_2: Handle<AudioSource>,\n\n #[asset(path = \"audio/land_1.ogg\")]\n\n pub land_1: Handle<AudioSource>,\n\n #[asset(path = \"audio/lose_1.ogg\")]\n\n pub lose_1: Handle<AudioSource>,\n\n #[asset(path = \"audio/lose_2.ogg\")]\n\n pub lose_2: Handle<AudioSource>,\n\n #[asset(path = \"audio/won.ogg\")]\n\n pub won: Handle<AudioSource>,\n\n #[asset(path = \"audio/fall.ogg\")]\n\n pub fall: Handle<AudioSource>,\n\n #[asset(path = \"audio/background.ogg\")]\n\n pub background: Handle<AudioSource>,\n\n}\n\n\n\n#[derive(AssetCollection)]\n\npub struct FontAssets {\n\n #[asset(path = \"fonts/FiraSans-Bold.ttf\")]\n", "file_path": "game_plugin/src/loading.rs", "rank": 52, "score": 11356.262130819658 }, { "content": " if let Ok((wheel, mut wheel_velocity, wheel_transform)) = wheel_query.single_mut() {\n\n for platform in platform_query.iter() {\n\n if let Some(contact_pair) = narrow_phase.contact_pair(wheel.handle(), platform.handle())\n\n {\n\n if contact_pair.has_any_active_contact {\n\n *jump_block = JumpBlock::Blocked;\n\n let body_transform = body_query.single_mut().unwrap();\n\n let jump_direction = Vec2::new(\n\n body_transform.translation.x - wheel_transform.translation.x,\n\n body_transform.translation.y - wheel_transform.translation.y,\n\n );\n\n jump_direction.normalize();\n\n sound_effects.send(PlaySoundEffect::Jump);\n\n wheel_velocity.linvel.data.0[0][0] += jump_direction.x * 0.15;\n\n wheel_velocity.linvel.data.0[0][1] += jump_direction.y * 0.15;\n\n return;\n\n }\n\n }\n\n }\n\n } else {\n\n warn!(\"Why is there more than one player?\");\n\n }\n\n}\n\n\n", "file_path": "game_plugin/src/player.rs", "rank": 53, "score": 11356.174332302955 }, { "content": ") {\n\n let restart = input.just_pressed(KeyCode::R);\n\n for (button, interaction, mut material, children) in interaction_query.iter_mut() {\n\n let text = text_query.get(children[0]).unwrap();\n\n if restart {\n\n commands.entity(button).despawn();\n\n commands.entity(text).despawn();\n\n state.replace(GameState::PrepareLevel).unwrap();\n\n let (mut wheel_velocity, mut wheel_position) = wheel_query.single_mut().unwrap();\n\n let (mut body_velocity, mut body_position) = body_query.single_mut().unwrap();\n\n let (mut head_velocity, mut head_position) = head_query.single_mut().unwrap();\n\n reset_level(\n\n &level,\n\n (&mut wheel_velocity, &mut wheel_position),\n\n (&mut body_velocity, &mut body_position),\n\n (&mut head_velocity, &mut head_position),\n\n );\n\n return;\n\n }\n\n match *interaction {\n", "file_path": "game_plugin/src/levels.rs", "rank": 54, "score": 11356.142966802247 }, { "content": "\n\npub struct StartingPoint {\n\n wheel: Point<f32>,\n\n body: Point<f32>,\n\n head: Point<f32>,\n\n}\n\n\n\npub struct ForLevel;\n\n\n\nimpl Level {\n\n pub fn last() -> Self {\n\n Level::Third\n\n }\n\n\n\n pub fn get_starting_points(&self) -> StartingPoint {\n\n match self {\n\n _ => StartingPoint {\n\n wheel: [0., 0.5 * BOULDER_HEIGTH + WHEEL_RADIUS].into(),\n\n body: [\n\n 0.,\n", "file_path": "game_plugin/src/levels.rs", "rank": 55, "score": 11355.912111064143 }, { "content": " Interaction::Clicked => {\n\n let next_level = level.next();\n\n *level = next_level;\n\n commands.entity(button).despawn();\n\n commands.entity(text).despawn();\n\n state.replace(GameState::PrepareLevel).unwrap();\n\n let (mut wheel_velocity, mut wheel_position) = wheel_query.single_mut().unwrap();\n\n let (mut body_velocity, mut body_position) = body_query.single_mut().unwrap();\n\n let (mut head_velocity, mut head_position) = head_query.single_mut().unwrap();\n\n reset_level(\n\n &level,\n\n (&mut wheel_velocity, &mut wheel_position),\n\n (&mut body_velocity, &mut body_position),\n\n (&mut head_velocity, &mut head_position),\n\n );\n\n return;\n\n }\n\n Interaction::Hovered => {\n\n *material = button_materials.hovered.clone();\n\n }\n\n Interaction::None => {\n\n *material = button_materials.normal.clone();\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "game_plugin/src/levels.rs", "rank": 56, "score": 11355.532747367075 }, { "content": " pub fira_sans: Handle<Font>,\n\n}\n\n\n\n#[derive(AssetCollection)]\n\npub struct TextureAssets {\n\n #[asset(path = \"textures/wheel.png\")]\n\n pub wheel: Handle<Texture>,\n\n #[asset(path = \"textures/head.png\")]\n\n pub head: Handle<Texture>,\n\n #[asset(path = \"textures/body.png\")]\n\n pub body: Handle<Texture>,\n\n #[asset(path = \"textures/background_1.png\")]\n\n pub background_1: Handle<Texture>,\n\n #[asset(path = \"textures/background_2.png\")]\n\n pub background_2: Handle<Texture>,\n\n #[asset(path = \"textures/background_3.png\")]\n\n pub background_3: Handle<Texture>,\n\n #[asset(path = \"textures/tutorial.png\")]\n\n pub tutorial: Handle<Texture>,\n\n #[asset(path = \"textures/tutorial_jump.png\")]\n", "file_path": "game_plugin/src/loading.rs", "rank": 57, "score": 11355.397956434068 }, { "content": " )\n\n .add_system_set(\n\n SystemSet::on_update(GameState::Finished).with_system(play_sound_effects.system()),\n\n );\n\n }\n\n}\n\n\n\npub enum PlaySoundEffect {\n\n Jump,\n\n Land,\n\n Loose,\n\n Fall,\n\n Won,\n\n}\n\n\n", "file_path": "game_plugin/src/audio.rs", "rank": 58, "score": 11355.288786271645 }, { "content": " ..Default::default()\n\n })\n\n .insert(ForLevel);\n\n commands\n\n .spawn_bundle(SpriteBundle {\n\n material: materials.add(textures.tutorial_jump.clone().into()),\n\n transform: {\n\n let mut transform = Transform::from_translation(Vec3::new(620.0, 250.0, 0.0));\n\n transform.scale = Vec3::new(0.5, 0.5, 0.5);\n\n\n\n transform\n\n },\n\n ..Default::default()\n\n })\n\n .insert(ForLevel);\n\n commands\n\n .spawn_bundle(SpriteBundle {\n\n material: materials.add(textures.tutorial_falling.clone().into()),\n\n transform: {\n\n let mut transform = Transform::from_translation(Vec3::new(1600.0, 250.0, 0.0));\n", "file_path": "game_plugin/src/player.rs", "rank": 59, "score": 11354.717046687962 }, { "content": " for (button, interaction, mut material, children) in interaction_query.iter_mut() {\n\n let text = text_query.get(children[0]).unwrap();\n\n match *interaction {\n\n Interaction::Clicked => {\n\n commands.entity(button).despawn();\n\n commands.entity(text).despawn();\n\n state.replace(GameState::PrepareLevel).unwrap();\n\n let (mut wheel_velocity, mut wheel_position) = wheel_query.single_mut().unwrap();\n\n let (mut body_velocity, mut body_position) = body_query.single_mut().unwrap();\n\n let (mut head_velocity, mut head_position) = head_query.single_mut().unwrap();\n\n reset_level(\n\n &level,\n\n (&mut wheel_velocity, &mut wheel_position),\n\n (&mut body_velocity, &mut body_position),\n\n (&mut head_velocity, &mut head_position),\n\n );\n\n }\n\n Interaction::Hovered => {\n\n *material = button_materials.hovered.clone();\n\n }\n", "file_path": "game_plugin/src/lost.rs", "rank": 60, "score": 11354.619032907949 }, { "content": " text: Text {\n\n sections: vec![TextSection {\n\n value: \"Again!\".to_string(),\n\n style: TextStyle {\n\n font: font_assets.fira_sans.clone(),\n\n font_size: 40.0,\n\n color: Color::rgb(0.9, 0.9, 0.9),\n\n },\n\n }],\n\n alignment: Default::default(),\n\n },\n\n ..Default::default()\n\n });\n\n });\n\n}\n\n\n\npub type ButtonInteraction<'a> = (\n\n Entity,\n\n &'a Interaction,\n\n &'a mut Handle<ColorMaterial>,\n\n &'a Children,\n\n);\n", "file_path": "game_plugin/src/lost.rs", "rank": 61, "score": 11354.506578376227 }, { "content": " transform.scale = Vec3::new(0.5, 0.5, 0.5);\n\n\n\n transform\n\n },\n\n ..Default::default()\n\n })\n\n .insert(ForLevel);\n\n commands\n\n .spawn_bundle(SpriteBundle {\n\n material: materials.add(textures.tutorial_restart.clone().into()),\n\n transform: {\n\n let mut transform = Transform::from_translation(Vec3::new(\n\n level.finish_line() + 200.,\n\n 170.0,\n\n 0.0,\n\n ));\n\n transform.scale = Vec3::new(0.5, 0.5, 0.5);\n\n\n\n transform\n\n },\n\n ..Default::default()\n\n })\n\n .insert(ForLevel);\n\n }\n\n}\n", "file_path": "game_plugin/src/player.rs", "rank": 62, "score": 11354.462251321089 }, { "content": " }\n\n }\n\n\n\n pub fn colliders(&self) -> Vec<ColliderBundle> {\n\n let mut colliders = vec![];\n\n match self {\n\n Level::Tutorial => {\n\n colliders.push(build_collider(\n\n Isometry::from(Point2::from([800.0 / PHYSICS_SCALE, 2.])),\n\n ColliderShape::cuboid(2., 1.),\n\n ));\n\n }\n\n Level::First => {\n\n colliders.push(build_collider(\n\n Isometry::from(Point2::from([800.0 / PHYSICS_SCALE, 2.])),\n\n ColliderShape::cuboid(2., 1.),\n\n ));\n\n colliders.push(build_collider(\n\n Isometry::from(Point2::from([(800.0 / PHYSICS_SCALE) * 1.8, 2.])),\n\n ColliderShape::cuboid(2., 1.),\n", "file_path": "game_plugin/src/levels.rs", "rank": 63, "score": 11354.183704332292 }, { "content": " )\n\n .add_system_set(\n\n SystemSet::on_update(GameState::InLevel)\n\n .before(LostSystem::Lost)\n\n .with_system(paddle_wheel.system())\n\n .with_system(move_head.system())\n\n .with_system(move_camera.system())\n\n .with_system(jump.system())\n\n .with_system(landing.system()),\n\n )\n\n .add_system_set(SystemSet::on_update(GameState::Lost).with_system(move_camera.system()))\n\n .add_system_set(\n\n SystemSet::on_update(GameState::Finished).with_system(move_camera.system()),\n\n );\n\n }\n\n}\n\n\n", "file_path": "game_plugin/src/player.rs", "rank": 64, "score": 11354.044774429987 }, { "content": " Level::Second => 800. * 3.,\n\n Level::Third => 800. * 3.,\n\n }\n\n }\n\n\n\n pub fn holes(&self) -> Vec<[f32; 2]> {\n\n match self {\n\n Level::Tutorial => vec![[1600., 1800.]],\n\n Level::First => vec![[864., 1000.]],\n\n Level::Second => vec![[800., 1250.]],\n\n Level::Third => vec![[250., 450.], [800., 1250.]],\n\n }\n\n }\n\n\n\n pub fn next(&self) -> Level {\n\n match self {\n\n Level::Tutorial => Level::First,\n\n Level::First => Level::Second,\n\n Level::Second => Level::Third,\n\n Level::Third => Level::Tutorial,\n", "file_path": "game_plugin/src/levels.rs", "rank": 65, "score": 11353.567672605854 }, { "content": " std::f32::consts::FRAC_PI_2,\n\n )),\n\n ..Default::default()\n\n })\n\n .insert(ColliderDebugRender::default())\n\n .insert(ColliderPositionSync::Discrete)\n\n .insert(ForLevel);\n\n commands\n\n .spawn_bundle(ColliderBundle {\n\n shape: ColliderShape::cuboid(300.0 / PHYSICS_SCALE, BOULDER_HEIGTH),\n\n position: ColliderPosition(Isometry2::new(\n\n [ground_length - 400. / PHYSICS_SCALE, 300.0 / PHYSICS_SCALE].into(),\n\n std::f32::consts::FRAC_PI_2,\n\n )),\n\n ..Default::default()\n\n })\n\n .insert(ColliderDebugRender::default())\n\n .insert(ColliderPositionSync::Discrete)\n\n .insert(ForLevel);\n\n}\n\n\n", "file_path": "game_plugin/src/player.rs", "rank": 66, "score": 11353.359135607341 }, { "content": " commands\n\n .spawn_bundle(SpriteBundle {\n\n material: materials.add(textures.player_won.clone().into()),\n\n transform: {\n\n let mut transform =\n\n Transform::from_translation(Vec3::new(level.finish_line(), 250.0, 0.0));\n\n transform.scale = Vec3::new(0.25, 0.25, 0.25);\n\n\n\n transform\n\n },\n\n ..Default::default()\n\n })\n\n .insert(ForLevel);\n\n commands\n\n .spawn_bundle(SpriteBundle {\n\n material: materials.add(textures.thanks.clone().into()),\n\n transform: {\n\n let mut transform = Transform::from_translation(Vec3::new(\n\n level.finish_line() + 200.,\n\n 170.0,\n", "file_path": "game_plugin/src/player.rs", "rank": 67, "score": 11353.343832347216 }, { "content": " 0.5 * BOULDER_HEIGTH + 2. * WHEEL_RADIUS + 0.5 * BODY_LENGTH + BODY_RADIUS,\n\n ]\n\n .into(),\n\n head: [\n\n 0.,\n\n 0.5 * BOULDER_HEIGTH\n\n + 2. * WHEEL_RADIUS\n\n + BODY_LENGTH\n\n + 2. * BODY_RADIUS\n\n + HEAD_RADIUS,\n\n ]\n\n .into(),\n\n },\n\n }\n\n }\n\n\n\n pub fn finish_line(&self) -> f32 {\n\n match self {\n\n Level::Tutorial => 800. * 3.,\n\n Level::First => 800. * 3.,\n", "file_path": "game_plugin/src/levels.rs", "rank": 68, "score": 11353.18615803566 }, { "content": " .with_children(|parent| {\n\n parent.spawn_bundle(TextBundle {\n\n text: Text {\n\n sections: vec![TextSection {\n\n value: if is_last_level {\n\n \"Restart\".to_string()\n\n } else {\n\n \"Next!\".to_string()\n\n },\n\n style: TextStyle {\n\n font: font_assets.fira_sans.clone(),\n\n font_size: 40.0,\n\n color: Color::rgb(0.9, 0.9, 0.9),\n\n },\n\n }],\n\n alignment: Default::default(),\n\n },\n\n ..Default::default()\n\n });\n\n });\n\n}\n\n\n", "file_path": "game_plugin/src/levels.rs", "rank": 69, "score": 11353.01310175547 }, { "content": " 0.0,\n\n ));\n\n transform.scale = Vec3::new(0.25, 0.25, 0.25);\n\n\n\n transform\n\n },\n\n ..Default::default()\n\n })\n\n .insert(ForLevel);\n\n }\n\n if *level == Level::Tutorial {\n\n commands\n\n .spawn_bundle(SpriteBundle {\n\n material: materials.add(textures.tutorial.clone().into()),\n\n transform: {\n\n let mut transform = Transform::from_translation(Vec3::new(-180.0, 250.0, 0.0));\n\n transform.scale = Vec3::new(0.5, 0.5, 0.5);\n\n\n\n transform\n\n },\n", "file_path": "game_plugin/src/player.rs", "rank": 70, "score": 11352.995800533683 }, { "content": " transform: Transform::from_translation(Vec3::new(slot as f32 * 800.0, 300.0, 0.0)),\n\n ..Default::default()\n\n })\n\n .insert(ForLevel);\n\n }\n\n if *level != Level::last() {\n\n commands\n\n .spawn_bundle(SpriteBundle {\n\n material: materials.add(textures.finish.clone().into()),\n\n transform: {\n\n let mut transform =\n\n Transform::from_translation(Vec3::new(level.finish_line(), 250.0, 0.0));\n\n transform.scale = Vec3::new(0.5, 0.5, 0.5);\n\n\n\n transform\n\n },\n\n ..Default::default()\n\n })\n\n .insert(ForLevel);\n\n } else {\n", "file_path": "game_plugin/src/player.rs", "rank": 71, "score": 11352.827480324193 }, { "content": " wheel_id,\n\n body_id,\n\n ))\n\n .insert(ForLevel);\n\n\n\n let mut body_head_joint = BallJoint::new(\n\n Vec2::new(0.0, 0.5 * BODY_LENGTH + BODY_RADIUS).into(),\n\n Vec2::new(0.0, -0.5 * HEAD_RADIUS).into(),\n\n );\n\n body_head_joint.motor_model = SpringModel::Disabled;\n\n commands\n\n .spawn()\n\n .insert(JointBuilderComponent::new(\n\n body_head_joint,\n\n body_id,\n\n head_id,\n\n ))\n\n .insert(ForLevel);\n\n}\n\n\n", "file_path": "game_plugin/src/player.rs", "rank": 72, "score": 11352.779130486859 }, { "content": " for (start, end) in borders {\n\n commands\n\n .spawn_bundle(ColliderBundle {\n\n shape: ColliderShape::cuboid((end - start) / 2., BOULDER_HEIGTH),\n\n position: ColliderPosition(Isometry::from(Point2::from([\n\n start + (end - start) / 2.,\n\n 0.,\n\n ]))),\n\n ..Default::default()\n\n })\n\n .insert(ColliderDebugRender::default())\n\n .insert(ColliderPositionSync::Discrete)\n\n .insert(Platform)\n\n .insert(ForLevel);\n\n }\n\n commands\n\n .spawn_bundle(ColliderBundle {\n\n shape: ColliderShape::cuboid(300.0 / PHYSICS_SCALE, BOULDER_HEIGTH),\n\n position: ColliderPosition(Isometry2::new(\n\n [-(400.0 / PHYSICS_SCALE), 300.0 / PHYSICS_SCALE].into(),\n", "file_path": "game_plugin/src/player.rs", "rank": 73, "score": 11352.702735864237 }, { "content": " SystemSet::on_enter(GameState::PrepareLevel).with_system(build_parcours.system()),\n\n )\n\n .add_system_set(\n\n SystemSet::on_update(GameState::PrepareLevel).with_system(start_level.system()),\n\n )\n\n .add_system_set(\n\n SystemSet::on_exit(GameState::InLevel).with_system(clear_level.system()),\n\n )\n\n .add_system_set(\n\n SystemSet::on_update(GameState::InLevel)\n\n .with_system(restart.system())\n\n .with_system(cross_finish_line.system())\n\n .with_system(fall.system()),\n\n )\n\n .add_system_set(\n\n SystemSet::on_enter(GameState::Finished).with_system(show_finished_button.system()),\n\n )\n\n .add_system_set(\n\n SystemSet::on_update(GameState::Finished).with_system(next_level.system()),\n\n );\n\n }\n\n}\n\n\n", "file_path": "game_plugin/src/levels.rs", "rank": 74, "score": 11352.429058239824 }, { "content": " ));\n\n }\n\n Level::Second => {\n\n colliders.push(build_collider(\n\n Isometry2::new(\n\n [800.0 / PHYSICS_SCALE, 2.].into(),\n\n std::f32::consts::FRAC_PI_4,\n\n ),\n\n ColliderShape::cuboid(4., 1.),\n\n ));\n\n }\n\n Level::Third => {\n\n colliders.push(build_collider(\n\n Isometry2::new(\n\n [800.0 / PHYSICS_SCALE, 2.].into(),\n\n std::f32::consts::FRAC_PI_4,\n\n ),\n\n ColliderShape::cuboid(4., 1.),\n\n ));\n\n colliders.push(build_collider(\n", "file_path": "game_plugin/src/levels.rs", "rank": 75, "score": 11351.428595782332 }, { "content": " [0., -0.5 * BODY_LENGTH].into(),\n\n [0., 0.5 * BODY_LENGTH].into(),\n\n BODY_RADIUS,\n\n ),\n\n ..Default::default()\n\n })\n\n .insert_bundle(SpriteBundle {\n\n material: materials.add(textures.body.clone().into()),\n\n transform: Transform {\n\n scale: Vec3::new(0.125, 0.125, 0.125),\n\n translation: Vec3::new(0., BOULDER_HEIGTH * PHYSICS_SCALE * 1.1, 0.),\n\n ..Transform::default()\n\n },\n\n ..Default::default()\n\n })\n\n .insert(ColliderPositionSync::Discrete)\n\n .insert(Body)\n\n .insert(ForLevel)\n\n .id()\n\n}\n\n\n", "file_path": "game_plugin/src/player.rs", "rank": 76, "score": 11351.428595782332 }, { "content": " ..Default::default()\n\n })\n\n .insert_bundle(SpriteBundle {\n\n material: materials.add(textures.head.clone().into()),\n\n transform: Transform {\n\n scale: Vec3::new(0.125, 0.125, 0.125),\n\n ..Transform::default()\n\n },\n\n ..Default::default()\n\n })\n\n .insert(ColliderPositionSync::Discrete)\n\n .insert(Head)\n\n .insert(ForLevel)\n\n .id()\n\n}\n\n\n", "file_path": "game_plugin/src/player.rs", "rank": 77, "score": 11351.428595782332 }, { "content": " material: materials.add(textures.wheel.clone().into()),\n\n transform: Transform {\n\n scale: Vec3::new(0.25, 0.25, 0.25),\n\n ..Transform::default()\n\n },\n\n ..Default::default()\n\n })\n\n .insert(ColliderPositionSync::Discrete)\n\n .insert(Wheel)\n\n .insert(ForLevel)\n\n .id()\n\n}\n\n\n", "file_path": "game_plugin/src/player.rs", "rank": 78, "score": 11351.428595782332 }, { "content": "#[derive(Clone, Eq, PartialEq, Debug, Hash)]\n\nenum GameState {\n\n Loading,\n\n Menu,\n\n Prepare,\n\n PrepareLevel,\n\n InLevel,\n\n Lost,\n\n Finished,\n\n}\n", "file_path": "game_plugin/src/lib.rs", "rank": 79, "score": 10351.607593850316 }, { "content": "#[derive(PartialEq)]\n\nenum LandBlock {\n\n Blocked,\n\n NotBlocked,\n\n}\n\n\n", "file_path": "game_plugin/src/player.rs", "rank": 80, "score": 10348.91767233008 }, { "content": "// disable console on windows for release builds\n\n#![cfg_attr(not(debug_assertions), windows_subsystem = \"windows\")]\n\n\n\n#[cfg(target_arch = \"wasm32\")]\n\nuse bevy_webgl2;\n\n\n\nuse bevy::prelude::{App, ClearColor, Color, WindowDescriptor};\n\nuse bevy::DefaultPlugins;\n\nuse game_plugin::GamePlugin;\n\n\n", "file_path": "src/main.rs", "rank": 81, "score": 8.892853823005641 }, { "content": "extern crate embed_resource;\n\nuse std::env;\n\n\n", "file_path": "build.rs", "rank": 82, "score": 2.6567535259878055 } ]
Rust
src/librustc_incremental/persist/dirty_clean.rs
killerswan/rust
e703b33e3e03d1078c8825e1f64ecfb45884f5cb
use super::directory::RetracedDefIdDirectory; use super::load::DirtyNodes; use rustc::dep_graph::{DepGraphQuery, DepNode}; use rustc::hir; use rustc::hir::def_id::DefId; use rustc::hir::itemlikevisit::ItemLikeVisitor; use rustc::hir::intravisit; use rustc::ich::{Fingerprint, ATTR_DIRTY, ATTR_CLEAN, ATTR_DIRTY_METADATA, ATTR_CLEAN_METADATA}; use syntax::ast::{self, Attribute, NestedMetaItem}; use rustc_data_structures::fx::{FxHashSet, FxHashMap}; use syntax_pos::Span; use rustc::ty::TyCtxt; const LABEL: &'static str = "label"; const CFG: &'static str = "cfg"; pub fn check_dirty_clean_annotations<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, dirty_inputs: &DirtyNodes, retraced: &RetracedDefIdDirectory) { if !tcx.sess.features.borrow().rustc_attrs { return; } let _ignore = tcx.dep_graph.in_ignore(); let dirty_inputs: FxHashSet<DepNode<DefId>> = dirty_inputs.keys() .filter_map(|d| retraced.map(d)) .collect(); let query = tcx.dep_graph.query(); debug!("query-nodes: {:?}", query.nodes()); let krate = tcx.hir.krate(); let mut dirty_clean_visitor = DirtyCleanVisitor { tcx: tcx, query: &query, dirty_inputs: dirty_inputs, checked_attrs: FxHashSet(), }; krate.visit_all_item_likes(&mut dirty_clean_visitor); let mut all_attrs = FindAllAttrs { tcx: tcx, attr_names: vec![ATTR_DIRTY, ATTR_CLEAN], found_attrs: vec![], }; intravisit::walk_crate(&mut all_attrs, krate); all_attrs.report_unchecked_attrs(&dirty_clean_visitor.checked_attrs); } pub struct DirtyCleanVisitor<'a, 'tcx:'a> { tcx: TyCtxt<'a, 'tcx, 'tcx>, query: &'a DepGraphQuery<DefId>, dirty_inputs: FxHashSet<DepNode<DefId>>, checked_attrs: FxHashSet<ast::AttrId>, } impl<'a, 'tcx> DirtyCleanVisitor<'a, 'tcx> { fn dep_node(&self, attr: &Attribute, def_id: DefId) -> DepNode<DefId> { for item in attr.meta_item_list().unwrap_or_else(Vec::new) { if item.check_name(LABEL) { let value = expect_associated_value(self.tcx, &item); match DepNode::from_label_string(&value.as_str(), def_id) { Ok(def_id) => return def_id, Err(()) => { self.tcx.sess.span_fatal( item.span, &format!("dep-node label `{}` not recognized", value)); } } } } self.tcx.sess.span_fatal(attr.span, "no `label` found"); } fn dep_node_str(&self, dep_node: &DepNode<DefId>) -> DepNode<String> { dep_node.map_def(|&def_id| Some(self.tcx.item_path_str(def_id))).unwrap() } fn assert_dirty(&self, item_span: Span, dep_node: DepNode<DefId>) { debug!("assert_dirty({:?})", dep_node); match dep_node { DepNode::Krate | DepNode::Hir(_) | DepNode::HirBody(_) => { if !self.dirty_inputs.contains(&dep_node) { let dep_node_str = self.dep_node_str(&dep_node); self.tcx.sess.span_err( item_span, &format!("`{:?}` not found in dirty set, but should be dirty", dep_node_str)); } } _ => { if self.query.contains_node(&dep_node) { let dep_node_str = self.dep_node_str(&dep_node); self.tcx.sess.span_err( item_span, &format!("`{:?}` found in dep graph, but should be dirty", dep_node_str)); } } } } fn assert_clean(&self, item_span: Span, dep_node: DepNode<DefId>) { debug!("assert_clean({:?})", dep_node); match dep_node { DepNode::Krate | DepNode::Hir(_) | DepNode::HirBody(_) => { if self.dirty_inputs.contains(&dep_node) { let dep_node_str = self.dep_node_str(&dep_node); self.tcx.sess.span_err( item_span, &format!("`{:?}` found in dirty-node set, but should be clean", dep_node_str)); } } _ => { if !self.query.contains_node(&dep_node) { let dep_node_str = self.dep_node_str(&dep_node); self.tcx.sess.span_err( item_span, &format!("`{:?}` not found in dep graph, but should be clean", dep_node_str)); } } } } fn check_item(&mut self, item_id: ast::NodeId, item_span: Span) { let def_id = self.tcx.hir.local_def_id(item_id); for attr in self.tcx.get_attrs(def_id).iter() { if attr.check_name(ATTR_DIRTY) { if check_config(self.tcx, attr) { self.checked_attrs.insert(attr.id); self.assert_dirty(item_span, self.dep_node(attr, def_id)); } } else if attr.check_name(ATTR_CLEAN) { if check_config(self.tcx, attr) { self.checked_attrs.insert(attr.id); self.assert_clean(item_span, self.dep_node(attr, def_id)); } } } } } impl<'a, 'tcx> ItemLikeVisitor<'tcx> for DirtyCleanVisitor<'a, 'tcx> { fn visit_item(&mut self, item: &'tcx hir::Item) { self.check_item(item.id, item.span); } fn visit_trait_item(&mut self, item: &hir::TraitItem) { self.check_item(item.id, item.span); } fn visit_impl_item(&mut self, item: &hir::ImplItem) { self.check_item(item.id, item.span); } } pub fn check_dirty_clean_metadata<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, prev_metadata_hashes: &FxHashMap<DefId, Fingerprint>, current_metadata_hashes: &FxHashMap<DefId, Fingerprint>) { if !tcx.sess.opts.debugging_opts.query_dep_graph { return; } tcx.dep_graph.with_ignore(||{ let krate = tcx.hir.krate(); let mut dirty_clean_visitor = DirtyCleanMetadataVisitor { tcx: tcx, prev_metadata_hashes: prev_metadata_hashes, current_metadata_hashes: current_metadata_hashes, checked_attrs: FxHashSet(), }; krate.visit_all_item_likes(&mut dirty_clean_visitor); let mut all_attrs = FindAllAttrs { tcx: tcx, attr_names: vec![ATTR_DIRTY_METADATA, ATTR_CLEAN_METADATA], found_attrs: vec![], }; intravisit::walk_crate(&mut all_attrs, krate); all_attrs.report_unchecked_attrs(&dirty_clean_visitor.checked_attrs); }); } pub struct DirtyCleanMetadataVisitor<'a, 'tcx:'a, 'm> { tcx: TyCtxt<'a, 'tcx, 'tcx>, prev_metadata_hashes: &'m FxHashMap<DefId, Fingerprint>, current_metadata_hashes: &'m FxHashMap<DefId, Fingerprint>, checked_attrs: FxHashSet<ast::AttrId>, } impl<'a, 'tcx, 'm> ItemLikeVisitor<'tcx> for DirtyCleanMetadataVisitor<'a, 'tcx, 'm> { fn visit_item(&mut self, item: &'tcx hir::Item) { self.check_item(item.id, item.span); if let hir::ItemEnum(ref def, _) = item.node { for v in &def.variants { self.check_item(v.node.data.id(), v.span); } } } fn visit_trait_item(&mut self, item: &hir::TraitItem) { self.check_item(item.id, item.span); } fn visit_impl_item(&mut self, item: &hir::ImplItem) { self.check_item(item.id, item.span); } } impl<'a, 'tcx, 'm> DirtyCleanMetadataVisitor<'a, 'tcx, 'm> { fn check_item(&mut self, item_id: ast::NodeId, item_span: Span) { let def_id = self.tcx.hir.local_def_id(item_id); for attr in self.tcx.get_attrs(def_id).iter() { if attr.check_name(ATTR_DIRTY_METADATA) { if check_config(self.tcx, attr) { self.checked_attrs.insert(attr.id); self.assert_state(false, def_id, item_span); } } else if attr.check_name(ATTR_CLEAN_METADATA) { if check_config(self.tcx, attr) { self.checked_attrs.insert(attr.id); self.assert_state(true, def_id, item_span); } } } } fn assert_state(&self, should_be_clean: bool, def_id: DefId, span: Span) { let item_path = self.tcx.item_path_str(def_id); debug!("assert_state({})", item_path); if let Some(&prev_hash) = self.prev_metadata_hashes.get(&def_id) { let hashes_are_equal = prev_hash == self.current_metadata_hashes[&def_id]; if should_be_clean && !hashes_are_equal { self.tcx.sess.span_err( span, &format!("Metadata hash of `{}` is dirty, but should be clean", item_path)); } let should_be_dirty = !should_be_clean; if should_be_dirty && hashes_are_equal { self.tcx.sess.span_err( span, &format!("Metadata hash of `{}` is clean, but should be dirty", item_path)); } } else { self.tcx.sess.span_err( span, &format!("Could not find previous metadata hash of `{}`", item_path)); } } } fn check_config(tcx: TyCtxt, attr: &Attribute) -> bool { debug!("check_config(attr={:?})", attr); let config = &tcx.sess.parse_sess.config; debug!("check_config: config={:?}", config); for item in attr.meta_item_list().unwrap_or_else(Vec::new) { if item.check_name(CFG) { let value = expect_associated_value(tcx, &item); debug!("check_config: searching for cfg {:?}", value); return config.contains(&(value, None)); } } tcx.sess.span_fatal( attr.span, &format!("no cfg attribute")); } fn expect_associated_value(tcx: TyCtxt, item: &NestedMetaItem) -> ast::Name { if let Some(value) = item.value_str() { value } else { let msg = if let Some(name) = item.name() { format!("associated value expected for `{}`", name) } else { "expected an associated value".to_string() }; tcx.sess.span_fatal(item.span, &msg); } } pub struct FindAllAttrs<'a, 'tcx:'a> { tcx: TyCtxt<'a, 'tcx, 'tcx>, attr_names: Vec<&'static str>, found_attrs: Vec<&'tcx Attribute>, } impl<'a, 'tcx> FindAllAttrs<'a, 'tcx> { fn is_active_attr(&mut self, attr: &Attribute) -> bool { for attr_name in &self.attr_names { if attr.check_name(attr_name) && check_config(self.tcx, attr) { return true; } } false } fn report_unchecked_attrs(&self, checked_attrs: &FxHashSet<ast::AttrId>) { for attr in &self.found_attrs { if !checked_attrs.contains(&attr.id) { self.tcx.sess.span_err(attr.span, &format!("found unchecked \ #[rustc_dirty]/#[rustc_clean] attribute")); } } } } impl<'a, 'tcx> intravisit::Visitor<'tcx> for FindAllAttrs<'a, 'tcx> { fn nested_visit_map<'this>(&'this mut self) -> intravisit::NestedVisitorMap<'this, 'tcx> { intravisit::NestedVisitorMap::All(&self.tcx.hir) } fn visit_attribute(&mut self, attr: &'tcx Attribute) { if self.is_active_attr(attr) { self.found_attrs.push(attr); } } }
use super::directory::RetracedDefIdDirectory; use super::load::DirtyNodes; use rustc::dep_graph::{DepGraphQuery, DepNode}; use rustc::hir; use rustc::hir::def_id::DefId; use rustc::hir::itemlikevisit::ItemLikeVisitor; use rustc::hir::intravisit; use rustc::ich::{Fingerprint, ATTR_DIRTY, ATTR_CLEAN, ATTR_DIRTY_METADATA, ATTR_CLEAN_METADATA}; use syntax::ast::{self, Attribute, NestedMetaItem}; use rustc_data_structures::fx::{FxHashSet, FxHashMap}; use syntax_pos::Span; use rustc::ty::TyCtxt; const LABEL: &'static str = "label"; const CFG: &'static str = "cfg"; pub fn check_dirty_clean_annotations<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, dirty_inputs: &DirtyNodes, retraced: &RetracedDefIdDirectory) { if !tcx.sess.features.borrow().rustc_attrs { return; } let _ignore = tcx.dep_graph.in_ignore(); let dirty_inputs: FxHashSet<DepNode<DefId>> = dirty_inputs.keys() .filter_map(|d| retraced.map(d)) .collect(); let query = tcx.dep_graph.query(); debug!("query-nodes: {:?}", query.nodes()); let krate = tcx.hir.krate(); let mut dirty_clean_visitor = DirtyCleanVisitor { tcx: tcx, query: &query, dirty_inputs: dirty_inputs, checked_attrs: FxHashSet(), }; krate.visit_all_item_likes(&mut dirty_clean_visitor); let mut all_attrs = FindAllAttrs { tcx: tcx, attr_names: vec![ATTR_DIRTY, ATTR_CLEAN], found_attrs: vec![], }; intravisit::walk_crate(&mut all_attrs, krate); all_attrs.report_unchecked_attrs(&dirty_clean_visitor.checked_attrs); } pub struct DirtyCleanVisitor<'a, 'tcx:'a> { tcx: TyCtxt<'a, 'tcx, 'tcx>, query: &'a DepGraphQuery<DefId>, dirty_inputs: FxHashSet<DepNode<DefId>>, checked_attrs: FxHashSet<ast::AttrId>, } impl<'a, 'tcx> DirtyCleanVisitor<'a, 'tcx> { fn dep_node(&self, attr: &Attribute, def_id: DefId) -> DepNode<DefId> { for item in attr.meta_item_list().unwrap_or_else(Vec::new) { if item.check_name(LABEL) { let value = expect_associated_value(self.tcx, &item); match DepNode::from_label_string(&value.as_str(), def_id) { Ok(def_id) => return def_id, Err(()) => { self.tcx.sess.span_fatal( item.span, &format!("dep-node label `{}` not recognized", value)); } } } } self.tcx.sess.span_fatal(attr.span, "no `label` found"); } fn dep_node_str(&self, dep_node: &DepNode<DefId>) -> DepNode<String> { dep_node.map_def(|&def_id| Some(self.tcx.item_path_str(def_id))).unwrap() } fn assert_dirty(&self, item_span: Span, dep_node: DepNode<DefId>) { debug!("assert_dirty({:?})", dep_node); match dep_node { DepNode::Krate | DepNode::Hir(_) | DepNode::HirBody(_) => { if !self.dirty_inputs.contains(&dep_node) { let dep_node_str = self.dep_node_str(&dep_node); self.tcx.sess.span_err( item_span, &format!("`{:?}` not found in dirty set, but should be dirty", dep_node_str)); } } _ => { if self.query.contains_node(&dep_node) { let dep_node_str = self.dep_node_str(&dep_node); self.tcx.sess.span_err( item_span, &format!("`{:?}` found in dep graph, but should be dirty", dep_node_str)); } } } } fn assert_clean(&self, item_span: Span, dep_node: DepNode<DefId>) { debug!("assert_clean({:?})", dep_node); match dep_node { DepNode::Krate | DepNode::Hir(_) | DepNode::HirBody(_) => { if self.dirty_inputs.contains(&dep_node) { let dep_node_str = self.dep_node_str(&dep_node); self.tcx.sess.span_err( item_span, &format!("`{:?}` found in dirty-node set, but should be clean", dep_node_str)); } } _ => { if !self.query.contains_node(&dep_node) { let dep_node_str = self.dep_node_str(&dep_node); self.tcx.sess.span_err( item_span, &format!("`{:?}` not found in dep graph, but should be clean", dep_node_str)); } } } } fn check_item(&mut self, item_id: ast::NodeId, item_span: Span) { let def_id = self.tcx.hir.local_def_id(item_id); for attr in self.tcx.get_attrs(def_id).iter() { if attr.check_name(ATTR_DIRTY) { if check_config(self.tcx, attr) { self.checked_attrs.insert(attr.id); self.assert_dirty(item_span, self.dep_node(attr, def_id)); } } else if attr.check_name(ATTR_CLEAN) { if check_config(self.tcx, attr) { self.checked_attrs.insert(attr.id); self.assert_clean(item_span, self.dep_node(attr, def_id)); } } } } } impl<'a, 'tcx> ItemLikeVisitor<'tcx> for DirtyCleanVisitor<'a, 'tcx> { fn visit_item(&mut self, item: &'tcx hir::Item) { self.check_item(item.id, item.span); } fn visit_trait_item(&mut self, item: &hir::TraitItem) { self.check_item(item.id, item.span); } fn visit_impl_item(&mut self, item: &hir::ImplItem) { self.check_item(item.id, item.span); } } pub fn check_dirty_clean_metadata<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, prev_metadata_hashes: &FxHashMap<DefId, Fingerprint>, current_metadata_hashes: &FxHashMap<DefId, Fingerprint>) { if !tcx.sess.opts.debugging_opts.query_dep_graph { return; } tcx.dep_graph.with_ignore(||{ let krate = tcx.hir.krate(); let mut dirty_clean_visitor = DirtyCleanMetadataVisitor { tcx: tcx, prev_metadata_hashes: prev_metadata_hashes, current_metadata_hashes: current_metadata_hashes, checked_attrs: FxHashSet(), }; krate.visit_all_item_likes(&mut dirty_clean_visitor); let mut all_attrs = FindAllAttrs { tcx: tcx, attr_names: vec![ATTR_DIRTY_METADATA, ATTR_CLEAN_METADATA], found_attrs: vec![], }; intravisit::walk_crate(&mut all_attrs, krate); all_attrs.report_unchecked_attrs(&dirty_clean_visitor.checked_attrs); }); } pub struct DirtyCleanMetadataVisitor<'a, 'tcx:'a, 'm> { tcx: TyCtxt<'a, 'tcx, 'tcx>, prev_metadata_hashes: &'m FxHashMap<DefId, Fingerprint>, current_metadata_hashes: &'m FxHashMap<DefId, Fingerprint>, checked_attrs: FxHashSet<ast::AttrId>, } impl<'a, 'tcx, 'm> ItemLikeVisitor<'tcx> for DirtyCleanMetadataVisitor<'a, 'tcx, 'm> { fn visit_item(&mut self, item: &'tcx hir::Item) { self.check_item(item.id, item.span); if let hir::ItemEnum(ref def, _) = item.node { for v in &def.variants { self.check_item(v.node.data.id(), v.span); } } } fn visit_trait_item(&mut self, item: &hir::TraitItem) { self.check_item(item.id, item.span); } fn visit_impl_item(&mut self, item: &hir::ImplItem) { self.check_item(item.id, item.span); } } impl<'a, 'tcx, 'm> DirtyCleanMetadataVisitor<'a, 'tcx, 'm> { fn check_item(&mut self, item_id: ast::NodeId, item_span: Span) { let def_id = self.tcx.hir.local_def_id(item_id); for attr in self.tcx.get_attrs(def_id).iter() { if attr.check_name(ATTR_DIRTY_METADATA) { if check_config(self.tcx, attr) { self.checked_attrs.insert(attr.id); self.assert_state(false, def_id, item_span); } } else if attr.check_name(ATTR_CLEAN_METADATA) { if check_config(self.tcx, attr) { self.checked_attrs.insert(attr.id); self.assert_state(true, def_id, item_span); } } } } fn assert_state(&self, should_be_clean: bool, def_id: DefId, span: Span) { let item_path = self.tcx.item_path_str(def_id); debug!("assert_state({})", item_path); if let Some(&prev_hash) = self.prev_metadata_hashes.get(&def_id) { let hashes_are_equal = prev_hash == self.current_metadata_hashes[&def_id]; if should_be_clean && !hashes_are_equal { self.tcx.sess.span_err( span, &format!("Metadata hash of `{}` is dirty, but should be clean", item_path)); } let should_be_dirty = !should_be_clean; if should_be_dirty && hashes_are_equal { self.tcx.sess.span_err( span, &format!("Metadata hash of `{}` is clean, but should be dirty", item_path)); } } else { self.tcx.sess.span_err( span, &format!("Could not find previous metadata hash of `{}`", item_path)); } } } fn check_config(tcx: TyCtxt, attr: &Attribute) -> bool { debug!("check_config(attr={:?})", attr); let config = &tcx.sess.parse_sess.config; debug!("check_config: config={:?}", config); for item in attr.meta_item_list().unwrap_or_else(Vec::new) { if item.check_name(CFG) { let value = expect_associated_value(tcx, &item); debug!("check_config: searching for cfg {:?}", value); return config.contains(&(value, None)); } } tcx.sess.span_fatal( attr.span, &format!("no cfg attribute")); } fn expect_associated_value(tcx: TyCtxt, item: &NestedMetaItem) -> ast::Name {
} pub struct FindAllAttrs<'a, 'tcx:'a> { tcx: TyCtxt<'a, 'tcx, 'tcx>, attr_names: Vec<&'static str>, found_attrs: Vec<&'tcx Attribute>, } impl<'a, 'tcx> FindAllAttrs<'a, 'tcx> { fn is_active_attr(&mut self, attr: &Attribute) -> bool { for attr_name in &self.attr_names { if attr.check_name(attr_name) && check_config(self.tcx, attr) { return true; } } false } fn report_unchecked_attrs(&self, checked_attrs: &FxHashSet<ast::AttrId>) { for attr in &self.found_attrs { if !checked_attrs.contains(&attr.id) { self.tcx.sess.span_err(attr.span, &format!("found unchecked \ #[rustc_dirty]/#[rustc_clean] attribute")); } } } } impl<'a, 'tcx> intravisit::Visitor<'tcx> for FindAllAttrs<'a, 'tcx> { fn nested_visit_map<'this>(&'this mut self) -> intravisit::NestedVisitorMap<'this, 'tcx> { intravisit::NestedVisitorMap::All(&self.tcx.hir) } fn visit_attribute(&mut self, attr: &'tcx Attribute) { if self.is_active_attr(attr) { self.found_attrs.push(attr); } } }
if let Some(value) = item.value_str() { value } else { let msg = if let Some(name) = item.name() { format!("associated value expected for `{}`", name) } else { "expected an associated value".to_string() }; tcx.sess.span_fatal(item.span, &msg); }
if_condition
[]
Rust
src/graphics/camera.rs
fossegutten/tetra
ebdccc242680786482a8622dbb3f76f1fc0ab00c
use super::Rectangle; use crate::input; use crate::math::{Mat4, Vec2, Vec3}; use crate::window; use crate::Context; #[derive(Debug, Clone)] pub struct Camera { pub position: Vec2<f32>, pub rotation: f32, pub zoom: f32, pub viewport_width: f32, pub viewport_height: f32, matrix: Mat4<f32>, } impl Camera { pub fn new(viewport_width: f32, viewport_height: f32) -> Camera { Camera { position: Vec2::zero(), rotation: 0.0, zoom: 1.0, viewport_width, viewport_height, matrix: Mat4::translation_2d(Vec2::new(viewport_width / 2.0, viewport_height / 2.0)), } } pub fn with_window_size(ctx: &Context) -> Camera { let (width, height) = window::get_size(ctx); Camera::new(width as f32, height as f32) } pub fn set_viewport_size(&mut self, width: f32, height: f32) { self.viewport_width = width; self.viewport_height = height; } pub fn update(&mut self) { self.matrix = Mat4::translation_2d(-self.position); self.matrix.rotate_z(self.rotation); self.matrix.scale_3d(Vec3::new(self.zoom, self.zoom, 1.0)); self.matrix.translate_2d(Vec2::new( self.viewport_width / 2.0, self.viewport_height / 2.0, )); } pub fn as_matrix(&self) -> Mat4<f32> { self.matrix } pub fn project(&self, point: Vec2<f32>) -> Vec2<f32> { let mut proj = Vec2::new( (point.x - self.viewport_width / 2.0) / self.zoom, (point.y - self.viewport_height / 2.0) / self.zoom, ); proj.rotate_z(-self.rotation); proj += self.position; proj } pub fn unproject(&self, point: Vec2<f32>) -> Vec2<f32> { let mut unproj = point - self.position; unproj.rotate_z(self.rotation); unproj.x = unproj.x * self.zoom + self.viewport_width / 2.0; unproj.y = unproj.y * self.zoom + self.viewport_height / 2.0; unproj } pub fn mouse_position(&self, ctx: &Context) -> Vec2<f32> { self.project(input::get_mouse_position(ctx)) } pub fn mouse_x(&self, ctx: &Context) -> f32 { self.mouse_position(ctx).x } pub fn mouse_y(&self, ctx: &Context) -> f32 { self.mouse_position(ctx).y } pub fn visible_rect(&self) -> Rectangle { let viewport_width = self.viewport_width / self.zoom; let viewport_height = self.viewport_height / self.zoom; let half_viewport_width = viewport_width / 2.0; let half_viewport_height = viewport_height / 2.0; if self.rotation.abs() > f32::EPSILON { let mut top_left = Vec2::new(-half_viewport_width, -half_viewport_height); let mut bottom_left = Vec2::new(-half_viewport_width, half_viewport_height); top_left.rotate_z(self.rotation); bottom_left.rotate_z(self.rotation); let largest_x = f32::max(top_left.x.abs(), bottom_left.x.abs()); let largest_y = f32::max(top_left.y.abs(), bottom_left.y.abs()); let left = self.position.x - largest_x; let top = self.position.y - largest_y; let width = largest_x * 2.0; let height = largest_y * 2.0; Rectangle { x: left, y: top, width, height, } } else { let left = self.position.x - half_viewport_width; let top = self.position.y - half_viewport_height; Rectangle { x: left, y: top, width: viewport_width, height: viewport_height, } } } } #[cfg(test)] mod tests { use super::*; #[test] fn point_projections() { let mut camera = Camera::new(128.0, 256.0); let proj_initial = camera.project(Vec2::zero()); let unproj_initial = camera.unproject(proj_initial); assert_eq!(proj_initial, Vec2::new(-64.0, -128.0)); assert_eq!(unproj_initial, Vec2::zero()); camera.position = Vec2::new(16.0, 16.0); let proj_positioned = camera.project(Vec2::zero()); let unproj_positioned = camera.unproject(proj_positioned); assert_eq!(proj_positioned, Vec2::new(-48.0, -112.0)); assert_eq!(unproj_positioned, Vec2::zero()); camera.zoom = 2.0; let proj_zoomed = camera.project(Vec2::zero()); let unproj_zoomed = camera.unproject(proj_zoomed); assert_eq!(proj_zoomed, Vec2::new(-16.0, -48.0)); assert_eq!(unproj_zoomed, Vec2::zero()); camera.rotation = std::f32::consts::FRAC_PI_2; let proj_rotated = camera.project(Vec2::zero()); let unproj_rotated = camera.unproject(proj_rotated); assert!(proj_rotated.x + 48.0 <= 0.001); assert!(proj_rotated.y - 48.0 <= 0.001); assert!(unproj_rotated.x.abs() <= 0.001); assert!(unproj_rotated.y.abs() <= 0.001); } #[test] fn validate_camera_visible_rect() { let mut camera = Camera::new(800.0, 600.0); assert_eq!( camera.visible_rect(), Rectangle { x: -400.0, y: -300.0, width: 800.0, height: 600.0 } ); camera.zoom = 2.0; assert_eq!( camera.visible_rect(), Rectangle { x: -200.0, y: -150.0, width: 400.0, height: 300.0 } ); camera.position = Vec2::new(-100.0, 100.0); assert_eq!( camera.visible_rect(), Rectangle { x: -300.0, y: -50.0, width: 400.0, height: 300.0 } ); camera.rotation = std::f32::consts::FRAC_PI_2; let rect = camera.visible_rect(); assert!(rect.x + 250.0 < 0.001); assert!(rect.y + 100.0 < 0.001); assert!(rect.width - 300.0 < 0.001); assert!(rect.height - 400.0 < 0.001); } }
use super::Rectangle; use crate::input; use crate::math::{Mat4, Vec2, Vec3}; use crate::window; use crate::Context; #[derive(Debug, Clone)] pub struct Camera { pub position: Vec2<f32>, pub rotation: f32, pub zoom: f32, pub viewport_width: f32, pub viewport_height: f32, matrix: Mat4<f32>, } impl Camera { pub fn new(viewport_width: f32, viewport_height: f32) -> Camera { Camera { position: Vec2::zero(), rotation: 0.0, zoom: 1.0, viewport_width, viewport_height, matrix: Mat4::translation_2d(Vec2::new(viewport_width / 2.0, viewport_height / 2.0)), } } pub fn with_window_size(ctx: &Context) -> Camera { let (width, height) = window::get_size(ctx); Camera::new(width as f32, height as f32) } pub fn set_viewport_size(&mut self, width: f32, height: f32) { self.viewport_width = width; self.viewport_height = height; } pub fn update(&mut self) { self.matrix = Mat4::translation_2d(-self.position); self.matrix.rotate_z(self.rotation); self.matrix.scale_3d(Vec3::new(self.zoom, self.zoom, 1.0)); self.matrix.translate_2d(Vec2::new( self.viewport_width / 2.0, self.viewport_height / 2.0, )); } pub fn as_matrix(&self) -> Mat4<f32> { self.matrix } pub fn project(&self, point: Vec2<f32>) -> Vec2<f32> { let mut proj = Vec2::new( (point.x - self.viewport_width / 2.0) / self.zoom, (point.y - self.viewport_height / 2.0) / self.zoom, ); proj.rotate_z(-self.rotation); proj += self.position; proj } pub fn unproject(&self, point: Vec2<f32>) -> Vec2<f32> { let mut unproj = point - self.position; unproj.rotate_z(self.rotation); unproj.x = unproj.x * self.zoom + self.viewport_width / 2.0; unproj.y = unproj.y * self.zoom + self.viewport_height / 2.0; unproj } pub fn mouse_position(&self, ctx: &Context) -> Vec2<f32> { self.project(input::get_mouse_position(ctx)) } pub fn mouse_x(&self, ctx: &Context) -> f32 { self.mouse_position(ctx).x } pub fn mouse_y(&self, ctx: &Context) -> f32 { self.mouse_position(ctx).y } pub fn visible_rect(&self) -> Rectangle { let viewport_width = self.viewport_width / self.zoom; let viewport_height = self.viewport_height / self.zoom; let half_viewport_width = viewport_width / 2.0; let half_viewport_height = viewport_height / 2.0; if self.rotation.abs
= camera.unproject(proj_zoomed); assert_eq!(proj_zoomed, Vec2::new(-16.0, -48.0)); assert_eq!(unproj_zoomed, Vec2::zero()); camera.rotation = std::f32::consts::FRAC_PI_2; let proj_rotated = camera.project(Vec2::zero()); let unproj_rotated = camera.unproject(proj_rotated); assert!(proj_rotated.x + 48.0 <= 0.001); assert!(proj_rotated.y - 48.0 <= 0.001); assert!(unproj_rotated.x.abs() <= 0.001); assert!(unproj_rotated.y.abs() <= 0.001); } #[test] fn validate_camera_visible_rect() { let mut camera = Camera::new(800.0, 600.0); assert_eq!( camera.visible_rect(), Rectangle { x: -400.0, y: -300.0, width: 800.0, height: 600.0 } ); camera.zoom = 2.0; assert_eq!( camera.visible_rect(), Rectangle { x: -200.0, y: -150.0, width: 400.0, height: 300.0 } ); camera.position = Vec2::new(-100.0, 100.0); assert_eq!( camera.visible_rect(), Rectangle { x: -300.0, y: -50.0, width: 400.0, height: 300.0 } ); camera.rotation = std::f32::consts::FRAC_PI_2; let rect = camera.visible_rect(); assert!(rect.x + 250.0 < 0.001); assert!(rect.y + 100.0 < 0.001); assert!(rect.width - 300.0 < 0.001); assert!(rect.height - 400.0 < 0.001); } }
() > f32::EPSILON { let mut top_left = Vec2::new(-half_viewport_width, -half_viewport_height); let mut bottom_left = Vec2::new(-half_viewport_width, half_viewport_height); top_left.rotate_z(self.rotation); bottom_left.rotate_z(self.rotation); let largest_x = f32::max(top_left.x.abs(), bottom_left.x.abs()); let largest_y = f32::max(top_left.y.abs(), bottom_left.y.abs()); let left = self.position.x - largest_x; let top = self.position.y - largest_y; let width = largest_x * 2.0; let height = largest_y * 2.0; Rectangle { x: left, y: top, width, height, } } else { let left = self.position.x - half_viewport_width; let top = self.position.y - half_viewport_height; Rectangle { x: left, y: top, width: viewport_width, height: viewport_height, } } } } #[cfg(test)] mod tests { use super::*; #[test] fn point_projections() { let mut camera = Camera::new(128.0, 256.0); let proj_initial = camera.project(Vec2::zero()); let unproj_initial = camera.unproject(proj_initial); assert_eq!(proj_initial, Vec2::new(-64.0, -128.0)); assert_eq!(unproj_initial, Vec2::zero()); camera.position = Vec2::new(16.0, 16.0); let proj_positioned = camera.project(Vec2::zero()); let unproj_positioned = camera.unproject(proj_positioned); assert_eq!(proj_positioned, Vec2::new(-48.0, -112.0)); assert_eq!(unproj_positioned, Vec2::zero()); camera.zoom = 2.0; let proj_zoomed = camera.project(Vec2::zero()); let unproj_zoomed
random
[ { "content": "/// Sets the transform matrix.\n\n///\n\n/// This can be used to apply global transformations to subsequent draw calls.\n\npub fn set_transform_matrix(ctx: &mut Context, matrix: Mat4<f32>) {\n\n flush(ctx);\n\n\n\n ctx.graphics.transform_matrix = matrix;\n\n}\n\n\n", "file_path": "src/graphics.rs", "rank": 0, "score": 363071.05536686844 }, { "content": "/// Get the position of the mouse.\n\npub fn get_mouse_position(ctx: &Context) -> Vec2<f32> {\n\n ctx.input.mouse_position\n\n}\n\n\n", "file_path": "src/input/mouse.rs", "rank": 1, "score": 350097.7954629049 }, { "content": "/// Sets the size of the window.\n\n///\n\n/// # Errors\n\n///\n\n/// * [`TetraError::FailedToChangeDisplayMode`](crate::TetraError::FailedToChangeDisplayMode)\n\n/// will be returned if the game was unable to change the window size.\n\npub fn set_size(ctx: &mut Context, width: i32, height: i32) -> Result {\n\n ctx.window.set_window_size(width, height)\n\n}\n\n\n", "file_path": "src/window.rs", "rank": 2, "score": 333367.23660764843 }, { "content": "/// Gets the master volume for the game.\n\npub fn get_master_volume(ctx: &mut Context) -> f32 {\n\n ctx.audio.master_volume()\n\n}\n\n\n", "file_path": "src/audio.rs", "rank": 3, "score": 323407.9917309761 }, { "content": "/// Sets the width of the window.\n\n///\n\n/// # Errors\n\n///\n\n/// * [`TetraError::FailedToChangeDisplayMode`](crate::TetraError::FailedToChangeDisplayMode)\n\n/// will be returned if the game was unable to change the window size.\n\npub fn set_width(ctx: &mut Context, width: i32) -> Result {\n\n set_size(ctx, width, ctx.window.get_window_height())\n\n}\n\n\n", "file_path": "src/window.rs", "rank": 4, "score": 315856.2106497827 }, { "content": "/// Sets the height of the window.\n\n///\n\n/// # Errors\n\n///\n\n/// * [`TetraError::FailedToChangeDisplayMode`](crate::TetraError::FailedToChangeDisplayMode)\n\n/// will be returned if the game was unable to change the window size.\n\npub fn set_height(ctx: &mut Context, height: i32) -> Result {\n\n set_size(ctx, ctx.window.get_window_width(), height)\n\n}\n\n\n", "file_path": "src/window.rs", "rank": 5, "score": 315856.2140465401 }, { "content": "/// Resets the transform matrix.\n\n///\n\n/// This is a shortcut for calling [`graphics::set_transform_matrix(ctx, Mat4::identity())`](set_transform_matrix).\n\npub fn reset_transform_matrix(ctx: &mut Context) {\n\n set_transform_matrix(ctx, Mat4::identity());\n\n}\n\n\n", "file_path": "src/graphics.rs", "rank": 6, "score": 311380.92755291273 }, { "content": "/// Sets the master volume for the game.\n\n///\n\n/// The parameter is used as a multiplier - for example, `1.0` would result in\n\n/// sounds being played back at their original volume.\n\npub fn set_master_volume(ctx: &mut Context, volume: f32) {\n\n ctx.audio.set_master_volume(volume);\n\n}\n\n\n", "file_path": "src/audio.rs", "rank": 7, "score": 310790.70729675354 }, { "content": "/// Returns the current transform matrix.\n\npub fn get_transform_matrix(ctx: &Context) -> Mat4<f32> {\n\n ctx.graphics.transform_matrix\n\n}\n\n\n", "file_path": "src/graphics.rs", "rank": 8, "score": 299590.7077061682 }, { "content": "/// Quits the game, if it is currently running.\n\n///\n\n/// Note that quitting the game does not take effect until the end of the current\n\n/// cycle of the game loop. This will probably change later.\n\npub fn quit(ctx: &mut Context) {\n\n ctx.running = false;\n\n}\n\n\n", "file_path": "src/window.rs", "rank": 9, "score": 288911.11683846434 }, { "content": "/// Sends queued data to the graphics hardware.\n\n///\n\n/// You usually will not have to call this manually, as the graphics API will\n\n/// automatically flush when necessary. Try to keep flushing to a minimum,\n\n/// as this will reduce the number of draw calls made to the\n\n/// graphics device.\n\npub fn flush(ctx: &mut Context) {\n\n if !ctx.graphics.vertex_data.is_empty() {\n\n let texture = match &ctx.graphics.texture {\n\n ActiveTexture::Default => return,\n\n ActiveTexture::User(t) => t,\n\n };\n\n\n\n let shader = match &ctx.graphics.shader {\n\n ActiveShader::Default => &ctx.graphics.default_shader,\n\n ActiveShader::User(s) => s,\n\n };\n\n\n\n // TODO: Failing to apply the defaults should be handled more gracefully than this,\n\n // but we can't do that without breaking changes.\n\n let _ = shader.set_default_uniforms(\n\n &mut ctx.device,\n\n ctx.graphics.projection_matrix * ctx.graphics.transform_matrix,\n\n Color::WHITE,\n\n );\n\n\n", "file_path": "src/graphics.rs", "rank": 10, "score": 288911.1168384643 }, { "content": "/// Presents the result of drawing commands to the screen.\n\n///\n\n/// If any custom shaders/canvases are set, this function will unset them -\n\n/// don't rely on the state of one render carrying over to the next!\n\n///\n\n/// You usually will not have to call this manually, as it is called for you at the end of every\n\n/// frame. Note that calling it will trigger a [`flush`] to the graphics hardware.\n\npub fn present(ctx: &mut Context) {\n\n flush(ctx);\n\n\n\n ctx.window.swap_buffers();\n\n}\n\n\n", "file_path": "src/graphics.rs", "rank": 11, "score": 288911.11683846434 }, { "content": "/// Sets the renderer back to using the default shader.\n\npub fn reset_shader(ctx: &mut Context) {\n\n set_shader_ex(ctx, ActiveShader::Default);\n\n}\n\n\n\npub(crate) fn set_shader_ex(ctx: &mut Context, shader: ActiveShader) {\n\n if shader != ctx.graphics.shader {\n\n flush(ctx);\n\n ctx.graphics.shader = shader;\n\n }\n\n}\n\n\n", "file_path": "src/graphics.rs", "rank": 12, "score": 283353.9154856884 }, { "content": "/// Sets the renderer back to drawing to the screen directly.\n\npub fn reset_canvas(ctx: &mut Context) {\n\n set_canvas_ex(ctx, ActiveCanvas::Window);\n\n}\n\n\n\npub(crate) fn set_canvas_ex(ctx: &mut Context, canvas: ActiveCanvas) {\n\n if canvas != ctx.graphics.canvas {\n\n flush(ctx);\n\n ctx.graphics.canvas = canvas;\n\n\n\n match &ctx.graphics.canvas {\n\n ActiveCanvas::Window => {\n\n let (width, height) = window::get_size(ctx);\n\n\n\n ctx.graphics.projection_matrix = ortho(width as f32, height as f32, false);\n\n\n\n ctx.device.bind_framebuffer(None);\n\n ctx.device.front_face(ctx.graphics.winding);\n\n ctx.device.viewport(0, 0, width, height);\n\n }\n\n ActiveCanvas::User(r) => {\n", "file_path": "src/graphics.rs", "rank": 13, "score": 283349.4754053156 }, { "content": "/// Sets the specified gamepad's motors to vibrate indefinitely.\n\npub fn set_gamepad_vibration(ctx: &mut Context, gamepad_id: usize, strength: f32) {\n\n if let Some(platform_id) = get_gamepad(ctx, gamepad_id).map(|g| g.platform_id) {\n\n ctx.window.set_gamepad_vibration(platform_id, strength);\n\n }\n\n}\n\n\n", "file_path": "src/input/gamepad.rs", "rank": 14, "score": 279371.6402395121 }, { "content": "/// Returns a value between 0.0 and 1.0, representing how far between updates the game loop\n\n/// currently is.\n\n///\n\n/// For example, if the value is 0.01, an update just happened; if the value is 0.99,\n\n/// an update is about to happen.\n\n///\n\n/// This can be used to interpolate when rendering.\n\n///\n\n/// This function returns an [`f32`], which is usually what you want when blending - however,\n\n/// if you need a more precise representation of the blend factor, you can call\n\n/// [`get_blend_factor_precise`].\n\npub fn get_blend_factor(ctx: &Context) -> f32 {\n\n match &ctx.time.timestep {\n\n Some(fixed) => fixed.accumulator.as_secs_f32() / fixed.tick_rate.as_secs_f32(),\n\n None => 0.0,\n\n }\n\n}\n\n\n", "file_path": "src/time.rs", "rank": 15, "score": 278743.895185008 }, { "content": "/// Get the X co-ordinate of the mouse.\n\npub fn get_mouse_x(ctx: &Context) -> f32 {\n\n ctx.input.mouse_position.x\n\n}\n\n\n", "file_path": "src/input/mouse.rs", "rank": 16, "score": 278737.20095504675 }, { "content": "/// Get the Y co-ordinate of the mouse.\n\npub fn get_mouse_y(ctx: &Context) -> f32 {\n\n ctx.input.mouse_position.y\n\n}\n\n\n", "file_path": "src/input/mouse.rs", "rank": 17, "score": 278737.20095504675 }, { "content": "/// Clears the screen (or a canvas, if one is enabled) to the specified color.\n\npub fn clear(ctx: &mut Context, color: Color) {\n\n ctx.device.clear(color.r, color.g, color.b, color.a);\n\n}\n\n\n\n#[allow(clippy::too_many_arguments)]\n\npub(crate) fn push_quad(\n\n ctx: &mut Context,\n\n x1: f32,\n\n y1: f32,\n\n x2: f32,\n\n y2: f32,\n\n mut u1: f32,\n\n mut v1: f32,\n\n mut u2: f32,\n\n mut v2: f32,\n\n params: &DrawParams,\n\n) {\n\n // This function is a bit hairy, but it's more performant than doing the matrix math every\n\n // frame by a *lot* (at least going by the BunnyMark example). The logic is roughly based\n\n // on how FNA and LibGDX implement their spritebatches.\n", "file_path": "src/graphics.rs", "rank": 18, "score": 265906.8225439057 }, { "content": "/// Sets the specified gamepad's motors to vibrate for a set duration, specified in milliseconds.\n\n/// After this time has passed, the vibration will automatically stop.\n\npub fn start_gamepad_vibration(ctx: &mut Context, gamepad_id: usize, strength: f32, duration: u32) {\n\n if let Some(platform_id) = get_gamepad(ctx, gamepad_id).map(|g| g.platform_id) {\n\n ctx.window\n\n .start_gamepad_vibration(platform_id, strength, duration);\n\n }\n\n}\n\n\n", "file_path": "src/input/gamepad.rs", "rank": 19, "score": 261166.98584927016 }, { "content": "/// Sets the shader that is currently being used for rendering.\n\n///\n\n/// If the shader is different from the one that is currently in use, this will trigger a\n\n/// [`flush`] to the graphics hardware - try to avoid shader swapping as\n\n/// much as you can.\n\npub fn set_shader(ctx: &mut Context, shader: &Shader) {\n\n set_shader_ex(ctx, ActiveShader::User(shader.clone()));\n\n}\n\n\n", "file_path": "src/graphics.rs", "rank": 20, "score": 261010.58211601526 }, { "content": "/// Sets the renderer to redirect all drawing commands to the specified canvas.\n\n///\n\n/// If the canvas is different from the one that is currently in use, this will trigger a\n\n/// [`flush`] to the graphics hardware.\n\npub fn set_canvas(ctx: &mut Context, canvas: &Canvas) {\n\n set_canvas_ex(ctx, ActiveCanvas::User(canvas.clone()));\n\n}\n\n\n", "file_path": "src/graphics.rs", "rank": 21, "score": 261009.83358725053 }, { "content": "/// Sets the timestep of the application.\n\npub fn set_timestep(ctx: &mut Context, timestep: Timestep) {\n\n ctx.time.timestep = create_timestep_state(timestep);\n\n}\n\n\n", "file_path": "src/time.rs", "rank": 22, "score": 261005.85634209792 }, { "content": "/// Get the amount that the mouse wheel moved since the last update.\n\n///\n\n/// Most 'normal' mice can only scroll vertically, but some devices can also scroll horizontally.\n\n/// Use the Y component of the returned vector if you don't care about horizontal scroll.\n\n///\n\n/// Positive values correspond to scrolling up/right, negative values correspond to scrolling\n\n/// down/left.\n\npub fn get_mouse_wheel_movement(ctx: &Context) -> Vec2<i32> {\n\n ctx.input.mouse_wheel_movement\n\n}\n\n\n\npub(crate) fn set_mouse_button_down(ctx: &mut Context, btn: MouseButton) -> bool {\n\n let was_up = ctx.input.mouse_buttons_down.insert(btn);\n\n\n\n if was_up {\n\n ctx.input.mouse_buttons_pressed.insert(btn);\n\n }\n\n\n\n was_up\n\n}\n\n\n\npub(crate) fn set_mouse_button_up(ctx: &mut Context, btn: MouseButton) -> bool {\n\n let was_down = ctx.input.mouse_buttons_down.remove(&btn);\n\n\n\n if was_down {\n\n ctx.input.mouse_buttons_released.insert(btn);\n\n }\n", "file_path": "src/input/mouse.rs", "rank": 23, "score": 258763.81903698054 }, { "content": "/// Gets the width of the window.\n\npub fn get_width(ctx: &Context) -> i32 {\n\n ctx.window.get_window_width()\n\n}\n\n\n", "file_path": "src/window.rs", "rank": 24, "score": 258110.2291492553 }, { "content": "/// Gets the height of the window.\n\npub fn get_height(ctx: &Context) -> i32 {\n\n ctx.window.get_window_height()\n\n}\n\n\n", "file_path": "src/window.rs", "rank": 25, "score": 258110.23332537973 }, { "content": "/// Returns whether clockwise or counter-clockwise ordered vertices are currently considered front-facing.\n\n///\n\n/// Back-facing geometry will be culled (not rendered) by default.\n\n///\n\n/// The default winding order is counter-clockwise. This is correct for all of the geometry that Tetra\n\n/// generates, but if you are rendering a `Mesh` with clockwise ordered data, you will need to change\n\n/// this setting via [`set_front_face_winding`].\n\npub fn get_front_face_winding(ctx: &mut Context) -> VertexWinding {\n\n ctx.graphics.winding\n\n}\n\n\n", "file_path": "src/graphics.rs", "rank": 26, "score": 257812.8976000484 }, { "content": "/// Sets whether or not the mouse is grabbed by the window.\n\n///\n\n/// When this is active, the cursor will not be able to leave the window while it\n\n/// is focused.\n\npub fn set_mouse_grabbed(ctx: &mut Context, mouse_grabbed: bool) {\n\n ctx.window.set_mouse_grabbed(mouse_grabbed);\n\n}\n\n\n", "file_path": "src/window.rs", "rank": 27, "score": 252040.5334896184 }, { "content": "/// Sets whether the window should be in fullscreen mode.\n\n///\n\n/// # Errors\n\n///\n\n/// * [`TetraError::FailedToChangeDisplayMode`](crate::TetraError::FailedToChangeDisplayMode)\n\n/// will be returned if the game was unable to enter or exit fullscreen.\n\npub fn set_fullscreen(ctx: &mut Context, fullscreen: bool) -> Result {\n\n ctx.window.set_fullscreen(fullscreen)\n\n}\n\n\n", "file_path": "src/window.rs", "rank": 28, "score": 251199.0913631424 }, { "content": "/// Sets the title of the window.\n\npub fn set_title<S>(ctx: &mut Context, title: S)\n\nwhere\n\n S: AsRef<str>,\n\n{\n\n ctx.window.set_window_title(title)\n\n}\n\n\n", "file_path": "src/window.rs", "rank": 29, "score": 251199.09136314242 }, { "content": "/// Sets whether the window should be vsynced.\n\n///\n\n/// # Errors\n\n///\n\n/// * [`TetraError::FailedToChangeDisplayMode`](crate::TetraError::FailedToChangeDisplayMode)\n\n/// will be returned if the game was unable to change vsync mode.\n\npub fn set_vsync(ctx: &mut Context, vsync: bool) -> Result {\n\n ctx.window.set_vsync(vsync)\n\n}\n\n\n", "file_path": "src/window.rs", "rank": 30, "score": 251199.0913631424 }, { "content": "/// Returns the current position of the specified gamepad axis.\n\n///\n\n/// If the gamepad is disconnected, this will always return `0.0`.\n\npub fn get_gamepad_axis_position(ctx: &Context, gamepad_id: usize, axis: GamepadAxis) -> f32 {\n\n if let Some(pad) = get_gamepad(ctx, gamepad_id) {\n\n if let Some(value) = pad.current_axis_state.get(&axis) {\n\n *value\n\n } else {\n\n 0.0\n\n }\n\n } else {\n\n 0.0\n\n }\n\n}\n\n\n", "file_path": "src/input/gamepad.rs", "rank": 31, "score": 250751.77566035898 }, { "content": "/// Stops the specified gamepad's motors from vibrating.\n\npub fn stop_gamepad_vibration(ctx: &mut Context, gamepad_id: usize) {\n\n if let Some(platform_id) = get_gamepad(ctx, gamepad_id).map(|g| g.platform_id) {\n\n ctx.window.stop_gamepad_vibration(platform_id);\n\n }\n\n}\n\n\n\npub(crate) fn add_gamepad(ctx: &mut Context, platform_id: u32) -> usize {\n\n for (i, slot) in ctx.input.pads.iter_mut().enumerate() {\n\n if slot.is_none() {\n\n *slot = Some(GamepadState::new(platform_id));\n\n return i;\n\n }\n\n }\n\n\n\n // There wasn't an existing free slot...\n\n let i = ctx.input.pads.len();\n\n ctx.input.pads.push(Some(GamepadState::new(platform_id)));\n\n i\n\n}\n\n\n", "file_path": "src/input/gamepad.rs", "rank": 32, "score": 247929.88981248336 }, { "content": "/// Sets whether clockwise or counter-clockwise ordered vertices should be considered front-facing.\n\n///\n\n/// Back-facing geometry will be culled (not rendered) by default.\n\n///\n\n/// The default winding order is counter-clockwise. This is correct for all of the geometry that Tetra\n\n/// generates, but if you are rendering a `Mesh` with clockwise ordered data, you will need to change\n\n/// this setting.\n\npub fn set_front_face_winding(ctx: &mut Context, winding: VertexWinding) {\n\n if ctx.graphics.winding != winding {\n\n flush(ctx);\n\n ctx.graphics.winding = winding;\n\n\n\n // Because canvas rendering is effectively done upside-down, the winding order is the opposite\n\n // of what you'd expect in that case.\n\n ctx.device.front_face(match &ctx.graphics.canvas {\n\n ActiveCanvas::Window => winding,\n\n ActiveCanvas::User(_) => winding.flipped(),\n\n });\n\n }\n\n}\n\n\n\npub(crate) fn set_viewport_size(\n\n ctx: &mut Context,\n\n width: i32,\n\n height: i32,\n\n pixel_width: i32,\n\n pixel_height: i32,\n", "file_path": "src/graphics.rs", "rank": 33, "score": 247929.88981248336 }, { "content": "/// Returns an iterator of the keys that are currently down.\n\npub fn get_keys_down(ctx: &Context) -> impl Iterator<Item = &Key> {\n\n ctx.input.keys_down.iter()\n\n}\n\n\n", "file_path": "src/input/keyboard.rs", "rank": 34, "score": 247673.58660742894 }, { "content": "/// Sets whether or not the mouse cursor should be visible.\n\n///\n\n/// # Errors\n\n///\n\n/// * [`TetraError::PlatformError`](crate::TetraError::PlatformError) will be returned if\n\n/// the cursor state was inaccessible.\n\npub fn set_mouse_visible(ctx: &mut Context, visible: bool) -> Result {\n\n ctx.window.set_mouse_visible(visible)\n\n}\n\n\n", "file_path": "src/window.rs", "rank": 35, "score": 246847.69854027953 }, { "content": "/// Sets whether or not relative mouse mode is enabled.\n\n///\n\n/// While the mouse is in relative mode, the cursor is hidden and can move beyond the\n\n/// bounds of the window. The `delta` field of [`Event::MouseMoved`](crate::Event::MouseMoved)\n\n/// can then be used to track the cursor's changes in position. This is useful when\n\n/// implementing control schemes that require the mouse to be able to move infinitely\n\n/// in any direction (for example, FPS-style movement).\n\n///\n\n/// While this mode is enabled, the absolute position of the mouse may not be updated -\n\n/// as such, you should not rely on it.\n\npub fn set_relative_mouse_mode(ctx: &mut Context, relative_mouse_mode: bool) {\n\n ctx.window.set_relative_mouse_mode(relative_mouse_mode);\n\n}\n\n\n", "file_path": "src/window.rs", "rank": 36, "score": 244048.66075362664 }, { "content": "/// Sets the filter mode that will be used by newly created textures and canvases.\n\npub fn set_default_filter_mode(ctx: &mut Context, filter_mode: FilterMode) {\n\n ctx.graphics.default_filter_mode = filter_mode;\n\n}\n\n\n\n/// Information about the device currently being used to render graphics.\n\n#[derive(Debug, Clone)]\n\npub struct GraphicsDeviceInfo {\n\n /// The name of the company responsible for the OpenGL implementation.\n\n pub vendor: String,\n\n\n\n /// The name of the renderer. This usually corresponds to the name\n\n /// of the physical device.\n\n pub renderer: String,\n\n\n\n /// The version of OpenGL that is being used.\n\n pub opengl_version: String,\n\n\n\n /// The version of GLSL that is being used.\n\n pub glsl_version: String,\n\n}\n\n\n", "file_path": "src/graphics.rs", "rank": 37, "score": 244044.85675124708 }, { "content": "/// Sets whether or not key repeat should be enabled.\n\n///\n\n/// Normally, a [`KeyPressed`](crate::Event::KeyPressed) event will only be fired once, when\n\n/// the key is initially pressed. Enabling key repeat causes `KeyPressed` events to be fired\n\n/// continuously while the key is held down.\n\npub fn set_key_repeat_enabled(ctx: &mut Context, key_repeat_enabled: bool) {\n\n ctx.window.set_key_repeat_enabled(key_repeat_enabled);\n\n}\n\n\n", "file_path": "src/window.rs", "rank": 38, "score": 244040.55325311303 }, { "content": "/// Returns an iterator of the keys that were released since the last update.\n\npub fn get_keys_released(ctx: &Context) -> impl Iterator<Item = &Key> {\n\n ctx.input.keys_released.iter()\n\n}\n\n\n\npub(crate) fn set_key_down(ctx: &mut Context, key: Key) -> bool {\n\n let was_up = ctx.input.keys_down.insert(key);\n\n\n\n if was_up {\n\n ctx.input.keys_pressed.insert(key);\n\n }\n\n\n\n was_up\n\n}\n\n\n\npub(crate) fn set_key_up(ctx: &mut Context, key: Key) -> bool {\n\n let was_down = ctx.input.keys_down.remove(&key);\n\n\n\n if was_down {\n\n ctx.input.keys_released.insert(key);\n\n }\n", "file_path": "src/input/keyboard.rs", "rank": 39, "score": 243558.15279668116 }, { "content": "/// Returns an iterator of the keys that were pressed since the last update.\n\npub fn get_keys_pressed(ctx: &Context) -> impl Iterator<Item = &Key> {\n\n ctx.input.keys_pressed.iter()\n\n}\n\n\n", "file_path": "src/input/keyboard.rs", "rank": 40, "score": 243558.15279668116 }, { "content": "/// Gets the height of the monitor that the window is currently on.\n\n///\n\n/// # Errors\n\n///\n\n/// * [`TetraError::PlatformError`](crate::TetraError::PlatformError) will be returned\n\n/// if the monitor state was inaccessible.\n\npub fn get_current_monitor_height(ctx: &Context) -> Result<i32> {\n\n get_current_monitor_size(ctx).map(|(_, h)| h)\n\n}\n\n\n", "file_path": "src/window.rs", "rank": 41, "score": 238326.6855156753 }, { "content": "/// Gets the width of the monitor that the window is currently on.\n\n///\n\n/// # Errors\n\n///\n\n/// * [`TetraError::PlatformError`](crate::TetraError::PlatformError) will be returned\n\n/// if the monitor state was inaccessible.\n\npub fn get_current_monitor_width(ctx: &Context) -> Result<i32> {\n\n get_current_monitor_size(ctx).map(|(w, _)| w)\n\n}\n\n\n", "file_path": "src/window.rs", "rank": 42, "score": 238326.68199372123 }, { "content": "/// Returns whether or not the window is currently in fullscreen mode.\n\npub fn is_fullscreen(ctx: &Context) -> bool {\n\n ctx.window.is_fullscreen()\n\n}\n\n\n", "file_path": "src/window.rs", "rank": 43, "score": 228246.8011516332 }, { "content": "pub fn handle_events<S, E>(ctx: &mut Context, state: &mut S) -> result::Result<(), E>\n\nwhere\n\n S: State<E>,\n\n E: From<TetraError>,\n\n{\n\n while let Some(event) = ctx.window.event_pump.poll_event() {\n\n match event {\n\n SdlEvent::Quit { .. } => ctx.running = false, // TODO: Add a way to override this\n\n\n\n SdlEvent::Window { win_event, .. } => match win_event {\n\n WindowEvent::SizeChanged(width, height) => {\n\n ctx.window.window_width = width;\n\n ctx.window.window_height = height;\n\n\n\n let (pixel_width, pixel_height) = ctx.window.sdl_window.drawable_size();\n\n\n\n graphics::set_viewport_size(\n\n ctx,\n\n width,\n\n height,\n", "file_path": "src/platform/window_sdl.rs", "rank": 44, "score": 225214.9389567675 }, { "content": "/// Returns the amount of time that has accumulated between updates.\n\n///\n\n/// When using a fixed time step, as time passes, this value will increase;\n\n/// as updates occur, it will decrease.\n\n///\n\n/// When using a variable time step, this function always returns `Duration::from_secs(0)`.\n\npub fn get_accumulator(ctx: &Context) -> Duration {\n\n match &ctx.time.timestep {\n\n Some(fixed) => fixed.accumulator,\n\n None => Duration::from_secs(0),\n\n }\n\n}\n\n\n", "file_path": "src/time.rs", "rank": 45, "score": 223820.59446813085 }, { "content": "/// Gets the current title of the window.\n\npub fn get_title(ctx: &Context) -> &str {\n\n ctx.window.get_window_title()\n\n}\n\n\n", "file_path": "src/window.rs", "rank": 46, "score": 223816.0231128431 }, { "content": "/// Returns whethere or not vsync is enabled.\n\npub fn is_vsync_enabled(ctx: &Context) -> bool {\n\n ctx.window.is_vsync_enabled()\n\n}\n\n\n", "file_path": "src/window.rs", "rank": 47, "score": 223816.0231128431 }, { "content": "/// Returns whether or not the mouse is currently grabbed by the window.\n\n///\n\n/// When this is active, the cursor will not be able to leave the window while it\n\n/// is focused.\n\npub fn is_mouse_grabbed(ctx: &Context) -> bool {\n\n ctx.window.is_mouse_grabbed()\n\n}\n\n\n", "file_path": "src/window.rs", "rank": 48, "score": 223816.02311284308 }, { "content": "/// Returns whether or not the mouse cursor is currently visible.\n\npub fn is_mouse_visible(ctx: &Context) -> bool {\n\n ctx.window.is_mouse_visible()\n\n}\n\n\n", "file_path": "src/window.rs", "rank": 49, "score": 223816.0231128431 }, { "content": "/// Gets the current timestep of the application.\n\npub fn get_timestep(ctx: &Context) -> Timestep {\n\n match &ctx.time.timestep {\n\n Some(fixed) => Timestep::Fixed(fixed.ticks_per_second),\n\n None => Timestep::Variable,\n\n }\n\n}\n\n\n", "file_path": "src/time.rs", "rank": 50, "score": 223816.0231128431 }, { "content": "/// Returns the current frame rate, averaged out over the last 200 frames.\n\npub fn get_fps(ctx: &Context) -> f64 {\n\n 1.0 / (ctx.time.fps_tracker.iter().sum::<f64>() / ctx.time.fps_tracker.len() as f64)\n\n}\n", "file_path": "src/time.rs", "rank": 51, "score": 223816.0231128431 }, { "content": "/// Gets the height of a monitor connected to the device.\n\n///\n\n/// # Errors\n\n///\n\n/// * [`TetraError::PlatformError`](crate::TetraError::PlatformError) will be returned\n\n/// if the monitor state was inaccessible.\n\npub fn get_monitor_height(ctx: &Context, monitor_index: i32) -> Result<i32> {\n\n get_monitor_size(ctx, monitor_index).map(|(_, h)| h)\n\n}\n\n\n", "file_path": "src/window.rs", "rank": 52, "score": 221301.10727556073 }, { "content": "/// Gets the width of a monitor connected to the device.\n\n///\n\n/// # Errors\n\n///\n\n/// * [`TetraError::PlatformError`](crate::TetraError::PlatformError) will be returned\n\n/// if the monitor state was inaccessible.\n\npub fn get_monitor_width(ctx: &Context, monitor_index: i32) -> Result<i32> {\n\n get_monitor_size(ctx, monitor_index).map(|(w, _)| w)\n\n}\n\n\n", "file_path": "src/window.rs", "rank": 53, "score": 221301.10375360667 }, { "content": "/// Returns the amount of time that has passed since the last frame was rendered.\n\n///\n\n/// When using a variable time step, you should use this to integrate the amount of time that\n\n/// has passed into your game's calculations. For example, if you wanted to move a\n\n/// [`Vec2`](crate::math::Vec2) 32 units to the right per second, you would do\n\n/// `foo.y += 32.0 * time::get_delta_time(ctx).as_secs_f32()`\n\n///\n\n/// When using a fixed time step, the above still applies, but only to rendering - you should\n\n/// not integrate the delta time into your update calculations.\n\npub fn get_delta_time(ctx: &Context) -> Duration {\n\n ctx.time.elapsed\n\n}\n\n\n", "file_path": "src/time.rs", "rank": 54, "score": 219675.73629793944 }, { "content": "/// Returns whether or not relative mouse mode is currently enabled.\n\n///\n\n/// While the mouse is in relative mode, the cursor is hidden and can move beyond the\n\n/// bounds of the window. The `delta` field of [`Event::MouseMoved`](crate::Event::MouseMoved)\n\n/// can then be used to track the cursor's changes in position. This is useful when\n\n/// implementing control schemes that require the mouse to be able to move infinitely\n\n/// in any direction (for example, FPS-style movement).\n\n///\n\n/// While this mode is enabled, the absolute position of the mouse may not be updated -\n\n/// as such, you should not rely on it.\n\npub fn is_relative_mouse_mode(ctx: &Context) -> bool {\n\n ctx.window.is_relative_mouse_mode()\n\n}\n\n\n", "file_path": "src/window.rs", "rank": 55, "score": 219668.9562259494 }, { "content": "/// Returns true if the user scrolled up since the last update.\n\npub fn is_mouse_scrolled_up(ctx: &Context) -> bool {\n\n get_mouse_wheel_movement(ctx).y > 0\n\n}\n\n\n", "file_path": "src/input/mouse.rs", "rank": 56, "score": 219660.88721858477 }, { "content": "/// Returns whether or not the user's screen saver can be displayed while the game is running.\n\npub fn is_screen_saver_enabled(ctx: &Context) -> bool {\n\n ctx.window.is_screen_saver_enabled()\n\n}\n\n\n", "file_path": "src/window.rs", "rank": 57, "score": 219660.8872185848 }, { "content": "/// Returns true if the user scrolled down since the last update.\n\npub fn is_mouse_scrolled_down(ctx: &Context) -> bool {\n\n get_mouse_wheel_movement(ctx).y < 0\n\n}\n\n\n", "file_path": "src/input/mouse.rs", "rank": 58, "score": 219660.88721858477 }, { "content": "/// Returns whether or not key repeat is enabled.\n\n///\n\n/// Normally, a [`KeyPressed`](crate::Event::KeyPressed) event will only be fired once, when\n\n/// the key is initially pressed. Enabling key repeat causes `KeyPressed` events to be fired\n\n/// continuously while the key is held down.\n\npub fn is_key_repeat_enabled(ctx: &Context) -> bool {\n\n ctx.window.is_key_repeat_enabled()\n\n}\n", "file_path": "src/window.rs", "rank": 59, "score": 219660.88721858477 }, { "content": "/// Returns a precise value between 0.0 and 1.0, representing how far between updates the game loop\n\n/// currently is.\n\n///\n\n/// For example, if the value is 0.01, an update just happened; if the value is 0.99,\n\n/// an update is about to happen.\n\n///\n\n/// This can be used to interpolate when rendering.\n\n///\n\n/// This function returns an [`f64`], which is a very precise representation of the blend factor,\n\n/// but often difficult to use in game logic without casting. If you need an [`f32`], call\n\n/// [`get_blend_factor`] instead.\n\npub fn get_blend_factor_precise(ctx: &Context) -> f64 {\n\n match &ctx.time.timestep {\n\n Some(fixed) => fixed.accumulator.as_secs_f64() / fixed.tick_rate.as_secs_f64(),\n\n None => 0.0,\n\n }\n\n}\n\n\n", "file_path": "src/time.rs", "rank": 60, "score": 215763.9472917081 }, { "content": "/// Gets the size of the window.\n\npub fn get_size(ctx: &Context) -> (i32, i32) {\n\n ctx.window.get_window_size()\n\n}\n\n\n", "file_path": "src/window.rs", "rank": 61, "score": 214625.78093962715 }, { "content": "/// Retrieves information about the device currently being used to render graphics.\n\n///\n\n/// This may be useful for debugging/logging purposes.\n\npub fn get_device_info(ctx: &Context) -> GraphicsDeviceInfo {\n\n GraphicsDeviceInfo {\n\n vendor: ctx.device.get_vendor(),\n\n renderer: ctx.device.get_renderer(),\n\n opengl_version: ctx.device.get_version(),\n\n glsl_version: ctx.device.get_shading_language_version(),\n\n }\n\n}\n\n\n", "file_path": "src/graphics.rs", "rank": 62, "score": 212085.6582359324 }, { "content": "/// Returns the filter mode that will be used by newly created textures and canvases.\n\npub fn get_default_filter_mode(ctx: &Context) -> FilterMode {\n\n ctx.graphics.default_filter_mode\n\n}\n\n\n", "file_path": "src/graphics.rs", "rank": 63, "score": 212084.98372741393 }, { "content": "/// Returns the text that the user entered since the last update.\n\n/// This will match the user's keyboard and OS settings.\n\npub fn get_text_input(ctx: &Context) -> Option<&str> {\n\n ctx.input.current_text_input.as_deref()\n\n}\n\n\n", "file_path": "src/input.rs", "rank": 64, "score": 210721.34127431625 }, { "content": "/// Gets the text currently stored in the system's clipboard.\n\n///\n\n/// # Errors\n\n///\n\n/// * [`TetraError::PlatformError`](crate::TetraError::PlatformError) will be\n\n/// returned if the text could not be retrieved from the clipboard.\n\npub fn get_clipboard_text(ctx: &Context) -> Result<String> {\n\n ctx.window.get_clipboard_text()\n\n}\n\n\n", "file_path": "src/input.rs", "rank": 65, "score": 210721.34127431625 }, { "content": "/// Gets the index of the monitor that the window is currently on.\n\n///\n\n/// # Errors\n\n///\n\n/// * [`TetraError::PlatformError`](crate::TetraError::PlatformError) will be returned\n\n/// if the monitor state was inaccessible.\n\npub fn get_current_monitor(ctx: &Context) -> Result<i32> {\n\n ctx.window.get_current_monitor()\n\n}\n\n\n", "file_path": "src/window.rs", "rank": 66, "score": 210721.34127431625 }, { "content": "/// Gets the number of monitors connected to the device.\n\n///\n\n/// # Errors\n\n///\n\n/// * [`TetraError::PlatformError`](crate::TetraError::PlatformError) will be returned\n\n/// if the monitor state was inaccessible.\n\npub fn get_monitor_count(ctx: &Context) -> Result<i32> {\n\n ctx.window.get_monitor_count()\n\n}\n\n\n", "file_path": "src/window.rs", "rank": 67, "score": 210721.34127431625 }, { "content": "/// Draws an object to the screen (or to a canvas, if one is enabled).\n\n///\n\n/// This function simply calls [`Drawable::draw`] on the passed object - it is\n\n/// provided to allow you to avoid having to import the [`Drawable`] trait as well\n\n/// as the `graphics` module.\n\npub fn draw<D: Drawable, P: Into<DrawParams>>(ctx: &mut Context, drawable: &D, params: P) {\n\n drawable.draw(ctx, params);\n\n}\n\n\n\n/// Sets the texture that is currently being used for rendering.\n\n///\n\n/// If the texture is different from the one that is currently in use, this will trigger a\n\n/// [`flush`] to the graphics hardware - try to avoid texture swapping as\n\n/// much as you can.\n\npub(crate) fn set_texture(ctx: &mut Context, texture: &Texture) {\n\n set_texture_ex(ctx, ActiveTexture::User(texture.clone()));\n\n}\n\n\n\npub(crate) fn set_texture_ex(ctx: &mut Context, texture: ActiveTexture) {\n\n if texture != ctx.graphics.texture {\n\n flush(ctx);\n\n ctx.graphics.texture = texture;\n\n }\n\n}\n\n\n", "file_path": "src/graphics.rs", "rank": 68, "score": 208922.15282783116 }, { "content": "/// Gets the name of the monitor that the window is currently on.\n\n///\n\n/// # Errors\n\n///\n\n/// * [[`TetraError::PlatformError`](crate::TetraError::PlatformError) will be returned\n\n/// if the monitor state was inaccessible.\n\npub fn get_current_monitor_name(ctx: &Context) -> Result<String> {\n\n let monitor_index = ctx.window.get_current_monitor()?;\n\n ctx.window.get_monitor_name(monitor_index)\n\n}\n\n\n", "file_path": "src/window.rs", "rank": 69, "score": 207045.5739503222 }, { "content": "/// Returns true if the specified key is currently down.\n\npub fn is_key_down(ctx: &Context, key: Key) -> bool {\n\n ctx.input.keys_down.contains(&key)\n\n}\n\n\n", "file_path": "src/input/keyboard.rs", "rank": 70, "score": 206218.46261875937 }, { "content": "/// Returns true if the specified key is currently up.\n\npub fn is_key_up(ctx: &Context, key: Key) -> bool {\n\n !ctx.input.keys_down.contains(&key)\n\n}\n\n\n", "file_path": "src/input/keyboard.rs", "rank": 71, "score": 206218.46261875937 }, { "content": "/// Returns true if the specified key was released since the last update.\n\npub fn is_key_released(ctx: &Context, key: Key) -> bool {\n\n ctx.input.keys_released.contains(&key)\n\n}\n\n\n", "file_path": "src/input/keyboard.rs", "rank": 72, "score": 202542.69529476535 }, { "content": "/// Sets the contents of the system's clipboard.\n\n///\n\n/// # Errors\n\n///\n\n/// * [`TetraError::PlatformError`](crate::TetraError::PlatformError) will be\n\n/// returned if the clipboard could not be modified.\n\npub fn set_clipboard_text(ctx: &Context, text: &str) -> Result {\n\n ctx.window.set_clipboard_text(text)\n\n}\n\n\n\npub(crate) fn push_text_input(ctx: &mut Context, text: &str) {\n\n match &mut ctx.input.current_text_input {\n\n Some(existing) => existing.push_str(text),\n\n x @ None => *x = Some(text.to_string()),\n\n }\n\n}\n", "file_path": "src/input.rs", "rank": 73, "score": 202542.69529476535 }, { "content": "/// Returns true if the specified key was pressed since the last update.\n\npub fn is_key_pressed(ctx: &Context, key: Key) -> bool {\n\n ctx.input.keys_pressed.contains(&key)\n\n}\n\n\n", "file_path": "src/input/keyboard.rs", "rank": 74, "score": 202542.69529476535 }, { "content": "/// Sets whether or not the user's screen saver can be displayed while the game is running.\n\npub fn set_screen_saver_enabled(ctx: &Context, screen_saver_enabled: bool) {\n\n ctx.window.set_screen_saver_enabled(screen_saver_enabled);\n\n}\n\n\n", "file_path": "src/window.rs", "rank": 75, "score": 200304.14591359894 }, { "content": "/// Returns true if the specified gamepad is currently connected.\n\npub fn is_gamepad_connected(ctx: &Context, gamepad_id: usize) -> bool {\n\n get_gamepad(ctx, gamepad_id).is_some()\n\n}\n\n\n", "file_path": "src/input/gamepad.rs", "rank": 76, "score": 199076.08278667092 }, { "content": "/// Returns true if the specified mouse button is currently down.\n\npub fn is_mouse_button_down(ctx: &Context, button: MouseButton) -> bool {\n\n ctx.input.mouse_buttons_down.contains(&button)\n\n}\n\n\n", "file_path": "src/input/mouse.rs", "rank": 77, "score": 199076.08278667092 }, { "content": "/// Returns true if the specified mouse button is currently up.\n\npub fn is_mouse_button_up(ctx: &Context, button: MouseButton) -> bool {\n\n !ctx.input.mouse_buttons_down.contains(&button)\n\n}\n\n\n", "file_path": "src/input/mouse.rs", "rank": 78, "score": 199076.08278667092 }, { "content": "/// Gets the size of the monitor that the window is currently on.\n\n///\n\n/// # Errors\n\n///\n\n/// * [`TetraError::PlatformError`](crate::TetraError::PlatformError) will be returned\n\n/// if the monitor state was inaccessible.\n\npub fn get_current_monitor_size(ctx: &Context) -> Result<(i32, i32)> {\n\n let monitor_index = ctx.window.get_current_monitor()?;\n\n ctx.window.get_monitor_size(monitor_index)\n\n}\n\n\n", "file_path": "src/window.rs", "rank": 79, "score": 199076.08278667092 }, { "content": "/// Returns true if the specified gamepad supports vibration.\n\n///\n\n/// If the gamepad is disconnected, this will always return `false`.\n\npub fn is_gamepad_vibration_supported(ctx: &Context, gamepad_id: usize) -> bool {\n\n if let Some(pad) = get_gamepad(ctx, gamepad_id) {\n\n ctx.window.is_gamepad_vibration_supported(pad.platform_id)\n\n } else {\n\n false\n\n }\n\n}\n\n\n", "file_path": "src/input/gamepad.rs", "rank": 80, "score": 195801.26725804206 }, { "content": "/// Returns true if the specified mouse button was pressed since the last update.\n\npub fn is_mouse_button_pressed(ctx: &Context, button: MouseButton) -> bool {\n\n ctx.input.mouse_buttons_pressed.contains(&button)\n\n}\n\n\n", "file_path": "src/input/mouse.rs", "rank": 81, "score": 195801.26725804206 }, { "content": "/// Returns true if the specified key modifier is currently up.\n\npub fn is_key_modifier_up(ctx: &Context, key_modifier: KeyModifier) -> bool {\n\n let (a, b) = get_modifier_keys(key_modifier);\n\n\n\n is_key_up(ctx, a) && is_key_up(ctx, b)\n\n}\n\n\n", "file_path": "src/input/keyboard.rs", "rank": 82, "score": 195801.26725804206 }, { "content": "/// Returns true if the specified key modifier is currently down.\n\npub fn is_key_modifier_down(ctx: &Context, key_modifier: KeyModifier) -> bool {\n\n let (a, b) = get_modifier_keys(key_modifier);\n\n\n\n is_key_down(ctx, a) || is_key_down(ctx, b)\n\n}\n\n\n", "file_path": "src/input/keyboard.rs", "rank": 83, "score": 195801.26725804206 }, { "content": "/// Returns true if the specified mouse button was released since the last update.\n\npub fn is_mouse_button_released(ctx: &Context, button: MouseButton) -> bool {\n\n ctx.input.mouse_buttons_released.contains(&button)\n\n}\n\n\n", "file_path": "src/input/mouse.rs", "rank": 84, "score": 195801.26725804206 }, { "content": "/// Gets the name of a monitor connected to the device.\n\n///\n\n/// # Errors\n\n///\n\n/// * [`TetraError::PlatformError`](crate::TetraError::PlatformError) will be returned\n\n/// if the monitor state was inaccessible.\n\npub fn get_monitor_name(ctx: &Context, monitor_index: i32) -> Result<String> {\n\n ctx.window.get_monitor_name(monitor_index)\n\n}\n\n\n", "file_path": "src/window.rs", "rank": 85, "score": 191750.46498571872 }, { "content": "/// Returns the name of the specified gamepad, or [`None`] if it is not connected.\n\npub fn get_gamepad_name(ctx: &Context, gamepad_id: usize) -> Option<String> {\n\n get_gamepad(ctx, gamepad_id)\n\n .map(|g| g.platform_id)\n\n .map(|id| ctx.window.get_gamepad_name(id))\n\n}\n\n\n", "file_path": "src/input/gamepad.rs", "rank": 86, "score": 188651.95760802086 }, { "content": "/// Gets the size of a monitor connected to the device.\n\n///\n\n/// # Errors\n\n///\n\n/// * [`TetraError::PlatformError`](crate::TetraError::PlatformError) will be returned\n\n/// if the monitor state was inaccessible.\n\npub fn get_monitor_size(ctx: &Context, monitor_index: i32) -> Result<(i32, i32)> {\n\n ctx.window.get_monitor_size(monitor_index)\n\n}\n\n\n", "file_path": "src/window.rs", "rank": 87, "score": 184988.40512241347 }, { "content": "/// Returns true if the specified gamepad button is currently down.\n\n///\n\n/// If the gamepad is disconnected, this will always return `false`.\n\npub fn is_gamepad_button_down(ctx: &Context, gamepad_id: usize, button: GamepadButton) -> bool {\n\n if let Some(pad) = get_gamepad(ctx, gamepad_id) {\n\n pad.buttons_down.contains(&button)\n\n } else {\n\n false\n\n }\n\n}\n\n\n", "file_path": "src/input/gamepad.rs", "rank": 88, "score": 182052.34111195354 }, { "content": "/// Returns true if the specified gamepad button is currently up.\n\n///\n\n/// If the gamepad is disconnected, this will always return `true`.\n\npub fn is_gamepad_button_up(ctx: &Context, gamepad_id: usize, button: GamepadButton) -> bool {\n\n if let Some(pad) = get_gamepad(ctx, gamepad_id) {\n\n !pad.buttons_down.contains(&button)\n\n } else {\n\n true\n\n }\n\n}\n\n\n", "file_path": "src/input/gamepad.rs", "rank": 89, "score": 182052.34111195354 }, { "content": "/// Returns true if the specified gamepad button was released since the last update.\n\n///\n\n/// If the gamepad is disconnected, this will always return `false`.\n\npub fn is_gamepad_button_released(ctx: &Context, gamepad_id: usize, button: GamepadButton) -> bool {\n\n if let Some(pad) = get_gamepad(ctx, gamepad_id) {\n\n pad.buttons_released.contains(&button)\n\n } else {\n\n false\n\n }\n\n}\n\n\n", "file_path": "src/input/gamepad.rs", "rank": 90, "score": 179266.27231023245 }, { "content": "/// Returns true if the specified gamepad button was pressed since the last update.\n\n///\n\n/// If the gamepad is disconnected, this will always return `false`.\n\npub fn is_gamepad_button_pressed(ctx: &Context, gamepad_id: usize, button: GamepadButton) -> bool {\n\n if let Some(pad) = get_gamepad(ctx, gamepad_id) {\n\n pad.buttons_pressed.contains(&button)\n\n } else {\n\n false\n\n }\n\n}\n\n\n", "file_path": "src/input/gamepad.rs", "rank": 91, "score": 179266.27231023245 }, { "content": "fn project_impl(window_pos: f32, rect_pos: f32, rect_size: f32, real_size: f32) -> f32 {\n\n (real_size * (window_pos - rect_pos)) / rect_size\n\n}\n\n\n", "file_path": "src/graphics/scaling.rs", "rank": 92, "score": 157043.94296475296 }, { "content": "fn unproject_impl(screen_pos: f32, rect_pos: f32, rect_size: f32, real_size: f32) -> f32 {\n\n rect_pos + ((rect_size * screen_pos) / real_size)\n\n}\n\n\n\nimpl Drawable for ScreenScaler {\n\n fn draw<P>(&self, ctx: &mut Context, params: P)\n\n where\n\n P: Into<DrawParams>,\n\n {\n\n graphics::set_texture(ctx, &self.canvas.texture);\n\n\n\n graphics::push_quad(\n\n ctx,\n\n self.screen_rect.x,\n\n self.screen_rect.y,\n\n self.screen_rect.x + self.screen_rect.width,\n\n self.screen_rect.y + self.screen_rect.height,\n\n 0.0,\n\n 0.0,\n\n 1.0,\n", "file_path": "src/graphics/scaling.rs", "rank": 93, "score": 157043.94296475296 }, { "content": "/// Returns the current position of the specified gamepad control stick.\n\n///\n\n/// If the gamepad is disconnected, this will always return `(0.0, 0.0)`.\n\npub fn get_gamepad_stick_position(\n\n ctx: &Context,\n\n gamepad_id: usize,\n\n stick: GamepadStick,\n\n) -> Vec2<f32> {\n\n let (x_axis, y_axis) = match stick {\n\n GamepadStick::LeftStick => (GamepadAxis::LeftStickX, GamepadAxis::LeftStickY),\n\n GamepadStick::RightStick => (GamepadAxis::RightStickX, GamepadAxis::RightStickY),\n\n };\n\n\n\n Vec2::new(\n\n get_gamepad_axis_position(ctx, gamepad_id, x_axis),\n\n get_gamepad_axis_position(ctx, gamepad_id, y_axis),\n\n )\n\n}\n\n\n", "file_path": "src/input/gamepad.rs", "rank": 94, "score": 154406.44636773152 }, { "content": "fn clamp(val: f32) -> f32 {\n\n f32::min(f32::max(0.0, val), 1.0)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::Color;\n\n\n\n #[test]\n\n fn rgb8_creation() {\n\n assert!(same_color(\n\n Color::rgba(0.2, 0.4, 0.6, 1.0),\n\n Color::rgb8(51, 102, 153)\n\n ));\n\n }\n\n\n\n #[test]\n\n fn hex_creation() {\n\n let expected = Color::rgba(0.2, 0.4, 0.6, 1.0);\n\n\n", "file_path": "src/graphics/color.rs", "rank": 95, "score": 138717.34355418338 }, { "content": "/// Converts a screen's dimensions into a rectangle that is scaled to fit in the given bounds.\n\n///\n\n/// This function may be useful if you want to use Tetra's scaling algorithms, but\n\n/// the built-in [`ScreenScaler`] abstraction does not fit your needs.\n\npub fn get_screen_rect(\n\n mode: ScalingMode,\n\n inner_width: i32,\n\n inner_height: i32,\n\n outer_width: i32,\n\n outer_height: i32,\n\n) -> Rectangle {\n\n let f_inner_width = inner_width as f32;\n\n let f_inner_height = inner_height as f32;\n\n let f_outer_width = outer_width as f32;\n\n let f_outer_height = outer_height as f32;\n\n\n\n let internal_aspect_ratio = f_inner_width / f_inner_height;\n\n let screen_aspect_ratio = f_outer_width / f_outer_height;\n\n\n\n match mode {\n\n ScalingMode::Fixed => {\n\n let screen_x = (outer_width - inner_width) / 2;\n\n let screen_y = (outer_height - inner_height) / 2;\n\n\n", "file_path": "src/graphics/scaling.rs", "rank": 96, "score": 122322.38602169434 }, { "content": "/// Returns an iterator of the buttons that are currently down on the specified gamepad.\n\n///\n\n/// If the gamepad is disconnected, an empty iterator will be returned.\n\npub fn get_gamepad_buttons_down(\n\n ctx: &Context,\n\n gamepad_id: usize,\n\n) -> impl Iterator<Item = &GamepadButton> {\n\n if let Some(pad) = get_gamepad(ctx, gamepad_id) {\n\n GamepadIterator::Connected(pad.buttons_down.iter())\n\n } else {\n\n GamepadIterator::Disconnected\n\n }\n\n}\n\n\n", "file_path": "src/input/gamepad.rs", "rank": 97, "score": 122313.77921466538 }, { "content": "/// Returns an iterator of the buttons that were pressed on the specified gamepad since the last update.\n\n///\n\n/// If the gamepad is disconnected, an empty iterator will be returned.\n\npub fn get_gamepad_buttons_pressed(\n\n ctx: &Context,\n\n gamepad_id: usize,\n\n) -> impl Iterator<Item = &GamepadButton> {\n\n if let Some(pad) = get_gamepad(ctx, gamepad_id) {\n\n GamepadIterator::Connected(pad.buttons_pressed.iter())\n\n } else {\n\n GamepadIterator::Disconnected\n\n }\n\n}\n\n\n", "file_path": "src/input/gamepad.rs", "rank": 98, "score": 120097.56313849436 }, { "content": "/// Returns an iterator of the buttons that were released on the specified gamepad since the last update .\n\n///\n\n/// If the gamepad is disconnected, an empty iterator will be returned.\n\npub fn get_gamepad_buttons_released(\n\n ctx: &Context,\n\n gamepad_id: usize,\n\n) -> impl Iterator<Item = &GamepadButton> {\n\n if let Some(pad) = get_gamepad(ctx, gamepad_id) {\n\n GamepadIterator::Connected(pad.buttons_released.iter())\n\n } else {\n\n GamepadIterator::Disconnected\n\n }\n\n}\n\n\n", "file_path": "src/input/gamepad.rs", "rank": 99, "score": 120097.56313849436 } ]
Rust
src/rtl/tree.rs
GuillaumeDIDIER/C-teel
4e46a6623dc3ce669e7525cff7069dbdeaa4043b
use common::ops; pub use parse::ast::Ident; use std::collections::{HashMap, HashSet}; use std::fmt::Display; use std::fmt; use common::register::Register; use common::label::Label; use common::label::LabelAllocator; use common::register::RegisterAllocator; #[derive(Debug)] pub enum Instruction { Const(i64, Register, Label), AccessGlobal(Ident, Register, Label), AssignGlobal(Register, Ident, Label), Load(Register, i64, Register, Label), Store(Register, Register, i64, Label), UnaryOp(ops::x64UnaryOp, Register, Label), BinaryOp(ops::x64BinaryOp, Register, Register, Label), Branch(ops::x64Branch, Label, Label), Call(Register, Ident, Vec<Register>, Label), Goto(Label), } impl Instruction { pub fn successors(&self) -> Vec<Label>{ match *self { Instruction::Const(_,_, ref label) | Instruction::AccessGlobal(_, _, ref label) | Instruction::AssignGlobal(_, _, ref label) | Instruction::Load(_, _, _, ref label) | Instruction::Store(_, _, _, ref label) | Instruction::UnaryOp(_, _, ref label) | Instruction::BinaryOp(_, _, _, ref label) | Instruction::Call(_, _, _, ref label) | Instruction::Goto(ref label) => { vec![label.clone()] }, Instruction::Branch(_, ref label1, ref label2) => { vec![label2.clone(), label1.clone()] } } } } impl Display for Instruction { fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { match *self { Instruction::Const(ref value, ref reg, ref label) => { write!(f, "mov ${} {} --> {}", value, reg, label) }, Instruction::AccessGlobal(ref name, ref reg, ref label) => { write!(f, "mov {} {} --> {}", name, reg, label) }, Instruction::AssignGlobal(ref reg, ref name, ref label) => { write!(f, "mov {} {} --> {}", reg, name, label) }, Instruction::Load(ref sreg, ref offset, ref dreg, ref label) => { write!(f, "mov {}({}) {} --> {}", offset, sreg, dreg, label) }, Instruction::Store(ref sreg, ref dreg, ref offset, ref label) => { write!(f, "mov {} {}({}) --> {}", sreg, offset, dreg, label) }, Instruction::UnaryOp(ref op, ref reg, ref label) => { write!(f, "{} {} --> {}", op, reg, label) }, Instruction::BinaryOp(ref op, ref sreg, ref dreg, ref label) => { write!(f, "{} {} {} --> {}", op, sreg, dreg, label) }, Instruction::Call(ref reg, ref name, ref args, ref label) => { try!(write!(f, "{} <- call {}(", reg, name)); let mut i = args.iter(); if let Some(arg0) = i.next() { try!(write!(f, "{}", arg0)); for arg in i { try!(write!(f, ", {}", arg)); } } write!(f, ") --> {}", label) }, Instruction::Goto(ref label) => { write!(f, "goto {}", label) }, Instruction::Branch(ref branch_op, ref label1, ref label2) => { write!(f, "{} --> {}, {}", branch_op, label1, label2) }, } } } #[derive(Debug)] pub struct FuncDefinition { pub name: Ident, pub formals: Vec<Register>, pub result: Register, pub locals: HashSet<Register>, pub entry: Label, pub exit: Label, pub body: HashMap<Label, Instruction>, pub label_allocator: LabelAllocator, pub register_allocator: RegisterAllocator, } impl FuncDefinition { fn print_body(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { let mut visited = HashSet::<Label>::new(); self.visit(&mut visited, self.entry, f) } fn visit(& self, visited: & mut HashSet<Label>, l: Label, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { if visited.contains(&l) { return Ok(()); } visited.insert(l); if let Some(instruction) = self.body.get(&l) { try!(write!(f, " {}: {}\n", l, instruction)); for s in instruction.successors() { try!(self.visit(visited, s, f)); } return Ok(()) } else { return Ok(()); } } } impl Display for FuncDefinition { fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { try!(write!(f, "{} {}(", self.result, self.name)); let mut i = self.formals.iter(); if let Some(formal0) = i.next() { try!(write!(f, "{}", formal0)); for formal in i { try!(write!(f, ", {}", formal)); } } try!(write!(f, ")\n")); try!(write!(f, " entry : {}\n", self.entry)); try!(write!(f, " exit : {}\n", self.exit)); try!(write!(f, " locals : ")); let mut i = self.locals.iter(); if let Some(local0) = i.next() { try!(write!(f, "{}", local0)); for local in i { try!(write!(f, ", {}", local)); } } try!(write!(f, "\n")); self.print_body(f) } } #[derive(Debug)] pub struct File { pub globals : Vec<Ident>, pub functions: Vec<FuncDefinition>, } impl Display for File { fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { try!(write!(f, "== RTL ==================================================\n")); for i in 0..self.functions.len() { try!(write!(f, "{}", self.functions[i])); } write!(f, "== END ==================================================\n") } }
use common::ops; pub use parse::ast::Ident; use std::collections::{HashMap, HashSet}; use std::fmt::Display; use std::fmt; use common::register::Register; use common::label::Label; use common::label::LabelAllocator; use common::register::RegisterAllocator; #[derive(Debug)] pub enum Instruction { Const(i64, Register, Label), AccessGlobal(Ident, Register, Label), AssignGlobal(Register, Ident, Label), Load(Register, i64, Register, Label), Store(Register, Register, i64, Label), UnaryOp(ops::x64UnaryOp, Register, Label), BinaryOp(ops::x64BinaryOp, Register, Register, Label), Branch(ops::x64Branch, Label, Label), Call(Register, Ident, Vec<Register>, Label), Goto(Label), } impl Instruction { pub fn successors(&self) -> Vec<Label>{ match *self { Instruction::Const(_,_, ref label) | Instruction::AccessGlobal(_, _, ref label) | Instruction::AssignGlobal(_, _, ref label) | Instruction::Load(_, _, _, ref label) | Instruction::Store(_, _, _, ref label) | Instruction::UnaryOp(_, _, ref label) | Instruction::BinaryOp(_, _, _, ref label) | Instruction::Call(_, _, _, ref label) | Instruction::Goto(ref label) => { vec![label.clone()] }, Instruction::Branch(_, ref label1, ref label2) => { vec![label2.clone(), label1.clone()] } } } } impl Display for Instruction { fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { match *self { Instruction::Const(ref value, ref reg, ref label) => { write!(f, "mov ${} {} --> {}", value, reg, label) }, Instruction::AccessGlobal(ref name, ref reg, ref label) => { write!(f, "mov {} {} --> {}", name, reg, label) }, Instruction::AssignGlobal(ref reg, ref name, ref label) => { write!(f, "mov {} {} --> {}", reg, name, label) }, Instruction::Load(ref sreg, ref offset, ref dreg, ref label) => { write!(f, "mov {}({}) {} --> {}", offset, sreg, dreg, label) }, Instruction::Store(ref sreg, ref dreg, ref offset, ref label) => { write!(f, "mov {} {}({}) --> {}", sreg, offset, dreg, label) }, Instruction::UnaryOp(ref op, ref reg, ref label) => { write!(f, "{} {} --> {}", op, reg, label) }, Instruction::BinaryOp(ref op, ref sreg, ref dreg, ref label) => { write!(f, "{} {} {} --> {}", op, sreg, dreg, label) }, Instruction::Call(ref reg, ref name, ref args, ref label) => { try!(write!(f, "{} <- call {}(", reg, name)); let mut i = args.iter(); if let Some(arg0) = i.next() { try!(write!(f, "{}", arg0)); for arg in i { try!(write!(f, ", {}", arg)); } } write!(f, ") --> {}", label) }, Instruction::Goto(ref label) => { write!(f, "
ruct FuncDefinition { pub name: Ident, pub formals: Vec<Register>, pub result: Register, pub locals: HashSet<Register>, pub entry: Label, pub exit: Label, pub body: HashMap<Label, Instruction>, pub label_allocator: LabelAllocator, pub register_allocator: RegisterAllocator, } impl FuncDefinition { fn print_body(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { let mut visited = HashSet::<Label>::new(); self.visit(&mut visited, self.entry, f) } fn visit(& self, visited: & mut HashSet<Label>, l: Label, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { if visited.contains(&l) { return Ok(()); } visited.insert(l); if let Some(instruction) = self.body.get(&l) { try!(write!(f, " {}: {}\n", l, instruction)); for s in instruction.successors() { try!(self.visit(visited, s, f)); } return Ok(()) } else { return Ok(()); } } } impl Display for FuncDefinition { fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { try!(write!(f, "{} {}(", self.result, self.name)); let mut i = self.formals.iter(); if let Some(formal0) = i.next() { try!(write!(f, "{}", formal0)); for formal in i { try!(write!(f, ", {}", formal)); } } try!(write!(f, ")\n")); try!(write!(f, " entry : {}\n", self.entry)); try!(write!(f, " exit : {}\n", self.exit)); try!(write!(f, " locals : ")); let mut i = self.locals.iter(); if let Some(local0) = i.next() { try!(write!(f, "{}", local0)); for local in i { try!(write!(f, ", {}", local)); } } try!(write!(f, "\n")); self.print_body(f) } } #[derive(Debug)] pub struct File { pub globals : Vec<Ident>, pub functions: Vec<FuncDefinition>, } impl Display for File { fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { try!(write!(f, "== RTL ==================================================\n")); for i in 0..self.functions.len() { try!(write!(f, "{}", self.functions[i])); } write!(f, "== END ==================================================\n") } }
goto {}", label) }, Instruction::Branch(ref branch_op, ref label1, ref label2) => { write!(f, "{} --> {}, {}", branch_op, label1, label2) }, } } } #[derive(Debug)] pub st
random
[ { "content": "pub fn convert_char(ch: &str) -> Option<i64> {\n\n if ch.len() == 1 {\n\n if let Some(c) = ch.chars().next() {\n\n Some(c as i64)\n\n } else {\n\n None\n\n }\n\n } else if ch.len() == 2 {\n\n match ch {\n\n \"\\\\\\\\\" => Some(92),\n\n \"\\\\\\'\" => Some(39),\n\n \"\\\\\\\"\" => Some(34),\n\n _ => None,\n\n }\n\n } else if ch.len() == 4 {\n\n i64::from_str_radix(&ch[2..], 16).ok()\n\n } else {\n\n None\n\n }\n\n}\n", "file_path": "src/parse/lexer.rs", "rank": 0, "score": 77924.83636509418 }, { "content": "pub fn build_deref_tree_aux(first: Node<Expression>, v: &mut Vec<Node<Ident>>) -> Node<Expression> {\n\n if let Some(i) = v.pop() {\n\n let second = build_deref_tree_aux(first.clone(), v);\n\n\n\n Node {\n\n start: first.start,\n\n stop: second.stop.clone(),\n\n t: Expression::MembDeref(Box::new(second), i),\n\n }\n\n } else {\n\n first\n\n }\n\n}\n", "file_path": "src/parse/parser.rs", "rank": 1, "score": 75567.43670046444 }, { "content": "pub fn build_deref_tree(first: Node<Expression>, v: &[Node<Ident>]) -> Node<Expression> {\n\n let mut vm = Vec::from(v);\n\n build_deref_tree_aux(first, &mut vm)\n\n}\n\n\n", "file_path": "src/parse/parser.rs", "rank": 2, "score": 63316.70223534066 }, { "content": "fn type_parameters(\n\n params: &[past::Node<past::Param>],\n\n types: &HashMap<String, tast::Struct>\n\n) -> Result<Vec<tast::Type>, String> {\n\n let mut res = Vec::new();\n\n for n_param in params {\n\n match n_param.t {\n\n past::Param::Int(_) => {res.push(tast::Type::Int);},\n\n past::Param::Struct(ref typ, _) => {\n\n if types.contains_key(&typ.t){\n\n res.push(tast::Type::Struct(typ.t.clone()))\n\n } else {\n\n return Err(String::from(\"Undefined parameter type\"));\n\n }\n\n },\n\n }\n\n }\n\n Ok(res)\n\n}\n\n\n", "file_path": "src/typing/typer.rs", "rank": 3, "score": 38757.03832333347 }, { "content": "#[derive(Debug, Default)]\n\npub struct LabelAllocator {\n\n count: i64,\n\n}\n\n\n\npub type Label = i64;\n\n\n\n\n\nimpl LabelAllocator {\n\n pub fn new() -> Self {\n\n LabelAllocator{count: 0}\n\n }\n\n pub fn fresh(& mut self) -> Label {\n\n let res : Label = self.count;//format!(\"L{}\", self.count);\n\n self.count += 1;\n\n res\n\n }\n\n}\n", "file_path": "src/common/label.rs", "rank": 4, "score": 28018.435116542583 }, { "content": " R15,\n\n Pseudo(i64),\n\n}\n\n\n\nimpl Register {\n\n pub fn result() -> Register {\n\n Register::Rax\n\n }\n\n\n\n pub fn parameters() -> Vec<Register> {\n\n vec![Register::Rdi, Register::Rsi, Register::Rdx, Register::Rcx, Register::R8, Register::R9]\n\n }\n\n\n\n pub fn caller_saved() -> Vec<Register> {\n\n let mut v = Vec::new();\n\n v.push(Register::result());\n\n //v.push(Register::R10);\n\n //v.push(Register::R11);\n\n v.append(& mut Register::parameters());\n\n v\n", "file_path": "src/common/register.rs", "rank": 5, "score": 27900.814784305105 }, { "content": "/* Opération x86_64\n\n\n\n*/\n\nuse std::fmt::Display;\n\nuse std::fmt;\n\n\n\n// Opérations unaires\n\n#[allow(non_camel_case_types)]\n\n#[derive(Debug)]\n\npub enum x64UnaryOp {\n\n addi(i64),\n\n sete,\n\n setne,\n\n setl,\n\n setle,\n\n setg,\n\n setge,\n\n}\n\n\n\nimpl Display for x64UnaryOp {\n", "file_path": "src/common/ops.rs", "rank": 6, "score": 27900.14785702162 }, { "content": " div,\n\n cmp, // dest - src\n\n test,// dest & src\n\n}\n\n\n\nimpl Display for x64BinaryOp {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {\n\n match *self {\n\n x64BinaryOp::mov => {\n\n write!(f, \"movq\")\n\n },\n\n x64BinaryOp::add => {\n\n write!(f, \"addq\")\n\n },\n\n x64BinaryOp::sub => {\n\n write!(f, \"subq\")\n\n },\n\n x64BinaryOp::mul => {\n\n write!(f, \"imulq\")\n\n },\n", "file_path": "src/common/ops.rs", "rank": 7, "score": 27900.083866954694 }, { "content": " }\n\n }\n\n\n\n pub fn is_hardware(&self) -> bool {\n\n !self.is_pseudo()\n\n }\n\n}\n\n\n\nimpl Display for Register {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {\n\n match *self {\n\n Register::Rax => {return write!(f, \"%rax\");},\n\n Register::Rbx => {return write!(f, \"%rbx\");},\n\n Register::Rcx => {return write!(f, \"%rcx\");},\n\n Register::Rdx => {return write!(f, \"%rdx\");},\n\n Register::Rdi => {return write!(f, \"%rdi\");},\n\n Register::Rsi => {return write!(f, \"%rsi\");},\n\n Register::Rsp => {return write!(f, \"%rsp\");},\n\n Register::Rbp => {return write!(f, \"%rbp\");},\n\n Register::R8 => {return write!(f, \"%r8 \");},\n", "file_path": "src/common/register.rs", "rank": 8, "score": 27899.54594337497 }, { "content": " Register::R9 => {return write!(f, \"%r9 \");},\n\n Register::R10 => {return write!(f, \"%r10\");},\n\n Register::R11 => {return write!(f, \"%r11\");},\n\n Register::R12 => {return write!(f, \"%r12\");},\n\n Register::R13 => {return write!(f, \"%r13\");},\n\n Register::R14 => {return write!(f, \"%r14\");},\n\n Register::R15 => {return write!(f, \"%r15\");},\n\n Register::Pseudo(i) => {return write!(f, \"#{:?}\", i);},\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Default)]\n\npub struct RegisterAllocator {\n\n count: i64,\n\n}\n\n\n\n\n\nimpl RegisterAllocator {\n\n pub fn new() -> Self {\n\n RegisterAllocator{count: 0}\n\n }\n\n pub fn fresh(& mut self) -> Register {\n\n let res = Register::Pseudo(self.count);\n\n self.count += 1;\n\n res\n\n }\n\n}\n", "file_path": "src/common/register.rs", "rank": 9, "score": 27898.36570018147 }, { "content": " fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {\n\n match *self {\n\n x64UnaryOp::addi(ival) => {\n\n write!(f, \"addq ${}, \", ival)\n\n },\n\n _ => {\n\n write!(f, \"{:?}\", self)\n\n }\n\n }\n\n }\n\n}\n\n\n\n// Opérations binaires\n\n#[allow(non_camel_case_types)]\n\n#[derive(Debug)]\n\npub enum x64BinaryOp {\n\n mov,\n\n add,\n\n sub,\n\n mul,\n", "file_path": "src/common/ops.rs", "rank": 10, "score": 27898.184011432 }, { "content": " jle,\n\n jl,\n\n jge,\n\n jg,\n\n}\n\n\n\nimpl Display for x64Branch {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {\n\n match self {\n\n _ => {\n\n write!(f, \"{:?}\", self)\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/common/ops.rs", "rank": 11, "score": 27897.895394986692 }, { "content": " }\n\n\n\n pub fn callee_saved() -> Vec<Register> {\n\n vec![Register::Rbx, Register::R12, Register::R13, Register::R14, Register::R15]\n\n }\n\n\n\n pub fn allocatable() -> Vec<Register> {\n\n let mut v = Register::caller_saved();\n\n v.append(& mut Register::callee_saved());\n\n v\n\n }\n\n\n\n pub fn tmp() -> (Register, Register) {\n\n (Register::R10, Register::R11)\n\n }\n\n\n\n pub fn is_pseudo(&self) -> bool {\n\n match *self {\n\n Register::Pseudo(_) => true,\n\n _ => false,\n", "file_path": "src/common/register.rs", "rank": 12, "score": 27896.142222589537 }, { "content": "use std::fmt;\n\nuse std::fmt::Display;\n\n\n\n#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]\n\npub enum Register {\n\n Rax,\n\n Rbx,\n\n Rcx,\n\n Rdx,\n\n Rdi,\n\n Rsi,\n\n Rsp,\n\n Rbp,\n\n R8,\n\n R9,\n\n R10,\n\n R11,\n\n R12,\n\n R13,\n\n R14,\n", "file_path": "src/common/register.rs", "rank": 13, "score": 27893.98376090414 }, { "content": " x64BinaryOp::div => {\n\n write!(f, \"idivq\")\n\n },\n\n x64BinaryOp::cmp => {\n\n write!(f, \"cmpq\")\n\n },\n\n x64BinaryOp::test => {\n\n write!(f, \"testq\")\n\n },\n\n }\n\n }\n\n}\n\n\n\n// Operations de branchement\n\n#[allow(non_camel_case_types)]\n\n#[derive(Debug)]\n\npub enum x64Branch {\n\n #[allow(non_camel_case_types)]\n\n je,\n\n jne,\n", "file_path": "src/common/ops.rs", "rank": 14, "score": 27888.885333784052 }, { "content": "// Identifiers\n\nfn check_keyword(id: &str) -> Option<String> {\n\n let kwds = vec![\"int\", \"struct\", \"sizeof\", \"if\", \"else\", \"while\", \"return\"];\n\n if kwds.contains(&id) {\n\n None\n\n } else {\n\n Some(String::from(id))\n\n }\n\n}\n", "file_path": "src/parse/lexer.rs", "rank": 15, "score": 27395.468195357073 }, { "content": "fn build_binop_left_assoc_tree(first: Node<Expression>,\n\n v: &[(Node<BinaryOp>, Node<Expression>)])\n\n -> Node<Expression> {\n\n let mut vm = Vec::from(v);\n\n build_binop_left_assoc_tree_aux(first, &mut vm)\n\n}\n\n\n", "file_path": "src/parse/parser.rs", "rank": 16, "score": 26711.935381987438 }, { "content": "fn build_binop_left_assoc_tree_aux(first: Node<Expression>,\n\n v: &mut Vec<(Node<BinaryOp>, Node<Expression>)>)\n\n -> Node<Expression> {\n\n if let Some((op, second)) = v.pop() {\n\n let first = build_binop_left_assoc_tree_aux(first, v);\n\n Node {\n\n start: first.start.clone(),\n\n stop: second.stop.clone(),\n\n t: Expression::Binary(Box::new(first), op, Box::new(second)),\n\n }\n\n } else {\n\n first\n\n }\n\n}\n", "file_path": "src/parse/parser.rs", "rank": 17, "score": 25971.785400485907 }, { "content": " }\n\n\n\n Instruction::Return => {\n\n let mut v = vec![Register::Rax];\n\n v.append(& mut Register::callee_saved());\n\n (vec![],v)\n\n }\n\n }\n\n\n\n }\n\n}\n\n\n\nimpl Display for Instruction {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {\n\n match *self {\n\n Instruction::Const(ref value, ref reg, ref label) => {\n\n write!(f, \"mov ${} {} --> {}\", value, reg, label)\n\n },\n\n Instruction::AccessGlobal(ref name, ref reg, ref label) => {\n\n write!(f, \"mov {} {} --> {}\", name, reg, label)\n", "file_path": "src/ertl/tree.rs", "rank": 20, "score": 38.10228484862655 }, { "content": " },\n\n Instruction::AssignGlobal(ref reg, ref name, ref label) => {\n\n write!(f, \"mov {} {} --> {}\", reg, name, label)\n\n },\n\n Instruction::Load(ref sreg, ref offset, ref dreg, ref label) => {\n\n write!(f, \"mov {}({}) {} --> {}\", offset, sreg, dreg, label)\n\n },\n\n Instruction::Store(ref sreg, ref dreg, ref offset, ref label) => {\n\n write!(f, \"mov {} {}({}) --> {}\", sreg, offset, dreg, label)\n\n },\n\n Instruction::UnaryOp(ref op, ref reg, ref label) => {\n\n write!(f, \"{} {} --> {}\", op, reg, label)\n\n },\n\n Instruction::BinaryOp(ref op, ref sreg, ref dreg, ref label) => {\n\n write!(f, \"{} {} {} --> {}\", op, sreg, dreg, label)\n\n },\n\n Instruction::Call(ref name, ref argc, ref label) => {\n\n write!(f, \"call {}({}) --> {}\", name, argc, label)\n\n },\n\n Instruction::Goto(ref label) => {\n", "file_path": "src/ertl/tree.rs", "rank": 21, "score": 37.89219811263879 }, { "content": " },\n\n Instruction::AccessGlobal(ref name, ref reg, ref label) => {\n\n write!(f, \"mov {} {} --> {}\", name, reg, label)\n\n },\n\n Instruction::AssignGlobal(ref reg, ref name, ref label) => {\n\n write!(f, \"mov {} {} --> {}\", reg, name, label)\n\n },\n\n Instruction::Load(ref sreg, ref offset, ref dreg, ref label) => {\n\n write!(f, \"mov {}({}) {} --> {}\", offset, sreg, dreg, label)\n\n },\n\n Instruction::Store(ref sreg, ref dreg, ref offset, ref label) => {\n\n write!(f, \"mov {} {}({}) --> {}\", sreg, offset, dreg, label)\n\n },\n\n Instruction::UnaryOp(ref op, ref reg, ref label) => {\n\n write!(f, \"{} {} --> {}\", op, reg, label)\n\n },\n\n Instruction::BinaryOp(ref op, ref sreg, ref dreg, ref label) => {\n\n write!(f, \"{} {} {} --> {}\", op, sreg, dreg, label)\n\n },\n\n Instruction::Call(ref name, ref label) => {\n", "file_path": "src/ltl/tree.rs", "rank": 22, "score": 37.84905015235777 }, { "content": " | Instruction::Leave(ref label)\n\n | Instruction::GetParam(_, _, ref label)\n\n | Instruction::PushParam(_, ref label) => {\n\n vec![label.clone()]\n\n },\n\n Instruction::Branch(_, ref label1, ref label2) => {\n\n vec![label2.clone(), label1.clone()]\n\n },\n\n Instruction::Return => {\n\n Vec::new()\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl Display for Instruction {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {\n\n match *self {\n\n Instruction::Const(ref value, ref reg, ref label) => {\n\n write!(f, \"mov ${} {} --> {}\", value, reg, label)\n", "file_path": "src/ltl/tree.rs", "rank": 23, "score": 35.35092527138801 }, { "content": "use common::ops;\n\npub use parse::ast::Ident;\n\n//use std::vec;\n\nuse std::collections::{HashMap, HashSet};\n\nuse std::fmt::Display;\n\nuse std::fmt;\n\npub use common::register::Register;\n\npub use common::label::Label;\n\n\n\n#[derive(Debug, PartialEq, Clone, Copy)]\n\npub enum Operand {\n\n Reg(Register),\n\n Spilled(usize),\n\n}\n\n\n\nimpl Display for Operand {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {\n\n match *self {\n\n Operand::Reg(r) => {\n\n r.fmt(f)\n", "file_path": "src/ltl/tree.rs", "rank": 26, "score": 29.816770621067384 }, { "content": " | Instruction::UnaryOp(_, _, ref label)\n\n | Instruction::BinaryOp(_, _, _, ref label)\n\n | Instruction::Call(_, _, ref label)\n\n | Instruction::Goto(ref label)\n\n | Instruction::AllocFrame(ref label)\n\n | Instruction::DeleteFrame(ref label)\n\n | Instruction::GetParam(_, _, ref label)\n\n | Instruction::PushParam(_, ref label) => {\n\n vec![label.clone()]\n\n },\n\n Instruction::Branch(_, ref label1, ref label2) => {\n\n vec![label2.clone(), label1.clone()]\n\n },\n\n Instruction::Return => {\n\n Vec::new()\n\n }\n\n }\n\n }\n\n\n\n pub fn define_use(&self) -> (Vec<Register>, Vec<Register>) {\n", "file_path": "src/ertl/tree.rs", "rank": 27, "score": 28.704657069849404 }, { "content": " match op {\n\n tast::UnaryOp::Not => {\n\n if let Some(result) = result_reg {\n\n let label1 = self.label_allocator.fresh();\n\n let label2 = self.label_allocator.fresh();\n\n let src_register = self.register_allocator.fresh();\n\n self.instructions.insert(label1, Instruction::BinaryOp(x64BinaryOp::test, src_register, src_register, label2));\n\n self.instructions.insert(label2, Instruction::UnaryOp(x64UnaryOp::sete, result, exit));\n\n self.expression(expr, label1, Some(src_register))\n\n } else {\n\n self.expression(expr, exit, None)\n\n }\n\n },\n\n tast::UnaryOp::Minus => {\n\n if let Some(result) = result_reg {\n\n let label1 = self.label_allocator.fresh();\n\n let label2 = self.label_allocator.fresh();\n\n let src_register = self.register_allocator.fresh();\n\n self.instructions.insert(label1, Instruction::Const(0, result, label2));\n\n self.instructions.insert(label2, Instruction::BinaryOp(x64BinaryOp::sub, src_register, result, exit));\n", "file_path": "src/rtl/builder.rs", "rank": 28, "score": 28.315272129907534 }, { "content": " self.instructions.insert(label3, Instruction::BinaryOp(x64BinaryOp::test, cnd_reg, cnd_reg, label2));\n\n loop_begin\n\n },\n\n //_ => Err(String::from(\"Unimplemented\")),\n\n }\n\n }\n\n\n\n fn expr_const(& mut self, ivalue: i64, exit: Label, result_reg: Option<Register>) -> Label {\n\n if let Some(result) = result_reg {\n\n let label = self.label_allocator.fresh();\n\n self.instructions.insert(label, Instruction::Const(ivalue, result, exit));\n\n label\n\n } else {\n\n exit // Noop !\n\n }\n\n }\n\n\n\n fn expr_lvalue(& mut self, name: String, exit: Label, result_reg: Option<Register>) -> Label {\n\n if let Some(result) = result_reg {\n\n let label = self.label_allocator.fresh();\n", "file_path": "src/rtl/builder.rs", "rank": 29, "score": 27.727631872172218 }, { "content": " self.expression(expr, label1, Some(src_register))\n\n } else {\n\n self.expression(expr, exit, None)\n\n }\n\n }\n\n }\n\n }\n\n\n\n // This function is used for lvalue used as destination of affectation.\n\n fn expr_affect_dest(& mut self, expr: tast::Expression, exit: Label, source_reg: Register) -> Label {\n\n if expr.kind.lvalue() {\n\n match expr.kind {\n\n tast::ExprKind::Lvalue(name) => {\n\n let label = self.label_allocator.fresh();\n\n if let Some(dest_register) = self.find_var(&name) {\n\n self.instructions.insert(label, Instruction::BinaryOp(x64BinaryOp::mov, source_reg, dest_register, exit));\n\n } else { // Global Variable\n\n self.instructions.insert(label, Instruction::AssignGlobal(source_reg, name, exit));\n\n }\n\n label\n", "file_path": "src/rtl/builder.rs", "rank": 30, "score": 27.671053678202856 }, { "content": " let label = self.label_allocator.fresh();\n\n self.instructions.insert(label, Instruction::Branch(op, exit_true, exit_false));\n\n let label2 = self.label_allocator.fresh();\n\n let l_reg = self.register_allocator.fresh();\n\n let r_reg = self.register_allocator.fresh();\n\n self.instructions.insert(label2, Instruction::BinaryOp(x64BinaryOp::cmp, r_reg, l_reg, label));\n\n let label3 = self.expression(rhs, label2, Some(r_reg));\n\n self.expression(lhs, label3, Some(l_reg))\n\n }\n\n\n\n fn expr_bin_and(& mut self, lhs: tast::Expression, rhs: tast::Expression, exit: Label, result_reg: Option<Register>) -> Label {\n\n let (result, label) = if let Some(result) = result_reg {\n\n let cnd_reg = self.register_allocator.fresh();\n\n let label1 = self.label_allocator.fresh();\n\n let label2 = self.label_allocator.fresh();\n\n let label3 = self.label_allocator.fresh();\n\n self.instructions.insert(label1, Instruction::Const(1, result, exit));\n\n self.instructions.insert(label2, Instruction::Branch(x64Branch::je, exit, label1));\n\n self.instructions.insert(label3, Instruction::BinaryOp(x64BinaryOp::test, cnd_reg, cnd_reg, label2));\n\n (Some(result), self.expression(rhs, label3, Some(cnd_reg)))\n", "file_path": "src/rtl/builder.rs", "rank": 31, "score": 27.359146586408986 }, { "content": " UnaryOp(ops::x64UnaryOp, Register, Label),\n\n BinaryOp(ops::x64BinaryOp, Register, Register, Label),\n\n Branch(ops::x64Branch, Label, Label),\n\n Call(Ident, usize, Label),\n\n Goto(Label),\n\n AllocFrame(Label),\n\n DeleteFrame(Label),\n\n GetParam(usize, Register, Label), // Parameters will be indexed starting from 1 in reverse order on the stack. This convention makes for an easier use of index, as it gives the index when starting from %rbp.\n\n PushParam(Register, Label),\n\n Return,\n\n}\n\n\n\nimpl Instruction {\n\n pub fn successors(&self) -> Vec<Label> { // Fixme !!\n\n match *self {\n\n Instruction::Const(_,_, ref label)\n\n | Instruction::AccessGlobal(_, _, ref label)\n\n | Instruction::AssignGlobal(_, _, ref label)\n\n | Instruction::Load(_, _, _, ref label)\n\n | Instruction::Store(_, _, _, ref label)\n", "file_path": "src/ertl/tree.rs", "rank": 32, "score": 27.042161514741338 }, { "content": " }\n\n\n\n fn expr_bin_mult(& mut self, lhs: tast::Expression, rhs: tast::Expression, exit: Label, result_reg: Option<Register>) -> Label {\n\n if let Some(result) = result_reg {\n\n let label2 = self.label_allocator.fresh();\n\n let src_register = self.register_allocator.fresh();\n\n let label1 = self.expression(rhs, label2.clone(), Some(src_register));\n\n self.instructions.insert(label2, Instruction::BinaryOp(x64BinaryOp::mul, src_register, result, exit));\n\n self.expression(lhs, label1, Some(result))\n\n } else {\n\n let label = self.expression(lhs, exit, None);\n\n self.expression(rhs, label, None)\n\n }\n\n }\n\n\n\n fn expr_bin_minus(& mut self, lhs: tast::Expression, rhs: tast::Expression, exit: Label, result_reg: Option<Register>) -> Label {\n\n if let Some(result) = result_reg {\n\n let label2 = self.label_allocator.fresh();\n\n let src_register = self.register_allocator.fresh();\n\n let label1 = self.expression(rhs, label2, Some(src_register));\n", "file_path": "src/rtl/builder.rs", "rank": 34, "score": 26.783365047178027 }, { "content": " self.instructions.insert(label2, Instruction::BinaryOp(x64BinaryOp::sub, src_register, result, exit));\n\n self.expression(lhs, label1, Some(result))\n\n } else {\n\n let label = self.expression(lhs, exit, None);\n\n self.expression(rhs, label, None)\n\n }\n\n }\n\n\n\n fn expr_bin_div(& mut self, lhs: tast::Expression, rhs: tast::Expression, exit: Label, result_reg: Option<Register>) -> Label {\n\n if let Some(result) = result_reg {\n\n let label2 = self.label_allocator.fresh();\n\n let src_register = self.register_allocator.fresh();\n\n let label1 = self.expression(rhs, label2, Some(src_register));\n\n self.instructions.insert(label2, Instruction::BinaryOp(x64BinaryOp::div, src_register, result, exit));\n\n self.expression(lhs, label1, Some(result))\n\n } else {\n\n let label = self.expression(lhs, exit, None);\n\n self.expression(rhs, label, None)\n\n }\n\n }\n", "file_path": "src/rtl/builder.rs", "rank": 35, "score": 26.765119079648375 }, { "content": " assert_eq!(dreg, ltl::Operand::Reg(Register::Rax), \"division destination should be %rax\");\n\n self.instructions.push((Some(label), format!(\" cqto\\n {} {}\", x64BinaryOp::div, sreg))); // cqto is used to sign extend the source register.\n\n self.visit(visited, next, instructions);\n\n },\n\n ltl::Instruction::BinaryOp(op, sreg, dreg, next) => {\n\n self.instructions.push((Some(label), format!(\" {} {}, {}\", op, sreg, dreg)));\n\n self.visit(visited, next, instructions);\n\n },\n\n ltl::Instruction::Call(name, next) => {\n\n self.instructions.push((Some(label), format!(\" call {}\", name)));\n\n self.visit(visited, next, instructions);\n\n },\n\n ltl::Instruction::Goto(next) => {\n\n self.instructions.push((Some(label), String::new()));\n\n self.visit(visited, next, instructions);\n\n },\n\n ltl::Instruction::Branch(op, jump_label, next) => {\n\n self.instructions.push((Some(label), format!(\" {} .{}.{}\", op, self.name, jump_label)));\n\n self.visit(visited, next, instructions);\n\n self.labels.insert(jump_label);\n", "file_path": "src/output/builder.rs", "rank": 36, "score": 26.580530295621607 }, { "content": "use common::ops;\n\npub use parse::ast::Ident;\n\n//use std::vec;\n\nuse std::collections::{HashMap, HashSet};\n\nuse std::fmt::Display;\n\nuse std::fmt;\n\nuse common::register::Register;\n\nuse common::label::Label;\n\nuse common::label::LabelAllocator;\n\nuse common::ops::*;\n\nuse ertl::liveness::LivenessInfo;\n\n\n\n\n\n#[derive(Debug)]\n\npub enum Instruction {\n\n Const(i64, Register, Label),\n\n AccessGlobal(Ident, Register, Label),\n\n AssignGlobal(Register, Ident, Label),\n\n Load(Register, i64, Register, Label),\n\n Store(Register, Register, i64, Label),\n", "file_path": "src/ertl/tree.rs", "rank": 37, "score": 26.5693674255527 }, { "content": " if let Some(src_register) = self.find_var(&name) {\n\n self.instructions.insert(label, Instruction::BinaryOp(x64BinaryOp::mov, src_register, result, exit));\n\n } else { // Global Variable\n\n self.instructions.insert(label, Instruction::AccessGlobal(name, result, exit));\n\n }\n\n label\n\n } else {\n\n exit // Noop !\n\n }\n\n }\n\n\n\n fn expr_sizeof(& mut self, typename: String, exit: Label, result_reg: Option<Register>) -> Label {\n\n if let Some(result) = result_reg {\n\n let label = self.label_allocator.fresh();\n\n let ivalue = self.types[&typename].size();\n\n self.instructions.insert(label, Instruction::Const(ivalue, result, exit));\n\n label\n\n } else {\n\n exit // Noop !\n\n }\n", "file_path": "src/rtl/builder.rs", "rank": 38, "score": 26.524464119191407 }, { "content": " try!(write!(f, \"{}\", glob));\n\n }\n\n Ok(())\n\n }\n\n}\n\n\n\npub struct FunctionOutput {\n\n pub name: String,\n\n pub instructions: Vec<(Option<Label>, String)>,\n\n pub labels: HashSet<Label>,\n\n}\n\n\n\nimpl Display for FunctionOutput {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {\n\n try!(write!(f, \"{}:\\n\", self.name));\n\n for &(ref label, ref instruction) in &self.instructions {\n\n if let Some(l) = *label {\n\n if self.labels.contains(&l) {\n\n try!(write!(f, \".{}.{}:\\n\", self.name, l));\n\n }\n", "file_path": "src/output/asm.rs", "rank": 39, "score": 26.192073200545327 }, { "content": " label5\n\n } else {\n\n label4\n\n }\n\n }\n\n\n\n fn expr_call(& mut self, name: String, parameters: Vec<tast::Expression>, exit: Label, result_reg: Option<Register>) -> Label {\n\n let result = if let Some(result) = result_reg {\n\n result\n\n } else {\n\n self.register_allocator.fresh()\n\n };\n\n let mut registers = Vec::new();\n\n let mut label : Label = self.label_allocator.fresh();\n\n for _ in 0..parameters.len() {\n\n registers.push(self.register_allocator.fresh());\n\n }\n\n self.instructions.insert(label, Instruction::Call(result, name, registers.clone(), exit));\n\n for (reg, parameter) in registers.into_iter().zip(parameters).rev() {\n\n label = self.expression(parameter, label, Some(reg));\n", "file_path": "src/rtl/builder.rs", "rank": 40, "score": 25.71513504274937 }, { "content": " write!(f, \"call {}() --> {}\", name, label)\n\n },\n\n Instruction::Goto(ref label) => {\n\n write!(f, \"goto {}\", label)\n\n },\n\n Instruction::Branch(ref branch_op, ref label1, ref label2) => {\n\n write!(f, \"{} --> {}, {}\", branch_op, label1, label2)\n\n },\n\n Instruction::Enter(size, ref label) => {\n\n write!(f, \"enter ${} --> {}\", size, label)\n\n },\n\n Instruction::Leave(ref label) => {\n\n write!(f, \"leave --> {}\", label)\n\n },\n\n Instruction::GetParam(ref index, ref dest, ref label) => {\n\n write!(f, \"mov stackp({}) {} --> {}\", index, dest, label)\n\n },\n\n Instruction::PushParam(ref src, ref label) => {\n\n write!(f, \"push {} --> {}\", src, label)\n\n },\n", "file_path": "src/ltl/tree.rs", "rank": 41, "score": 25.569341277814498 }, { "content": " fn expr_bin_affect(& mut self, lhs: tast::Expression, rhs: tast::Expression, exit: Label, result_reg: Option<Register>) -> Label {\n\n let source_reg = if let Some(result) = result_reg {\n\n result\n\n } else {\n\n self.register_allocator.fresh()\n\n };\n\n\n\n let label = self.expr_affect_dest(lhs, exit, source_reg);\n\n self.expression(rhs, label, Some(source_reg))\n\n\n\n }\n\n\n\n fn expr_bin_plus_const(& mut self, ivalue: i64, expr: tast::Expression, exit: Label, result_reg: Option<Register>) -> Label {\n\n if let Some(result) = result_reg {\n\n let label1 = self.label_allocator.fresh();\n\n self.instructions.insert(label1, Instruction::UnaryOp(x64UnaryOp::addi(ivalue), result, exit));\n\n self.expression(expr, label1, Some(result))\n\n } else {\n\n self.expression(expr, exit, None)\n\n }\n", "file_path": "src/rtl/builder.rs", "rank": 42, "score": 25.564468654080645 }, { "content": "\n\n }\n\n\n\n fn expr_bin_plus(& mut self, lhs: tast::Expression, rhs: tast::Expression, exit: Label, result_reg: Option<Register>) -> Label {\n\n match (&lhs.kind, &rhs.kind) {\n\n (&tast::ExprKind::Const(ivalue), _) => {self.expr_bin_plus_const(ivalue, rhs, exit, result_reg)},\n\n (_, &tast::ExprKind::Const(ivalue)) => {self.expr_bin_plus_const(ivalue, lhs, exit, result_reg)},\n\n _ => {\n\n if let Some(result) = result_reg {\n\n let label2 = self.label_allocator.fresh();\n\n let src_register = self.register_allocator.fresh();\n\n let label1 = self.expression(rhs, label2, Some(src_register));\n\n self.instructions.insert(label2, Instruction::BinaryOp(x64BinaryOp::add, src_register, result, exit));\n\n self.expression(lhs, label1, Some(result))\n\n } else {\n\n let label = self.expression(lhs, exit, None);\n\n self.expression(rhs, label, None)\n\n }\n\n },\n\n }\n", "file_path": "src/rtl/builder.rs", "rank": 43, "score": 25.556025631121976 }, { "content": " let cnd_reg = self.register_allocator.fresh();\n\n let label1 = self.label_allocator.fresh();\n\n let label2 = self.label_allocator.fresh();\n\n let label3 = self.label_allocator.fresh();\n\n self.instructions.insert(label1, Instruction::Const(0, result, exit));\n\n self.instructions.insert(label2, Instruction::Branch(x64Branch::jne, exit, label1));\n\n self.instructions.insert(label3, Instruction::BinaryOp(x64BinaryOp::test, cnd_reg, cnd_reg, label2));\n\n (Some(result), self.expression(rhs, label3, Some(cnd_reg)))\n\n } else {\n\n (None, self.expression(rhs, exit.clone(), None))\n\n };\n\n let cnd_reg = self.register_allocator.fresh();\n\n let label2 = self.label_allocator.fresh();\n\n let label3 = self.label_allocator.fresh();\n\n self.instructions.insert(label2, Instruction::Branch(x64Branch::jne, exit, label));\n\n self.instructions.insert(label3, Instruction::BinaryOp(x64BinaryOp::test, cnd_reg, cnd_reg, label2));\n\n let label4 = self.expression(lhs, label3, Some(cnd_reg));\n\n if let Some(result) = result {\n\n let label5 = self.label_allocator.fresh();\n\n self.instructions.insert(label5, Instruction::Const(1, result, label4));\n", "file_path": "src/rtl/builder.rs", "rank": 44, "score": 24.723014182224993 }, { "content": " Enter(usize, Label), // Use the usual x86_64 conventions (r10 an r11 as temporaries), rbp as base pointer.\n\n Leave(Label),\n\n GetParam(usize, Register, Label),\n\n PushParam(Operand, Label),\n\n Return,\n\n}\n\n\n\nimpl Instruction {\n\n pub fn successors(&self) -> Vec<Label> { // Fixme !!\n\n match *self {\n\n Instruction::Const(_,_, ref label)\n\n | Instruction::AccessGlobal(_, _, ref label)\n\n | Instruction::AssignGlobal(_, _, ref label)\n\n | Instruction::Load(_, _, _, ref label)\n\n | Instruction::Store(_, _, _, ref label)\n\n | Instruction::UnaryOp(_, _, ref label)\n\n | Instruction::BinaryOp(_, _, _, ref label)\n\n | Instruction::Call(_, ref label)\n\n | Instruction::Goto(ref label)\n\n | Instruction::Enter(_, ref label)\n", "file_path": "src/ltl/tree.rs", "rank": 45, "score": 24.695974184715435 }, { "content": "\n\n\n\n\n\n fn expr_bin_cmp(& mut self, op: x64UnaryOp, lhs: tast::Expression, rhs: tast::Expression, exit: Label, result_reg: Option<Register>) -> Label {\n\n if let Some(result) = result_reg {\n\n let label = self.label_allocator.fresh();\n\n self.instructions.insert(label, Instruction::UnaryOp(op, result, exit));\n\n let label2 = self.label_allocator.fresh();\n\n let l_reg = self.register_allocator.fresh();\n\n let r_reg = self.register_allocator.fresh();\n\n self.instructions.insert(label2, Instruction::BinaryOp(x64BinaryOp::cmp, r_reg, l_reg, label));\n\n let label3 = self.expression(rhs, label2, Some(r_reg));\n\n self.expression(lhs, label3, Some(l_reg))\n\n } else {\n\n let label = self.expression(rhs, exit, None);\n\n self.expression(lhs, label, None)\n\n }\n\n }\n\n\n\n fn cnd_cmp(& mut self, op: x64Branch, lhs: tast::Expression, rhs: tast::Expression, exit_false: Label, exit_true: Label) -> Label {\n", "file_path": "src/rtl/builder.rs", "rank": 46, "score": 24.606494270839363 }, { "content": " }\n\n\n\n fn expr_memb_deref(& mut self, expr: tast::Expression, membername: String, exit: Label, result_reg: Option<Register>) -> Label {\n\n if let tast::Type::Struct(typename) = expr.typ.clone() {\n\n let typ = &self.types[&typename];\n\n let index = typ.index[&membername];\n\n if let Some(result) = result_reg {\n\n let label = self.label_allocator.fresh();\n\n let src_register = self.register_allocator.fresh();\n\n self.instructions.insert(label, Instruction::Load(src_register, index as i64 * 8 , result, exit));\n\n self.expression(expr, label, Some(src_register))\n\n } else {\n\n self.expression(expr, exit, None)\n\n }\n\n } else {\n\n panic!(\"Dereferencing of non struct type, typer failed.\");\n\n }\n\n }\n\n\n\n fn expr_unary(& mut self, op: tast::UnaryOp, expr: tast::Expression, exit: Label, result_reg: Option<Register>) -> Label {\n", "file_path": "src/rtl/builder.rs", "rank": 47, "score": 24.541366097228472 }, { "content": " },\n\n Operand::Spilled(index) => {\n\n write!(f, \"{}(%rbp)\", -(index as i64 * 8 + 8))\n\n },\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub enum Instruction {\n\n Const(i64, Operand, Label),\n\n AccessGlobal(Ident, Register, Label),\n\n AssignGlobal(Register, Ident, Label),\n\n Load(Register, i64, Register, Label),\n\n Store(Register, Register, i64, Label),\n\n UnaryOp(ops::x64UnaryOp, Operand, Label),\n\n BinaryOp(ops::x64BinaryOp, Operand, Operand, Label),\n\n Branch(ops::x64Branch, Label, Label),\n\n Call(Ident, Label),\n\n Goto(Label),\n", "file_path": "src/ltl/tree.rs", "rank": 48, "score": 23.882291075830906 }, { "content": " } else {\n\n (None, self.expression(rhs, exit.clone(), None))\n\n };\n\n let cnd_reg = self.register_allocator.fresh();\n\n let label2 = self.label_allocator.fresh();\n\n let label3 = self.label_allocator.fresh();\n\n self.instructions.insert(label2, Instruction::Branch(x64Branch::je, exit, label));\n\n self.instructions.insert(label3, Instruction::BinaryOp(x64BinaryOp::test, cnd_reg, cnd_reg, label2));\n\n let label4 = self.expression(lhs, label3, Some(cnd_reg));\n\n if let Some(result) = result {\n\n let label5 = self.label_allocator.fresh();\n\n self.instructions.insert(label5, Instruction::Const(0, result, label4));\n\n label5\n\n } else {\n\n label4\n\n }\n\n }\n\n\n\n fn expr_bin_or(& mut self, lhs: tast::Expression, rhs: tast::Expression, exit: Label, result_reg: Option<Register>) -> Label {\n\n let (result, label) = if let Some(result) = result_reg {\n", "file_path": "src/rtl/builder.rs", "rank": 49, "score": 23.382004649436148 }, { "content": " if visited.contains(&label) {\n\n self.labels.insert(label);\n\n self.instructions.push((None, format!(\" jmp .{}.{}\", self.name, label)));\n\n } else {\n\n visited.insert(label);\n\n self.output(visited, label, instructions);\n\n }\n\n }\n\n\n\n fn output(& mut self, visited : & mut HashSet<Label>, label: Label, instructions: &mut HashMap<Label, ltl::Instruction>) {\n\n if let Some(i) = instructions.remove(&label) {\n\n match i {\n\n ltl::Instruction::Const(value, op, next) => {\n\n self.instructions.push((Some(label), format!(\" movq ${}, {}\", value, op)));\n\n self.visit(visited, next, instructions);\n\n },\n\n ltl::Instruction::AccessGlobal(name, reg, next) => {\n\n self.instructions.push((Some(label), format!(\" movq {}(%rip), {}\", name, reg)));\n\n self.visit(visited, next, instructions);\n\n },\n", "file_path": "src/output/builder.rs", "rank": 50, "score": 23.034634533782537 }, { "content": " fn lookup(&self, reg: Register) -> Operand {\n\n self.register_affectations[&reg]\n\n }\n\n\n\n fn fix_dest(&mut self, dest: Operand, tmp_reg: Register, next: Label) -> (Register, Label) {\n\n match dest {\n\n Operand::Spilled(i) => {\n\n let tmp = self.label_allocator.fresh();\n\n self.body.insert(tmp, Instruction::BinaryOp(x64BinaryOp::mov, Operand::Reg(tmp_reg), Operand::Spilled(i), next));\n\n (tmp_reg, tmp)\n\n },\n\n Operand::Reg(r) => {\n\n (r, next)\n\n },\n\n }\n\n }\n\n\n\n fn fix_src(&mut self, entry: Label, src: Operand, tmp_reg: Register) -> (Label, Register) {\n\n match src {\n\n Operand::Spilled(i) => {\n", "file_path": "src/ltl/builder.rs", "rank": 51, "score": 22.65963240715528 }, { "content": " ltl::Instruction::AssignGlobal(reg, name, next) => {\n\n self.instructions.push((Some(label), format!(\" movq {}, {}(%rip)\", reg, name)));\n\n self.visit(visited, next, instructions);\n\n },\n\n ltl::Instruction::Load(src, offset, dest, next) => {\n\n self.instructions.push((Some(label), format!(\" movq {}({}), {}\", offset, src, dest)));\n\n self.visit(visited, next, instructions);\n\n },\n\n ltl::Instruction::Store(src, dest, offset, next) => {\n\n self.instructions.push((Some(label), format!(\" movq {}, {}({})\", src, offset, dest)));\n\n self.visit(visited, next, instructions);\n\n },\n\n ltl::Instruction::UnaryOp(x64UnaryOp::addi(i), reg, next) => {\n\n self.instructions.push((Some(label), format!(\" addq ${}, {}\", i, reg)));\n\n self.visit(visited, next, instructions);\n\n },\n\n ltl::Instruction::UnaryOp(op, reg, next) => { // setcc instruction\n\n let byte_reg = match reg {\n\n ltl::Operand::Reg(Register::Rax) => Cow::from(\"%al\"),\n\n ltl::Operand::Reg(Register::Rbx) => Cow::from(\"%bl\"),\n", "file_path": "src/output/builder.rs", "rank": 52, "score": 22.407692406736658 }, { "content": " write!(f, \"goto {}\", label)\n\n },\n\n Instruction::Branch(ref branch_op, ref label1, ref label2) => {\n\n write!(f, \"{} --> {}, {}\", branch_op, label1, label2)\n\n },\n\n Instruction::AllocFrame(ref label) => {\n\n write!(f, \"alloc_frame --> {}\", label)\n\n },\n\n Instruction::DeleteFrame(ref label) => {\n\n write!(f, \"delete_frame --> {}\", label)\n\n },\n\n Instruction::GetParam(ref index, ref dest, ref label) => {\n\n write!(f, \"mov stackp({}) {} --> {}\", index, dest, label)\n\n },\n\n Instruction::PushParam(ref src, ref label) => {\n\n write!(f, \"push {} --> {}\", src, label)\n\n },\n\n Instruction::Return => {\n\n write!(f, \"return\")\n\n },\n", "file_path": "src/ertl/tree.rs", "rank": 53, "score": 22.07929973027951 }, { "content": " Instruction::Return => {\n\n write!(f, \"return\")\n\n },\n\n //_ => write!(f, \"{:#?}\\n\", self)\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct FuncDefinition {\n\n pub name: Ident,\n\n pub entry: Label,\n\n pub body: HashMap<Label, Instruction>,\n\n}\n\n\n\nimpl FuncDefinition {\n\n fn print_body(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {\n\n let mut visited = HashSet::<Label>::new();\n\n self.visit(&mut visited, self.entry, f)\n\n }\n", "file_path": "src/ltl/tree.rs", "rank": 54, "score": 22.066193461035354 }, { "content": " intfs: HashSet::new(),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct Graph { //Contains the graph and useful metadata.\n\n graph: HashMap<Register, Arcs>,\n\n result: HashMap<Register, Operand>,\n\n possible_colors : HashMap<Register, HashSet<Register>>,\n\n spilled: usize,\n\n todo: HashSet<Register>,\n\n}\n\n\n\nimpl Graph {\n\n\n\n // Initialise the graph and compute interferences, possible colors and preferences.\n\n pub fn new(instructions: &HashMap<Label, ertl::Instruction>, liveness: &HashMap<Label, liveness::LivenessInfo>) -> Self {\n\n let mut graph = HashMap::new();\n\n let mut movs = HashMap::new();\n", "file_path": "src/ltl/interference.rs", "rank": 55, "score": 22.048364530018546 }, { "content": " 0 => Instruction::BinaryOp(x64BinaryOp::mov, Register::Rdi, self.formals[i], next), // Rdi\n\n 1 => Instruction::BinaryOp(x64BinaryOp::mov, Register::Rsi, self.formals[i], next), // Rsi\n\n 2 => Instruction::BinaryOp(x64BinaryOp::mov, Register::Rdx, self.formals[i], next), // Rdx\n\n 3 => Instruction::BinaryOp(x64BinaryOp::mov, Register::Rcx, self.formals[i], next), // Rcx\n\n 4 => Instruction::BinaryOp(x64BinaryOp::mov, Register::R8, self.formals[i], next), // R8\n\n 5 => Instruction::BinaryOp(x64BinaryOp::mov, Register::R9, self.formals[i], next), // R9\n\n _ => Instruction::GetParam(self.formals.len() - i, self.formals[i], next), // Stack\n\n });\n\n }\n\n\n\n // Save the callee_saved registers.\n\n // This is inserted before fetching the arguments\n\n let mut saved_registers = Vec::new();\n\n let calle_saved_reg = Register::callee_saved();\n\n for i in 0..calle_saved_reg.len() {\n\n let next = prev;\n\n prev = self.label_allocator.fresh();\n\n saved_registers.push(self.register_allocator.fresh());\n\n self.new_body.insert(prev, Instruction::BinaryOp(x64BinaryOp::mov, calle_saved_reg[i], saved_registers[i] , next));\n\n }\n", "file_path": "src/ertl/builder.rs", "rank": 56, "score": 21.57959247756846 }, { "content": " // Inseted before backing up the registers.\n\n let new_entry = self.label_allocator.fresh();\n\n self.new_body.insert(new_entry, Instruction::AllocFrame(prev));\n\n\n\n // end of the function\n\n let mut new_exit = self.label_allocator.fresh();\n\n // move result\n\n self.new_body.insert(self.exit , Instruction::BinaryOp(x64BinaryOp::mov, self.result, Register::Rax, new_exit));\n\n // restore registers\n\n for i in 0..calle_saved_reg.len() {\n\n let exit = new_exit;\n\n new_exit = self.label_allocator.fresh();\n\n self.new_body.insert(exit, Instruction::BinaryOp(x64BinaryOp::mov, saved_registers[i], calle_saved_reg[i], new_exit));\n\n }\n\n // delete frame and return\n\n let ret = self.label_allocator.fresh();\n\n self.new_body.insert(new_exit, Instruction::DeleteFrame(ret));\n\n self.new_body.insert(ret, Instruction::Return);\n\n\n\n // Compute lifetimes and returns a freshly built function. Lifetimes are included because the are meaningless without the corresponding function body.\n", "file_path": "src/ertl/builder.rs", "rank": 57, "score": 21.50790122666477 }, { "content": " fn add_parameter(& mut self, index: usize, source: Register, entry: Label) -> Label /*exit*/ {\n\n let label = self.label_allocator.fresh();\n\n self.new_body.insert(entry, match index {\n\n 0 => Instruction::BinaryOp(x64BinaryOp::mov, source, Register::Rdi, label),\n\n 1 => Instruction::BinaryOp(x64BinaryOp::mov, source, Register::Rsi, label), // Rsi\n\n 2 => Instruction::BinaryOp(x64BinaryOp::mov, source, Register::Rdx, label), // Rdx\n\n 3 => Instruction::BinaryOp(x64BinaryOp::mov, source, Register::Rcx, label), // Rcx\n\n 4 => Instruction::BinaryOp(x64BinaryOp::mov, source, Register::R8, label), // R8\n\n 5 => Instruction::BinaryOp(x64BinaryOp::mov, source, Register::R9, label), // R9\n\n _ => Instruction::PushParam(source, label), // Stack\n\n });\n\n label\n\n }\n\n}\n", "file_path": "src/ertl/builder.rs", "rank": 58, "score": 21.208294709297924 }, { "content": " let tmp = self.label_allocator.fresh();\n\n self.body.insert(entry, Instruction::BinaryOp(x64BinaryOp::mov, Operand::Spilled(i), Operand::Reg(tmp_reg), tmp));\n\n (tmp, tmp_reg)\n\n },\n\n Operand::Reg(r) => {\n\n (entry, r)\n\n },\n\n }\n\n }\n\n\n\n fn instruction(& mut self, entry: Label, ertl_instr: ertl::Instruction) {\n\n match ertl_instr {\n\n ertl::Instruction::Const(val, dest, next) => {\n\n let dest = self.lookup(dest);\n\n self.body.insert(entry, Instruction::Const(val, dest , next));\n\n },\n\n ertl::Instruction::AccessGlobal(name, dest, next) => {\n\n let dest_op = self.lookup(dest);\n\n let (dest, next) = self.fix_dest(dest_op, Register::R11, next);\n\n self.body.insert(entry, Instruction::AccessGlobal(name, dest, next));\n", "file_path": "src/ltl/builder.rs", "rank": 59, "score": 20.885286318426644 }, { "content": "\n\nimpl FuncDefinition {\n\n fn new(builder: FuncDefinitionBuilder, name: String, entry_label: Label,) -> Self{\n\n FuncDefinition{\n\n name: name,\n\n formals: builder.formals,\n\n result: builder.result,\n\n locals: builder.locals,\n\n entry: entry_label,\n\n exit: builder.exit,\n\n body: builder.instructions,\n\n label_allocator: builder.label_allocator,\n\n register_allocator: builder.register_allocator,\n\n }\n\n }\n\n\n\n pub fn from_typer_function(function: tast::Function, types: &HashMap<String, tast::Struct>, /*globals: &Vec<Ident>*//* Not needed, can infer that a variable is global*/) -> FuncDefinition {\n\n let mut builder = FuncDefinitionBuilder::new(&function, types);\n\n let exit = builder.exit;\n\n let entry_label = builder.bloc(function.blk, exit);\n\n let entry = builder.label_allocator.fresh();\n\n builder.instructions.insert(entry, Instruction::Const(0, builder.result, entry_label));\n\n FuncDefinition::new(builder, function.name, entry_label)\n\n }\n\n}\n", "file_path": "src/rtl/builder.rs", "rank": 60, "score": 20.545186071714276 }, { "content": " self.new_body.insert(entry, Instruction::BinaryOp(op, reg1, reg2, next));\n\n },\n\n rtl::Instruction::Branch(op, label1, next) => {\n\n self.new_body.insert(entry, Instruction::Branch(op, label1, next));\n\n },\n\n rtl::Instruction::Call(result, name, params, next) => { // function calls\n\n let mut entry = entry;\n\n // Add parameters\n\n for (i, param) in params.iter().cloned().enumerate(){\n\n entry = self.add_parameter(i, param, entry)\n\n }\n\n let tmp = self.label_allocator.fresh();\n\n // The call itself\n\n self.new_body.insert(entry, Instruction::Call(name, params.len(), tmp));\n\n // clean up stack if needed\n\n let tmp = if params.len() > 6{\n\n let tmp2 = self.label_allocator.fresh();\n\n self.new_body.insert(\n\n tmp,\n\n Instruction::UnaryOp(\n", "file_path": "src/ertl/builder.rs", "rank": 62, "score": 20.353164986669604 }, { "content": " x64UnaryOp::addi(((params.len() - 6) * 8) as i64),\n\n Register::Rsp, tmp2\n\n )\n\n );\n\n tmp2\n\n } else {\n\n tmp\n\n };\n\n // Move result to its destination\n\n self.new_body.insert(\n\n tmp,\n\n Instruction::BinaryOp(x64BinaryOp::mov, Register::Rax, result, next)\n\n );\n\n },\n\n rtl::Instruction::Goto(label) => {\n\n self.new_body.insert(entry, Instruction::Goto(label));\n\n },\n\n }\n\n }\n\n // This must be synchronised with common::register.\n", "file_path": "src/ertl/builder.rs", "rank": 63, "score": 20.291297989991538 }, { "content": " }\n\n label\n\n }\n\n\n\n fn expression(& mut self, expr: tast::Expression, exit: Label, result_reg: Option<Register>) -> Label {\n\n match expr.kind {\n\n tast::ExprKind::Const(ivalue) => {\n\n self.expr_const(ivalue, exit, result_reg)\n\n },\n\n tast::ExprKind::Lvalue(name) => {\n\n self.expr_lvalue(name, exit, result_reg)\n\n },\n\n tast::ExprKind::Sizeof(typename) => {\n\n self.expr_sizeof(typename, exit, result_reg)\n\n },\n\n tast::ExprKind::MembDeref(box_expr, membername) => {\n\n self.expr_memb_deref(*box_expr, membername, exit, result_reg)\n\n },\n\n tast::ExprKind::Unary(op, box_expr) => {\n\n self.expr_unary(op, *box_expr, exit, result_reg)\n", "file_path": "src/rtl/builder.rs", "rank": 64, "score": 20.275845013705037 }, { "content": " }\n\n try!(write!(f, \"{}\\n\", instruction));\n\n }\n\n Ok(())\n\n }\n\n}\n\n\n\npub struct GlobalDef {\n\n pub name: String,\n\n}\n\n\n\nimpl Display for GlobalDef {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {\n\n write!(f, \"{}:\\n .quad 0\\n\", self.name)\n\n }\n\n}\n", "file_path": "src/output/asm.rs", "rank": 65, "score": 20.258522476400636 }, { "content": " rtl::Instruction::AssignGlobal(src, name, next) => {\n\n self.new_body.insert(entry, Instruction::AssignGlobal(src, name, next));\n\n },\n\n rtl::Instruction::Load(addr, offset, dest, next) => {\n\n self.new_body.insert(entry, Instruction::Load(addr, offset, dest, next));\n\n },\n\n rtl::Instruction::Store(src, addr, offset, next) => {\n\n self.new_body.insert(entry, Instruction::Store(src, addr, offset, next));\n\n },\n\n rtl::Instruction::UnaryOp(op, reg, next) => {\n\n self.new_body.insert(entry, Instruction::UnaryOp(op, reg, next));\n\n },\n\n rtl::Instruction::BinaryOp(x64BinaryOp::div, reg1, reg2, next) => { // Special case for the division\n\n let tmp2 = self.label_allocator.fresh();\n\n let tmp3 = self.label_allocator.fresh();\n\n self.new_body.insert(entry, Instruction::BinaryOp(x64BinaryOp::mov, reg2, Register::Rax, tmp2));\n\n self.new_body.insert(tmp2, Instruction::BinaryOp(x64BinaryOp::div, reg1, Register::Rax, tmp3));\n\n self.new_body.insert(tmp3, Instruction::BinaryOp(x64BinaryOp::mov, Register::Rax, reg2, next));\n\n },\n\n rtl::Instruction::BinaryOp(op, reg1, reg2, next) => {\n", "file_path": "src/ertl/builder.rs", "rank": 66, "score": 19.84293885084069 }, { "content": " //_ => write!(f, \"{:#?}\\n\", self)\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct FuncDefinition {\n\n pub label_allocator: LabelAllocator,\n\n pub name: Ident,\n\n pub formals: usize,\n\n pub entry: Label,\n\n pub body: HashMap<Label, Instruction>,\n\n pub liveness : HashMap<Label, LivenessInfo>,\n\n}\n\n\n\nimpl FuncDefinition { // Implemented by visiting teh graph, inspired from OCaml provided code.\n\n fn print_body(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {\n\n let mut visited = HashSet::<Label>::new();\n\n self.visit(&mut visited, self.entry, f)\n\n }\n", "file_path": "src/ertl/tree.rs", "rank": 67, "score": 19.566307693237764 }, { "content": " functions: functions,\n\n }\n\n }\n\n}\n\n\n\nimpl FunctionOutput {\n\n pub fn from_ltl(mut ltl_function: ltl::FuncDefinition) -> Self {\n\n let instructions = Vec::new();\n\n let labels = HashSet::new();\n\n let mut visited : HashSet<Label> = HashSet::new();\n\n let mut f = FunctionOutput{\n\n name: ltl_function.name,\n\n instructions: instructions,\n\n labels: labels,\n\n };\n\n f.visit(& mut visited, ltl_function.entry, &mut ltl_function.body);\n\n f\n\n }\n\n\n\n fn visit(& mut self, visited : & mut HashSet<Label>, label: Label, instructions: &mut HashMap<Label, ltl::Instruction>) {\n", "file_path": "src/output/builder.rs", "rank": 68, "score": 19.162363998645603 }, { "content": "//pub use self::register::{Register, RegisterAllocator};\n\n//pub use self::label::{Label, LabelAllocator};\n\n\n\n/*\n\nThis contains common primitives that are used in most codegen passes.\n\n\n\nRegister deals with physicla and pseudo registers. RegisterAllocator is the structure that allocates pseudo register numbers.\n\nLabel deals with allocating labels in the code. The lables are simply integers.\n\nOps represent the names of operations in intel assembly splitted in three categories (Unary, Binary and Branch)\n\n\n\n*/\n\n\n\npub mod register;\n\npub mod label;\n\npub mod ops;\n", "file_path": "src/common/mod.rs", "rank": 69, "score": 19.058959884024095 }, { "content": "use common::label::Label;\n\nuse std::collections::HashSet;\n\nuse std::fmt::Display;\n\nuse std::fmt;\n\n\n\n\n\n\n\npub struct Output {\n\n pub functions: Vec<FunctionOutput>,\n\n pub globals: Vec<GlobalDef>\n\n}\n\n\n\nimpl Display for Output {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {\n\n try!(write!(f,\" .text\\n .globl main\\n\"));\n\n for func in &self.functions {\n\n try!(write!(f, \"{}\", func));\n\n }\n\n try!(write!(f,\" .data\\n\"));\n\n for glob in &self.globals {\n", "file_path": "src/output/asm.rs", "rank": 70, "score": 18.997057069361123 }, { "content": "use std::collections::{HashSet, HashMap};\n\nuse ltl::Register;\n\nuse ltl::Label;\n\nuse ertl::liveness;\n\nuse ertl;\n\nuse common::ops;\n\nuse ltl::tree::Operand;\n\n\n\n\n\n#[derive(Clone, Debug, Default)]\n\npub struct Arcs {\n\n pub prefs: HashSet<Register>,\n\n pub intfs: HashSet<Register>,\n\n}\n\n\n\n\n\nimpl Arcs {\n\n pub fn new() -> Self {\n\n Arcs{\n\n prefs: HashSet::new(),\n", "file_path": "src/ltl/interference.rs", "rank": 71, "score": 18.985316509158288 }, { "content": " tast::Statement::Return(expr) => {\n\n let result = self.result;\n\n let exit = self.exit;\n\n self.expression(expr, exit, Some(result))\n\n },\n\n tast::Statement::Expr(expr) => {\n\n self.expression(expr, exit, None)\n\n },\n\n tast::Statement::Bloc(blk) => {\n\n self.bloc(blk, exit)\n\n },\n\n tast::Statement::Noop => exit,\n\n tast::Statement::If(condition, box_stmt) => {\n\n let label = self.statement(*box_stmt, exit.clone());\n\n // Naïve implementation !\n\n let label2 = self.label_allocator.fresh();\n\n let cnd_reg = self.register_allocator.fresh();\n\n let label3 = self.label_allocator.fresh();\n\n self.instructions.insert(label2, Instruction::Branch(x64Branch::je, exit, label));\n\n self.instructions.insert(label3, Instruction::BinaryOp(x64BinaryOp::test, cnd_reg, cnd_reg, label2));\n", "file_path": "src/rtl/builder.rs", "rank": 72, "score": 18.897526338124543 }, { "content": "use output::*;\n\nuse ltl;\n\nuse common::ops::*;\n\nuse std::collections::{HashSet, HashMap};\n\nuse common::label::Label;\n\nuse common::register::Register;\n\nuse std::borrow::Cow;\n\n\n\nimpl Output {\n\n pub fn from_ltl(ltl_file: ltl::File) -> Self {\n\n let mut globals = Vec::new();\n\n for global in ltl_file.globals {\n\n globals.push(GlobalDef::new(global));\n\n }\n\n let mut functions = Vec::new();\n\n for function in ltl_file.functions {\n\n functions.push(FunctionOutput::from_ltl(function));\n\n }\n\n Output{\n\n globals: globals,\n", "file_path": "src/output/builder.rs", "rank": 73, "score": 18.655786133792194 }, { "content": " self.instructions.push((Some(label), String::from(\" retq\")));\n\n },\n\n //_ => {}\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl GlobalDef {\n\n pub fn new(name: String) -> Self {\n\n GlobalDef{\n\n name: name\n\n }\n\n }\n\n}\n", "file_path": "src/output/builder.rs", "rank": 74, "score": 18.531815961462 }, { "content": "use common::label::Label;\n\nuse common::register::Register;\n\nuse ertl::Instruction;\n\nuse std::collections::{HashMap, HashSet};\n\nuse std::iter::FromIterator;\n\nuse std::collections::VecDeque;\n\n\n\n\n\n#[derive(Debug)]\n\npub struct LivenessInfo {\n\n pub predecessors: HashSet<Label>,\n\n pub successors: HashSet<Label>,\n\n pub defs: HashSet<Register>,\n\n pub uses: HashSet<Register>,\n\n pub ins: HashSet<Register>,\n\n pub outs: HashSet<Register>,\n\n}\n\n\n\n// Implementation of kildal's algorithm\n\nimpl LivenessInfo {\n", "file_path": "src/ertl/liveness.rs", "rank": 75, "score": 18.255959421454342 }, { "content": " self.expression(condition, label3, Some(cnd_reg))\n\n },\n\n tast::Statement::IfElse(condition, box_stmt_if, box_stmt_else) => {\n\n let label_else = self.statement(*box_stmt_else, exit.clone());\n\n let label_if = self.statement(*box_stmt_if, exit.clone());\n\n // Naïve implementation !\n\n let label2 = self.label_allocator.fresh();\n\n let cnd_reg = self.register_allocator.fresh();\n\n let label3 = self.label_allocator.fresh();\n\n self.instructions.insert(label2, Instruction::Branch(x64Branch::je, label_else, label_if));\n\n self.instructions.insert(label3, Instruction::BinaryOp(x64BinaryOp::test, cnd_reg, cnd_reg, label2));\n\n self.expression(condition, label3, Some(cnd_reg))\n\n },\n\n tast::Statement::While(condition, box_stmt) => {\n\n let label3 = self.label_allocator.fresh();\n\n let label2 = self.label_allocator.fresh();\n\n let cnd_reg = self.register_allocator.fresh();\n\n let loop_begin = self.expression(condition, label3, Some(cnd_reg));\n\n let label_loop_body = self.statement(*box_stmt, loop_begin);\n\n self.instructions.insert(label2, Instruction::Branch(x64Branch::je, exit, label_loop_body));\n", "file_path": "src/rtl/builder.rs", "rank": 76, "score": 17.51556255692772 }, { "content": "\n\n fn visit(& self, visited: & mut HashSet<Label>, l: Label, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {\n\n if visited.contains(&l) {\n\n return Ok(());\n\n }\n\n visited.insert(l);\n\n if let Some(instruction) = self.body.get(&l) {\n\n try!(write!(f, \" {}: {}\\n\", l, instruction));\n\n for s in instruction.successors() {\n\n try!(self.visit(visited, s, f));\n\n }\n\n return Ok(())\n\n } else {\n\n return Ok(());\n\n }\n\n }\n\n}\n\n\n\nimpl Display for FuncDefinition {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {\n", "file_path": "src/ltl/tree.rs", "rank": 77, "score": 17.353660931302773 }, { "content": " try!(write!(f, \"{}()\\n\", self.name));\n\n try!(write!(f, \" entry : {}\\n\", self.entry));\n\n self.print_body(f)\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct File {\n\n pub globals : Vec<Ident>,\n\n pub functions: Vec<FuncDefinition>,\n\n}\n\nimpl Display for File {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {\n\n try!(write!(f, \"== LTL ==================================================\\n\"));\n\n for i in 0..self.functions.len() {\n\n try!(write!(f, \"{}\", self.functions[i]));\n\n }\n\n write!(f, \"== END ==================================================\\n\")\n\n }\n\n}\n", "file_path": "src/ltl/tree.rs", "rank": 78, "score": 17.2277484750644 }, { "content": " try!(write!(f, \"{}({})\\n\", self.name, self.formals));\n\n try!(write!(f, \" entry : {}\\n\", self.entry));\n\n self.print_body(f)\n\n }\n\n}\n\n\n\n\n\n\n\n#[derive(Debug)]\n\npub struct File {\n\n pub globals : Vec<Ident>,\n\n pub functions: Vec<FuncDefinition>,\n\n}\n\n\n\nimpl Display for File {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {\n\n try!(write!(f, \"== ERTL ==================================================\\n\"));\n\n for i in 0..self.functions.len() {\n\n try!(write!(f, \"{}\", self.functions[i]));\n\n }\n\n write!(f, \"== END ==================================================\\n\")\n\n }\n\n}\n", "file_path": "src/ertl/tree.rs", "rank": 79, "score": 17.070311208633562 }, { "content": " new_body: HashMap::new(),\n\n result: rtl_func.result,\n\n }, rtl_func.body)\n\n }\n\n\n\n fn build(mut self, old_body: HashMap<Label, rtl::Instruction>) -> FuncDefinition {\n\n\n\n // Build the body\n\n for (entry, instr) in old_body {\n\n self.instruction(entry, instr);\n\n }\n\n\n\n // Beginning of the function.\n\n\n\n let mut prev = self.entry; // Label used as the start for the next instruction throughout the loop.\n\n // fetch the parameters from register and stack. The instructions at teh beginning of the function body.\n\n for i in 0..self.formals.len() {\n\n let next = prev;\n\n prev = self.label_allocator.fresh();\n\n self.new_body.insert(prev, match i {\n", "file_path": "src/ertl/builder.rs", "rank": 80, "score": 16.74111691251203 }, { "content": " possible_colors: possible_colors,\n\n result: HashMap::new(),\n\n spilled: 0,\n\n todo: todo,\n\n }\n\n }\n\n\n\n // Simple coloring algorithm.\n\n\n\n pub fn color_simple(mut self) -> (HashMap<Register, Operand>, usize) {\n\n\n\n while ! self.todo.is_empty() {\n\n // Select a register to color or spill.\n\n let (register, color) : (Register, Option<Register>) =\n\n // First case : single possible color with preference\n\n if let Some(reg) = self.todo.iter().find(|&reg_ref| {\n\n if self.possible_colors[reg_ref].len() == 1 {\n\n let color = self.possible_colors[reg_ref].iter().nth(0).unwrap();\n\n let prefs = &self.graph[reg_ref].prefs;\n\n prefs.iter().any(|&pref_ref| {self.result.get(&pref_ref) == Some(&Operand::Reg(*color))})\n", "file_path": "src/ltl/interference.rs", "rank": 81, "score": 16.677136546239364 }, { "content": "\n\n fn visit(& self, visited: & mut HashSet<Label>, l: Label, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {\n\n if visited.contains(&l) {\n\n return Ok(());\n\n }\n\n visited.insert(l);\n\n if let (Some(instruction), Some(live_info)) = (self.body.get(&l),self.liveness.get(&l)) {\n\n try!(write!(f, \" {}: {} {:?}\\n\", l, instruction, live_info));\n\n for s in instruction.successors() {\n\n try!(self.visit(visited, s, f));\n\n }\n\n return Ok(())\n\n } else {\n\n return Ok(());\n\n }\n\n }\n\n}\n\n\n\nimpl Display for FuncDefinition {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {\n", "file_path": "src/ertl/tree.rs", "rank": 82, "score": 16.65131868811938 }, { "content": " ltl::Operand::Reg(Register::Rcx) => Cow::from(\"%cl\"),\n\n ltl::Operand::Reg(Register::Rdx) => Cow::from(\"%dl\"),\n\n ltl::Operand::Reg(Register::Rsi) => Cow::from(\"%sil\"),\n\n ltl::Operand::Reg(Register::Rdi) => Cow::from(\"%dil\"),\n\n ltl::Operand::Reg(Register::Rsp) => Cow::from(\"%spl\"),\n\n ltl::Operand::Reg(Register::Rbp) => Cow::from(\"%bpl\"),\n\n ltl::Operand::Reg(Register::R8 ) => Cow::from(\"%r8b\"),\n\n ltl::Operand::Reg(Register::R9 ) => Cow::from(\"%r9b\"),\n\n ltl::Operand::Reg(Register::R10) => Cow::from(\"%r10b\"),\n\n ltl::Operand::Reg(Register::R11) => Cow::from(\"%r11b\"),\n\n ltl::Operand::Reg(Register::R12) => Cow::from(\"%r12b\"),\n\n ltl::Operand::Reg(Register::R13) => Cow::from(\"%r13b\"),\n\n ltl::Operand::Reg(Register::R14) => Cow::from(\"%r14b\"),\n\n ltl::Operand::Reg(Register::R15) => Cow::from(\"%r15b\"),\n\n _ => Cow::from(format!(\"{}\", reg)),\n\n };\n\n self.instructions.push((Some(label), format!(\" movq $0, {}\\n {} {}\", reg, op, byte_reg)));\n\n self.visit(visited, next, instructions);\n\n },\n\n ltl::Instruction::BinaryOp(x64BinaryOp::div, sreg, dreg, next) => {\n", "file_path": "src/output/builder.rs", "rank": 83, "score": 16.601378141419943 }, { "content": " self.expr_bin_or(*b_lhs, *b_rhs, exit, result_reg)\n\n },\n\n }\n\n },\n\n tast::ExprKind::Call(name, parameters) => {\n\n self.expr_call(name, parameters, exit, result_reg)\n\n },\n\n }\n\n }\n\n\n\n\n\n fn find_var(& self, name: &str) -> Option<Register> {\n\n for scope in self.variables.iter().rev() {\n\n if let Some(reg) = scope.get(name) {\n\n return Some(*reg);\n\n }\n\n }\n\n None\n\n }\n\n}\n", "file_path": "src/rtl/builder.rs", "rank": 84, "score": 16.398624441841566 }, { "content": " let liveness = LivenessInfo::compute(&self.new_body);\n\n FuncDefinition{\n\n label_allocator: self.label_allocator,\n\n name: self.name,\n\n formals: self.formals.len(),\n\n entry: new_entry,\n\n body: self.new_body,\n\n liveness: liveness,\n\n }\n\n }\n\n\n\n // Instruction translation\n\n fn instruction(& mut self, entry: Label, rtl_instrution: rtl::Instruction) {\n\n match rtl_instrution {\n\n rtl::Instruction::Const(val, reg, next) => {\n\n self.new_body.insert(entry, Instruction::Const(val, reg, next));\n\n },\n\n rtl::Instruction::AccessGlobal(name, dest, next) => {\n\n self.new_body.insert(entry, Instruction::AccessGlobal(name, dest, next));\n\n },\n", "file_path": "src/ertl/builder.rs", "rank": 85, "score": 16.04109836149264 }, { "content": "}\n\n\n\nimpl ExprKind {\n\n pub fn is_kind_pure(&self) -> bool {\n\n match *self {\n\n ExprKind::Const(_) | ExprKind::Lvalue(_) | ExprKind::Sizeof(_) => true,\n\n ExprKind::MembDeref(ref be, _) | ExprKind::Unary(_, ref be) => be.is_expr_pure(),\n\n ExprKind::Call(..) => false, // This could change.\n\n ExprKind::Binary(ref be1, ref op, ref be2) => match *op {\n\n BinaryOp::Affect => false,\n\n _ => be1.is_expr_pure() && be2.is_expr_pure(),\n\n },\n\n }\n\n }\n\n}\n", "file_path": "src/typing/ast.rs", "rank": 86, "score": 15.697594538174846 }, { "content": " },\n\n ertl::Instruction::AssignGlobal(src, name, next) => {\n\n let src_op = self.lookup(src);\n\n let (entry, src) = self.fix_src(entry, src_op, Register::R11);\n\n self.body.insert(entry, Instruction::AssignGlobal(src, name, next));\n\n },\n\n ertl::Instruction::Load(adr, offset, dest, next) => {\n\n let adr_op = self.lookup(adr);\n\n let dest_op = self.lookup(dest);\n\n let (entry, adr) = self.fix_src(entry, adr_op, Register::R11);\n\n let (dest, next) = self.fix_dest(dest_op, Register::R10, next);\n\n self.body.insert(entry, Instruction::Load(adr, offset, dest, next));\n\n },\n\n ertl::Instruction::Store(src, adr, offset, next) => {\n\n let adr_op = self.lookup(adr);\n\n let src_op = self.lookup(src);\n\n let (entry, src) = self.fix_src(entry, src_op, Register::R11);\n\n let (entry, adr) = self.fix_src(entry, adr_op, Register::R10);\n\n self.body.insert(entry, Instruction::Store(src, adr, offset, next));\n\n },\n", "file_path": "src/ltl/builder.rs", "rank": 87, "score": 15.33229662025169 }, { "content": " for (label, instruction) in instructions {\n\n if let ertl::Instruction::BinaryOp(ops::x64BinaryOp::mov, register1, register2, _) = *instruction {\n\n {\n\n let arc1 = graph.entry(register1).or_insert_with(Arcs::new);\n\n arc1.prefs.insert(register2);\n\n }\n\n {\n\n let arc2 = graph.entry(register2).or_insert_with(Arcs::new);\n\n arc2.prefs.insert(register1);\n\n }\n\n movs.insert(label, register1);\n\n }\n\n }\n\n for (label, live_info) in liveness {\n\n let mut outs = live_info.outs.clone();\n\n let defs = live_info.defs.clone();\n\n if movs.contains_key(label) {\n\n outs.remove(&movs[&label]);\n\n }\n\n\n", "file_path": "src/ltl/interference.rs", "rank": 88, "score": 15.25546018128626 }, { "content": " match *self {\n\n Instruction::Const(_, register, _)\n\n | Instruction::AccessGlobal(_, register, _)\n\n | Instruction::GetParam(_, register, _) => {\n\n (vec![register], vec![])\n\n },\n\n Instruction::AssignGlobal(register, _, _)\n\n | Instruction::PushParam(register, _) => {\n\n (vec![], vec![register])\n\n },\n\n Instruction::UnaryOp(_, register_dest, _) =>{\n\n (vec![register_dest], vec![register_dest])\n\n },\n\n Instruction::BinaryOp(x64BinaryOp::mov, register_source, register_dest, _)\n\n | Instruction::Load(register_source, _, register_dest, _) => {\n\n (vec![register_dest], vec![register_source])\n\n },\n\n Instruction::BinaryOp(x64BinaryOp::test, register1, register2, _)\n\n | Instruction::BinaryOp(x64BinaryOp::cmp, register1, register2, _)\n\n | Instruction::Store(register1, register2, _, _) => {\n", "file_path": "src/ertl/tree.rs", "rank": 89, "score": 15.060467613367862 }, { "content": "use ertl::*;\n\nuse rtl;\n\nuse common::label::{Label, LabelAllocator};\n\nuse common::ops::*;\n\nuse std::collections::HashMap;\n\nuse ertl::liveness::LivenessInfo;\n\nuse common::register::{Register, RegisterAllocator};\n\n\n\n/*\n\n The interesting methods and structires are here.\n\n\n\n The FunctionDefinitionBuilder data structure conatins teh temporary dat needed while doing the transformation.\n\n*/\n\n\n\nimpl File {\n\n pub fn from_rtl(rtl_file: rtl::File) -> File { // Is there a possibilty of error ?\n\n let functions = rtl_file.functions.into_iter().map(\n\n |rtl_func: rtl::FuncDefinition| {\n\n FuncDefinition::from_rtl(rtl_func)\n\n }\n", "file_path": "src/ertl/builder.rs", "rank": 90, "score": 14.887612732114139 }, { "content": " },\n\n tast::ExprKind::MembDeref(box_expr, membername) => {\n\n if let tast::Type::Struct(typename) = box_expr.typ.clone() {\n\n let typ = &self.types[&typename];\n\n let index = typ.index[&membername];\n\n let label = self.label_allocator.fresh();\n\n let adress = self.register_allocator.fresh();\n\n self.instructions.insert(label, Instruction::Store(source_reg, adress, index as i64 * 8 , exit));\n\n self.expression(*box_expr, label, Some(adress))\n\n } else {\n\n panic!(\"Dereferencing of non struct type, typer failed.\");\n\n }\n\n },\n\n _ => {panic!(\"Unkown lvalue type for affectation\")}, // Unreachable in theory.\n\n }\n\n } else {\n\n panic!(\"Unkown lvalue type for affectation\")\n\n }\n\n }\n\n\n", "file_path": "src/rtl/builder.rs", "rank": 91, "score": 14.4593243910264 }, { "content": "use rtl::*;\n\nuse common::label::*;\n\nuse common::register::*;\n\nuse common::ops::*;\n\nuse std::iter::FromIterator;\n\nuse typing::ast as tast;\n\nuse std::collections::{HashMap, HashSet};\n\n//use std::boxed::Box;\n\n//use std::ops::Deref;\n\n\n\nimpl File {\n\n pub fn from_typer_ast(typer_ast: tast::File) -> File {\n\n let mut file = File{globals: Vec::new(), functions: Vec::new()};\n\n file.globals = Vec::from_iter(typer_ast.variables.keys().cloned());\n\n for function in typer_ast.function_definitions {\n\n let func_def = FuncDefinition::from_typer_function(function, &typer_ast.types/*, &file.globals*/);\n\n file.functions.push(func_def);\n\n }\n\n file\n\n\n\n }\n\n}\n\n\n", "file_path": "src/rtl/builder.rs", "rank": 92, "score": 14.34128310061453 }, { "content": " (entry, Operand::Reg(src))\n\n } else {\n\n (entry, src_op)\n\n };\n\n self.body.insert(entry, Instruction::BinaryOp(op, src_op, dest_op, next));\n\n },\n\n ertl::Instruction::Call(name, _, next) => {\n\n self.body.insert(entry, Instruction::Call(name, next));\n\n },\n\n ertl::Instruction::Branch(op, jmp_dest, next) => {\n\n self.body.insert(entry, Instruction::Branch(op, jmp_dest, next));\n\n },\n\n ertl::Instruction::Goto(next) => {\n\n self.body.insert(entry, Instruction::Goto(next));\n\n },\n\n ertl::Instruction::AllocFrame(next) => {\n\n self.body.insert(entry, Instruction::Enter(self.spilled*8, next));\n\n },\n\n ertl::Instruction::DeleteFrame(next) => {\n\n self.body.insert(entry, Instruction::Leave(next));\n", "file_path": "src/ltl/builder.rs", "rank": 93, "score": 14.121870207059496 }, { "content": " (vec![], vec![register1, register2])\n\n },\n\n Instruction::BinaryOp(x64BinaryOp::div, register1, register2, _) => {\n\n assert_eq!(register2, Register::Rax, \"division destination should be %rax\");\n\n (vec![Register::Rax, Register::Rdx], vec![Register::Rax, Register::Rdx, register1])\n\n },\n\n Instruction::BinaryOp(_, register1, register2, _) => {\n\n (vec![register2], vec![register1, register2])\n\n },\n\n Instruction::Call(_, n, _) => {\n\n let mut v = Vec::new();\n\n let p = Register::parameters();\n\n v.extend(p.iter().take(n));\n\n (Register::caller_saved(),v)\n\n }\n\n Instruction::Branch(_, _ , _)\n\n | Instruction::Goto(_)\n\n | Instruction::AllocFrame(_)\n\n | Instruction::DeleteFrame(_) => {\n\n (vec![],vec![])\n", "file_path": "src/ertl/tree.rs", "rank": 95, "score": 13.98125799641228 }, { "content": " }\n\n }).unwrap();\n\n let color = match self.result[pref] {\n\n Operand::Reg(c) => c,\n\n _ => {panic!(\"Weird thing occured\");}\n\n };\n\n (*reg, Some(color))\n\n // case 4 : possible colors without preference\n\n } else if let Some(reg) = self.todo.iter().find(|& reg_ref| {!self.possible_colors[reg_ref].is_empty()}) {\n\n (*reg, Some(*self.possible_colors[reg].iter().nth(0).unwrap()))\n\n // case 5 :Need to spill at least one pseudo register\n\n } else {\n\n let reg = *self.todo.iter().nth(0).unwrap();\n\n (reg, None)\n\n }\n\n ;\n\n // remove it from the todo list and color it (or spill it)\n\n self.todo.remove(&register);\n\n if let Some(color) = color {\n\n self.color_register(register, color);\n", "file_path": "src/ltl/interference.rs", "rank": 96, "score": 13.944851512075923 }, { "content": " pub fn compute(instructions: &HashMap<Label, Instruction>) -> HashMap<Label, LivenessInfo> {\n\n\n\n let mut liveness = HashMap::new();\n\n for (label, instruction) in instructions {\n\n liveness.insert(*label, LivenessInfo::new(instruction));\n\n }\n\n let keys = instructions.keys().cloned();\n\n for label in keys.clone() {\n\n for succ in liveness[&label].successors.clone() {\n\n liveness.get_mut(&succ).unwrap().predecessors.insert(label);\n\n }\n\n }\n\n\n\n let mut working_set = VecDeque::from_iter(keys);\n\n while let Some(l) = working_set.pop_front() {\n\n let old_ins = liveness[&l].ins.clone();\n\n let outs = liveness[&l].successors.iter().fold(HashSet::new(), |acc, l| {acc.union(&liveness[l].ins).cloned().collect()});\n\n liveness.get_mut(&l).unwrap().outs = outs;\n\n let tmp = &liveness[&l].outs - &liveness[&l].defs;\n\n let ins : HashSet<Register> = liveness[&l].uses.union(&tmp).cloned().collect();\n", "file_path": "src/ertl/liveness.rs", "rank": 97, "score": 13.941843579128545 }, { "content": " (FuncDefinitionBuilder{\n\n label_allocator: ertl_func.label_allocator,\n\n name: ertl_func.name,\n\n entry: ertl_func.entry,\n\n body: HashMap::new(),\n\n spilled: spilled,\n\n register_affectations: register_affectations,\n\n }, ertl_func.body)\n\n }\n\n fn build(mut self, old_body: HashMap<Label, ertl::Instruction>) -> FuncDefinition{\n\n for (entry, instr) in old_body {\n\n self.instruction(entry, instr);\n\n }\n\n FuncDefinition{\n\n name: self.name,\n\n entry: self.entry,\n\n body: self.body,\n\n }\n\n }\n\n\n", "file_path": "src/ltl/builder.rs", "rank": 98, "score": 13.908935866670008 }, { "content": "\n\n // Operators\n\n method!(pub op_plus <Parser, &str, &str>, mut self, terminated!(do_parse!(s: tag_s!(\"+\") >> ({self.location.column += 1; s})), call_m!(self.space_opt)));\n\n method!(pub op_minus <Parser, &str, &str>, mut self, terminated!(do_parse!(s: tag_s!(\"-\") >> ({self.location.column += 1; s})), call_m!(self.space_opt)));\n\n method!(pub op_star <Parser, &str, &str>, mut self, terminated!(do_parse!(s: tag_s!(\"*\") >> ({self.location.column += 1; s})), call_m!(self.space_opt)));\n\n method!(pub op_div <Parser, &str, &str>, mut self, terminated!(do_parse!(s: tag_s!(\"/\") >> ({self.location.column += 1; s})), call_m!(self.space_opt)));\n\n method!(pub op_not <Parser, &str, &str>, mut self, terminated!(do_parse!(s: tag_s!(\"!\") >> ({self.location.column += 1; s})), call_m!(self.space_opt)));\n\n\n\n method!(pub op_deref <Parser, &str, &str>, mut self, terminated!(do_parse!(s: tag_s!(\"->\") >> ({self.location.column += 2; s})), call_m!(self.space_opt)));\n\n method!(pub op_and <Parser, &str, &str>, mut self, terminated!(do_parse!(s: tag_s!(\"&&\") >> ({self.location.column += 2; s})), call_m!(self.space_opt)));\n\n method!(pub op_or <Parser, &str, &str>, mut self, terminated!(do_parse!(s: tag_s!(\"||\") >> ({self.location.column += 2; s})), call_m!(self.space_opt)));\n\n\n\n method!(pub op_simple_eq <Parser, &str, &str>, mut self, terminated!(do_parse!(s: tag_s!(\"=\" ) >> ({self.location.column += 2; s})), call_m!(self.space_opt)));\n\n method!(pub op_double_eq <Parser, &str, &str>, mut self, terminated!(do_parse!(s: tag_s!(\"==\") >> ({self.location.column += 2; s})), call_m!(self.space_opt)));\n\n method!(pub op_not_eq <Parser, &str, &str>, mut self, terminated!(do_parse!(s: tag_s!(\"!=\") >> ({self.location.column += 2; s})), call_m!(self.space_opt)));\n\n\n\n method!(pub op_gt <Parser, &str, &str>, mut self, terminated!(do_parse!(s: tag_s!(\">\" ) >> ({self.location.column += 1; s})), call_m!(self.space_opt)));\n\n method!(pub op_lt <Parser, &str, &str>, mut self, terminated!(do_parse!(s: tag_s!(\"<\" ) >> ({self.location.column += 1; s})), call_m!(self.space_opt)));\n\n method!(pub op_ge <Parser, &str, &str>, mut self, terminated!(do_parse!(s: tag_s!(\">=\") >> ({self.location.column += 2; s})), call_m!(self.space_opt)));\n\n method!(pub op_le <Parser, &str, &str>, mut self, terminated!(do_parse!(s: tag_s!(\"<=\") >> ({self.location.column += 2; s})), call_m!(self.space_opt)));\n", "file_path": "src/parse/lexer.rs", "rank": 99, "score": 13.721662446439527 } ]
Rust
verification/src/header_verifier.rs
liyaspawn/ckb
59a40454fd46bf3640df68591001f9d6b6491049
use crate::{ BlockVersionError, EpochError, NumberError, PowError, TimestampError, UnknownParentError, ALLOWED_FUTURE_BLOCKTIME, }; use ckb_chain_spec::consensus::Consensus; use ckb_error::Error; use ckb_pow::PowEngine; use ckb_traits::HeaderProvider; use ckb_types::core::{HeaderView, Version}; use ckb_verification_traits::Verifier; use faketime::unix_time_as_millis; pub struct HeaderVerifier<'a, DL> { data_loader: &'a DL, consensus: &'a Consensus, } impl<'a, DL: HeaderProvider> HeaderVerifier<'a, DL> { pub fn new(data_loader: &'a DL, consensus: &'a Consensus) -> Self { HeaderVerifier { consensus, data_loader, } } } impl<'a, DL: HeaderProvider> Verifier for HeaderVerifier<'a, DL> { type Target = HeaderView; fn verify(&self, header: &Self::Target) -> Result<(), Error> { VersionVerifier::new(header, self.consensus.block_version()).verify()?; PowVerifier::new(header, self.consensus.pow_engine().as_ref()).verify()?; let parent = self .data_loader .get_header(&header.parent_hash()) .ok_or_else(|| UnknownParentError { parent_hash: header.parent_hash(), })?; NumberVerifier::new(&parent, header).verify()?; EpochVerifier::new(&parent, header).verify()?; TimestampVerifier::new( self.data_loader, header, self.consensus.median_time_block_count(), ) .verify()?; Ok(()) } } pub struct VersionVerifier<'a> { header: &'a HeaderView, block_version: Version, } impl<'a> VersionVerifier<'a> { pub fn new(header: &'a HeaderView, block_version: Version) -> Self { VersionVerifier { header, block_version, } } pub fn verify(&self) -> Result<(), Error> { if self.header.version() != self.block_version { return Err(BlockVersionError { expected: self.block_version, actual: self.header.version(), } .into()); } Ok(()) } } pub struct TimestampVerifier<'a, DL> { header: &'a HeaderView, data_loader: &'a DL, median_block_count: usize, now: u64, } impl<'a, DL: HeaderProvider> TimestampVerifier<'a, DL> { pub fn new(data_loader: &'a DL, header: &'a HeaderView, median_block_count: usize) -> Self { TimestampVerifier { data_loader, header, median_block_count, now: unix_time_as_millis(), } } pub fn verify(&self) -> Result<(), Error> { if self.header.is_genesis() { return Ok(()); } let min = self.data_loader.block_median_time( &self.header.data().raw().parent_hash(), self.median_block_count, ); if self.header.timestamp() <= min { return Err(TimestampError::BlockTimeTooOld { min, actual: self.header.timestamp(), } .into()); } let max = self.now + ALLOWED_FUTURE_BLOCKTIME; if self.header.timestamp() > max { return Err(TimestampError::BlockTimeTooNew { max, actual: self.header.timestamp(), } .into()); } Ok(()) } } pub struct NumberVerifier<'a> { parent: &'a HeaderView, header: &'a HeaderView, } impl<'a> NumberVerifier<'a> { pub fn new(parent: &'a HeaderView, header: &'a HeaderView) -> Self { NumberVerifier { parent, header } } pub fn verify(&self) -> Result<(), Error> { if self.header.number() != self.parent.number() + 1 { return Err(NumberError { expected: self.parent.number() + 1, actual: self.header.number(), } .into()); } Ok(()) } } pub struct EpochVerifier<'a> { parent: &'a HeaderView, header: &'a HeaderView, } impl<'a> EpochVerifier<'a> { pub fn new(parent: &'a HeaderView, header: &'a HeaderView) -> Self { EpochVerifier { parent, header } } pub fn verify(&self) -> Result<(), Error> { if !self.header.epoch().is_well_formed() { return Err(EpochError::Malformed { value: self.header.epoch(), } .into()); } if !self.parent.is_genesis() && !self.header.epoch().is_successor_of(self.parent.epoch()) { return Err(EpochError::NonContinuous { current: self.header.epoch(), parent: self.parent.epoch(), } .into()); } Ok(()) } } pub struct PowVerifier<'a> { header: &'a HeaderView, pow: &'a dyn PowEngine, } impl<'a> PowVerifier<'a> { pub fn new(header: &'a HeaderView, pow: &'a dyn PowEngine) -> Self { PowVerifier { header, pow } } pub fn verify(&self) -> Result<(), Error> { if self.pow.verify(&self.header.data()) { Ok(()) } else { Err(PowError::InvalidNonce.into()) } } }
use crate::{ BlockVersionError, EpochError, NumberError, PowError, TimestampError, UnknownParentError, ALLOWED_FUTURE_BLOCKTIME, }; use ckb_chain_spec::consensus::Consensus; use ckb_error::Error; use ckb_pow::PowEngine; use ckb_traits::HeaderProvider; use ckb_types::core::{HeaderView, Version}; use ckb_verification_traits::Verifier; use faketime::unix_time_as_millis; pub struct HeaderVerifier<'a, DL> { data_loader: &'a DL, consensus: &'a Consensus, } impl<'a, DL: HeaderProvider> HeaderVerifier<'a, DL> { pub fn new(data_loader: &'a DL, consensus: &'a Consensus) -> Self { HeaderVerifier { consensus, data_loader, } } } impl<'a, DL: HeaderProvider> Verifier for HeaderVerifier<'a, DL> { type Target = HeaderView; fn verify(&self, header: &Self::Target) -> Result<(), Error> { VersionVerifier::new(header, self.c
} .into()); } let max = self.now + ALLOWED_FUTURE_BLOCKTIME; if self.header.timestamp() > max { return Err(TimestampError::BlockTimeTooNew { max, actual: self.header.timestamp(), } .into()); } Ok(()) } } pub struct NumberVerifier<'a> { parent: &'a HeaderView, header: &'a HeaderView, } impl<'a> NumberVerifier<'a> { pub fn new(parent: &'a HeaderView, header: &'a HeaderView) -> Self { NumberVerifier { parent, header } } pub fn verify(&self) -> Result<(), Error> { if self.header.number() != self.parent.number() + 1 { return Err(NumberError { expected: self.parent.number() + 1, actual: self.header.number(), } .into()); } Ok(()) } } pub struct EpochVerifier<'a> { parent: &'a HeaderView, header: &'a HeaderView, } impl<'a> EpochVerifier<'a> { pub fn new(parent: &'a HeaderView, header: &'a HeaderView) -> Self { EpochVerifier { parent, header } } pub fn verify(&self) -> Result<(), Error> { if !self.header.epoch().is_well_formed() { return Err(EpochError::Malformed { value: self.header.epoch(), } .into()); } if !self.parent.is_genesis() && !self.header.epoch().is_successor_of(self.parent.epoch()) { return Err(EpochError::NonContinuous { current: self.header.epoch(), parent: self.parent.epoch(), } .into()); } Ok(()) } } pub struct PowVerifier<'a> { header: &'a HeaderView, pow: &'a dyn PowEngine, } impl<'a> PowVerifier<'a> { pub fn new(header: &'a HeaderView, pow: &'a dyn PowEngine) -> Self { PowVerifier { header, pow } } pub fn verify(&self) -> Result<(), Error> { if self.pow.verify(&self.header.data()) { Ok(()) } else { Err(PowError::InvalidNonce.into()) } } }
onsensus.block_version()).verify()?; PowVerifier::new(header, self.consensus.pow_engine().as_ref()).verify()?; let parent = self .data_loader .get_header(&header.parent_hash()) .ok_or_else(|| UnknownParentError { parent_hash: header.parent_hash(), })?; NumberVerifier::new(&parent, header).verify()?; EpochVerifier::new(&parent, header).verify()?; TimestampVerifier::new( self.data_loader, header, self.consensus.median_time_block_count(), ) .verify()?; Ok(()) } } pub struct VersionVerifier<'a> { header: &'a HeaderView, block_version: Version, } impl<'a> VersionVerifier<'a> { pub fn new(header: &'a HeaderView, block_version: Version) -> Self { VersionVerifier { header, block_version, } } pub fn verify(&self) -> Result<(), Error> { if self.header.version() != self.block_version { return Err(BlockVersionError { expected: self.block_version, actual: self.header.version(), } .into()); } Ok(()) } } pub struct TimestampVerifier<'a, DL> { header: &'a HeaderView, data_loader: &'a DL, median_block_count: usize, now: u64, } impl<'a, DL: HeaderProvider> TimestampVerifier<'a, DL> { pub fn new(data_loader: &'a DL, header: &'a HeaderView, median_block_count: usize) -> Self { TimestampVerifier { data_loader, header, median_block_count, now: unix_time_as_millis(), } } pub fn verify(&self) -> Result<(), Error> { if self.header.is_genesis() { return Ok(()); } let min = self.data_loader.block_median_time( &self.header.data().raw().parent_hash(), self.median_block_count, ); if self.header.timestamp() <= min { return Err(TimestampError::BlockTimeTooOld { min, actual: self.header.timestamp(),
random
[]
Rust
cli/ops/os.rs
justgeek/deno
34ec3b225425cecdccf754fbc87f4a8f3728890d
use super::dispatch_json::{Deserialize, JsonOp, Value}; use crate::op_error::OpError; use crate::state::State; use deno_core::CoreIsolate; use deno_core::ZeroCopyBuf; use std::collections::HashMap; use std::env; use std::io::{Error, ErrorKind}; use url::Url; pub fn init(i: &mut CoreIsolate, s: &State) { i.register_op("op_exit", s.stateful_json_op(op_exit)); i.register_op("op_env", s.stateful_json_op(op_env)); i.register_op("op_exec_path", s.stateful_json_op(op_exec_path)); i.register_op("op_set_env", s.stateful_json_op(op_set_env)); i.register_op("op_get_env", s.stateful_json_op(op_get_env)); i.register_op("op_get_dir", s.stateful_json_op(op_get_dir)); i.register_op("op_hostname", s.stateful_json_op(op_hostname)); i.register_op("op_loadavg", s.stateful_json_op(op_loadavg)); i.register_op("op_os_release", s.stateful_json_op(op_os_release)); } #[derive(Deserialize)] struct GetDirArgs { kind: std::string::String, } fn op_get_dir( state: &State, args: Value, _zero_copy: Option<ZeroCopyBuf>, ) -> Result<JsonOp, OpError> { state.check_unstable("Deno.dir"); state.check_env()?; let args: GetDirArgs = serde_json::from_value(args)?; let path = match args.kind.as_str() { "home" => dirs::home_dir(), "config" => dirs::config_dir(), "cache" => dirs::cache_dir(), "executable" => dirs::executable_dir(), "data" => dirs::data_dir(), "data_local" => dirs::data_local_dir(), "audio" => dirs::audio_dir(), "desktop" => dirs::desktop_dir(), "document" => dirs::document_dir(), "download" => dirs::download_dir(), "font" => dirs::font_dir(), "picture" => dirs::picture_dir(), "public" => dirs::public_dir(), "template" => dirs::template_dir(), "tmp" => Some(std::env::temp_dir()), "video" => dirs::video_dir(), _ => { return Err( Error::new( ErrorKind::InvalidInput, format!("Invalid dir type `{}`", args.kind.as_str()), ) .into(), ) } }; if path == None { Err(OpError::not_found(format!( "Could not get user {} directory.", args.kind.as_str() ))) } else { Ok(JsonOp::Sync(json!(path .unwrap_or_default() .into_os_string() .into_string() .unwrap_or_default()))) } } fn op_exec_path( state: &State, _args: Value, _zero_copy: Option<ZeroCopyBuf>, ) -> Result<JsonOp, OpError> { let current_exe = env::current_exe().unwrap(); state.check_read(&current_exe)?; let exe_url = Url::from_file_path(current_exe).unwrap(); let path = exe_url.to_file_path().unwrap(); Ok(JsonOp::Sync(json!(path))) } #[derive(Deserialize)] struct SetEnv { key: String, value: String, } fn op_set_env( state: &State, args: Value, _zero_copy: Option<ZeroCopyBuf>, ) -> Result<JsonOp, OpError> { let args: SetEnv = serde_json::from_value(args)?; state.check_env()?; env::set_var(args.key, args.value); Ok(JsonOp::Sync(json!({}))) } fn op_env( state: &State, _args: Value, _zero_copy: Option<ZeroCopyBuf>, ) -> Result<JsonOp, OpError> { state.check_env()?; let v = env::vars().collect::<HashMap<String, String>>(); Ok(JsonOp::Sync(json!(v))) } #[derive(Deserialize)] struct GetEnv { key: String, } fn op_get_env( state: &State, args: Value, _zero_copy: Option<ZeroCopyBuf>, ) -> Result<JsonOp, OpError> { let args: GetEnv = serde_json::from_value(args)?; state.check_env()?; let r = match env::var(args.key) { Err(env::VarError::NotPresent) => json!([]), v => json!([v?]), }; Ok(JsonOp::Sync(r)) } #[derive(Deserialize)] struct Exit { code: i32, } fn op_exit( _s: &State, args: Value, _zero_copy: Option<ZeroCopyBuf>, ) -> Result<JsonOp, OpError> { let args: Exit = serde_json::from_value(args)?; std::process::exit(args.code) } fn op_loadavg( state: &State, _args: Value, _zero_copy: Option<ZeroCopyBuf>, ) -> Result<JsonOp, OpError> { state.check_unstable("Deno.loadavg"); state.check_env()?; match sys_info::loadavg() { Ok(loadavg) => Ok(JsonOp::Sync(json!([ loadavg.one, loadavg.five, loadavg.fifteen ]))), Err(_) => Ok(JsonOp::Sync(json!([0f64, 0f64, 0f64]))), } } fn op_hostname( state: &State, _args: Value, _zero_copy: Option<ZeroCopyBuf>, ) -> Result<JsonOp, OpError> { state.check_env()?; let hostname = sys_info::hostname().unwrap_or_else(|_| "".to_string()); Ok(JsonOp::Sync(json!(hostname))) } fn op_os_release( state: &State, _args: Value, _zero_copy: Option<ZeroCopyBuf>, ) -> Result<JsonOp, OpError> { state.check_unstable("Deno.osRelease"); state.check_env()?; let release = sys_info::os_release().unwrap_or_else(|_| "".to_string()); Ok(JsonOp::Sync(json!(release))) }
use super::dispatch_json::{Deserialize, JsonOp, Value}; use crate::op_error::OpError; use crate::state::State; use deno_core::CoreIsolate; use deno_core::ZeroCopyBuf; use std::collections::HashMap; use std::env; use std::io::{Error, ErrorKind}; use url::Url;
#[derive(Deserialize)] struct GetDirArgs { kind: std::string::String, } fn op_get_dir( state: &State, args: Value, _zero_copy: Option<ZeroCopyBuf>, ) -> Result<JsonOp, OpError> { state.check_unstable("Deno.dir"); state.check_env()?; let args: GetDirArgs = serde_json::from_value(args)?; let path = match args.kind.as_str() { "home" => dirs::home_dir(), "config" => dirs::config_dir(), "cache" => dirs::cache_dir(), "executable" => dirs::executable_dir(), "data" => dirs::data_dir(), "data_local" => dirs::data_local_dir(), "audio" => dirs::audio_dir(), "desktop" => dirs::desktop_dir(), "document" => dirs::document_dir(), "download" => dirs::download_dir(), "font" => dirs::font_dir(), "picture" => dirs::picture_dir(), "public" => dirs::public_dir(), "template" => dirs::template_dir(), "tmp" => Some(std::env::temp_dir()), "video" => dirs::video_dir(), _ => { return Err( Error::new( ErrorKind::InvalidInput, format!("Invalid dir type `{}`", args.kind.as_str()), ) .into(), ) } }; if path == None { Err(OpError::not_found(format!( "Could not get user {} directory.", args.kind.as_str() ))) } else { Ok(JsonOp::Sync(json!(path .unwrap_or_default() .into_os_string() .into_string() .unwrap_or_default()))) } } fn op_exec_path( state: &State, _args: Value, _zero_copy: Option<ZeroCopyBuf>, ) -> Result<JsonOp, OpError> { let current_exe = env::current_exe().unwrap(); state.check_read(&current_exe)?; let exe_url = Url::from_file_path(current_exe).unwrap(); let path = exe_url.to_file_path().unwrap(); Ok(JsonOp::Sync(json!(path))) } #[derive(Deserialize)] struct SetEnv { key: String, value: String, } fn op_set_env( state: &State, args: Value, _zero_copy: Option<ZeroCopyBuf>, ) -> Result<JsonOp, OpError> { let args: SetEnv = serde_json::from_value(args)?; state.check_env()?; env::set_var(args.key, args.value); Ok(JsonOp::Sync(json!({}))) } fn op_env( state: &State, _args: Value, _zero_copy: Option<ZeroCopyBuf>, ) -> Result<JsonOp, OpError> { state.check_env()?; let v = env::vars().collect::<HashMap<String, String>>(); Ok(JsonOp::Sync(json!(v))) } #[derive(Deserialize)] struct GetEnv { key: String, } fn op_get_env( state: &State, args: Value, _zero_copy: Option<ZeroCopyBuf>, ) -> Result<JsonOp, OpError> { let args: GetEnv = serde_json::from_value(args)?; state.check_env()?; let r = match env::var(args.key) { Err(env::VarError::NotPresent) => json!([]), v => json!([v?]), }; Ok(JsonOp::Sync(r)) } #[derive(Deserialize)] struct Exit { code: i32, } fn op_exit( _s: &State, args: Value, _zero_copy: Option<ZeroCopyBuf>, ) -> Result<JsonOp, OpError> { let args: Exit = serde_json::from_value(args)?; std::process::exit(args.code) } fn op_loadavg( state: &State, _args: Value, _zero_copy: Option<ZeroCopyBuf>, ) -> Result<JsonOp, OpError> { state.check_unstable("Deno.loadavg"); state.check_env()?; match sys_info::loadavg() { Ok(loadavg) => Ok(JsonOp::Sync(json!([ loadavg.one, loadavg.five, loadavg.fifteen ]))), Err(_) => Ok(JsonOp::Sync(json!([0f64, 0f64, 0f64]))), } } fn op_hostname( state: &State, _args: Value, _zero_copy: Option<ZeroCopyBuf>, ) -> Result<JsonOp, OpError> { state.check_env()?; let hostname = sys_info::hostname().unwrap_or_else(|_| "".to_string()); Ok(JsonOp::Sync(json!(hostname))) } fn op_os_release( state: &State, _args: Value, _zero_copy: Option<ZeroCopyBuf>, ) -> Result<JsonOp, OpError> { state.check_unstable("Deno.osRelease"); state.check_env()?; let release = sys_info::os_release().unwrap_or_else(|_| "".to_string()); Ok(JsonOp::Sync(json!(release))) }
pub fn init(i: &mut CoreIsolate, s: &State) { i.register_op("op_exit", s.stateful_json_op(op_exit)); i.register_op("op_env", s.stateful_json_op(op_env)); i.register_op("op_exec_path", s.stateful_json_op(op_exec_path)); i.register_op("op_set_env", s.stateful_json_op(op_set_env)); i.register_op("op_get_env", s.stateful_json_op(op_get_env)); i.register_op("op_get_dir", s.stateful_json_op(op_get_dir)); i.register_op("op_hostname", s.stateful_json_op(op_hostname)); i.register_op("op_loadavg", s.stateful_json_op(op_loadavg)); i.register_op("op_os_release", s.stateful_json_op(op_os_release)); }
function_block-full_function
[ { "content": "export function getRandomValues<\n\n T extends\n\n | Int8Array\n\n | Uint8Array\n\n | Uint8ClampedArray\n\n | Int16Array\n\n | Uint16Array\n\n | Int32Array\n\n | Uint32Array\n\n>(typedArray: T): T {\n\n assert(typedArray !== null, \"Input must not be null\");\n\n assert(typedArray.length <= 65536, \"Input must not be longer than 65536\");\n\n const ui8 = new Uint8Array(\n\n typedArray.buffer,\n\n typedArray.byteOffset,\n\n typedArray.byteLength\n\n );\n\n sendSync(\"op_get_random_values\", {}, ui8);\n\n return typedArray;\n", "file_path": "cli/js/ops/get_random_values.ts", "rank": 0, "score": 62465.534818553264 }, { "content": "pub fn op_exit2(s: &mut TSState, v: Value) -> Result<Value, ErrBox> {\n\n let v: Exit = serde_json::from_value(v)?;\n\n s.exit_code = v.code;\n\n std::process::exit(v.code)\n\n}\n\n\n\n#[derive(Debug, Deserialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct EmitResult {\n\n pub emit_skipped: bool,\n\n pub diagnostics: Vec<String>,\n\n pub emitted_files: Vec<String>,\n\n}\n\n\n", "file_path": "deno_typescript/ops.rs", "rank": 1, "score": 60936.153687436476 }, { "content": "pub fn op_write_file(s: &mut TSState, v: Value) -> Result<Value, ErrBox> {\n\n let v: WriteFile = serde_json::from_value(v)?;\n\n let module_specifier = ModuleSpecifier::resolve_url_or_path(&v.file_name)?;\n\n if s.bundle {\n\n std::fs::write(&v.file_name, &v.data)?;\n\n }\n\n s.written_files.push(WrittenFile {\n\n url: module_specifier.as_str().to_string(),\n\n module_name: v.module_name,\n\n source_code: v.data,\n\n });\n\n Ok(json!(true))\n\n}\n\n\n", "file_path": "deno_typescript/ops.rs", "rank": 2, "score": 60199.207052450925 }, { "content": "pub fn op_load_module(s: &mut TSState, v: Value) -> Result<Value, ErrBox> {\n\n let v: LoadModule = serde_json::from_value(v)?;\n\n let (module_name, source_code) = if v.module_url.starts_with(\"$asset$/\") {\n\n let asset = v.module_url.replace(\"$asset$/\", \"\");\n\n\n\n let source_code = match crate::get_asset(&asset) {\n\n Some(code) => code.to_string(),\n\n None => {\n\n return Err(\n\n std::io::Error::new(std::io::ErrorKind::NotFound, \"Asset not found\")\n\n .into(),\n\n );\n\n }\n\n };\n\n\n\n (asset, source_code)\n\n } else {\n\n assert!(!v.module_url.starts_with(\"$assets$\"), \"you meant $asset$\");\n\n let module_specifier = ModuleSpecifier::resolve_url_or_path(&v.module_url)?;\n\n let module_url = module_specifier.as_url();\n", "file_path": "deno_typescript/ops.rs", "rank": 3, "score": 60199.207052450925 }, { "content": "type Dispatcher = fn(state: &mut TSState, args: Value) -> Result<Value, ErrBox>;\n\n\n", "file_path": "deno_typescript/ops.rs", "rank": 4, "score": 60199.207052450925 }, { "content": "pub fn op_set_emit_result(s: &mut TSState, v: Value) -> Result<Value, ErrBox> {\n\n let v: EmitResult = serde_json::from_value(v)?;\n\n s.emit_result = Some(v);\n\n Ok(json!(true))\n\n}\n", "file_path": "deno_typescript/ops.rs", "rank": 5, "score": 59479.87232087146 }, { "content": "pub fn use_color() -> bool {\n\n !(*NO_COLOR)\n\n}\n\n\n", "file_path": "cli/colors.rs", "rank": 6, "score": 55176.42277155495 }, { "content": "#[cfg(test)]\n\nfn set_prompt_result(value: bool) {\n\n STUB_PROMPT_VALUE.store(value, Ordering::SeqCst);\n\n}\n\n\n\n// When testing, permission prompt returns the value of STUB_PROMPT_VALUE\n\n// which we set from the test functions.\n", "file_path": "cli/permissions.rs", "rank": 7, "score": 53955.86802574348 }, { "content": "fn op_get_random_values(\n\n state: &State,\n\n _args: Value,\n\n zero_copy: Option<ZeroCopyBuf>,\n\n) -> Result<JsonOp, OpError> {\n\n assert!(zero_copy.is_some());\n\n\n\n if let Some(ref mut seeded_rng) = state.borrow_mut().seeded_rng {\n\n seeded_rng.fill(&mut zero_copy.unwrap()[..]);\n\n } else {\n\n let mut rng = thread_rng();\n\n rng.fill(&mut zero_copy.unwrap()[..]);\n\n }\n\n\n\n Ok(JsonOp::Sync(json!({})))\n\n}\n", "file_path": "cli/ops/random.rs", "rank": 8, "score": 53955.86802574348 }, { "content": "class AddrInUse extends Error {\n\n constructor(msg: string) {\n\n super(msg);\n\n this.name = \"AddrInUse\";\n\n }\n", "file_path": "cli/js/errors.ts", "rank": 9, "score": 52834.84823103795 }, { "content": "function printValue(value: unknown, path: string): void {\n\n if (typeof value === \"string\") {\n\n value = colors.green('\"' + value + '\"');\n\n } else if (typeof value === \"number\") {\n\n value = value.toString();\n\n } else if (typeof value === \"boolean\") {\n\n value = colors.yellow(value.toString());\n\n }\n\n\n\n console.log(path + \" = \" + value);\n", "file_path": "std/examples/catj.ts", "rank": 10, "score": 52810.97967910614 }, { "content": " constructor(msg: string) {\n\n super(msg);\n\n this.name = \"AddrInUse\";\n", "file_path": "cli/js/errors.ts", "rank": 11, "score": 51737.04112241562 }, { "content": "function hasHeaderValueOf(s: string, value: string): boolean {\n\n return new RegExp(`^${value}[\\t\\s]*;?`).test(s);\n", "file_path": "cli/js/web/fetch.ts", "rank": 12, "score": 51713.66851334842 }, { "content": "export function cloneValue(value: any): any {\n\n switch (typeof value) {\n\n case \"number\":\n\n case \"string\":\n\n case \"boolean\":\n\n case \"undefined\":\n\n case \"bigint\":\n\n return value;\n\n case \"object\": {\n\n if (objectCloneMemo.has(value)) {\n\n return objectCloneMemo.get(value);\n\n }\n\n if (value === null) {\n\n return value;\n\n }\n\n if (value instanceof Date) {\n\n return new Date(value.valueOf());\n\n }\n\n if (value instanceof RegExp) {\n\n return new RegExp(value);\n\n }\n\n if (value instanceof SharedArrayBuffer) {\n\n return value;\n\n }\n\n if (value instanceof ArrayBuffer) {\n\n const cloned = cloneArrayBuffer(\n\n value,\n\n 0,\n\n value.byteLength,\n\n ArrayBuffer\n\n );\n\n objectCloneMemo.set(value, cloned);\n\n return cloned;\n\n }\n\n if (ArrayBuffer.isView(value)) {\n\n const clonedBuffer = cloneValue(value.buffer) as ArrayBufferLike;\n\n // Use DataViewConstructor type purely for type-checking, can be a\n\n // DataView or TypedArray. They use the same constructor signature,\n\n // only DataView has a length in bytes and TypedArrays use a length in\n\n // terms of elements, so we adjust for that.\n\n let length: number;\n\n if (value instanceof DataView) {\n\n length = value.byteLength;\n\n } else {\n\n length = (value as Uint8Array).length;\n\n }\n\n return new (value.constructor as DataViewConstructor)(\n\n clonedBuffer,\n\n value.byteOffset,\n\n length\n\n );\n\n }\n\n if (value instanceof Map) {\n\n const clonedMap = new Map();\n\n objectCloneMemo.set(value, clonedMap);\n\n value.forEach((v, k) => clonedMap.set(k, cloneValue(v)));\n\n return clonedMap;\n\n }\n\n if (value instanceof Set) {\n\n const clonedSet = new Map();\n\n objectCloneMemo.set(value, clonedSet);\n\n value.forEach((v, k) => clonedSet.set(k, cloneValue(v)));\n\n return clonedSet;\n\n }\n\n\n\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n\n const clonedObj = {} as Record<string, any>;\n\n objectCloneMemo.set(value, clonedObj);\n\n const sourceKeys = Object.getOwnPropertyNames(value);\n\n for (const key of sourceKeys) {\n\n clonedObj[key] = cloneValue(value[key]);\n\n }\n\n return clonedObj;\n\n }\n\n case \"symbol\":\n\n case \"function\":\n\n default:\n\n throw new DOMException(\"Uncloneable value in stream\", \"DataCloneError\");\n\n }\n", "file_path": "cli/js/web/util.ts", "rank": 13, "score": 51713.66851334842 }, { "content": "export const value = `3 imports ${redirect}`;\n", "file_path": "cli/tests/subdir/redirects/redirect3.js", "rank": 14, "score": 51713.66851334842 }, { "content": "export const value = `4 imports ${redirect}`;\n", "file_path": "cli/tests/subdir/redirects/redirect4.ts", "rank": 15, "score": 51713.66851334842 }, { "content": "function hasHeaderValueOf(s: string, value: string): boolean {\n\n return new RegExp(`^${value}[\\t\\s]*;?`).test(s);\n", "file_path": "cli/js/web/body.ts", "rank": 16, "score": 51713.66851334842 }, { "content": "interface TaggedYieldedValue<T> {\n\n iterator: AsyncIterableIterator<T>;\n\n value: T;\n", "file_path": "std/util/async.ts", "rank": 17, "score": 51713.66851334842 }, { "content": "function validateValue(value: string): void {\n\n if (invalidHeaderCharRegex.test(value)) {\n\n throw new TypeError(`${value} is not a legal HTTP header value`);\n\n }\n", "file_path": "cli/js/web/headers.ts", "rank": 18, "score": 51713.66851334842 }, { "content": "class KeyValuePair {\n\n constructor(public key: string, public value: unknown) {}\n", "file_path": "std/encoding/toml.ts", "rank": 19, "score": 51713.66851334842 }, { "content": " get bodyUsed(): boolean {\n\n return this.#bodyUsed;\n", "file_path": "cli/js/web/fetch.ts", "rank": 20, "score": 50683.92606317046 }, { "content": " get bodyUsed(): boolean {\n\n if (this.body === null) return false;\n\n return this.body.bodyUsed;\n", "file_path": "cli/js/web/fetch.ts", "rank": 21, "score": 50683.92606317046 }, { "content": " get bodyUsed(): boolean {\n\n if (this.body && this.body.locked) {\n\n return true;\n\n }\n\n return false;\n", "file_path": "cli/js/web/body.ts", "rank": 22, "score": 50683.92606317046 }, { "content": "export const BodyUsedError =\n", "file_path": "cli/js/web/body.ts", "rank": 23, "score": 50683.92606317046 }, { "content": "fn json_err(err: OpError) -> Value {\n\n json!({\n\n \"message\": err.msg,\n\n \"kind\": err.kind as u32,\n\n })\n\n}\n\n\n", "file_path": "cli/ops/dispatch_json.rs", "rank": 24, "score": 50661.02920698841 }, { "content": "export function dequeueValue<R>(container: Container<R>): R {\n\n assert(sym.queue in container && sym.queueTotalSize in container);\n\n assert(container[sym.queue].length);\n\n const pair = container[sym.queue].shift()!;\n\n container[sym.queueTotalSize] -= pair.size;\n\n if (container[sym.queueTotalSize] <= 0) {\n\n container[sym.queueTotalSize] = 0;\n\n }\n\n return pair.value as R;\n", "file_path": "cli/js/web/streams/internals.ts", "rank": 25, "score": 50661.02920698841 }, { "content": " constructor(public key: string, public value: unknown) {}\n", "file_path": "std/encoding/toml.ts", "rank": 26, "score": 50661.02920698841 }, { "content": "function peekQueueValue<T>(container: Container<T>): T | \"close\" {\n\n assert(sym.queue in container && sym.queueTotalSize in container);\n\n assert(container[sym.queue].length);\n\n const [pair] = container[sym.queue];\n\n return pair.value as T;\n", "file_path": "cli/js/web/streams/internals.ts", "rank": 27, "score": 49650.3882725092 }, { "content": "fn serialize_worker_event(event: WorkerEvent) -> Value {\n\n match event {\n\n WorkerEvent::Message(buf) => json!({ \"type\": \"msg\", \"data\": buf }),\n\n WorkerEvent::TerminalError(error) => {\n\n let mut serialized_error = json!({\n\n \"type\": \"terminalError\",\n\n \"error\": {\n\n \"message\": error.to_string(),\n\n }\n\n });\n\n\n\n if let Ok(js_error) = error.downcast::<JSError>() {\n\n serialized_error = json!({\n\n \"type\": \"terminalError\",\n\n \"error\": {\n\n \"message\": js_error.message,\n\n \"fileName\": js_error.script_resource_name,\n\n \"lineNumber\": js_error.line_number,\n\n \"columnNumber\": js_error.start_column,\n\n }\n", "file_path": "cli/ops/worker_host.rs", "rank": 28, "score": 49650.3882725092 }, { "content": "function getHeaderValueParams(value: string): Map<string, string> {\n\n const params = new Map();\n\n // Forced to do so for some Map constructor param mismatch\n\n value\n\n .split(\";\")\n\n .slice(1)\n\n .map((s): string[] => s.trim().split(\"=\"))\n\n .filter((arr): boolean => arr.length > 1)\n\n .map(([k, v]): [string, string] => [k, v.replace(/^\"([^\"]*)\"$/, \"$1\")])\n\n .forEach(([k, v]): Map<string, string> => params.set(k, v));\n\n return params;\n", "file_path": "cli/js/web/fetch.ts", "rank": 29, "score": 49650.3882725092 }, { "content": "function getHeaderValueParams(value: string): Map<string, string> {\n\n const params = new Map();\n\n // Forced to do so for some Map constructor param mismatch\n\n value\n\n .split(\";\")\n\n .slice(1)\n\n .map((s): string[] => s.trim().split(\"=\"))\n\n .filter((arr): boolean => arr.length > 1)\n\n .map(([k, v]): [string, string] => [k, v.replace(/^\"([^\"]*)\"$/, \"$1\")])\n\n .forEach(([k, v]): Map<string, string> => params.set(k, v));\n\n return params;\n", "file_path": "cli/js/web/body.ts", "rank": 30, "score": 49650.3882725092 }, { "content": "function enqueueValueWithSize<R>(\n\n container: Container<R>,\n\n value: R,\n\n size: number\n\n): void {\n\n assert(sym.queue in container && sym.queueTotalSize in container);\n\n size = Number(size);\n\n if (!isFiniteNonNegativeNumber(size)) {\n\n throw new RangeError(\"size must be a finite non-negative number.\");\n\n }\n\n container[sym.queue].push({ value, size });\n\n container[sym.queueTotalSize] += size;\n", "file_path": "cli/js/web/streams/internals.ts", "rank": 31, "score": 49650.3882725092 }, { "content": " useCaseSensitiveFileNames(): boolean {\n\n return true;\n", "file_path": "cli/js/compiler.ts", "rank": 32, "score": 48701.28256589939 }, { "content": " useCaseSensitiveFileNames() {\n\n return false;\n", "file_path": "deno_typescript/compiler_main.js", "rank": 33, "score": 47767.012433904194 }, { "content": " value(idx: number): string | null {\n\n return idx in arr ? arr[idx] : null;\n", "file_path": "cli/js/web/dom_util.ts", "rank": 34, "score": 47745.433316047725 }, { "content": "fn to_msec(maybe_time: Result<SystemTime, io::Error>) -> serde_json::Value {\n\n match maybe_time {\n\n Ok(time) => {\n\n let msec = time\n\n .duration_since(UNIX_EPOCH)\n\n .map(|t| t.as_secs_f64() * 1000f64)\n\n .unwrap_or_else(|err| err.duration().as_secs_f64() * -1000f64);\n\n serde_json::Number::from_f64(msec)\n\n .map(serde_json::Value::Number)\n\n .unwrap_or(serde_json::Value::Null)\n\n }\n\n Err(_) => serde_json::Value::Null,\n\n }\n\n}\n\n\n", "file_path": "cli/ops/fs.rs", "rank": 35, "score": 46846.7401442833 }, { "content": " *values(): IterableIterator<string> {\n\n for (const [, value] of this.#params) {\n\n yield value;\n\n }\n", "file_path": "cli/js/web/url_search_params.ts", "rank": 36, "score": 46846.7401442833 }, { "content": " valueOf(): number {\n\n called = true;\n\n return 1;\n", "file_path": "cli/js/tests/timers_test.ts", "rank": 37, "score": 46846.7401442833 }, { "content": " *values(): IterableIterator<V> {\n\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n\n for (const [, value] of (this as any)[dataSymbol]) {\n\n yield value;\n\n }\n", "file_path": "cli/js/web/dom_iterable.ts", "rank": 38, "score": 46846.7401442833 }, { "content": "// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.\n\nimport { sendSync } from \"./dispatch_json.ts\";\n\nimport { assert } from \"../util.ts\";\n\n\n\nexport function getRandomValues<\n\n T extends\n\n | Int8Array\n\n | Uint8Array\n\n | Uint8ClampedArray\n\n | Int16Array\n\n | Uint16Array\n\n | Int32Array\n\n | Uint32Array\n\n>(typedArray: T): T {\n\n assert(typedArray !== null, \"Input must not be null\");\n\n assert(typedArray.length <= 65536, \"Input must not be longer than 65536\");\n\n const ui8 = new Uint8Array(\n\n typedArray.buffer,\n\n typedArray.byteOffset,\n\n typedArray.byteLength\n\n );\n\n sendSync(\"op_get_random_values\", {}, ui8);\n\n return typedArray;\n\n}\n", "file_path": "cli/js/ops/get_random_values.ts", "rank": 39, "score": 44342.79985043082 }, { "content": "// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.\n\nimport { unitTest, assertNotEquals, assertStrictEq } from \"./test_util.ts\";\n\n\n\nunitTest(function getRandomValuesInt8Array(): void {\n\n const arr = new Int8Array(32);\n\n crypto.getRandomValues(arr);\n\n assertNotEquals(arr, new Int8Array(32));\n\n});\n\n\n\nunitTest(function getRandomValuesUint8Array(): void {\n\n const arr = new Uint8Array(32);\n\n crypto.getRandomValues(arr);\n\n assertNotEquals(arr, new Uint8Array(32));\n\n});\n\n\n\nunitTest(function getRandomValuesUint8ClampedArray(): void {\n\n const arr = new Uint8ClampedArray(32);\n\n crypto.getRandomValues(arr);\n\n assertNotEquals(arr, new Uint8ClampedArray(32));\n\n});\n\n\n\nunitTest(function getRandomValuesInt16Array(): void {\n\n const arr = new Int16Array(4);\n\n crypto.getRandomValues(arr);\n\n assertNotEquals(arr, new Int16Array(4));\n\n});\n\n\n\nunitTest(function getRandomValuesUint16Array(): void {\n\n const arr = new Uint16Array(4);\n\n crypto.getRandomValues(arr);\n\n assertNotEquals(arr, new Uint16Array(4));\n\n});\n\n\n\nunitTest(function getRandomValuesInt32Array(): void {\n\n const arr = new Int32Array(8);\n\n crypto.getRandomValues(arr);\n\n assertNotEquals(arr, new Int32Array(8));\n\n});\n\n\n\nunitTest(function getRandomValuesUint32Array(): void {\n\n const arr = new Uint32Array(8);\n\n crypto.getRandomValues(arr);\n\n assertNotEquals(arr, new Uint32Array(8));\n\n});\n\n\n\nunitTest(function getRandomValuesReturnValue(): void {\n\n const arr = new Uint32Array(8);\n\n const rtn = crypto.getRandomValues(arr);\n\n assertNotEquals(arr, new Uint32Array(8));\n\n assertStrictEq(rtn, arr);\n\n});\n", "file_path": "cli/js/tests/get_random_values_test.ts", "rank": 40, "score": 43566.59393804921 }, { "content": "pub fn blocking_json<F>(is_sync: bool, f: F) -> Result<JsonOp, OpError>\n\nwhere\n\n F: 'static + Send + FnOnce() -> JsonResult,\n\n{\n\n if is_sync {\n\n Ok(JsonOp::Sync(f()?))\n\n } else {\n\n let fut = async move { tokio::task::spawn_blocking(f).await.unwrap() };\n\n Ok(JsonOp::Async(fut.boxed_local()))\n\n }\n\n}\n", "file_path": "cli/ops/dispatch_json.rs", "rank": 41, "score": 12212.228698135834 }, { "content": "// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.\n\nuse super::dispatch_json::{Deserialize, JsonOp, Value};\n\nuse super::io::{StreamResource, StreamResourceHolder};\n\nuse crate::http_util::{create_http_client, HttpBody};\n\nuse crate::op_error::OpError;\n\nuse crate::state::State;\n\nuse deno_core::CoreIsolate;\n\nuse deno_core::ZeroCopyBuf;\n\nuse futures::future::FutureExt;\n\nuse http::header::HeaderName;\n\nuse http::header::HeaderValue;\n\nuse http::Method;\n\nuse std::convert::From;\n\n\n", "file_path": "cli/ops/fetch.rs", "rank": 42, "score": 10.683426256367984 }, { "content": "// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.\n\nuse crate::op_error::OpError;\n\nuse deno_core::Buf;\n\nuse deno_core::CoreIsolate;\n\nuse deno_core::Op;\n\nuse deno_core::ZeroCopyBuf;\n\nuse futures::future::FutureExt;\n\npub use serde_derive::Deserialize;\n\nuse serde_json::json;\n\npub use serde_json::Value;\n\nuse std::future::Future;\n\nuse std::pin::Pin;\n\n\n\npub type JsonResult = Result<Value, OpError>;\n\n\n\npub type AsyncJsonOp = Pin<Box<dyn Future<Output = JsonResult>>>;\n\n\n\npub enum JsonOp {\n\n Sync(Value),\n\n Async(AsyncJsonOp),\n\n /// AsyncUnref is the variation of Async, which doesn't block the program\n\n /// exiting.\n\n AsyncUnref(AsyncJsonOp),\n\n}\n\n\n", "file_path": "cli/ops/dispatch_json.rs", "rank": 43, "score": 10.633055033255925 }, { "content": "// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.\n\nuse super::dispatch_json::{JsonOp, Value};\n\nuse crate::op_error::OpError;\n\nuse crate::state::State;\n\nuse deno_core::CoreIsolate;\n\nuse deno_core::ZeroCopyBuf;\n\nuse rand::thread_rng;\n\nuse rand::Rng;\n\n\n", "file_path": "cli/ops/random.rs", "rank": 44, "score": 10.135038412300073 }, { "content": "// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.\n\nuse super::dispatch_json::{Deserialize, JsonOp, Value};\n\nuse crate::op_error::OpError;\n\nuse crate::state::State;\n\nuse deno_core::CoreIsolate;\n\nuse deno_core::ZeroCopyBuf;\n\n\n", "file_path": "cli/ops/resources.rs", "rank": 45, "score": 10.12207219987194 }, { "content": "// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.\n\nuse super::dispatch_json::{JsonOp, Value};\n\nuse crate::colors;\n\nuse crate::op_error::OpError;\n\nuse crate::state::State;\n\nuse crate::version;\n\nuse crate::DenoSubcommand;\n\nuse deno_core::CoreIsolate;\n\nuse deno_core::ZeroCopyBuf;\n\nuse std::env;\n\n\n", "file_path": "cli/ops/runtime.rs", "rank": 46, "score": 10.075815440833036 }, { "content": "use crate::TSState;\n\nuse deno_core::ErrBox;\n\nuse deno_core::ModuleSpecifier;\n\nuse deno_core::Op;\n\nuse serde::Deserialize;\n\nuse serde_json::json;\n\nuse serde_json::Value;\n\n\n\n#[derive(Clone, Debug)]\n\npub struct WrittenFile {\n\n pub url: String,\n\n pub module_name: String,\n\n pub source_code: String,\n\n}\n\n\n", "file_path": "deno_typescript/ops.rs", "rank": 47, "score": 9.980468457926943 }, { "content": "// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.\n\nuse super::dispatch_json::{Deserialize, JsonOp, Value};\n\nuse crate::fs as deno_fs;\n\nuse crate::op_error::OpError;\n\nuse crate::state::State;\n\nuse deno_core::CoreIsolate;\n\nuse deno_core::ZeroCopyBuf;\n\nuse std::path::Path;\n\n\n", "file_path": "cli/ops/permissions.rs", "rank": 48, "score": 9.980468457926943 }, { "content": "// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.\n\nuse super::dispatch_json::{Deserialize, JsonOp, Value};\n\nuse crate::op_error::OpError;\n\nuse crate::state::State;\n\nuse deno_core::CoreIsolate;\n\nuse deno_core::ZeroCopyBuf;\n\nuse futures::future::FutureExt;\n\nuse std::time::Duration;\n\nuse std::time::Instant;\n\n\n", "file_path": "cli/ops/timers.rs", "rank": 49, "score": 9.920669584993252 }, { "content": " 2 => DiagnosticCategory::Info,\n\n 3 => DiagnosticCategory::Error,\n\n 4 => DiagnosticCategory::Warning,\n\n 5 => DiagnosticCategory::Suggestion,\n\n _ => panic!(\"Unknown value: {}\", value),\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::colors::strip_ansi_codes;\n\n\n\n fn diagnostic1() -> Diagnostic {\n\n Diagnostic {\n\n items: vec![\n\n DiagnosticItem {\n\n message: \"Type '(o: T) => { v: any; f: (x: B) => string; }[]' is not assignable to type '(r: B) => Value<B>[]'.\".to_string(),\n\n message_chain: Some(DiagnosticMessageChain {\n", "file_path": "cli/diagnostics.rs", "rank": 50, "score": 9.919849674002759 }, { "content": "// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.\n\nuse super::dispatch_json::{blocking_json, Deserialize, JsonOp, Value};\n\nuse crate::op_error::OpError;\n\nuse crate::repl;\n\nuse crate::repl::Repl;\n\nuse crate::state::State;\n\nuse deno_core::CoreIsolate;\n\nuse deno_core::ZeroCopyBuf;\n\nuse std::sync::Arc;\n\nuse std::sync::Mutex;\n\n\n", "file_path": "cli/ops/repl.rs", "rank": 51, "score": 9.85595920583834 }, { "content": "// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.\n\nuse super::dispatch_json::{Deserialize, JsonOp, Value};\n\nuse crate::compilers::runtime_compile;\n\nuse crate::compilers::runtime_transpile;\n\nuse crate::futures::FutureExt;\n\nuse crate::op_error::OpError;\n\nuse crate::state::State;\n\nuse deno_core::CoreIsolate;\n\nuse deno_core::ZeroCopyBuf;\n\nuse std::collections::HashMap;\n\n\n", "file_path": "cli/ops/runtime_compiler.rs", "rank": 53, "score": 9.730973577073204 }, { "content": "// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.\n\nuse super::dispatch_json::{JsonOp, Value};\n\nuse crate::op_error::OpError;\n\nuse crate::ops::json_op;\n\nuse crate::state::State;\n\nuse crate::web_worker::WebWorkerHandle;\n\nuse crate::worker::WorkerEvent;\n\nuse deno_core::CoreIsolate;\n\nuse deno_core::ZeroCopyBuf;\n\nuse futures::channel::mpsc;\n\nuse std::convert::From;\n\n\n", "file_path": "cli/ops/web_worker.rs", "rank": 54, "score": 9.71169842530895 }, { "content": "use super::dispatch_json::{Deserialize, JsonOp, Value};\n\nuse crate::fs as deno_fs;\n\nuse crate::op_error::OpError;\n\nuse crate::ops::json_op;\n\nuse crate::state::State;\n\nuse deno_core::CoreIsolate;\n\nuse deno_core::ZeroCopyBuf;\n\nuse dlopen::symbor::Library;\n\nuse std::ffi::OsStr;\n\nuse std::path::Path;\n\n\n\npub type PluginInitFn = fn(isolate: &mut CoreIsolate);\n\n\n", "file_path": "cli/ops/plugins.rs", "rank": 55, "score": 9.673602455041642 }, { "content": "use deno_core::ErrBox;\n\nuse deno_core::ModuleSpecifier;\n\nuse indexmap::IndexMap;\n\nuse serde_json::Map;\n\nuse serde_json::Value;\n\nuse std::cmp::Ordering;\n\nuse std::error::Error;\n\nuse std::fmt;\n\nuse std::fs;\n\nuse std::io;\n\nuse url::Url;\n\n\n\n#[derive(Debug)]\n\npub struct ImportMapError {\n\n pub msg: String,\n\n}\n\n\n\nimpl ImportMapError {\n\n pub fn new(msg: &str) -> Self {\n\n ImportMapError {\n", "file_path": "cli/import_map.rs", "rank": 56, "score": 9.650422586229014 }, { "content": "// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.\n\nuse super::dispatch_json::{Deserialize, JsonOp, Value};\n\nuse crate::diagnostics::Diagnostic;\n\nuse crate::op_error::OpError;\n\nuse crate::source_maps::get_orig_position;\n\nuse crate::source_maps::CachedMaps;\n\nuse crate::state::State;\n\nuse deno_core::CoreIsolate;\n\nuse deno_core::ZeroCopyBuf;\n\nuse std::collections::HashMap;\n\n\n", "file_path": "cli/ops/errors.rs", "rank": 57, "score": 9.6503089773089 }, { "content": "use deno_core::Buf;\n\nuse deno_core::ErrBox;\n\nuse deno_core::ModuleSpecifier;\n\nuse log::info;\n\nuse regex::Regex;\n\nuse serde::Deserialize;\n\nuse serde_json::json;\n\nuse serde_json::Value;\n\nuse std::collections::HashMap;\n\nuse std::collections::HashSet;\n\nuse std::fs;\n\nuse std::hash::BuildHasher;\n\nuse std::io;\n\nuse std::ops::Deref;\n\nuse std::path::PathBuf;\n\nuse std::str;\n\nuse std::sync::atomic::Ordering;\n\nuse std::sync::Arc;\n\nuse std::sync::Mutex;\n\nuse url::Url;\n", "file_path": "cli/compilers/ts.rs", "rank": 58, "score": 9.599577587051908 }, { "content": "// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.\n\nuse super::dispatch_json::Deserialize;\n\nuse super::dispatch_json::JsonOp;\n\nuse super::dispatch_json::Value;\n\nuse crate::futures::future::try_join_all;\n\nuse crate::msg;\n\nuse crate::op_error::OpError;\n\nuse crate::state::State;\n\nuse deno_core::CoreIsolate;\n\nuse deno_core::ModuleLoader;\n\nuse deno_core::ModuleSpecifier;\n\nuse deno_core::ZeroCopyBuf;\n\nuse futures::future::FutureExt;\n\n\n", "file_path": "cli/ops/compiler.rs", "rank": 59, "score": 9.58945881009496 }, { "content": "// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.\n\nuse crate::colors;\n\nuse crate::flags::Flags;\n\nuse crate::op_error::OpError;\n\nuse std::collections::HashSet;\n\nuse std::fmt;\n\n#[cfg(not(test))]\n\nuse std::io;\n\nuse std::path::{Path, PathBuf};\n\n#[cfg(test)]\n\nuse std::sync::atomic::AtomicBool;\n\n#[cfg(test)]\n\nuse std::sync::atomic::Ordering;\n\n#[cfg(test)]\n\nuse std::sync::Mutex;\n\nuse url::Url;\n\n\n\nconst PERMISSION_EMOJI: &str = \"⚠️\";\n\n\n\n/// Tri-state value for storing permission state\n", "file_path": "cli/permissions.rs", "rank": 60, "score": 9.345942137818618 }, { "content": "// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.\n\nuse super::dispatch_json::{JsonOp, Value};\n\nuse crate::op_error::OpError;\n\nuse crate::state::State;\n\nuse deno_core::CoreIsolate;\n\nuse deno_core::ZeroCopyBuf;\n\n\n\n#[cfg(unix)]\n\nuse super::dispatch_json::Deserialize;\n\n#[cfg(unix)]\n\nuse futures::future::{poll_fn, FutureExt};\n\n#[cfg(unix)]\n\nuse std::task::Waker;\n\n#[cfg(unix)]\n\nuse tokio::signal::unix::{signal, Signal, SignalKind};\n\n\n", "file_path": "cli/ops/signal.rs", "rank": 61, "score": 9.310955950934545 }, { "content": "// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.\n\nuse super::dispatch_json::{Deserialize, JsonOp, Value};\n\nuse crate::op_error::OpError;\n\nuse crate::state::State;\n\nuse deno_core::CoreIsolate;\n\nuse deno_core::ErrBox;\n\nuse deno_core::ZeroCopyBuf;\n\nuse futures::future::poll_fn;\n\nuse futures::future::FutureExt;\n\nuse notify::event::Event as NotifyEvent;\n\nuse notify::Error as NotifyError;\n\nuse notify::EventKind;\n\nuse notify::RecommendedWatcher;\n\nuse notify::RecursiveMode;\n\nuse notify::Watcher;\n\nuse serde::Serialize;\n\nuse std::convert::From;\n\nuse std::path::PathBuf;\n\nuse tokio::sync::mpsc;\n\n\n", "file_path": "cli/ops/fs_events.rs", "rank": 62, "score": 9.23268485331128 }, { "content": "use std::env::VarError;\n\nuse std::error::Error;\n\nuse std::fmt;\n\nuse std::io;\n\n\n\n// Warning! The values in this enum are duplicated in js/errors.ts\n\n// Update carefully!\n\n#[derive(Clone, Copy, PartialEq, Debug)]\n\npub enum ErrorKind {\n\n NotFound = 1,\n\n PermissionDenied = 2,\n\n ConnectionRefused = 3,\n\n ConnectionReset = 4,\n\n ConnectionAborted = 5,\n\n NotConnected = 6,\n\n AddrInUse = 7,\n\n AddrNotAvailable = 8,\n\n BrokenPipe = 9,\n\n AlreadyExists = 10,\n\n InvalidData = 13,\n", "file_path": "cli/op_error.rs", "rank": 63, "score": 9.232338600710644 }, { "content": "use crate::compilers::CompiledModule;\n\nuse serde_json::json;\n\npub use serde_json::Value;\n\nuse std::collections::HashMap;\n\nuse std::io::Result;\n\n\n\npub struct Lockfile {\n\n need_read: bool,\n\n map: HashMap<String, String>,\n\n pub filename: String,\n\n}\n\n\n\nimpl Lockfile {\n\n pub fn new(filename: String) -> Lockfile {\n\n Lockfile {\n\n map: HashMap::new(),\n\n filename,\n\n need_read: true,\n\n }\n\n }\n", "file_path": "cli/lockfile.rs", "rank": 64, "score": 9.232338600710644 }, { "content": "use rand::rngs::StdRng;\n\nuse rand::SeedableRng;\n\nuse serde_json::Value;\n\nuse std::cell::RefCell;\n\nuse std::collections::HashMap;\n\nuse std::ops::Deref;\n\nuse std::path::Path;\n\nuse std::pin::Pin;\n\nuse std::rc::Rc;\n\nuse std::str;\n\nuse std::thread::JoinHandle;\n\nuse std::time::Instant;\n\n#[derive(Copy, Clone, Eq, PartialEq)]\n\npub enum DebugType {\n\n /// Can be debugged, will wait for debugger when --inspect-brk given.\n\n Main,\n\n /// Can be debugged, never waits for debugger.\n\n Dependent,\n\n /// No inspector instance is created.\n\n Internal,\n", "file_path": "cli/state.rs", "rank": 65, "score": 9.215899144386857 }, { "content": "// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.\n\nuse super::dispatch_json::{Deserialize, JsonOp, Value};\n\nuse super::io::{std_file_resource, StreamResource, StreamResourceHolder};\n\nuse crate::op_error::OpError;\n\nuse crate::signal::kill;\n\nuse crate::state::State;\n\nuse deno_core::CoreIsolate;\n\nuse deno_core::ResourceTable;\n\nuse deno_core::ZeroCopyBuf;\n\nuse futures::future::poll_fn;\n\nuse futures::future::FutureExt;\n\nuse futures::TryFutureExt;\n\nuse std::convert::From;\n\nuse tokio::process::Command;\n\n\n\n#[cfg(unix)]\n\nuse std::os::unix::process::ExitStatusExt;\n\n\n", "file_path": "cli/ops/process.rs", "rank": 66, "score": 9.115482064298313 }, { "content": "// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.\n\nuse super::dispatch_json::{Deserialize, JsonOp, Value};\n\nuse super::io::{StreamResource, StreamResourceHolder};\n\nuse crate::op_error::OpError;\n\nuse crate::resolve_addr::resolve_addr;\n\nuse crate::state::State;\n\nuse deno_core::CoreIsolate;\n\nuse deno_core::ZeroCopyBuf;\n\nuse futures::future::poll_fn;\n\nuse futures::future::FutureExt;\n\nuse std::convert::From;\n\nuse std::fs::File;\n\nuse std::io::BufReader;\n\nuse std::net::SocketAddr;\n\nuse std::path::Path;\n\nuse std::sync::Arc;\n\nuse std::task::Context;\n\nuse std::task::Poll;\n\nuse tokio::net::TcpListener;\n\nuse tokio::net::TcpStream;\n", "file_path": "cli/ops/tls.rs", "rank": 67, "score": 9.03619111882157 }, { "content": "// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.\n\nuse super::dispatch_json::{Deserialize, JsonOp, Value};\n\nuse super::io::{StreamResource, StreamResourceHolder};\n\nuse crate::op_error::OpError;\n\nuse crate::resolve_addr::resolve_addr;\n\nuse crate::state::State;\n\nuse deno_core::CoreIsolate;\n\nuse deno_core::ResourceTable;\n\nuse deno_core::ZeroCopyBuf;\n\nuse futures::future::poll_fn;\n\nuse futures::future::FutureExt;\n\nuse std::convert::From;\n\nuse std::net::Shutdown;\n\nuse std::net::SocketAddr;\n\nuse std::task::Context;\n\nuse std::task::Poll;\n\nuse tokio::net::TcpListener;\n\nuse tokio::net::TcpStream;\n\nuse tokio::net::UdpSocket;\n\n\n\n#[cfg(unix)]\n\nuse super::net_unix;\n\n\n", "file_path": "cli/ops/net.rs", "rank": 68, "score": 8.97525844606281 }, { "content": "use super::dispatch_json::JsonOp;\n\nuse super::io::std_file_resource;\n\nuse super::io::{StreamResource, StreamResourceHolder};\n\nuse crate::op_error::OpError;\n\nuse crate::state::State;\n\nuse deno_core::CoreIsolate;\n\nuse deno_core::ZeroCopyBuf;\n\n#[cfg(unix)]\n\nuse nix::sys::termios;\n\nuse serde_derive::Deserialize;\n\nuse serde_json::Value;\n\n\n\n#[cfg(windows)]\n\nuse winapi::shared::minwindef::DWORD;\n\n#[cfg(windows)]\n\nuse winapi::um::wincon;\n\n#[cfg(windows)]\n\nconst RAW_MODE_MASK: DWORD = wincon::ENABLE_LINE_INPUT\n\n | wincon::ENABLE_ECHO_INPUT\n\n | wincon::ENABLE_PROCESSED_INPUT;\n\n#[cfg(windows)]\n", "file_path": "cli/ops/tty.rs", "rank": 69, "score": 8.96522167196672 }, { "content": "// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.\n\nuse crate::version;\n\nuse bytes::Bytes;\n\nuse deno_core::ErrBox;\n\nuse futures::future::FutureExt;\n\nuse reqwest::header::HeaderMap;\n\nuse reqwest::header::HeaderValue;\n\nuse reqwest::header::IF_NONE_MATCH;\n\nuse reqwest::header::LOCATION;\n\nuse reqwest::header::USER_AGENT;\n\nuse reqwest::redirect::Policy;\n\nuse reqwest::Client;\n\nuse reqwest::Response;\n\nuse reqwest::StatusCode;\n\nuse std::cmp::min;\n\nuse std::collections::HashMap;\n\nuse std::fs::File;\n\nuse std::future::Future;\n\nuse std::io;\n\nuse std::io::Read;\n\nuse std::pin::Pin;\n\nuse std::task::Context;\n\nuse std::task::Poll;\n\nuse tokio::io::AsyncRead;\n\nuse url::Url;\n\n\n\n/// Create new instance of async reqwest::Client. This client supports\n\n/// proxies and doesn't follow redirects.\n", "file_path": "cli/http_util.rs", "rank": 70, "score": 8.954472509451517 }, { "content": "// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.\n\nuse super::dispatch_json::{Deserialize, JsonOp, Value};\n\nuse crate::fmt_errors::JSError;\n\nuse crate::global_state::GlobalState;\n\nuse crate::op_error::OpError;\n\nuse crate::ops::io::get_stdio;\n\nuse crate::permissions::Permissions;\n\nuse crate::startup_data;\n\nuse crate::state::State;\n\nuse crate::tokio_util::create_basic_runtime;\n\nuse crate::web_worker::WebWorker;\n\nuse crate::web_worker::WebWorkerHandle;\n\nuse crate::worker::WorkerEvent;\n\nuse deno_core::CoreIsolate;\n\nuse deno_core::ErrBox;\n\nuse deno_core::ModuleSpecifier;\n\nuse deno_core::ZeroCopyBuf;\n\nuse futures::future::FutureExt;\n\nuse std::convert::From;\n\nuse std::thread::JoinHandle;\n\n\n", "file_path": "cli/ops/worker_host.rs", "rank": 71, "score": 8.896424631522107 }, { "content": "// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.\n\nuse crate::deno_dir::DenoDir;\n\nuse crate::op_error::OpError;\n\nuse deno_core::ErrBox;\n\nuse std::fs;\n\nuse std::path::PathBuf;\n\n\n\n#[cfg(not(windows))]\n\nuse rustyline::Editor;\n\n\n\n// Work around the issue that on Windows, `struct Editor` does not implement the\n\n// `Send` trait, because it embeds a windows HANDLE which is a type alias for\n\n// *mut c_void. This value isn't actually a pointer and there's nothing that\n\n// can be mutated through it, so hack around it. TODO: a prettier solution.\n\n#[cfg(windows)]\n\nuse std::ops::{Deref, DerefMut};\n\n\n\n#[cfg(windows)]\n", "file_path": "cli/repl.rs", "rank": 72, "score": 8.763563772844972 }, { "content": "// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.\n\n// Some deserializer fields are only used on Unix and Windows build fails without it\n\nuse super::dispatch_json::{blocking_json, Deserialize, JsonOp, Value};\n\nuse super::io::std_file_resource;\n\nuse super::io::{FileMetadata, StreamResource, StreamResourceHolder};\n\nuse crate::fs::resolve_from_cwd;\n\nuse crate::op_error::OpError;\n\nuse crate::ops::dispatch_json::JsonResult;\n\nuse crate::state::State;\n\nuse deno_core::CoreIsolate;\n\nuse deno_core::ZeroCopyBuf;\n\nuse futures::future::FutureExt;\n\nuse std::convert::From;\n\nuse std::env::{current_dir, set_current_dir, temp_dir};\n\nuse std::io;\n\nuse std::path::{Path, PathBuf};\n\nuse std::time::SystemTime;\n\nuse std::time::UNIX_EPOCH;\n\n\n\nuse rand::{thread_rng, Rng};\n\n\n", "file_path": "cli/ops/fs.rs", "rank": 73, "score": 8.734366707907151 }, { "content": " .arg(\n\n Arg::with_name(\"allow-net\")\n\n .long(\"allow-net\")\n\n .min_values(0)\n\n .takes_value(true)\n\n .use_delimiter(true)\n\n .require_equals(true)\n\n .help(\"Allow network access\"),\n\n )\n\n .arg(\n\n Arg::with_name(\"allow-env\")\n\n .long(\"allow-env\")\n\n .help(\"Allow environment access\"),\n\n )\n\n .arg(\n\n Arg::with_name(\"allow-run\")\n\n .long(\"allow-run\")\n\n .help(\"Allow running subprocesses\"),\n\n )\n\n .arg(\n", "file_path": "cli/flags.rs", "rank": 74, "score": 7.973747286126569 }, { "content": "export interface ReadOptions {\n\n comma?: string;\n\n comment?: string;\n\n trimLeadingSpace?: boolean;\n\n lazyQuotes?: boolean;\n\n fieldsPerRecord?: number;\n", "file_path": "std/encoding/csv.ts", "rank": 75, "score": 7.67803196357265 }, { "content": ") -> Result<Value, OpError> {\n\n let req_msg = json!({\n\n \"type\": msg::CompilerRequestType::RuntimeTranspile as i32,\n\n \"sources\": sources,\n\n \"options\": options,\n\n })\n\n .to_string()\n\n .into_boxed_str()\n\n .into_boxed_bytes();\n\n\n\n let msg = execute_in_thread(global_state, req_msg).await?;\n\n let json_str = std::str::from_utf8(&msg).unwrap();\n\n let v = serde_json::from_str::<serde_json::Value>(json_str)\n\n .expect(\"Error decoding JSON string.\");\n\n Ok(v)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n", "file_path": "cli/compilers/ts.rs", "rank": 76, "score": 7.404039252092098 }, { "content": "# Testing\n\n\n\nThis module provides a few basic utilities to make testing easier and consistent\n\nin Deno.\n\n\n\n## Usage\n\n\n\n`testing/asserts.ts` module provides range of assertion helpers. If the\n\nassertion is false an `AssertionError` will be thrown which will result in\n\npretty-printed diff of failing assertion.\n\n\n\n- `equal()` - Deep comparison function, where `actual` and `expected` are\n\n compared deeply, and if they vary, `equal` returns `false`.\n\n- `assert()` - Expects a boolean value, throws if the value is `false`.\n\n- `assertEquals()` - Uses the `equal` comparison and throws if the `actual` and\n\n `expected` are not equal.\n\n- `assertNotEquals()` - Uses the `equal` comparison and throws if the `actual`\n\n and `expected` are equal.\n\n- `assertStrictEq()` - Compares `actual` and `expected` strictly, therefore for\n\n non-primitives the values must reference the same instance.\n\n- `assertStrContains()` - Make an assertion that `actual` contains `expected`.\n\n- `assertMatch()` - Make an assertion that `actual` match RegExp `expected`.\n\n- `assertArrayContains()` - Make an assertion that `actual` array contains the\n\n `expected` values.\n\n- `assertThrows()` - Expects the passed `fn` to throw. If `fn` does not throw,\n\n this function does. Also compares any errors thrown to an optional expected\n\n `Error` class and checks that the error `.message` includes an optional\n\n string.\n\n- `assertThrowsAsync()` - Expects the passed `fn` to be async and throw (or\n\n return a `Promise` that rejects). If the `fn` does not throw or reject, this\n\n function will throw asynchronously. Also compares any errors thrown to an\n\n optional expected `Error` class and checks that the error `.message` includes\n\n an optional string.\n\n- `unimplemented()` - Use this to stub out methods that will throw when invoked\n\n- `unreachable()` - Used to assert unreachable code\n\n\n\nBasic usage:\n\n\n\n```ts\n", "file_path": "std/testing/README.md", "rank": 77, "score": 7.362105895775218 }, { "content": "const RESOLVED_SPECIFIER_CACHE: Map<string, Map<string, string>> = new Map();\n", "file_path": "cli/js/compiler.ts", "rank": 78, "score": 7.088985407732249 }, { "content": " public setMaxListeners(n: number): this {\n\n validateIntegerRange(n, \"maxListeners\", 0);\n\n this.maxListeners = n;\n\n return this;\n", "file_path": "std/node/events.ts", "rank": 79, "score": 7.088985407732249 }, { "content": "use std::cell::RefCell;\n\nuse std::collections::HashMap;\n\nuse std::ffi::c_void;\n\nuse std::mem::replace;\n\nuse std::mem::take;\n\nuse std::mem::MaybeUninit;\n\nuse std::net::SocketAddr;\n\nuse std::ops::Deref;\n\nuse std::ops::DerefMut;\n\nuse std::pin::Pin;\n\nuse std::process;\n\nuse std::ptr;\n\nuse std::ptr::NonNull;\n\nuse std::sync::Arc;\n\nuse std::sync::Mutex;\n\nuse std::sync::Once;\n\nuse std::thread;\n\nuse uuid::Uuid;\n\nuse warp::filters::ws;\n\nuse warp::Filter;\n\n\n", "file_path": "cli/inspector.rs", "rank": 80, "score": 7.032475030545001 }, { "content": "use futures::task::AtomicWaker;\n\nuse futures::Future;\n\nuse libc::c_void;\n\nuse std::cell::RefCell;\n\nuse std::collections::HashMap;\n\nuse std::convert::From;\n\nuse std::error::Error;\n\nuse std::fmt;\n\nuse std::mem::forget;\n\nuse std::ops::{Deref, DerefMut};\n\nuse std::option::Option;\n\nuse std::pin::Pin;\n\nuse std::rc::Rc;\n\nuse std::sync::{Arc, Mutex, Once};\n\nuse std::task::Context;\n\nuse std::task::Poll;\n\n\n", "file_path": "core/isolate.rs", "rank": 81, "score": 6.991250623912651 }, { "content": "#[macro_use]\n\nextern crate derive_deref;\n\n#[macro_use]\n\nextern crate log;\n\n\n\nuse deno_core::CoreIsolate;\n\nuse deno_core::Op;\n\nuse deno_core::ResourceTable;\n\nuse deno_core::Script;\n\nuse deno_core::StartupData;\n\nuse deno_core::ZeroCopyBuf;\n\nuse futures::future::poll_fn;\n\nuse futures::prelude::*;\n\nuse futures::task::Context;\n\nuse futures::task::Poll;\n\nuse std::cell::RefCell;\n\nuse std::convert::TryInto;\n\nuse std::env;\n\nuse std::fmt::Debug;\n\nuse std::io::Error;\n", "file_path": "core/examples/http_bench.rs", "rank": 82, "score": 6.950453648189909 }, { "content": "use std::convert::TryFrom;\n\nuse std::ops::{Deref, DerefMut};\n\nuse std::option::Option;\n\nuse std::pin::Pin;\n\nuse std::rc::Rc;\n\nuse std::task::Context;\n\nuse std::task::Poll;\n\n\n\nuse crate::isolate::attach_handle_to_error;\n\nuse crate::isolate::exception_to_err_result;\n\nuse crate::isolate::CoreIsolate;\n\nuse crate::isolate::StartupData;\n\nuse crate::module_specifier::ModuleSpecifier;\n\nuse crate::modules::LoadState;\n\nuse crate::modules::ModuleLoader;\n\nuse crate::modules::ModuleSource;\n\nuse crate::modules::Modules;\n\nuse crate::modules::PrepareLoadFuture;\n\nuse crate::modules::RecursiveModuleLoad;\n\n\n", "file_path": "core/es_isolate.rs", "rank": 83, "score": 6.9449024966576385 }, { "content": "use deno_core::ErrBox;\n\nuse deno_core::ModuleSpecifier;\n\nuse flags::DenoSubcommand;\n\nuse flags::Flags;\n\nuse futures::future::FutureExt;\n\nuse futures::Future;\n\nuse log::Level;\n\nuse log::Metadata;\n\nuse log::Record;\n\nuse std::env;\n\nuse std::io::Write;\n\nuse std::path::PathBuf;\n\nuse std::pin::Pin;\n\nuse upgrade::upgrade_command;\n\nuse url::Url;\n\n\n\nstatic LOGGER: Logger = Logger;\n\n\n\n// TODO(ry) Switch to env_logger or other standard crate.\n", "file_path": "cli/lib.rs", "rank": 84, "score": 6.927268531493583 }, { "content": "// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.\n\nuse crate::compilers::CompiledModule;\n\nuse crate::compilers::JsCompiler;\n\nuse crate::compilers::TargetLib;\n\nuse crate::compilers::TsCompiler;\n\nuse crate::compilers::WasmCompiler;\n\nuse crate::deno_dir;\n\nuse crate::file_fetcher::SourceFileFetcher;\n\nuse crate::flags;\n\nuse crate::http_cache;\n\nuse crate::lockfile::Lockfile;\n\nuse crate::msg;\n\nuse crate::permissions::Permissions;\n\nuse deno_core::ErrBox;\n\nuse deno_core::ModuleSpecifier;\n\nuse std::env;\n\nuse std::ops::Deref;\n\nuse std::sync::atomic::AtomicUsize;\n\nuse std::sync::Arc;\n\nuse std::sync::Mutex;\n", "file_path": "cli/global_state.rs", "rank": 85, "score": 6.911257879995824 }, { "content": "// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.\n\nuse crate::ops;\n\nuse crate::state::State;\n\nuse crate::worker::Worker;\n\nuse crate::worker::WorkerEvent;\n\nuse crate::worker::WorkerHandle;\n\nuse deno_core::v8;\n\nuse deno_core::ErrBox;\n\nuse deno_core::StartupData;\n\nuse futures::channel::mpsc;\n\nuse futures::future::FutureExt;\n\nuse futures::stream::StreamExt;\n\nuse std::future::Future;\n\nuse std::ops::Deref;\n\nuse std::ops::DerefMut;\n\nuse std::pin::Pin;\n\nuse std::sync::atomic::AtomicBool;\n\nuse std::sync::atomic::Ordering;\n\nuse std::sync::Arc;\n\nuse std::task::Context;\n", "file_path": "cli/web_worker.rs", "rank": 86, "score": 6.911257879995824 }, { "content": "// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.\n\nuse crate::colors;\n\nuse crate::http_cache::HttpCache;\n\nuse crate::http_util;\n\nuse crate::http_util::create_http_client;\n\nuse crate::http_util::FetchOnceResult;\n\nuse crate::msg;\n\nuse crate::op_error::OpError;\n\nuse deno_core::ErrBox;\n\nuse deno_core::ModuleSpecifier;\n\nuse futures::future::FutureExt;\n\nuse log::info;\n\nuse regex::Regex;\n\nuse std::collections::HashMap;\n\nuse std::fs;\n\nuse std::future::Future;\n\nuse std::io::Read;\n\nuse std::path::Path;\n\nuse std::path::PathBuf;\n\nuse std::pin::Pin;\n", "file_path": "cli/file_fetcher.rs", "rank": 87, "score": 6.911257879995824 }, { "content": "// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.\n\nuse crate::fmt_errors::JSError;\n\nuse crate::inspector::DenoInspector;\n\nuse crate::ops;\n\nuse crate::state::DebugType;\n\nuse crate::state::State;\n\nuse deno_core::Buf;\n\nuse deno_core::ErrBox;\n\nuse deno_core::ModuleId;\n\nuse deno_core::ModuleSpecifier;\n\nuse deno_core::StartupData;\n\nuse futures::channel::mpsc;\n\nuse futures::future::FutureExt;\n\nuse futures::stream::StreamExt;\n\nuse futures::task::AtomicWaker;\n\nuse std::env;\n\nuse std::future::Future;\n\nuse std::ops::Deref;\n\nuse std::ops::DerefMut;\n\nuse std::pin::Pin;\n", "file_path": "cli/worker.rs", "rank": 88, "score": 6.904123566952915 }, { "content": "use std::io::ErrorKind;\n\nuse std::mem::size_of;\n\nuse std::net::SocketAddr;\n\nuse std::pin::Pin;\n\nuse std::ptr;\n\nuse std::rc::Rc;\n\nuse tokio::io::AsyncRead;\n\nuse tokio::io::AsyncWrite;\n\nuse tokio::net::TcpListener;\n\nuse tokio::net::TcpStream;\n\n\n", "file_path": "core/examples/http_bench.rs", "rank": 89, "score": 6.87042718013993 }, { "content": "// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.\n\nuse crate::op_error::OpError;\n\nuse crate::swc_common::comments::CommentKind;\n\nuse crate::swc_common::Span;\n\nuse crate::swc_ecma_ast;\n\nuse crate::swc_ecma_ast::Decl;\n\nuse crate::swc_ecma_ast::DefaultDecl;\n\nuse crate::swc_ecma_ast::ModuleDecl;\n\nuse crate::swc_ecma_ast::Stmt;\n\nuse crate::swc_util::AstParser;\n\nuse crate::swc_util::SwcDiagnosticBuffer;\n\n\n\nuse deno_core::ErrBox;\n\nuse deno_core::ModuleSpecifier;\n\nuse futures::Future;\n\nuse regex::Regex;\n\nuse std::collections::HashMap;\n\nuse std::pin::Pin;\n\n\n\nuse super::namespace::NamespaceDef;\n\nuse super::node;\n\nuse super::node::ModuleDoc;\n\nuse super::DocNode;\n\nuse super::DocNodeKind;\n\nuse super::Location;\n\n\n", "file_path": "cli/doc/parser.rs", "rank": 90, "score": 6.848160530579887 }, { "content": "// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.\n\nuse super::compiler_worker::CompilerWorker;\n\nuse crate::colors;\n\nuse crate::compilers::CompiledModule;\n\nuse crate::diagnostics::Diagnostic;\n\nuse crate::diagnostics::DiagnosticItem;\n\nuse crate::disk_cache::DiskCache;\n\nuse crate::file_fetcher::SourceFile;\n\nuse crate::file_fetcher::SourceFileFetcher;\n\nuse crate::fs as deno_fs;\n\nuse crate::global_state::GlobalState;\n\nuse crate::msg;\n\nuse crate::op_error::OpError;\n\nuse crate::source_maps::SourceMapGetter;\n\nuse crate::startup_data;\n\nuse crate::state::*;\n\nuse crate::tokio_util;\n\nuse crate::version;\n\nuse crate::web_worker::WebWorkerHandle;\n\nuse crate::worker::WorkerEvent;\n", "file_path": "cli/compilers/ts.rs", "rank": 91, "score": 6.840571459466188 }, { "content": "// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.\n\n\n\nuse rusty_v8 as v8;\n\n\n\nuse crate::any_error::ErrBox;\n\nuse crate::es_isolate::ModuleId;\n\nuse crate::es_isolate::ModuleLoadId;\n\nuse crate::module_specifier::ModuleSpecifier;\n\nuse futures::future::FutureExt;\n\nuse futures::stream::FuturesUnordered;\n\nuse futures::stream::Stream;\n\nuse futures::stream::TryStreamExt;\n\nuse std::collections::HashMap;\n\nuse std::collections::HashSet;\n\nuse std::fmt;\n\nuse std::future::Future;\n\nuse std::pin::Pin;\n\nuse std::rc::Rc;\n\nuse std::sync::atomic::AtomicI32;\n\nuse std::sync::atomic::Ordering;\n", "file_path": "core/modules.rs", "rank": 92, "score": 6.839244502479925 }, { "content": "mod shared_queue;\n\n\n\npub use rusty_v8 as v8;\n\n\n\npub use crate::any_error::*;\n\npub use crate::es_isolate::*;\n\npub use crate::flags::v8_set_flags;\n\npub use crate::isolate::*;\n\npub use crate::js_errors::*;\n\npub use crate::module_specifier::*;\n\npub use crate::modules::*;\n\npub use crate::ops::*;\n\npub use crate::resources::*;\n\n\n", "file_path": "core/lib.rs", "rank": 93, "score": 6.830433316617029 }, { "content": "// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.\n\nuse crate::compilers::TargetLib;\n\nuse crate::global_state::GlobalState;\n\nuse crate::global_timer::GlobalTimer;\n\nuse crate::import_map::ImportMap;\n\nuse crate::metrics::Metrics;\n\nuse crate::op_error::OpError;\n\nuse crate::ops::JsonOp;\n\nuse crate::ops::MinimalOp;\n\nuse crate::permissions::Permissions;\n\nuse crate::web_worker::WebWorkerHandle;\n\nuse deno_core::Buf;\n\nuse deno_core::ErrBox;\n\nuse deno_core::ModuleLoadId;\n\nuse deno_core::ModuleLoader;\n\nuse deno_core::ModuleSpecifier;\n\nuse deno_core::Op;\n\nuse deno_core::ZeroCopyBuf;\n\nuse futures::future::FutureExt;\n\nuse futures::Future;\n", "file_path": "cli/state.rs", "rank": 94, "score": 6.8266073318059135 }, { "content": "use super::dispatch_minimal::MinimalOp;\n\nuse crate::http_util::HttpBody;\n\nuse crate::op_error::OpError;\n\nuse crate::state::State;\n\nuse deno_core::CoreIsolate;\n\nuse deno_core::ResourceTable;\n\nuse deno_core::ZeroCopyBuf;\n\nuse futures::future::poll_fn;\n\nuse futures::future::FutureExt;\n\nuse futures::ready;\n\nuse std::collections::HashMap;\n\nuse std::pin::Pin;\n\nuse std::sync::atomic::{AtomicUsize, Ordering};\n\nuse std::task::Context;\n\nuse std::task::Poll;\n\nuse tokio::io::{AsyncRead, AsyncWrite};\n\nuse tokio::net::TcpStream;\n\nuse tokio_rustls::client::TlsStream as ClientTlsStream;\n\nuse tokio_rustls::server::TlsStream as ServerTlsStream;\n\n\n", "file_path": "cli/ops/io.rs", "rank": 95, "score": 6.819646625624969 }, { "content": "use crate::fs as deno_fs;\n\nuse std::ffi::OsStr;\n\nuse std::fs;\n\nuse std::path::Component;\n\nuse std::path::Path;\n\nuse std::path::PathBuf;\n\nuse std::path::Prefix;\n\nuse std::str;\n\nuse url::Url;\n\n\n\n#[derive(Clone)]\n\npub struct DiskCache {\n\n pub location: PathBuf,\n\n}\n\n\n", "file_path": "cli/disk_cache.rs", "rank": 96, "score": 6.784194952858188 }, { "content": "// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.\n\nuse super::compiler_worker::CompilerWorker;\n\nuse crate::compilers::CompiledModule;\n\nuse crate::file_fetcher::SourceFile;\n\nuse crate::global_state::GlobalState;\n\nuse crate::startup_data;\n\nuse crate::state::*;\n\nuse crate::tokio_util;\n\nuse crate::web_worker::WebWorkerHandle;\n\nuse crate::worker::WorkerEvent;\n\nuse deno_core::Buf;\n\nuse deno_core::ErrBox;\n\nuse deno_core::ModuleSpecifier;\n\nuse serde_derive::Deserialize;\n\nuse std::collections::HashMap;\n\nuse std::sync::atomic::Ordering;\n\nuse std::sync::{Arc, Mutex};\n\nuse url::Url;\n\n\n\n// TODO(ry) The entire concept of spawning a thread, sending data to JS,\n", "file_path": "cli/compilers/wasm.rs", "rank": 97, "score": 6.7772797502972155 }, { "content": "use super::dispatch_json::{Deserialize, JsonOp};\n\nuse super::io::{StreamResource, StreamResourceHolder};\n\nuse crate::op_error::OpError;\n\nuse deno_core::CoreIsolate;\n\nuse deno_core::ResourceTable;\n\nuse deno_core::ZeroCopyBuf;\n\nuse futures::future::FutureExt;\n\nuse std::fs::remove_file;\n\nuse std::os::unix;\n\npub use std::path::Path;\n\nuse tokio::net::UnixDatagram;\n\nuse tokio::net::UnixListener;\n\npub use tokio::net::UnixStream;\n\n\n", "file_path": "cli/ops/net_unix.rs", "rank": 98, "score": 6.774653510124535 }, { "content": " 'd' => return false,\n\n _ => {\n\n // If we don't get a recognized option try again.\n\n let msg_again =\n\n format!(\"Unrecognized option '{}' [g/d (g = grant, d = deny)] \", ch);\n\n eprint!(\"{}\", colors::bold(msg_again));\n\n }\n\n };\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nlazy_static! {\n\n /// Lock this when you use `set_prompt_result` in a test case.\n\n static ref PERMISSION_PROMPT_GUARD: Mutex<()> = Mutex::new(());\n\n}\n\n\n\n#[cfg(test)]\n\nstatic STUB_PROMPT_VALUE: AtomicBool = AtomicBool::new(true);\n\n\n", "file_path": "cli/permissions.rs", "rank": 99, "score": 6.751373914463738 } ]
Rust
src/metadata/media_info.rs
fengalin/media-toc-player
8fb6580419ec530329c131116c726bb176348956
use gettextrs::gettext; use gst::Tag; use lazy_static::lazy_static; use std::{ collections::HashMap, fmt, path::{Path, PathBuf}, sync::Arc, }; use super::{Duration, MediaContent}; #[derive(Debug)] pub struct SelectStreamError(Arc<str>); impl SelectStreamError { fn new(id: &Arc<str>) -> Self { SelectStreamError(Arc::clone(&id)) } pub fn id(&self) -> &Arc<str> { &self.0 } } impl fmt::Display for SelectStreamError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "MediaInfo: unknown stream id {}", self.0) } } impl std::error::Error for SelectStreamError {} pub fn get_default_chapter_title() -> String { gettext("untitled") } macro_rules! add_tag_names ( ($($tag_type:path),+) => { { let mut tag_names = Vec::new(); $(tag_names.push(<$tag_type>::tag_name());)+ tag_names } }; ); lazy_static! { static ref TAGS_TO_SKIP_FOR_TRACK: Vec<&'static str> = { add_tag_names!( gst::tags::Album, gst::tags::AlbumSortname, gst::tags::AlbumSortname, gst::tags::AlbumArtist, gst::tags::AlbumArtistSortname, gst::tags::ApplicationName, gst::tags::ApplicationData, gst::tags::Artist, gst::tags::ArtistSortname, gst::tags::AudioCodec, gst::tags::Codec, gst::tags::ContainerFormat, gst::tags::Duration, gst::tags::Encoder, gst::tags::EncoderVersion, gst::tags::Image, gst::tags::ImageOrientation, gst::tags::PreviewImage, gst::tags::SubtitleCodec, gst::tags::Title, gst::tags::TitleSortname, gst::tags::TrackCount, gst::tags::TrackNumber, gst::tags::VideoCodec ) }; } #[derive(Debug, Clone)] pub struct Stream { pub id: Arc<str>, pub codec_printable: String, pub caps: gst::Caps, pub tags: gst::TagList, pub type_: gst::StreamType, } impl Stream { fn new(stream: &gst::Stream) -> Self { let caps = stream.get_caps().unwrap(); let tags = stream.get_tags().unwrap_or_else(gst::TagList::new); let type_ = stream.get_stream_type(); let codec_printable = match type_ { gst::StreamType::AUDIO => tags.get_index::<gst::tags::AudioCodec>(0), gst::StreamType::VIDEO => tags.get_index::<gst::tags::VideoCodec>(0), gst::StreamType::TEXT => tags.get_index::<gst::tags::SubtitleCodec>(0), _ => panic!("Stream::new can't handle {:?}", type_), } .or_else(|| tags.get_index::<gst::tags::Codec>(0)) .and_then(|value| value.get()) .map_or_else( || { let codec = caps.get_structure(0).unwrap().get_name(); let id_parts: Vec<&str> = codec.split('/').collect(); if id_parts.len() == 2 { if id_parts[1].starts_with("x-") { id_parts[1][2..].to_string() } else { id_parts[1].to_string() } } else { codec.to_string() } }, ToString::to_string, ); Stream { id: stream.get_stream_id().unwrap().as_str().into(), codec_printable, caps, tags, type_, } } } #[derive(Debug)] pub struct StreamCollection { type_: gst::StreamType, collection: HashMap<Arc<str>, Stream>, } impl StreamCollection { fn new(type_: gst::StreamType) -> Self { StreamCollection { type_, collection: HashMap::new(), } } fn add_stream(&mut self, stream: Stream) { self.collection.insert(Arc::clone(&stream.id), stream); } pub fn get<S: AsRef<str>>(&self, id: S) -> Option<&Stream> { self.collection.get(id.as_ref()) } pub fn contains<S: AsRef<str>>(&self, id: S) -> bool { self.collection.contains_key(id.as_ref()) } pub fn sorted(&self) -> impl Iterator<Item = &'_ Stream> { SortedStreamCollectionIter::new(self) } } struct SortedStreamCollectionIter<'sc> { collection: &'sc StreamCollection, sorted_iter: std::vec::IntoIter<Arc<str>>, } impl<'sc> SortedStreamCollectionIter<'sc> { fn new(collection: &'sc StreamCollection) -> Self { let mut sorted_ids: Vec<Arc<str>> = collection.collection.keys().map(Arc::clone).collect(); sorted_ids.sort(); SortedStreamCollectionIter { collection, sorted_iter: sorted_ids.into_iter(), } } } impl<'sc> Iterator for SortedStreamCollectionIter<'sc> { type Item = &'sc Stream; fn next(&mut self) -> Option<Self::Item> { self.sorted_iter .next() .and_then(|id| self.collection.get(&id)) } } #[derive(Debug)] pub struct Streams { pub audio: StreamCollection, pub video: StreamCollection, pub text: StreamCollection, cur_audio_id: Option<Arc<str>>, pub audio_changed: bool, cur_video_id: Option<Arc<str>>, pub video_changed: bool, cur_text_id: Option<Arc<str>>, pub text_changed: bool, } impl Default for Streams { fn default() -> Self { Streams { audio: StreamCollection::new(gst::StreamType::AUDIO), video: StreamCollection::new(gst::StreamType::VIDEO), text: StreamCollection::new(gst::StreamType::TEXT), cur_audio_id: None, audio_changed: false, cur_video_id: None, video_changed: false, cur_text_id: None, text_changed: false, } } } impl Streams { pub fn add_stream(&mut self, gst_stream: &gst::Stream) { let stream = Stream::new(gst_stream); match stream.type_ { gst::StreamType::AUDIO => { self.cur_audio_id.get_or_insert(Arc::clone(&stream.id)); self.audio.add_stream(stream); } gst::StreamType::VIDEO => { self.cur_video_id.get_or_insert(Arc::clone(&stream.id)); self.video.add_stream(stream); } gst::StreamType::TEXT => { self.cur_text_id.get_or_insert(Arc::clone(&stream.id)); self.text.add_stream(stream); } other => unimplemented!("{:?}", other), } } pub fn collection(&self, type_: gst::StreamType) -> &StreamCollection { match type_ { gst::StreamType::AUDIO => &self.audio, gst::StreamType::VIDEO => &self.video, gst::StreamType::TEXT => &self.text, other => unimplemented!("{:?}", other), } } pub fn is_video_selected(&self) -> bool { self.cur_video_id.is_some() } pub fn selected_audio(&self) -> Option<&Stream> { self.cur_audio_id .as_ref() .and_then(|stream_id| self.audio.get(stream_id)) } pub fn selected_video(&self) -> Option<&Stream> { self.cur_video_id .as_ref() .and_then(|stream_id| self.video.get(stream_id)) } pub fn selected_text(&self) -> Option<&Stream> { self.cur_text_id .as_ref() .and_then(|stream_id| self.text.get(stream_id)) } pub fn select_streams(&mut self, ids: &[Arc<str>]) -> Result<(), SelectStreamError> { let mut is_audio_selected = false; let mut is_text_selected = false; let mut is_video_selected = false; for id in ids { if self.audio.contains(id) { is_audio_selected = true; self.audio_changed = self .selected_audio() .map_or(true, |prev_stream| *id != prev_stream.id); self.cur_audio_id = Some(Arc::clone(id)); } else if self.text.contains(id) { is_text_selected = true; self.text_changed = self .selected_text() .map_or(true, |prev_stream| *id != prev_stream.id); self.cur_text_id = Some(Arc::clone(id)); } else if self.video.contains(id) { is_video_selected = true; self.video_changed = self .selected_video() .map_or(true, |prev_stream| *id != prev_stream.id); self.cur_video_id = Some(Arc::clone(id)); } else { return Err(SelectStreamError::new(id)); } } if !is_audio_selected { self.audio_changed = self.cur_audio_id.take().map_or(false, |_| true); } if !is_text_selected { self.text_changed = self.cur_text_id.take().map_or(false, |_| true); } if !is_video_selected { self.video_changed = self.cur_video_id.take().map_or(false, |_| true); } Ok(()) } pub fn audio_codec(&self) -> Option<&str> { self.selected_audio() .map(|stream| stream.codec_printable.as_str()) } pub fn video_codec(&self) -> Option<&str> { self.selected_video() .map(|stream| stream.codec_printable.as_str()) } fn tag_list<'a, T: gst::Tag<'a>>(&'a self) -> Option<&gst::TagList> { self.selected_audio() .and_then(|selected_audio| { if selected_audio.tags.get_size::<T>() > 0 { Some(&selected_audio.tags) } else { None } }) .or_else(|| { self.selected_video().and_then(|selected_video| { if selected_video.tags.get_size::<T>() > 0 { Some(&selected_video.tags) } else { None } }) }) } } #[derive(Debug, Default)] pub struct MediaInfo { pub name: String, pub file_name: String, pub path: PathBuf, pub content: MediaContent, pub tags: gst::TagList, pub toc: Option<gst::Toc>, pub chapter_count: Option<usize>, pub description: String, pub duration: Duration, pub streams: Streams, } impl MediaInfo { pub fn new(path: &Path) -> Self { MediaInfo { name: path.file_stem().unwrap().to_str().unwrap().to_owned(), file_name: path.file_name().unwrap().to_str().unwrap().to_owned(), path: path.to_owned(), ..MediaInfo::default() } } pub fn add_stream(&mut self, gst_stream: &gst::Stream) { self.streams.add_stream(gst_stream); self.content.add_stream_type(gst_stream.get_stream_type()); } pub fn add_tags(&mut self, tags: &gst::TagList) { self.tags = self.tags.merge(tags, gst::TagMergeMode::Keep); } fn tag_list<'a, T: gst::Tag<'a>>(&'a self) -> Option<&gst::TagList> { if self.tags.get_size::<T>() > 0 { Some(&self.tags) } else { None } } fn tag_for_display<'a, Primary, Secondary>( &'a self, ) -> Option<<Primary as gst::Tag<'a>>::TagType> where Primary: gst::Tag<'a> + 'a, Secondary: gst::Tag<'a, TagType = <Primary as gst::Tag<'a>>::TagType> + 'a, { self.tag_list::<Primary>() .or_else(|| self.tag_list::<Secondary>()) .or_else(|| { self.streams .tag_list::<Primary>() .or_else(|| self.streams.tag_list::<Secondary>()) }) .and_then(|tag_list| { tag_list .get_index::<Primary>(0) .or_else(|| tag_list.get_index::<Secondary>(0)) .and_then(|value| value.get()) }) } pub fn media_artist(&self) -> Option<&str> { self.tag_for_display::<gst::tags::Artist, gst::tags::AlbumArtist>() } pub fn media_title(&self) -> Option<&str> { self.tag_for_display::<gst::tags::Title, gst::tags::Album>() } pub fn media_image(&self) -> Option<gst::Sample> { self.tag_for_display::<gst::tags::Image, gst::tags::PreviewImage>() } pub fn container(&self) -> Option<&str> { if let Some(audio_codec) = self.streams.audio_codec() { if self.streams.video_codec().is_none() && audio_codec.to_lowercase().find("mp3").is_some() { return None; } } self.tags .get_index::<gst::tags::ContainerFormat>(0) .and_then(|value| value.get()) } }
use gettextrs::gettext; use gst::Tag; use lazy_static::lazy_static; use std::{ collections::HashMap, fmt, path::{Path, PathBuf}, sync::Arc, }; use super::{Duration, MediaContent}; #[derive(Debug)] pub struct SelectStreamError(Arc<str>); impl SelectStreamError { fn new(id: &Arc<str>) -> Self { SelectStreamError(Arc::clone(&id)) } pub fn id(&self) -> &Arc<str> { &self.0 } } impl fmt::Display for SelectStreamError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "MediaInfo: unknown stream id {}", self.0) } } impl std::error::Error for SelectStreamError {} pub fn get_default_chapter_title() -> String { gettext("untitled") } macro_rules! add_tag_names ( ($($tag_type:path),+) => { { let mut tag_names = Vec::new(); $(tag_names.push(<$tag_type>::tag_name());)+ tag_names } }; ); lazy_static! { static ref TAGS_TO_SKIP_FOR_TRACK: Vec<&'static str> = { add_tag_names!( gst::tags::Album, gst::tags::AlbumSortname, gst::tags::AlbumSortname, gst::tags::AlbumArtist, gst::tags::AlbumArtistSortname, gst::tags::ApplicationName, gst::tags::ApplicationData, gst::tags::Artist, gst::tags::ArtistSortname, gst::tags::AudioCodec, gst::tags::Codec, gst::tags::ContainerFormat, gst::tags::Duration, gst::tags::Encoder, gst::tags::EncoderVersion, gst::tags::Image, gst::tags::ImageOrientation, gst::tags::PreviewImage, gst::tags::SubtitleCodec, gst::tags::Title, gst::tags::TitleSortname, gst::tags::TrackCount, gst::tags::TrackNumber, gst::tags::VideoCodec ) }; } #[derive(Debug, Clone)] pub struct Stream { pub id: Arc<str>, pub codec_printable: String, pub caps: gst::Caps, pub tags: gst::TagList, pub type_: gst::StreamType, } impl Stream { fn new(stream: &gst::Stream) -> Self { let caps = stream.get_caps().unwrap(); let tags = stream.get_tags().unwrap_or_else(gst::TagList::new); let type_ = stream.get_stream_type(); let codec_printable = match type_ { gst::StreamType::AUDIO => tags.get_index::<gst::tags::AudioCodec>(0), gst::StreamType::VIDEO => tags.get_index::<gst::tags::VideoCodec>(0), gst::StreamType::TEXT => tags.get_index::<gst::tags::SubtitleCodec>(0), _ => panic!("Stream::new can't handle {:?}", type_), } .or_else(|| tags.get_index::<gst::tags::Codec>(0)) .and_then(|value| value.get()) .map_or_else( || { let codec = caps.get_structure(0).unwrap().get_name(); let id_parts: Vec<&str> = codec.split('/').collect(); if id_parts.len() == 2 { if id_parts[1].starts_with("x-") { id_parts[1][2..].to_string() } else { id_parts[1].to_string() } } else { codec.to_string() } }, ToString::to_string, ); Stream { id: stream.get_stream_id().unwrap().as_str().into(), codec_printable, caps, tags, type_, } } } #[derive(Debug)] pub struct StreamCollection { type_: gst::StreamType, collection: HashMap<Arc<str>, Stream>, } impl StreamCollection { fn new(type_: gst::StreamType) -> Self { StreamCollection { type_, collection: HashMap::new(), } } fn add_stream(&mut self, stream: Stream) { self.collection.insert(Arc::clone(&stream.id), stream); } pub fn get<S: AsRef<str>>(&self, id: S) -> Option<&Stream> { self.collection.get(id.as_ref()) } pub fn contains<S: AsRef<str>>(&self, id: S) -> bool { self.collection.contains_key(id.as_ref()) } pub fn sorted(&self) -> impl Iterator<Item = &'_ Stream> { SortedStreamCollectionIter::new(self) } } struct SortedStreamCollectionIter<'sc> { collection: &'sc StreamCollection, sorted_iter: std::vec::IntoIter<Arc<str>>, } impl<'sc> SortedStreamCollectionIter<'sc> { fn new(collection: &'sc StreamCollection) -> Self { let mut sorted_ids: Vec<Arc<str>> = collection.collection.keys().map(Arc::clone).collect(); sorted_ids.sort(); SortedStreamCollectionIter { collection, sorted_iter: sorted_ids.into_iter(), } } } impl<'sc> Iterator for SortedStreamCollectionIter<'sc> { type Item = &'sc Stream; fn next(&mut self) -> Option<Self::Item> { self.sorted_iter .next() .and_then(|id| self.collection.get(&id)) } } #[derive(Debug)] pub struct Streams { pub audio: StreamCollection, pub video: StreamCollection, pub text: StreamCollection, cur_audio_id: Option<Arc<str>>, pub audio_changed: bool, cur_video_id: Option<Arc<str>>, pub video_changed: bool, cur_text_id: Option<Arc<str>>, pub text_changed: bool, } impl Default for Streams { fn default() -> Self { Streams { audio: StreamCollection::new(gst::StreamType::AUDIO), video: StreamCollection::new(gst::StreamType::VIDEO), text: StreamCollection::new(gst::StreamType::TEXT), cur_audio_id: None, audio_changed: false, cur_video_id: None, video_changed: false, cur_text_id: None, text_changed: false, } } } impl Streams { pub fn add_stream(&mut self, gst_stream: &gst::Stream) { let stream = Stream::new(gst_stream); match stream.type_ { gst::StreamType::AUDIO => { self.cur_audio_id.get_or_insert(Arc::clone(&stream.id)); self.audio.add_stream(stream); } gst::StreamType::VIDEO => { self.cur_video_id.get_or_insert(Arc::clone(&stream.id)); self.video.add_stream(stream); } gst::StreamType::TEXT => { self.cur_text_id.get_or_insert(Arc::clone(&stream.id)); self.text.add_stream(stream); } other => unimplemented!("{:?}", other), } } pub fn collection(&self, type_: gst::StreamType) -> &StreamCollection {
} pub fn is_video_selected(&self) -> bool { self.cur_video_id.is_some() } pub fn selected_audio(&self) -> Option<&Stream> { self.cur_audio_id .as_ref() .and_then(|stream_id| self.audio.get(stream_id)) } pub fn selected_video(&self) -> Option<&Stream> { self.cur_video_id .as_ref() .and_then(|stream_id| self.video.get(stream_id)) } pub fn selected_text(&self) -> Option<&Stream> { self.cur_text_id .as_ref() .and_then(|stream_id| self.text.get(stream_id)) } pub fn select_streams(&mut self, ids: &[Arc<str>]) -> Result<(), SelectStreamError> { let mut is_audio_selected = false; let mut is_text_selected = false; let mut is_video_selected = false; for id in ids { if self.audio.contains(id) { is_audio_selected = true; self.audio_changed = self .selected_audio() .map_or(true, |prev_stream| *id != prev_stream.id); self.cur_audio_id = Some(Arc::clone(id)); } else if self.text.contains(id) { is_text_selected = true; self.text_changed = self .selected_text() .map_or(true, |prev_stream| *id != prev_stream.id); self.cur_text_id = Some(Arc::clone(id)); } else if self.video.contains(id) { is_video_selected = true; self.video_changed = self .selected_video() .map_or(true, |prev_stream| *id != prev_stream.id); self.cur_video_id = Some(Arc::clone(id)); } else { return Err(SelectStreamError::new(id)); } } if !is_audio_selected { self.audio_changed = self.cur_audio_id.take().map_or(false, |_| true); } if !is_text_selected { self.text_changed = self.cur_text_id.take().map_or(false, |_| true); } if !is_video_selected { self.video_changed = self.cur_video_id.take().map_or(false, |_| true); } Ok(()) } pub fn audio_codec(&self) -> Option<&str> { self.selected_audio() .map(|stream| stream.codec_printable.as_str()) } pub fn video_codec(&self) -> Option<&str> { self.selected_video() .map(|stream| stream.codec_printable.as_str()) } fn tag_list<'a, T: gst::Tag<'a>>(&'a self) -> Option<&gst::TagList> { self.selected_audio() .and_then(|selected_audio| { if selected_audio.tags.get_size::<T>() > 0 { Some(&selected_audio.tags) } else { None } }) .or_else(|| { self.selected_video().and_then(|selected_video| { if selected_video.tags.get_size::<T>() > 0 { Some(&selected_video.tags) } else { None } }) }) } } #[derive(Debug, Default)] pub struct MediaInfo { pub name: String, pub file_name: String, pub path: PathBuf, pub content: MediaContent, pub tags: gst::TagList, pub toc: Option<gst::Toc>, pub chapter_count: Option<usize>, pub description: String, pub duration: Duration, pub streams: Streams, } impl MediaInfo { pub fn new(path: &Path) -> Self { MediaInfo { name: path.file_stem().unwrap().to_str().unwrap().to_owned(), file_name: path.file_name().unwrap().to_str().unwrap().to_owned(), path: path.to_owned(), ..MediaInfo::default() } } pub fn add_stream(&mut self, gst_stream: &gst::Stream) { self.streams.add_stream(gst_stream); self.content.add_stream_type(gst_stream.get_stream_type()); } pub fn add_tags(&mut self, tags: &gst::TagList) { self.tags = self.tags.merge(tags, gst::TagMergeMode::Keep); } fn tag_list<'a, T: gst::Tag<'a>>(&'a self) -> Option<&gst::TagList> { if self.tags.get_size::<T>() > 0 { Some(&self.tags) } else { None } } fn tag_for_display<'a, Primary, Secondary>( &'a self, ) -> Option<<Primary as gst::Tag<'a>>::TagType> where Primary: gst::Tag<'a> + 'a, Secondary: gst::Tag<'a, TagType = <Primary as gst::Tag<'a>>::TagType> + 'a, { self.tag_list::<Primary>() .or_else(|| self.tag_list::<Secondary>()) .or_else(|| { self.streams .tag_list::<Primary>() .or_else(|| self.streams.tag_list::<Secondary>()) }) .and_then(|tag_list| { tag_list .get_index::<Primary>(0) .or_else(|| tag_list.get_index::<Secondary>(0)) .and_then(|value| value.get()) }) } pub fn media_artist(&self) -> Option<&str> { self.tag_for_display::<gst::tags::Artist, gst::tags::AlbumArtist>() } pub fn media_title(&self) -> Option<&str> { self.tag_for_display::<gst::tags::Title, gst::tags::Album>() } pub fn media_image(&self) -> Option<gst::Sample> { self.tag_for_display::<gst::tags::Image, gst::tags::PreviewImage>() } pub fn container(&self) -> Option<&str> { if let Some(audio_codec) = self.streams.audio_codec() { if self.streams.video_codec().is_none() && audio_codec.to_lowercase().find("mp3").is_some() { return None; } } self.tags .get_index::<gst::tags::ContainerFormat>(0) .and_then(|value| value.get()) } }
match type_ { gst::StreamType::AUDIO => &self.audio, gst::StreamType::VIDEO => &self.video, gst::StreamType::TEXT => &self.text, other => unimplemented!("{:?}", other), }
if_condition
[ { "content": "fn parse_to<T: std::str::FromStr>(i: &str) -> IResult<&str, T> {\n\n let (i, res) = digit1(i)?;\n\n\n\n res.parse::<T>()\n\n .map(move |value| (i, value))\n\n .map_err(move |_| Err::Error((i, ErrorKind::ParseTo)))\n\n}\n", "file_path": "src/metadata/mod.rs", "rank": 2, "score": 122923.27450890675 }, { "content": "pub fn parse_timestamp(i: &str) -> IResult<&str, Timestamp4Humans> {\n\n let parse_timestamp_ = tuple((\n\n separated_pair(parse_to::<u8>, tag(\":\"), parse_to::<u8>),\n\n opt(tuple((\n\n // the next tag determines whether the 1st number is h or mn\n\n alt((tag(\":\"), tag(\".\"))),\n\n parse_to::<u16>,\n\n opt(preceded(tag(\".\"), parse_to::<u16>)),\n\n ))),\n\n ));\n\n\n\n let (i, res) = parse_timestamp_(i)?;\n\n\n\n let ts = match res {\n\n ((h, m), Some((\":\", s, ms))) => {\n\n let s = u8::try_from(s).map_err(|_| Err::Error((i, ErrorKind::ParseTo)))?;\n\n\n\n Timestamp4Humans {\n\n h,\n\n m,\n", "file_path": "src/metadata/timestamp_4_humans.rs", "rank": 3, "score": 115369.29635564875 }, { "content": "fn parse_chapter(i: &str) -> IResult<&str, gst::TocEntry> {\n\n let parse_first_line = terminated(\n\n preceded(\n\n tag(CHAPTER_TAG),\n\n separated_pair(parse_to::<usize>, tag(\"=\"), parse_timestamp),\n\n ),\n\n line_ending,\n\n );\n\n\n\n let (i, (nb, start_ts)) = parse_first_line(i)?;\n\n\n\n let parse_second_line = terminated(\n\n preceded(\n\n tag(CHAPTER_TAG),\n\n separated_pair(\n\n verify(parse_to::<usize>, |nb2| nb == *nb2),\n\n pair(tag(NAME_TAG), tag(\"=\")),\n\n not_line_ending,\n\n ),\n\n ),\n\n opt(line_ending),\n\n );\n\n\n\n parse_second_line(i).map(|(i, (_, title))| (i, new_chapter(nb, start_ts, title)))\n\n}\n\n\n", "file_path": "src/metadata/mkvmerge_text_format.rs", "rank": 4, "score": 102178.70770197021 }, { "content": "pub fn get_command_line() -> CommandLineArguments {\n\n let about_msg = gettext(\"A media player with a table of contents\");\n\n let help_msg = gettext(\"Display this message\");\n\n let version_msg = gettext(\"Print version information\");\n\n\n\n let disable_gl_arg = \"DISABLE_GL\";\n\n let input_arg = gettext(\"MEDIA\");\n\n\n\n let matches = App::new(env!(\"CARGO_PKG_NAME\"))\n\n .version(env!(\"CARGO_PKG_VERSION\"))\n\n .author(env!(\"CARGO_PKG_AUTHORS\"))\n\n .about(&about_msg[..])\n\n .help_message(&help_msg[..])\n\n .version_message(&version_msg[..])\n\n .arg(\n\n Arg::with_name(&disable_gl_arg[..])\n\n .short(\"d\")\n\n .long(\"disable-gl\")\n\n .help(&gettext(\"Disable video rendering hardware acceleration\")),\n\n )\n", "file_path": "src/application/command_line.rs", "rank": 5, "score": 98318.4983463967 }, { "content": "pub fn init_locale() {\n\n // Search translations under `target` first\n\n // in order to reflect latest changes during development\n\n let text_domain = TextDomain::new(&*APP_NAME)\n\n .codeset(\"UTF-8\")\n\n .prepend(\"target\");\n\n\n\n // Add user's data dir in the search path\n\n let project_dirs = ProjectDirs::from(TLD, SLD, &APP_NAME)\n\n .expect(\"Couldn't find project dirs for this platform\");\n\n let _app_data_dir = project_dirs.data_dir();\n\n\n\n // FIXME: figure out macOS conventions\n\n #[cfg(all(target_family = \"unix\", not(target_os = \"macos\")))]\n\n let text_domain = match _app_data_dir.parent() {\n\n Some(data_dir) => text_domain.prepend(data_dir),\n\n None => text_domain,\n\n };\n\n\n\n #[cfg(target_os = \"windows\")]\n", "file_path": "src/application/locale.rs", "rank": 6, "score": 87389.64691451851 }, { "content": "pub fn run(args: CommandLineArguments) {\n\n register_resource(include_bytes!(\"../../target/resources/ui.gresource\"));\n\n\n\n let gtk_app = gtk::Application::new(Some(&APP_ID), gio::ApplicationFlags::empty())\n\n .expect(\"Failed to initialize GtkApplication\");\n\n\n\n gtk_app.connect_activate(move |gtk_app| MainController::setup(gtk_app, &args));\n\n gtk_app.run(&[]);\n\n}\n\n\n", "file_path": "src/ui/mod.rs", "rank": 7, "score": 75568.34133981986 }, { "content": "struct TocEntryIter {\n\n entries: Vec<gst::TocEntry>,\n\n index: usize,\n\n}\n\n\n\nimpl TocEntryIter {\n\n fn from(entries: Vec<gst::TocEntry>) -> Self {\n\n Self { entries, index: 0 }\n\n }\n\n\n\n fn next(&mut self) -> Option<(gst::TocEntry, usize)> {\n\n if self.index >= self.entries.len() {\n\n return None;\n\n }\n\n\n\n let result = Some((self.entries[self.index].clone(), self.index));\n\n self.index += 1;\n\n result\n\n }\n\n}\n", "file_path": "src/metadata/toc_visitor.rs", "rank": 8, "score": 75162.91338913052 }, { "content": "fn new_chapter(nb: usize, start_ts: Timestamp4Humans, title: &str) -> gst::TocEntry {\n\n let mut chapter = gst::TocEntry::new(gst::TocEntryType::Chapter, &format!(\"{:02}\", nb));\n\n let start = start_ts.nano_total() as i64;\n\n chapter\n\n .get_mut()\n\n .unwrap()\n\n .set_start_stop_times(start, start);\n\n\n\n let mut tag_list = gst::TagList::new();\n\n tag_list\n\n .get_mut()\n\n .unwrap()\n\n .add::<gst::tags::Title>(&title, gst::TagMergeMode::Replace);\n\n chapter.get_mut().unwrap().set_tags(tag_list);\n\n chapter\n\n}\n\n\n", "file_path": "src/metadata/mkvmerge_text_format.rs", "rank": 9, "score": 74802.75167357014 }, { "content": "#[test]\n\nfn parse_string() {\n\n let ts_res = parse_timestamp(\"11:42:20.010\");\n\n assert!(ts_res.is_ok());\n\n let ts = ts_res.unwrap().1;\n\n assert_eq!(ts.h, 11);\n\n assert_eq!(ts.m, 42);\n\n assert_eq!(ts.s, 20);\n\n assert_eq!(ts.ms, 10);\n\n assert_eq!(ts.us, 0);\n\n assert_eq!(ts.nano, 0);\n\n assert_eq!(\n\n ts.nano_total(),\n\n ((((11 * 60 + 42) * 60 + 20) * 1_000) + 10) * 1_000 * 1_000\n\n );\n\n\n\n let ts_res = parse_timestamp(\"42:20.010\");\n\n assert!(ts_res.is_ok());\n\n let ts = ts_res.unwrap().1;\n\n assert_eq!(ts.h, 0);\n\n assert_eq!(ts.m, 42);\n", "file_path": "src/metadata/timestamp_4_humans.rs", "rank": 10, "score": 72323.92776835995 }, { "content": "fn spawn<Fut: Future<Output = ()> + 'static>(future: Fut) {\n\n glib::MainContext::ref_thread_default().spawn_local(future);\n\n}\n\n\n", "file_path": "src/ui/mod.rs", "rank": 11, "score": 69648.9464478245 }, { "content": "#[test]\n\nfn parse_chapter_test() {\n\n use nom::{error::ErrorKind, InputLength};\n\n gst::init().unwrap();\n\n\n\n let res = parse_chapter(\"CHAPTER01=00:00:01.000\\nCHAPTER01NAME=test\\n\");\n\n let (i, toc_entry) = res.unwrap();\n\n assert_eq!(0, i.input_len());\n\n assert_eq!(1_000_000_000, toc_entry.get_start_stop_times().unwrap().0);\n\n assert_eq!(\n\n Some(\"test\".to_string()),\n\n toc_entry.get_tags().and_then(|tags| tags\n\n .get::<gst::tags::Title>()\n\n .and_then(|tag| tag.get().map(|value| value.to_string()))),\n\n );\n\n\n\n let res = parse_chapter(\"CHAPTER01=00:00:01.000\\r\\nCHAPTER01NAME=test\\r\\n\");\n\n let (i, toc_entry) = res.unwrap();\n\n assert_eq!(0, i.input_len());\n\n assert_eq!(1_000_000_000, toc_entry.get_start_stop_times().unwrap().0);\n\n assert_eq!(\n", "file_path": "src/metadata/mkvmerge_text_format.rs", "rank": 12, "score": 67637.49859326796 }, { "content": "pub fn new_pair() -> (UIEventSender, async_mpsc::UnboundedReceiver<UIEvent>) {\n\n let (sender, receiver) = async_mpsc::unbounded();\n\n let sender = UIEventSender(RefCell::new(sender));\n\n\n\n (sender, receiver)\n\n}\n", "file_path": "src/ui/ui_event.rs", "rank": 13, "score": 61175.74860113417 }, { "content": "fn res_path() -> PathBuf {\n\n PathBuf::from(\"res\")\n\n}\n\n\n", "file_path": "build.rs", "rank": 14, "score": 60836.72108144696 }, { "content": "fn target_path() -> PathBuf {\n\n PathBuf::from(\"target\")\n\n}\n\n\n", "file_path": "build.rs", "rank": 15, "score": 60836.72108144696 }, { "content": "fn po_path() -> PathBuf {\n\n PathBuf::from(\"po\")\n\n}\n\n\n", "file_path": "build.rs", "rank": 16, "score": 60836.72108144696 }, { "content": "// This is from https://github.com/gtk-rs/examples/blob/master/src/bin/cairo_threads.rs\n\n// Helper struct that allows passing the pixels to the Cairo image surface and once the\n\n// image surface is destroyed the pixels will be stored in the return_location.\n\n//\n\n// This allows us to give temporary ownership of the pixels to the Cairo surface and later\n\n// retrieve them back in a safe way while ensuring that nothing else still has access to\n\n// it.\n\nstruct ImageHolder {\n\n pixels: Option<Box<[u8]>>,\n\n return_location: Rc<RefCell<Option<Box<[u8]>>>>,\n\n}\n\n\n\n// This stores the pixels back into the return_location as now nothing\n\n// references the pixels anymore\n\nimpl Drop for ImageHolder {\n\n fn drop(&mut self) {\n\n *self.return_location.borrow_mut() = Some(self.pixels.take().expect(\"Holding no image\"));\n\n }\n\n}\n\n\n\nimpl AsMut<[u8]> for ImageHolder {\n\n fn as_mut(&mut self) -> &mut [u8] {\n\n self.pixels.as_mut().expect(\"Holding no image\").as_mut()\n\n }\n\n}\n\n\n\n// This is mostly from https://github.com/gtk-rs/examples/blob/master/src/bin/cairo_threads.rs\n", "file_path": "src/ui/image.rs", "rank": 17, "score": 49752.9409408538 }, { "content": "struct Thumbnail {\n\n drawingarea: gtk::DrawingArea,\n\n signal_handler_id: Option<glib::SignalHandlerId>,\n\n state: ThumbnailState,\n\n}\n\n\n\nimpl Thumbnail {\n\n fn new<D>(drawingarea: &gtk::DrawingArea, draw_cb: D) -> Self\n\n where\n\n D: Fn(&gtk::DrawingArea, &cairo::Context) -> Inhibit + 'static,\n\n {\n\n let signal_handler_id = drawingarea.connect_draw(draw_cb);\n\n glib::signal_handler_block(drawingarea, &signal_handler_id);\n\n\n\n Thumbnail {\n\n drawingarea: drawingarea.clone(),\n\n signal_handler_id: Some(signal_handler_id),\n\n state: ThumbnailState::Blocked,\n\n }\n\n }\n", "file_path": "src/ui/info_controller.rs", "rank": 18, "score": 49750.53079173616 }, { "content": "fn main() {\n\n generate_resources();\n\n generate_translations();\n\n\n\n #[cfg(target_family = \"unix\")]\n\n generate_install_script();\n\n\n\n #[cfg(target_family = \"unix\")]\n\n generate_uninstall_script();\n\n}\n", "file_path": "build.rs", "rank": 19, "score": 49156.35961496464 }, { "content": "#[derive(Debug)]\n\nstruct PurgeError;\n\n\n\n#[derive(Debug)]\n\npub struct StateChangeError;\n\n\n\nimpl From<gst::StateChangeError> for StateChangeError {\n\n fn from(_: gst::StateChangeError) -> Self {\n\n StateChangeError\n\n }\n\n}\n\n\n\nimpl From<PurgeError> for StateChangeError {\n\n fn from(_: PurgeError) -> Self {\n\n StateChangeError\n\n }\n\n}\n\n\n\nimpl fmt::Display for StateChangeError {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(f, \"Media: couldn't change state\")\n", "file_path": "src/media/playback_pipeline.rs", "rank": 20, "score": 48457.21351355017 }, { "content": "fn generate_resources() {\n\n let output_path = target_path().join(\"resources\");\n\n create_dir_all(&output_path).unwrap();\n\n\n\n // UI\n\n let input_path = res_path().join(\"ui\");\n\n\n\n let mut compile_res = Command::new(\"glib-compile-resources\");\n\n compile_res\n\n .arg(\"--generate\")\n\n .arg(format!(\"--sourcedir={}\", input_path.to_str().unwrap()))\n\n .arg(format!(\n\n \"--target={}\",\n\n output_path.join(\"ui.gresource\").to_str().unwrap(),\n\n ))\n\n .arg(input_path.join(\"ui.gresource.xml\").to_str().unwrap());\n\n\n\n match compile_res.status() {\n\n Ok(status) => {\n\n if !status.success() {\n", "file_path": "build.rs", "rank": 21, "score": 47460.28635483124 }, { "content": "fn main() {\n\n env_logger::init();\n\n\n\n init_locale();\n\n\n\n // Character encoding is broken unless gtk (glib) is initialized\n\n let is_gtk_ok = gtk::init().is_ok();\n\n\n\n if is_gtk_ok {\n\n ui::run(get_command_line());\n\n } else {\n\n error!(\"{}\", gettext(\"Failed to initialize GTK\"));\n\n }\n\n}\n", "file_path": "src/main.rs", "rank": 22, "score": 47460.28635483124 }, { "content": "fn generate_translations() {\n\n if let Ok(mut linguas_file) = File::open(&po_path().join(\"LINGUAS\")) {\n\n let mut linguas = String::new();\n\n linguas_file\n\n .read_to_string(&mut linguas)\n\n .expect(\"Couldn't read po/LINGUAS as string\");\n\n\n\n for lingua in linguas.lines() {\n\n let mo_path = target_path()\n\n .join(\"locale\")\n\n .join(lingua)\n\n .join(\"LC_MESSAGES\");\n\n create_dir_all(&mo_path).unwrap();\n\n\n\n let mut msgfmt = Command::new(\"msgfmt\");\n\n msgfmt\n\n .arg(format!(\n\n \"--output-file={}\",\n\n mo_path.join(\"media-toc-player.mo\").to_str().unwrap()\n\n ))\n", "file_path": "build.rs", "rank": 23, "score": 47460.28635483124 }, { "content": "struct ChapterTree {\n\n store: gtk::TreeStore,\n\n iter: Option<gtk::TreeIter>,\n\n selected: Option<gtk::TreeIter>,\n\n}\n\n\n\nimpl ChapterTree {\n\n fn new(store: gtk::TreeStore) -> Self {\n\n ChapterTree {\n\n store,\n\n iter: None,\n\n selected: None,\n\n }\n\n }\n\n\n\n fn store(&self) -> &gtk::TreeStore {\n\n &self.store\n\n }\n\n\n\n fn clear(&mut self) {\n", "file_path": "src/ui/chapter_tree_manager.rs", "rank": 24, "score": 47269.75320091483 }, { "content": "pub trait Reader {\n\n fn read(&self, info: &MediaInfo, source: &mut dyn Read) -> Result<Option<gst::Toc>, String>;\n\n}\n", "file_path": "src/metadata/format.rs", "rank": 25, "score": 47005.840400494766 }, { "content": "#[cfg(target_family = \"unix\")]\n\nfn generate_uninstall_script() {\n\n let base_dirs = BaseDirs::new().unwrap();\n\n // Note: `base_dirs.executable_dir()` is `None` on macOS\n\n if let Some(exe_dir) = base_dirs.executable_dir() {\n\n let project_dirs = ProjectDirs::from(\"org\", \"fengalin\", &APP_NAME).unwrap();\n\n let app_data_dir = project_dirs.data_dir();\n\n let data_dir = app_data_dir.parent().unwrap();\n\n\n\n match File::create(&target_path().join(\"uninstall\")) {\n\n Ok(mut install_file) => {\n\n install_file\n\n .write_all(format!(\"# User uninstall script for {}\\n\", *APP_NAME).as_bytes())\n\n .unwrap();\n\n\n\n install_file\n\n .write_all(b\"\\n# Uninstall executable\\n\")\n\n .unwrap();\n\n install_file\n\n .write_all(format!(\"rm {:?}\\n\", exe_dir.join(&*APP_NAME)).as_bytes())\n\n .unwrap();\n", "file_path": "build.rs", "rank": 26, "score": 45922.46442264116 }, { "content": "#[cfg(target_family = \"unix\")]\n\nfn generate_install_script() {\n\n let base_dirs = BaseDirs::new().unwrap();\n\n // Note: `base_dirs.executable_dir()` is `None` on macOS\n\n if let Some(exe_dir) = base_dirs.executable_dir() {\n\n let project_dirs = ProjectDirs::from(\"org\", \"fengalin\", &APP_NAME).unwrap();\n\n let app_data_dir = project_dirs.data_dir();\n\n let data_dir = app_data_dir.parent().unwrap();\n\n\n\n match File::create(&target_path().join(\"install\")) {\n\n Ok(mut install_file) => {\n\n install_file\n\n .write_all(format!(\"# User install script for {}\\n\", *APP_NAME).as_bytes())\n\n .unwrap();\n\n\n\n install_file.write_all(b\"\\n# Install executable\\n\").unwrap();\n\n install_file\n\n .write_all(format!(\"mkdir -p {:?}\\n\", exe_dir).as_bytes())\n\n .unwrap();\n\n install_file\n\n .write_all(\n", "file_path": "build.rs", "rank": 27, "score": 45922.46442264116 }, { "content": "pub trait UIDispatcher {\n\n type Controller: UIController;\n\n\n\n fn setup(\n\n ctrl: &mut Self::Controller,\n\n main_ctrl_rc: &Rc<RefCell<MainController>>,\n\n app: &gtk::Application,\n\n ui_event: &UIEventSender,\n\n );\n\n\n\n // bind context specific accels\n\n fn bind_accels_for(_ctx: UIFocusContext, _app: &gtk::Application) {}\n\n}\n", "file_path": "src/ui/mod.rs", "rank": 28, "score": 45714.75820886421 }, { "content": "pub trait UIController {\n\n fn new_media(&mut self, _pipeline: &PlaybackPipeline) {}\n\n fn cleanup(&mut self);\n\n fn streams_changed(&mut self, _info: &metadata::MediaInfo) {}\n\n fn grab_focus(&self) {}\n\n}\n\n\n", "file_path": "src/ui/mod.rs", "rank": 29, "score": 45714.75820886421 }, { "content": "fn register_resource(resource: &[u8]) {\n\n let gbytes = glib::Bytes::from(resource);\n\n gio::Resource::from_data(&gbytes)\n\n .map(|resource| {\n\n gio::resources_register(&resource);\n\n })\n\n .unwrap_or_else(|err| {\n\n warn!(\"unable to load resources: {:?}\", err);\n\n });\n\n}\n\n\n", "file_path": "src/ui/mod.rs", "rank": 30, "score": 39238.1267353935 }, { "content": "use gettextrs::gettext;\n\nuse glib::clone;\n\nuse gtk::prelude::*;\n\nuse log::error;\n\n\n\nuse std::{cell::RefCell, rc::Rc};\n\n\n\nuse super::{spawn, MainController, UIDispatcher, UIEventSender, VideoController};\n\n\n\npub struct VideoDispatcher;\n\nimpl UIDispatcher for VideoDispatcher {\n\n type Controller = VideoController;\n\n\n\n fn setup(\n\n video_ctrl: &mut VideoController,\n\n _main_ctrl_rc: &Rc<RefCell<MainController>>,\n\n _app: &gtk::Application,\n\n ui_event: &UIEventSender,\n\n ) {\n\n match video_ctrl.video_output {\n", "file_path": "src/ui/video_dispatcher.rs", "rank": 31, "score": 31691.08706576628 }, { "content": " }\n\n }\n\n }\n\n\n\n fn streams_changed(&mut self, info: &MediaInfo) {\n\n if let Some(video_output) = self.video_output.as_ref() {\n\n if let Some(cleaner_id) = self.cleaner_id.take() {\n\n self.container.get_children()[0].disconnect(cleaner_id);\n\n }\n\n\n\n if info.streams.is_video_selected() {\n\n debug!(\"streams_changed video selected\");\n\n video_output.widget.show();\n\n } else {\n\n debug!(\"streams_changed video not selected\");\n\n video_output.widget.hide();\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/ui/video_controller.rs", "rank": 32, "score": 31687.797130857933 }, { "content": "}\n\n\n\nimpl UIController for VideoController {\n\n fn cleanup(&mut self) {\n\n if let Some(video_widget) = self.video_widget() {\n\n if self.cleaner_id.is_none() {\n\n self.cleaner_id = Some(video_widget.connect_draw(|widget, cr| {\n\n let allocation = widget.get_allocation();\n\n cr.set_source_rgb(0f64, 0f64, 0f64);\n\n cr.rectangle(\n\n 0f64,\n\n 0f64,\n\n f64::from(allocation.width),\n\n f64::from(allocation.height),\n\n );\n\n cr.fill();\n\n\n\n Inhibit(true)\n\n }));\n\n video_widget.queue_draw();\n", "file_path": "src/ui/video_controller.rs", "rank": 33, "score": 31684.44961573742 }, { "content": "use glib::{prelude::*, signal::SignalHandlerId};\n\nuse gtk::prelude::*;\n\nuse log::debug;\n\n\n\nuse crate::{\n\n application::{CommandLineArguments, CONFIG},\n\n metadata::MediaInfo,\n\n};\n\n\n\nuse super::UIController;\n\n\n\npub struct VideoOutput {\n\n sink: gst::Element,\n\n pub(super) widget: gtk::Widget,\n\n}\n\n\n\npub struct VideoController {\n\n pub(super) video_output: Option<VideoOutput>,\n\n pub(super) container: gtk::Box,\n\n cleaner_id: Option<SignalHandlerId>,\n", "file_path": "src/ui/video_controller.rs", "rank": 34, "score": 31683.737697882512 }, { "content": " .get::<gtk::Widget>()\n\n .expect(\"VideoController: unexpected type for `widget` in `gtkglsink`\")\n\n .expect(\"VideoController: `widget` not found in `gtkglsink`\"),\n\n }\n\n })\n\n .ok()\n\n } else {\n\n None\n\n }\n\n .or_else(|| {\n\n gst::ElementFactory::make(\"gtksink\", Some(\"video_sink\"))\n\n .map(|sink| {\n\n debug!(\"Using gtksink\");\n\n VideoOutput {\n\n sink: sink.clone(),\n\n widget: sink\n\n .get_property(\"widget\")\n\n .expect(\"VideoController: couldn't get `widget` from `gtksink`\")\n\n .get::<gtk::Widget>()\n\n .expect(\"VideoController: unexpected type for `widget` in `gtksink`\")\n", "file_path": "src/ui/video_controller.rs", "rank": 35, "score": 31682.909077041484 }, { "content": " video_ctrl\n\n }\n\n\n\n pub fn video_sink(&self) -> Option<gst::Element> {\n\n self.video_output\n\n .as_ref()\n\n .map(|video_output| video_output.sink.clone())\n\n }\n\n\n\n fn video_widget(&self) -> Option<gtk::Widget> {\n\n self.video_output\n\n .as_ref()\n\n .map(|video_output| video_output.widget.clone())\n\n }\n\n}\n", "file_path": "src/ui/video_controller.rs", "rank": 36, "score": 31682.892675316125 }, { "content": "\n\nimpl VideoController {\n\n pub fn new(builder: &gtk::Builder, args: &CommandLineArguments) -> Self {\n\n let container: gtk::Box = builder.get_object(\"video-container\").unwrap();\n\n\n\n let video_output = if !args.disable_gl && !CONFIG.read().unwrap().media.is_gl_disabled {\n\n gst::ElementFactory::make(\"gtkglsink\", Some(\"gtkglsink\"))\n\n .map(|gtkglsink| {\n\n let glsinkbin = gst::ElementFactory::make(\"glsinkbin\", Some(\"video_sink\"))\n\n .expect(\"PlaybackPipeline: couldn't get `glsinkbin` from `gtkglsink`\");\n\n glsinkbin\n\n .set_property(\"sink\", &gtkglsink)\n\n .expect(\"VideoController: couldn't set `sink` for `glsinkbin`\");\n\n\n\n debug!(\"Using gtkglsink\");\n\n VideoOutput {\n\n sink: glsinkbin,\n\n widget: gtkglsink\n\n .get_property(\"widget\")\n\n .expect(\"VideoController: couldn't get `widget` from `gtkglsink`\")\n", "file_path": "src/ui/video_controller.rs", "rank": 37, "score": 31682.58931000847 }, { "content": " .expect(\"VideoController: `widget` not found in `gtksink`\"),\n\n }\n\n })\n\n .ok()\n\n });\n\n\n\n if let Some(video_output) = video_output.as_ref() {\n\n container.pack_start(&video_output.widget, true, true, 0);\n\n container.reorder_child(&video_output.widget, 0);\n\n video_output.widget.show();\n\n };\n\n\n\n let mut video_ctrl = VideoController {\n\n video_output,\n\n container,\n\n cleaner_id: None,\n\n };\n\n\n\n video_ctrl.cleanup();\n\n\n", "file_path": "src/ui/video_controller.rs", "rank": 38, "score": 31682.581125646106 }, { "content": " Some(ref video_output) => {\n\n // discard GStreamer defined navigation events on widget\n\n video_output\n\n .widget\n\n .set_events(gdk::EventMask::BUTTON_PRESS_MASK);\n\n\n\n video_ctrl.container.connect_button_press_event(\n\n clone!(@strong ui_event => move |_, _| {\n\n ui_event.play_pause();\n\n Inhibit(true)\n\n }),\n\n );\n\n }\n\n None => {\n\n error!(\"{}\", gettext(\"Couldn't find GStreamer GTK video sink.\"));\n\n let container = video_ctrl.container.clone();\n\n spawn(async move {\n\n container.hide();\n\n });\n\n }\n\n };\n\n }\n\n}\n", "file_path": "src/ui/video_dispatcher.rs", "rank": 39, "score": 31679.58138876265 }, { "content": " &gettext(\"Channels\"),\n\n ALIGN_RIGHT,\n\n Self::AUDIO_CHANNELS_COL,\n\n None,\n\n );\n\n Self::add_text_column(treeview, &gettext(\"Comment\"), ALIGN_LEFT, COMMENT_COL, None);\n\n }\n\n}\n\n\n\npub(super) struct UIStreamTextImpl;\n\nimpl UIStreamTextImpl {\n\n const TEXT_FORMAT_COL: u32 = 5;\n\n}\n\n\n\nimpl UIStreamImpl for UIStreamTextImpl {\n\n const TYPE: gst::StreamType = gst::StreamType::TEXT;\n\n\n\n fn new_media(store: &gtk::ListStore, iter: &gtk::TreeIter, caps_struct: &gst::StructureRef) {\n\n if let Ok(Some(format)) = caps_struct.get::<&str>(\"format\") {\n\n store.set_value(&iter, Self::TEXT_FORMAT_COL, &glib::Value::from(&format));\n", "file_path": "src/ui/streams_controller.rs", "rank": 40, "score": 31425.48105130614 }, { "content": " self.store.clear();\n\n }\n\n\n\n fn new_media(&mut self, streams: &metadata::Streams) {\n\n let sorted_collection = streams.collection(Impl::TYPE).sorted();\n\n for stream in sorted_collection {\n\n let iter = self.add_stream(stream);\n\n let caps_structure = stream.caps.get_structure(0).unwrap();\n\n Impl::new_media(&self.store, &iter, &caps_structure);\n\n }\n\n\n\n self.selected = self.store.get_iter_first().map(|ref iter| {\n\n self.treeview.get_selection().select_iter(iter);\n\n self.store\n\n .get_value(iter, STREAM_ID_COL as i32)\n\n .get::<String>()\n\n .unwrap()\n\n .unwrap()\n\n .into()\n\n });\n", "file_path": "src/ui/streams_controller.rs", "rank": 41, "score": 31420.578954157827 }, { "content": "}\n\n\n\nimpl UIStreamImpl for UIStreamVideoImpl {\n\n const TYPE: gst::StreamType = gst::StreamType::VIDEO;\n\n\n\n fn new_media(store: &gtk::ListStore, iter: &gtk::TreeIter, caps_struct: &gst::StructureRef) {\n\n if let Ok(Some(width)) = caps_struct.get::<i32>(\"width\") {\n\n store.set_value(iter, Self::VIDEO_WIDTH_COL, &glib::Value::from(&width));\n\n }\n\n if let Ok(Some(height)) = caps_struct.get::<i32>(\"height\") {\n\n store.set_value(iter, Self::VIDEO_HEIGHT_COL, &glib::Value::from(&height));\n\n }\n\n }\n\n\n\n fn init_treeview(treeview: &gtk::TreeView, store: &gtk::ListStore) {\n\n treeview.set_model(Some(store));\n\n\n\n // Video\n\n Self::add_text_column(\n\n treeview,\n", "file_path": "src/ui/streams_controller.rs", "rank": 42, "score": 31419.94877506098 }, { "content": " Self::add_text_column(treeview, &gettext(\"Codec\"), ALIGN_LEFT, CODEC_COL, None);\n\n Self::add_text_column(\n\n treeview,\n\n &gettext(\"Format\"),\n\n ALIGN_LEFT,\n\n Self::TEXT_FORMAT_COL,\n\n None,\n\n );\n\n Self::add_text_column(treeview, &gettext(\"Comment\"), ALIGN_LEFT, COMMENT_COL, None);\n\n }\n\n}\n\n\n\npub struct StreamsController {\n\n pub(super) page: gtk::Grid,\n\n\n\n pub(super) video: UIStream<UIStreamVideoImpl>,\n\n pub(super) audio: UIStream<UIStreamAudioImpl>,\n\n pub(super) text: UIStream<UIStreamTextImpl>,\n\n}\n\n\n", "file_path": "src/ui/streams_controller.rs", "rank": 43, "score": 31418.271572543745 }, { "content": " builder.get_object(\"text_streams-liststore\").unwrap(),\n\n ),\n\n };\n\n\n\n ctrl.cleanup();\n\n\n\n ctrl.video.init_treeview();\n\n ctrl.audio.init_treeview();\n\n ctrl.text.init_treeview();\n\n\n\n ctrl\n\n }\n\n\n\n pub(super) fn stream_clicked(&mut self, type_: gst::StreamType) -> StreamClickedStatus {\n\n match type_ {\n\n gst::StreamType::VIDEO => self.video.stream_clicked(),\n\n gst::StreamType::AUDIO => self.audio.stream_clicked(),\n\n gst::StreamType::TEXT => self.text.stream_clicked(),\n\n other => unimplemented!(\"{:?}\", other),\n\n }\n", "file_path": "src/ui/streams_controller.rs", "rank": 44, "score": 31417.795935647482 }, { "content": " Self::add_text_column(\n\n treeview,\n\n &gettext(\"Height\"),\n\n ALIGN_RIGHT,\n\n Self::VIDEO_HEIGHT_COL,\n\n None,\n\n );\n\n Self::add_text_column(treeview, &gettext(\"Comment\"), ALIGN_LEFT, COMMENT_COL, None);\n\n }\n\n}\n\n\n\npub(super) struct UIStreamAudioImpl;\n\nimpl UIStreamAudioImpl {\n\n const AUDIO_RATE_COL: u32 = 5;\n\n const AUDIO_CHANNELS_COL: u32 = 6;\n\n}\n\n\n\nimpl UIStreamImpl for UIStreamAudioImpl {\n\n const TYPE: gst::StreamType = gst::StreamType::AUDIO;\n\n\n", "file_path": "src/ui/streams_controller.rs", "rank": 45, "score": 31417.23433157571 }, { "content": " }\n\n\n\n fn add_stream(&self, stream: &metadata::Stream) -> gtk::TreeIter {\n\n let id_parts: Vec<&str> = stream.id.split('/').collect();\n\n let stream_id_display = if id_parts.len() == 2 {\n\n id_parts[1].to_owned()\n\n } else {\n\n gettext(\"unknown\")\n\n };\n\n\n\n let iter = self.store.insert_with_values(\n\n None,\n\n &[STREAM_ID_COL, STREAM_ID_DISPLAY_COL],\n\n &[&stream.id.as_ref(), &stream_id_display],\n\n );\n\n\n\n let lang = stream\n\n .tags\n\n .get_index::<gst::tags::LanguageName>(0)\n\n .or_else(|| stream.tags.get_index::<gst::tags::LanguageCode>(0))\n", "file_path": "src/ui/streams_controller.rs", "rank": 46, "score": 31416.684922323006 }, { "content": " }\n\n\n\n pub fn selected_streams(&self) -> Vec<Arc<str>> {\n\n let mut streams: Vec<Arc<str>> = Vec::new();\n\n if let Some(stream) = self.video.selected.as_ref() {\n\n streams.push(Arc::clone(stream));\n\n }\n\n if let Some(stream) = self.audio.selected.as_ref() {\n\n streams.push(Arc::clone(stream));\n\n }\n\n if let Some(stream) = self.text.selected.as_ref() {\n\n streams.push(Arc::clone(stream));\n\n }\n\n\n\n streams\n\n }\n\n}\n", "file_path": "src/ui/streams_controller.rs", "rank": 47, "score": 31415.611846382653 }, { "content": " fn new_media(store: &gtk::ListStore, iter: &gtk::TreeIter, caps_struct: &gst::StructureRef) {\n\n if let Ok(Some(rate)) = caps_struct.get::<i32>(\"rate\") {\n\n store.set_value(&iter, Self::AUDIO_RATE_COL, &glib::Value::from(&rate));\n\n }\n\n if let Ok(Some(channels)) = caps_struct.get::<i32>(\"channels\") {\n\n store.set_value(\n\n &iter,\n\n Self::AUDIO_CHANNELS_COL,\n\n &glib::Value::from(&channels),\n\n );\n\n }\n\n }\n\n\n\n fn init_treeview(treeview: &gtk::TreeView, store: &gtk::ListStore) {\n\n treeview.set_model(Some(store));\n\n\n\n Self::add_text_column(\n\n treeview,\n\n &gettext(\"Stream id\"),\n\n ALIGN_LEFT,\n", "file_path": "src/ui/streams_controller.rs", "rank": 48, "score": 31414.114541698254 }, { "content": "\n\npub enum StreamClickedStatus {\n\n Changed,\n\n Unchanged,\n\n}\n\n\n\npub(super) trait UIStreamImpl {\n\n const TYPE: gst::StreamType;\n\n\n\n fn new_media(store: &gtk::ListStore, iter: &gtk::TreeIter, caps_struct: &gst::StructureRef);\n\n fn init_treeview(treeview: &gtk::TreeView, store: &gtk::ListStore);\n\n\n\n fn add_text_column(\n\n treeview: &gtk::TreeView,\n\n title: &str,\n\n alignment: f32,\n\n col_id: u32,\n\n width: Option<i32>,\n\n ) {\n\n let col = gtk::TreeViewColumn::new();\n", "file_path": "src/ui/streams_controller.rs", "rank": 49, "score": 31413.733851414698 }, { "content": " iter\n\n }\n\n\n\n fn stream_clicked(&mut self) -> StreamClickedStatus {\n\n if let (Some(cursor_path), _) = self.treeview.get_cursor() {\n\n if let Some(iter) = self.store.get_iter(&cursor_path) {\n\n let stream = self\n\n .store\n\n .get_value(&iter, STREAM_ID_COL as i32)\n\n .get::<String>()\n\n .unwrap()\n\n .unwrap()\n\n .into();\n\n let stream_to_select = match &self.selected {\n\n Some(stream_id) => {\n\n if stream_id != &stream {\n\n // Stream has changed\n\n Some(stream)\n\n } else {\n\n None\n", "file_path": "src/ui/streams_controller.rs", "rank": 50, "score": 31413.69121546209 }, { "content": " }\n\n}\n\n\n\nimpl StreamsController {\n\n pub fn new(builder: &gtk::Builder) -> Self {\n\n let mut ctrl = StreamsController {\n\n page: builder.get_object(\"streams-grid\").unwrap(),\n\n\n\n video: UIStream::new(\n\n builder.get_object(\"video_streams-treeview\").unwrap(),\n\n builder.get_object(\"video_streams-liststore\").unwrap(),\n\n ),\n\n\n\n audio: UIStream::new(\n\n builder.get_object(\"audio_streams-treeview\").unwrap(),\n\n builder.get_object(\"audio_streams-liststore\").unwrap(),\n\n ),\n\n\n\n text: UIStream::new(\n\n builder.get_object(\"text_streams-treeview\").unwrap(),\n", "file_path": "src/ui/streams_controller.rs", "rank": 51, "score": 31413.516530346402 }, { "content": "use glib::clone;\n\nuse gtk::prelude::*;\n\n\n\nuse std::{cell::RefCell, rc::Rc};\n\n\n\nuse super::{MainController, StreamsController, UIDispatcher, UIEventSender, UIFocusContext};\n\n\n\npub struct StreamsDispatcher;\n\nimpl UIDispatcher for StreamsDispatcher {\n\n type Controller = StreamsController;\n\n\n\n fn setup(\n\n streams_ctrl: &mut StreamsController,\n\n _main_ctrl_rc: &Rc<RefCell<MainController>>,\n\n _app: &gtk::Application,\n\n ui_event: &UIEventSender,\n\n ) {\n\n streams_ctrl.video.treeview.connect_cursor_changed(\n\n clone!(@strong ui_event => move |_| ui_event.stream_clicked(gst::StreamType::VIDEO)),\n\n );\n", "file_path": "src/ui/streams_dispatcher.rs", "rank": 52, "score": 31412.072863251567 }, { "content": " &gettext(\"Stream id\"),\n\n ALIGN_LEFT,\n\n STREAM_ID_DISPLAY_COL,\n\n Some(200),\n\n );\n\n Self::add_text_column(\n\n treeview,\n\n &gettext(\"Language\"),\n\n ALIGN_CENTER,\n\n LANGUAGE_COL,\n\n None,\n\n );\n\n Self::add_text_column(treeview, &gettext(\"Codec\"), ALIGN_LEFT, CODEC_COL, None);\n\n Self::add_text_column(\n\n treeview,\n\n &gettext(\"Width\"),\n\n ALIGN_RIGHT,\n\n Self::VIDEO_WIDTH_COL,\n\n None,\n\n );\n", "file_path": "src/ui/streams_controller.rs", "rank": 53, "score": 31411.244272240063 }, { "content": " STREAM_ID_DISPLAY_COL,\n\n Some(200),\n\n );\n\n Self::add_text_column(\n\n treeview,\n\n &gettext(\"Language\"),\n\n ALIGN_CENTER,\n\n LANGUAGE_COL,\n\n None,\n\n );\n\n Self::add_text_column(treeview, &gettext(\"Codec\"), ALIGN_LEFT, CODEC_COL, None);\n\n Self::add_text_column(\n\n treeview,\n\n &gettext(\"Rate\"),\n\n ALIGN_RIGHT,\n\n Self::AUDIO_RATE_COL,\n\n None,\n\n );\n\n Self::add_text_column(\n\n treeview,\n", "file_path": "src/ui/streams_controller.rs", "rank": 54, "score": 31409.9692074327 }, { "content": "impl UIController for StreamsController {\n\n fn new_media(&mut self, pipeline: &PlaybackPipeline) {\n\n self.video.new_media(&pipeline.info.streams);\n\n self.audio.new_media(&pipeline.info.streams);\n\n self.text.new_media(&pipeline.info.streams);\n\n }\n\n\n\n fn cleanup(&mut self) {\n\n self.video.cleanup();\n\n self.audio.cleanup();\n\n self.text.cleanup();\n\n }\n\n\n\n fn grab_focus(&self) {\n\n // grab focus asynchronoulsy because it triggers the `cursor_changed` signal\n\n // which needs to check if the stream has changed\n\n let audio_treeview = self.audio.treeview.clone();\n\n spawn(async move {\n\n audio_treeview.grab_focus();\n\n });\n", "file_path": "src/ui/streams_controller.rs", "rank": 55, "score": 31408.413308745556 }, { "content": " }\n\n }\n\n None => Some(stream),\n\n };\n\n\n\n if let Some(new_stream) = stream_to_select {\n\n self.selected = Some(new_stream);\n\n return StreamClickedStatus::Changed;\n\n }\n\n }\n\n }\n\n\n\n StreamClickedStatus::Unchanged\n\n }\n\n}\n\n\n\npub(super) struct UIStreamVideoImpl;\n\nimpl UIStreamVideoImpl {\n\n const VIDEO_WIDTH_COL: u32 = 5;\n\n const VIDEO_HEIGHT_COL: u32 = 6;\n", "file_path": "src/ui/streams_controller.rs", "rank": 56, "score": 31405.352399579704 }, { "content": "}\n\n\n\nimpl<Impl: UIStreamImpl> UIStream<Impl> {\n\n fn new(treeview: gtk::TreeView, store: gtk::ListStore) -> Self {\n\n UIStream {\n\n treeview,\n\n store,\n\n selected: None,\n\n phantom: std::marker::PhantomData,\n\n }\n\n }\n\n\n\n fn init_treeview(&self) {\n\n Impl::init_treeview(&self.treeview, &self.store);\n\n }\n\n\n\n fn cleanup(&mut self) {\n\n self.selected = None;\n\n self.treeview\n\n .set_cursor(&gtk::TreePath::new(), None::<&gtk::TreeViewColumn>, false);\n", "file_path": "src/ui/streams_controller.rs", "rank": 57, "score": 31405.322419900782 }, { "content": " .and_then(|value| value.get())\n\n .unwrap_or(\"-\");\n\n self.store\n\n .set_value(&iter, LANGUAGE_COL, &glib::Value::from(lang));\n\n\n\n if let Some(comment) = stream\n\n .tags\n\n .get_index::<gst::tags::Comment>(0)\n\n .and_then(|value| value.get())\n\n {\n\n self.store\n\n .set_value(&iter, COMMENT_COL, &glib::Value::from(comment));\n\n }\n\n\n\n self.store.set_value(\n\n &iter,\n\n CODEC_COL,\n\n &glib::Value::from(&stream.codec_printable),\n\n );\n\n\n", "file_path": "src/ui/streams_controller.rs", "rank": 58, "score": 31405.124869409665 }, { "content": " col.set_title(title);\n\n\n\n let renderer = gtk::CellRendererText::new();\n\n renderer.set_alignment(alignment, ALIGN_CENTER);\n\n col.pack_start(&renderer, true);\n\n col.add_attribute(&renderer, \"text\", col_id as i32);\n\n\n\n if let Some(width) = width {\n\n renderer.set_fixed_size(width, -1);\n\n }\n\n\n\n treeview.append_column(&col);\n\n }\n\n}\n\n\n\npub(super) struct UIStream<Impl: UIStreamImpl> {\n\n pub(super) treeview: gtk::TreeView,\n\n store: gtk::ListStore,\n\n selected: Option<Arc<str>>,\n\n phantom: std::marker::PhantomData<Impl>,\n", "file_path": "src/ui/streams_controller.rs", "rank": 59, "score": 31405.07785690473 }, { "content": "\n\n streams_ctrl.audio.treeview.connect_cursor_changed(\n\n clone!(@strong ui_event => move |_| ui_event.stream_clicked(gst::StreamType::AUDIO)),\n\n );\n\n\n\n streams_ctrl.text.treeview.connect_cursor_changed(\n\n clone!(@strong ui_event => move |_| ui_event.stream_clicked(gst::StreamType::TEXT)),\n\n );\n\n\n\n streams_ctrl\n\n .page\n\n .connect_map(clone!(@strong ui_event => move |_| {\n\n ui_event.switch_to(UIFocusContext::StreamsPage);\n\n }));\n\n }\n\n}\n", "file_path": "src/ui/streams_dispatcher.rs", "rank": 60, "score": 31404.682511100407 }, { "content": " }\n\n }\n\n\n\n fn init_treeview(treeview: &gtk::TreeView, store: &gtk::ListStore) {\n\n treeview.set_model(Some(store));\n\n\n\n Self::add_text_column(\n\n treeview,\n\n &gettext(\"Stream id\"),\n\n ALIGN_LEFT,\n\n STREAM_ID_DISPLAY_COL,\n\n Some(200),\n\n );\n\n Self::add_text_column(\n\n treeview,\n\n &gettext(\"Language\"),\n\n ALIGN_CENTER,\n\n LANGUAGE_COL,\n\n None,\n\n );\n", "file_path": "src/ui/streams_controller.rs", "rank": 61, "score": 31403.606478912207 }, { "content": "use gettextrs::gettext;\n\n\n\nuse gtk::prelude::*;\n\n\n\nuse std::sync::Arc;\n\n\n\nuse crate::{media::PlaybackPipeline, metadata};\n\n\n\nuse super::{spawn, UIController};\n\n\n\nconst ALIGN_LEFT: f32 = 0f32;\n\nconst ALIGN_CENTER: f32 = 0.5f32;\n\nconst ALIGN_RIGHT: f32 = 1f32;\n\n\n\nconst STREAM_ID_COL: u32 = 0;\n\nconst STREAM_ID_DISPLAY_COL: u32 = 1;\n\n\n\nconst LANGUAGE_COL: u32 = 2;\n\nconst CODEC_COL: u32 = 3;\n\nconst COMMENT_COL: u32 = 4;\n", "file_path": "src/ui/streams_controller.rs", "rank": 62, "score": 31402.55616148776 }, { "content": "static NAME_TAG: &str = \"NAME\";\n\n\n\npub struct MKVMergeTextFormat {}\n\n\n\nimpl MKVMergeTextFormat {\n\n pub fn get_extension() -> &'static str {\n\n EXTENSION\n\n }\n\n\n\n pub fn new_as_boxed() -> Box<Self> {\n\n Box::new(MKVMergeTextFormat {})\n\n }\n\n}\n\n\n", "file_path": "src/metadata/mkvmerge_text_format.rs", "rank": 63, "score": 30247.819514999992 }, { "content": " } else {\n\n panic!(\"unexpected error type returned\");\n\n }\n\n}\n\n\n\nimpl Reader for MKVMergeTextFormat {\n\n fn read(&self, info: &MediaInfo, source: &mut dyn Read) -> Result<Option<gst::Toc>, String> {\n\n let error_msg = gettext(\"unexpected error reading mkvmerge text file.\");\n\n let mut content = String::new();\n\n source.read_to_string(&mut content).map_err(|_| {\n\n error!(\"{}\", error_msg);\n\n error_msg.clone()\n\n })?;\n\n\n\n if !content.is_empty() {\n\n let mut toc_edition = gst::TocEntry::new(gst::TocEntryType::Edition, \"\");\n\n let mut last_chapter: Option<gst::TocEntry> = None;\n\n let mut input = content.as_str();\n\n\n\n while !input.is_empty() {\n", "file_path": "src/metadata/mkvmerge_text_format.rs", "rank": 64, "score": 30240.65274232002 }, { "content": "use gettextrs::gettext;\n\n\n\nuse log::error;\n\n\n\nuse nom::{\n\n bytes::complete::tag,\n\n character::complete::{line_ending, not_line_ending},\n\n combinator::{opt, verify},\n\n error::ErrorKind,\n\n sequence::{pair, preceded, separated_pair, terminated},\n\n IResult,\n\n};\n\n\n\nuse std::io::Read;\n\n\n\nuse super::{parse_timestamp, parse_to, MediaInfo, Reader, Timestamp4Humans};\n\n\n\nstatic EXTENSION: &str = \"txt\";\n\n\n\nstatic CHAPTER_TAG: &str = \"CHAPTER\";\n", "file_path": "src/metadata/mkvmerge_text_format.rs", "rank": 65, "score": 30237.688047260417 }, { "content": " Some(\"test\".to_owned()),\n\n toc_entry.get_tags().and_then(|tags| tags\n\n .get::<gst::tags::Title>()\n\n .and_then(|tag| tag.get().map(|value| value.to_string()))),\n\n );\n\n\n\n let res = parse_chapter(\"CHAPTER0x=00:00:01.000\");\n\n let err = res.unwrap_err();\n\n if let nom::Err::Error((i, error_kind)) = err {\n\n assert_eq!(\"x=00:00:01.000\", i);\n\n assert_eq!(ErrorKind::Tag, error_kind);\n\n } else {\n\n panic!(\"unexpected error type returned\");\n\n }\n\n\n\n let res = parse_chapter(\"CHAPTER01=00:00:01.000\\nCHAPTER02NAME=test\\n\");\n\n let err = res.unwrap_err();\n\n if let nom::Err::Error((i, error_kind)) = err {\n\n assert_eq!(\"02NAME=test\\n\", i);\n\n assert_eq!(ErrorKind::Verify, error_kind);\n", "file_path": "src/metadata/mkvmerge_text_format.rs", "rank": 66, "score": 30231.178630711343 }, { "content": " ErrorKind::Verify => gettext(\"chapter numbers don't match for: {}\")\n\n .replacen(\"{}\", &i[..i.len().min(2)], 1),\n\n _ => gettext(\"unexpected sequence starting with: {}\").replacen(\n\n \"{}\",\n\n &i[..i.len().min(10)],\n\n 1,\n\n ),\n\n }\n\n } else {\n\n error!(\"unknown error {:?}\", err);\n\n error_msg\n\n };\n\n error!(\"{}\", msg);\n\n return Err(msg);\n\n }\n\n };\n\n\n\n if let Some(mut prev_chapter) = last_chapter.take() {\n\n // Update previous chapter's end\n\n let prev_start = prev_chapter.get_start_stop_times().unwrap().0;\n", "file_path": "src/metadata/mkvmerge_text_format.rs", "rank": 67, "score": 30231.042076557926 }, { "content": " error!(\"{}\", gettext(\"couldn't update last start position\"));\n\n Err(error_msg)\n\n },\n\n |mut last_chapter| {\n\n let last_start = last_chapter.get_start_stop_times().unwrap().0;\n\n last_chapter\n\n .get_mut()\n\n .unwrap()\n\n .set_start_stop_times(last_start, info.duration.as_i64());\n\n toc_edition\n\n .get_mut()\n\n .unwrap()\n\n .append_sub_entry(last_chapter);\n\n\n\n let mut toc = gst::Toc::new(gst::TocScope::Global);\n\n toc.get_mut().unwrap().append_entry(toc_edition);\n\n Ok(Some(toc))\n\n },\n\n )\n\n } else {\n\n // file is empty\n\n Ok(None)\n\n }\n\n }\n\n}\n", "file_path": "src/metadata/mkvmerge_text_format.rs", "rank": 68, "score": 30230.979364614115 }, { "content": " let cur_start = cur_chapter.get_start_stop_times().unwrap().0;\n\n prev_chapter\n\n .get_mut()\n\n .unwrap()\n\n .set_start_stop_times(prev_start, cur_start);\n\n // Add previous chapter to the Edition entry\n\n toc_edition\n\n .get_mut()\n\n .unwrap()\n\n .append_sub_entry(prev_chapter);\n\n }\n\n\n\n // Queue current chapter (will be added when next chapter start is known\n\n // or with the media's duration when the parsing is done)\n\n last_chapter = Some(cur_chapter);\n\n }\n\n\n\n // Update last_chapter\n\n last_chapter.take().map_or_else(\n\n || {\n", "file_path": "src/metadata/mkvmerge_text_format.rs", "rank": 69, "score": 30229.938793642585 }, { "content": " let cur_chapter = match parse_chapter(input) {\n\n Ok((i, cur_chapter)) => {\n\n if i.len() == input.len() {\n\n // No progress\n\n if !i.is_empty() {\n\n let msg = gettext(\"unexpected sequence starting with: {}\")\n\n .replacen(\"{}\", &i[..i.len().min(10)], 1);\n\n error!(\"{}\", msg);\n\n return Err(msg);\n\n }\n\n break;\n\n }\n\n input = i;\n\n cur_chapter\n\n }\n\n Err(err) => {\n\n let msg = if let nom::Err::Error((i, error_kind)) = err {\n\n match error_kind {\n\n ErrorKind::ParseTo => gettext(\"expecting a number, found: {}\")\n\n .replacen(\"{}\", &i[..i.len().min(2)], 1),\n", "file_path": "src/metadata/mkvmerge_text_format.rs", "rank": 70, "score": 30224.822010620206 }, { "content": "\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum Format {\n\n MKVMergeText,\n\n}\n\n\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum MediaContent {\n\n Audio,\n\n AudioVideo,\n\n AudioText,\n\n AudioVideoText,\n\n Text,\n\n Video,\n\n VideoText,\n\n Undefined,\n\n}\n\n\n\nimpl MediaContent {\n\n pub fn add_stream_type(&mut self, type_: gst::StreamType) {\n", "file_path": "src/metadata/mod.rs", "rank": 81, "score": 30.89512629376756 }, { "content": "impl From<PurgeError> for SelectStreamsError {\n\n fn from(_: PurgeError) -> Self {\n\n SelectStreamsError::Unrecoverable\n\n }\n\n}\n\n\n\nimpl fmt::Display for SelectStreamsError {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n match self {\n\n SelectStreamsError::UnknownId(id) => {\n\n write!(f, \"Media: select stream: unknown id {}\", id.as_ref())\n\n }\n\n SelectStreamsError::Unrecoverable => write!(f, \"Media: couldn't select stream\"),\n\n }\n\n }\n\n}\n\nimpl std::error::Error for SelectStreamsError {}\n\n\n\npub struct PlaybackPipeline {\n\n pipeline: gst::Pipeline,\n", "file_path": "src/media/playback_pipeline.rs", "rank": 82, "score": 30.808530325296168 }, { "content": " match self {\n\n Eos => write!(f, \"Media: seeking past the end\"),\n\n Unrecoverable => write!(f, \"Media: couldn't seek\"),\n\n }\n\n }\n\n}\n\nimpl std::error::Error for SeekError {}\n\n\n\n#[derive(Debug)]\n\npub enum SelectStreamsError {\n\n UnknownId(Arc<str>),\n\n Unrecoverable,\n\n}\n\n\n\nimpl From<media_info::SelectStreamError> for SelectStreamsError {\n\n fn from(err: media_info::SelectStreamError) -> Self {\n\n SelectStreamsError::UnknownId(Arc::clone(err.id()))\n\n }\n\n}\n\n\n", "file_path": "src/media/playback_pipeline.rs", "rank": 85, "score": 27.260460940858827 }, { "content": "use std::boxed::Box;\n\n\n\nuse super::{Format, MKVMergeTextFormat, Reader};\n\n\n\npub struct Factory {}\n\n\n\nimpl Factory {\n\n pub fn get_extensions() -> Vec<(&'static str, Format)> {\n\n let mut result = Vec::<(&'static str, Format)>::new();\n\n\n\n // Only MKVMergeTextFormat implemented for Read ATM\n\n result.push((MKVMergeTextFormat::get_extension(), Format::MKVMergeText));\n\n\n\n result\n\n }\n\n\n\n pub fn get_reader(format: Format) -> Box<dyn Reader> {\n\n match format {\n\n Format::MKVMergeText => MKVMergeTextFormat::new_as_boxed(),\n\n }\n\n }\n\n}\n", "file_path": "src/metadata/factory.rs", "rank": 86, "score": 26.79316315267097 }, { "content": " iter: Option<gtk::TreeIter>,\n\n is_first: bool,\n\n}\n\n\n\nimpl<'store> Iter<'store> {\n\n fn new(store: &'store gtk::TreeStore) -> Self {\n\n Iter {\n\n store,\n\n iter: None,\n\n is_first: true,\n\n }\n\n }\n\n}\n\n\n\nimpl<'store> Iterator for Iter<'store> {\n\n type Item = ChapterEntry<'store>;\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n\n if !self.is_first {\n\n if let Some(iter) = self.iter.as_mut() {\n", "file_path": "src/ui/chapter_tree_manager.rs", "rank": 87, "score": 25.676502963694986 }, { "content": "use lazy_static::lazy_static;\n\n\n\npub const TLD: &str = \"org\";\n\npub const SLD: &str = \"fengalin\";\n\n\n\nlazy_static! {\n\n pub static ref APP_NAME: String = env!(\"CARGO_PKG_NAME\").to_string();\n\n}\n\n\n\nlazy_static! {\n\n pub static ref APP_ID: String = format!(\"{}.{}.{}\", TLD, SLD, *APP_NAME);\n\n}\n\n\n\nlazy_static! {\n\n pub static ref APP_PATH: String = format!(\"/{}/{}/{}\", TLD, SLD, *APP_NAME);\n\n}\n\n\n\nmod command_line;\n\npub use self::command_line::{get_command_line, CommandLineArguments};\n\n\n\nmod configuration;\n\npub use self::configuration::CONFIG;\n\n\n\nmod locale;\n\npub use self::locale::init_locale;\n", "file_path": "src/application/mod.rs", "rank": 89, "score": 25.390732258614417 }, { "content": " this.as_mut().unwrap().missing_plugins.insert(plugin);\n\n }\n\n }\n\n StreamCollection(stream_collection) => {\n\n let this = this.as_mut().unwrap();\n\n stream_collection\n\n .get_stream_collection()\n\n .iter()\n\n .for_each(|stream| this.info.add_stream(&stream));\n\n }\n\n // FIXME really still necessary can't we just use StateChanged?\n\n StreamsSelected(_) => {\n\n streams_selected = true;\n\n }\n\n Tag(msg_tag) => {\n\n let tags = msg_tag.get_tags();\n\n if tags.get_scope() == gst::TagScope::Global {\n\n this.as_mut().unwrap().info.add_tags(&tags);\n\n }\n\n }\n", "file_path": "src/media/playback_pipeline.rs", "rank": 90, "score": 24.38855945409228 }, { "content": " match type_ {\n\n gst::StreamType::AUDIO => match self {\n\n MediaContent::Text => *self = MediaContent::AudioText,\n\n MediaContent::Video => *self = MediaContent::AudioVideo,\n\n MediaContent::VideoText => *self = MediaContent::AudioVideoText,\n\n MediaContent::Undefined => *self = MediaContent::Audio,\n\n _ => (),\n\n },\n\n gst::StreamType::VIDEO => match self {\n\n MediaContent::Audio => *self = MediaContent::AudioVideo,\n\n MediaContent::Text => *self = MediaContent::VideoText,\n\n MediaContent::AudioText => *self = MediaContent::AudioVideoText,\n\n MediaContent::Undefined => *self = MediaContent::Video,\n\n _ => (),\n\n },\n\n gst::StreamType::TEXT => match self {\n\n MediaContent::Audio => *self = MediaContent::AudioText,\n\n MediaContent::Video => *self = MediaContent::VideoText,\n\n MediaContent::AudioVideo => *self = MediaContent::AudioVideoText,\n\n MediaContent::Undefined => *self = MediaContent::Text,\n", "file_path": "src/metadata/mod.rs", "rank": 92, "score": 22.916380907599862 }, { "content": " match self.iter.take() {\n\n Some(iter) => {\n\n if self.store.iter_next(&iter) {\n\n self.iter = Some(iter);\n\n let store = &self.store;\n\n self.iter\n\n .as_ref()\n\n .map(|iter| ChapterEntry::new(store, iter))\n\n } else {\n\n None\n\n }\n\n }\n\n None => None,\n\n }\n\n }\n\n\n\n fn pick_next(&self) -> Option<ChapterEntry<'_>> {\n\n match self.selected.as_ref() {\n\n Some(selected) => {\n\n let iter = selected.clone();\n", "file_path": "src/ui/chapter_tree_manager.rs", "rank": 94, "score": 22.12512337401906 }, { "content": "\n\n pub fn enter_chapters(&mut self) -> bool {\n\n // Skip edition entry and enter chapters\n\n assert_eq!(Some(TocVisit::EnteringChildren), self.next());\n\n let found_edition = match self.next() {\n\n Some(TocVisit::Node(entry)) => gst::TocEntryType::Edition == entry.get_entry_type(),\n\n _ => false,\n\n };\n\n\n\n if found_edition {\n\n self.next()\n\n .map_or(false, |visit| TocVisit::EnteringChildren == visit)\n\n } else {\n\n false\n\n }\n\n }\n\n\n\n fn next(&mut self) -> Option<TocVisit> {\n\n match self.next_to_push.take() {\n\n None => {\n", "file_path": "src/metadata/toc_visitor.rs", "rank": 95, "score": 21.620593992935056 }, { "content": "use std::{\n\n cmp::Ordering,\n\n fmt,\n\n ops::{Add, Sub},\n\n};\n\n\n\nuse crate::metadata::{Duration, Timestamp4Humans};\n\n\n\n#[derive(Clone, Copy, Default, Debug, Eq, Ord, PartialEq, PartialOrd)]\n\npub struct Timestamp(u64);\n\n\n\nimpl Timestamp {\n\n pub fn new(value: u64) -> Self {\n\n Timestamp(value)\n\n }\n\n\n\n pub fn for_humans(self) -> Timestamp4Humans {\n\n Timestamp4Humans::from_nano(self.0)\n\n }\n\n\n", "file_path": "src/media/timestamp.rs", "rank": 96, "score": 21.432336310535803 }, { "content": "use std::{\n\n fmt,\n\n ops::{Div, DivAssign, Mul, MulAssign},\n\n};\n\n\n\n// FIXME: consider moving to std::time::Duration when `div_duration` is stabilized.\n\n\n\n#[derive(Clone, Copy, Default, Debug, Eq, Ord, PartialEq, PartialOrd)]\n\npub struct Duration(u64);\n\n\n\nimpl Duration {\n\n pub const fn from_nanos(nanos: u64) -> Self {\n\n Duration(nanos)\n\n }\n\n\n\n pub const fn from_secs(secs: u64) -> Self {\n\n Duration(secs * 1_000_000_000u64)\n\n }\n\n\n\n pub fn as_f64(self) -> f64 {\n", "file_path": "src/metadata/duration.rs", "rank": 98, "score": 21.181034883680894 }, { "content": " // we should check whether the parent node contains something\n\n None\n\n }\n\n }\n\n None => self.store.get_iter_first().map(|iter| {\n\n let mut last_iter = iter.clone();\n\n while self.store.iter_next(&iter) {\n\n last_iter = iter.clone();\n\n }\n\n ChapterEntry::new_owned(&self.store, last_iter)\n\n }),\n\n }\n\n }\n\n\n\n fn add_unchecked(&self, ts: ChapterTimestamps, title: &str) -> gtk::TreeIter {\n\n self.store.insert_with_values(\n\n None,\n\n None,\n\n &[START_COL, END_COL, TITLE_COL, START_STR_COL, END_STR_COL],\n\n &[\n", "file_path": "src/ui/chapter_tree_manager.rs", "rank": 99, "score": 21.07080328157393 } ]
Rust
src/dat/civilization.rs
vtabbott/djin
9b63973941a9f6efc327f18085c00838e02e80b7
use crate::dat::common::DeString; use crate::dat::unit::Task; use crate::dat::ResourceUsage; const RESOURCE_STORAGE_SIZE: usize = 3; const GRAPHIC_DISPLACEMENT_SIZE: usize = 3; const BUILDING_ANNEXES_SIZE: usize = 4; #[derive(Protocol, Debug, Clone, PartialEq)] pub struct Civilizations { pub size: u16, #[protocol(length_prefix(elements(size)))] pub civilizations: Vec<Civilization>, } #[derive(Protocol, Debug, Clone, PartialEq)] pub struct Civilization { pub player_type: u8, pub name: DeString, pub resource_size: u16, pub tech_tree_id: i16, pub team_bonus_id: u16, #[protocol(length_prefix(elements(resource_size)))] pub resources: Vec<f32>, pub icon_set: u8, pub units_pointers_size: u16, #[protocol(length_prefix(elements(units_pointers_size)))] pub unit_pointers: Vec<u32>, #[protocol(length_prefix(pointers(unit_pointers)))] pub units: Vec<Unit>, } #[derive(Protocol, Debug, Clone, PartialEq)] pub struct Unit { pub unit_type: UnitType, pub id: i16, pub language_dll_name: i32, pub language_dll_creation: i32, pub class: i16, pub standing_graphics: (i16, i16), pub dying_graphic: i16, pub undead_graphic: i16, pub undead_mode: u8, pub hit_points: i16, pub line_of_sight: f32, pub garrison_capacity: u8, pub collision_box: (f32, f32, f32), pub train_sound: i16, pub damage_sound: i16, pub dead_unit_id: i16, pub blood_unit_id: i16, pub sort_number: u8, pub can_be_built_on: u8, pub icon_id: i16, pub hide_in_editor: u8, pub old_portrait_pict: i16, pub enabled: u8, pub disabled: u8, pub placement_side_terrain: (i16, i16), pub placement_terrain: (i16, i16), pub clearance_size: (f32, f32), pub hill_mode: u8, pub fog_visibility: u8, pub terrain_restriction: i16, pub fly_mode: u8, pub resource_capacity: i16, pub resource_decay: f32, pub blast_defense_level: u8, pub combat_level: u8, pub interaction_mode: u8, pub minimap_mode: u8, pub interface_kind: u8, pub multiple_attribute_mode: f32, pub minimap_color: u8, pub language_dll_help: i32, pub language_dll_hot_key_text: i32, pub hot_key: i32, pub recyclable: u8, pub enable_auto_gather: u8, pub create_doppelganger_on_death: u8, pub resource_gather_group: u8, pub occlusion_mode: u8, pub obstruction_type: u8, pub obstruction_class: u8, pub r#trait: u8, pub civilization: u8, pub nothing: i16, pub selection_effect: u8, pub editor_selection_colour: u8, pub outline_box: (f32, f32, f32), pub data: u32, pub data_2: u32, #[protocol(fixed_length(RESOURCE_STORAGE_SIZE))] pub resources_storage: Vec<ResourceStorage>, pub damage_graphic_size: u8, #[protocol(length_prefix(elements(damage_graphic_size)))] pub damage_graphics: Vec<DamageGraphic>, pub selection_sound: i16, pub dying_sound: i16, pub wwise_train_sound_id: u32, pub wwise_damage_sound_id: u32, pub wwise_selection_sound_id: u32, pub wwise_dying_sound_id: u32, pub old_attack_reaction: u8, pub convert_terrain: u8, pub name: DeString, pub copy_id: i16, pub base_id: i16, #[protocol(skip_if("unit_type < UnitType::Flag"))] pub speed: Option<f32>, #[protocol(skip_if("unit_type < UnitType::DeadFish"))] pub dead_fish: Option<DeadFish>, #[protocol(skip_if("unit_type < UnitType::Bird"))] pub bird: Option<Bird>, #[protocol(skip_if("unit_type < UnitType::Combatant"))] pub type_50: Option<Combatant>, #[protocol(skip_if("unit_type != UnitType::Projectile"))] pub projectile: Option<Projectile>, #[protocol(skip_if("unit_type < UnitType::Creatable"))] pub creatable: Option<Creatable>, #[protocol(skip_if("unit_type != UnitType::Building"))] pub building: Option<Building>, } #[derive(Protocol, Debug, Clone, PartialEq)] pub struct ResourceStorage(i16, f32, u8); #[derive(Protocol, Debug, Clone, PartialEq)] pub struct DamageGraphic { pub graphic_id: i16, pub damage_percent: i16, pub apply_mode: u8, } #[derive(Protocol, Debug, Clone, PartialEq, PartialOrd)] #[protocol(discriminant = "integer")] #[repr(u8)] pub enum UnitType { EyeCandy = 10, Trees = 15, Flag = 20, Dopl = 25, DeadFish = 30, Bird = 40, Combatant = 50, Projectile = 60, Creatable = 70, Building = 80, AoeTrees = 90, } #[derive(Protocol, Debug, Clone, PartialEq)] pub struct DeadFish { pub walking_graphic: i16, pub running_graphic: i16, pub rotation_speed: f32, pub old_size_class: u8, pub tracking_unit: i16, pub tracking_unit_mode: u8, pub tracking_unit_density: f32, pub old_move_algorithm: u8, pub turn_radius: f32, pub turn_radius_speed: f32, pub max_yaw_per_second_moving: f32, pub stationary_yaw_revolution_time: f32, pub max_yaw_per_second_stationary: f32, pub min_collision_size_multiplier: f32, } #[derive(Protocol, Debug, Clone, PartialEq)] pub struct Bird { pub default_task_id: i16, pub search_radius: f32, pub work_rate: f32, pub drop_sites_size: (i16, i16, i16), pub task_swap_group: u8, pub attack_sound: i16, pub move_sound: i16, pub wwise_attack_sound_id: u32, pub wwise_move_sound_id: u32, pub run_pattern: u8, pub task_list_size_count: i16, #[protocol(length_prefix(elements(task_list_size_count)))] pub task_list: Vec<Task>, } #[derive(Protocol, Debug, Clone, PartialEq)] pub struct Combatant { pub base_armor: i16, pub attack_count_size: i16, #[protocol(length_prefix(elements(attack_count_size)))] pub attacks: Vec<AttackOrArmor>, pub armor_count_size: i16, #[protocol(length_prefix(elements(armor_count_size)))] pub armor: Vec<AttackOrArmor>, pub defense_terrain_bonus: i16, pub bonus_damage_resistance: f32, pub max_range: f32, pub blast_width: f32, pub reload_time: f32, pub projectile_unit_id: i16, pub accuracy_percent: i16, pub break_off_combat: u8, pub frame_delay: i16, #[protocol(fixed_length(GRAPHIC_DISPLACEMENT_SIZE))] pub graphic_displacement: Vec<f32>, pub blast_attack_level: u8, pub min_range: f32, pub accuracy_dispersion: f32, pub attack_graphic: i16, pub displayed_melee_armour: i16, pub displayed_attack: i16, pub displayed_range: f32, pub displayed_reload_time: f32, } #[derive(Protocol, Debug, Clone, PartialEq)] pub struct Projectile { pub projectile_type: u8, pub smart_mode: u8, pub hit_mode: u8, pub vanish_mode: u8, pub area_effect_specials: u8, pub projectile_arc: f32, } #[derive(Protocol, Debug, Clone, PartialEq)] pub struct Creatable { #[protocol(fixed_length(RESOURCE_STORAGE_SIZE))] pub resources_costs: Vec<ResourceUsage>, pub train_time: i16, pub train_location_id: i16, pub button_id: u8, pub rear_attack_modifier: f32, pub flank_attack_modifier: f32, pub creatable_type: u8, pub hero_mode: u8, pub garrison_graphic: i32, pub spawning_graphic: i16, pub upgrade_graphic: i16, pub hero_glow_graphic: i16, pub max_charge: f32, pub recharge_rate: f32, pub charge_event: i16, pub charge_type: i16, pub total_projectiles: f32, pub max_total_projectiles: u8, pub projectile_spawning_area: (f32, f32, f32), pub secondary_projectile_unit: i32, pub special_graphic: i32, pub special_ability: u8, pub displayed_pierce_armour: i16, } #[derive(Protocol, Debug, Clone, PartialEq)] pub struct Building { pub construction_graphic_id: i16, pub snow_graphic_id: i16, pub destruction_graphic_id: i16, pub destruction_rubble_graphic_id: i16, pub researching_graphic: i16, pub research_completed_graphic: i16, pub adjacent_mode: u8, pub graphics_angle: i16, pub disappears_when_built: u8, pub stack_unit_id: i16, pub foundation_terrain_id: i16, pub old_overlay_id: i16, pub tech_id: i16, pub can_burn: u8, #[protocol(fixed_length(BUILDING_ANNEXES_SIZE))] pub building_annexes: Vec<Annexe>, pub head_unit: i16, pub transform_unit: i16, pub transform_sound: i16, pub construction_sound: i16, pub wwise_transform_sound_id: u32, pub wwise_construction_sound_id: u32, pub garrison_type: u8, pub garrison_heal_rate: f32, pub garrison_repair_rate: f32, pub pile_unit: i16, #[protocol(fixed_length(6))] pub looting_table: Vec<u8>, } #[derive(Protocol, Debug, Clone, PartialEq)] pub struct Annexe { pub unit_id: i16, pub misplacement: (f32, f32), } #[derive(Protocol, Debug, Clone, PartialEq)] pub struct AttackOrArmor { pub class: i16, pub amount: i16, }
use crate::dat::common::DeString; use crate::dat::unit::Task; use crate::dat::ResourceUsage; const RESOURCE_STORAGE_SIZE: usize = 3; const GRAPHIC_DISPLACEMENT_SIZE: usize = 3; const BUILDING_ANNEXES_SIZE: usize = 4; #[derive(Protocol, Debug, Clone, PartialEq)] pub struct Civilizations { pub size: u16, #[protocol(length_prefix(elements(size)))] pub civilizations: Vec<Civilization>, } #[derive(Protocol, Debug, Clone, PartialEq)] pub struct Civilization { pub player_type: u8, pub name: DeString, pub resource_size: u16, pub tech_tree_id: i16, pub team_bonus_id: u16, #[protocol(length_prefix(elements(resource_size)))] pub resources: Vec<f32>, pub icon_set: u8, pub units_pointers_size: u16, #[protocol(length_prefix(elements(units_pointers_size)))] pub unit_pointers: Vec<u32>, #[protocol(length_prefix(pointers(unit_pointers)))] pub units: Vec<Unit>, } #[derive(Protocol, Debug, Clone, PartialEq)] pub struct Unit { pub unit_type: UnitType, pub id: i16, pub language_dll_name: i32, pub language_dll_creation: i32, pub class: i16, pub standing_graphics: (i16, i16), pub dying_graphic: i16, pub undead_graphic: i16, pub undead_mode: u8, pub hit_points: i16, pub line_of_sight: f32, pub garrison_capacity: u8, pub collision_box: (f32, f32, f32), pub train_sound: i16, pub damage_sound: i16, pub dead_unit_id: i16, pub blood_unit_id: i16, pub sort_number: u8, pub can_be_built_on: u8, pub icon_id: i16, pub hide_in_editor: u8, pub old_portrait_pict: i16, pub enabled: u8, pub disabled: u8, pub placement_side_terrain: (i16, i16), pub placement_terrain: (i16, i16), pub clearance_size: (f32, f32), pub hill_mode: u8, pub fog_visibility: u8, pub terrain_restriction: i16, pub fly_mode: u8, pub resource_ca
Creatable = 70, Building = 80, AoeTrees = 90, } #[derive(Protocol, Debug, Clone, PartialEq)] pub struct DeadFish { pub walking_graphic: i16, pub running_graphic: i16, pub rotation_speed: f32, pub old_size_class: u8, pub tracking_unit: i16, pub tracking_unit_mode: u8, pub tracking_unit_density: f32, pub old_move_algorithm: u8, pub turn_radius: f32, pub turn_radius_speed: f32, pub max_yaw_per_second_moving: f32, pub stationary_yaw_revolution_time: f32, pub max_yaw_per_second_stationary: f32, pub min_collision_size_multiplier: f32, } #[derive(Protocol, Debug, Clone, PartialEq)] pub struct Bird { pub default_task_id: i16, pub search_radius: f32, pub work_rate: f32, pub drop_sites_size: (i16, i16, i16), pub task_swap_group: u8, pub attack_sound: i16, pub move_sound: i16, pub wwise_attack_sound_id: u32, pub wwise_move_sound_id: u32, pub run_pattern: u8, pub task_list_size_count: i16, #[protocol(length_prefix(elements(task_list_size_count)))] pub task_list: Vec<Task>, } #[derive(Protocol, Debug, Clone, PartialEq)] pub struct Combatant { pub base_armor: i16, pub attack_count_size: i16, #[protocol(length_prefix(elements(attack_count_size)))] pub attacks: Vec<AttackOrArmor>, pub armor_count_size: i16, #[protocol(length_prefix(elements(armor_count_size)))] pub armor: Vec<AttackOrArmor>, pub defense_terrain_bonus: i16, pub bonus_damage_resistance: f32, pub max_range: f32, pub blast_width: f32, pub reload_time: f32, pub projectile_unit_id: i16, pub accuracy_percent: i16, pub break_off_combat: u8, pub frame_delay: i16, #[protocol(fixed_length(GRAPHIC_DISPLACEMENT_SIZE))] pub graphic_displacement: Vec<f32>, pub blast_attack_level: u8, pub min_range: f32, pub accuracy_dispersion: f32, pub attack_graphic: i16, pub displayed_melee_armour: i16, pub displayed_attack: i16, pub displayed_range: f32, pub displayed_reload_time: f32, } #[derive(Protocol, Debug, Clone, PartialEq)] pub struct Projectile { pub projectile_type: u8, pub smart_mode: u8, pub hit_mode: u8, pub vanish_mode: u8, pub area_effect_specials: u8, pub projectile_arc: f32, } #[derive(Protocol, Debug, Clone, PartialEq)] pub struct Creatable { #[protocol(fixed_length(RESOURCE_STORAGE_SIZE))] pub resources_costs: Vec<ResourceUsage>, pub train_time: i16, pub train_location_id: i16, pub button_id: u8, pub rear_attack_modifier: f32, pub flank_attack_modifier: f32, pub creatable_type: u8, pub hero_mode: u8, pub garrison_graphic: i32, pub spawning_graphic: i16, pub upgrade_graphic: i16, pub hero_glow_graphic: i16, pub max_charge: f32, pub recharge_rate: f32, pub charge_event: i16, pub charge_type: i16, pub total_projectiles: f32, pub max_total_projectiles: u8, pub projectile_spawning_area: (f32, f32, f32), pub secondary_projectile_unit: i32, pub special_graphic: i32, pub special_ability: u8, pub displayed_pierce_armour: i16, } #[derive(Protocol, Debug, Clone, PartialEq)] pub struct Building { pub construction_graphic_id: i16, pub snow_graphic_id: i16, pub destruction_graphic_id: i16, pub destruction_rubble_graphic_id: i16, pub researching_graphic: i16, pub research_completed_graphic: i16, pub adjacent_mode: u8, pub graphics_angle: i16, pub disappears_when_built: u8, pub stack_unit_id: i16, pub foundation_terrain_id: i16, pub old_overlay_id: i16, pub tech_id: i16, pub can_burn: u8, #[protocol(fixed_length(BUILDING_ANNEXES_SIZE))] pub building_annexes: Vec<Annexe>, pub head_unit: i16, pub transform_unit: i16, pub transform_sound: i16, pub construction_sound: i16, pub wwise_transform_sound_id: u32, pub wwise_construction_sound_id: u32, pub garrison_type: u8, pub garrison_heal_rate: f32, pub garrison_repair_rate: f32, pub pile_unit: i16, #[protocol(fixed_length(6))] pub looting_table: Vec<u8>, } #[derive(Protocol, Debug, Clone, PartialEq)] pub struct Annexe { pub unit_id: i16, pub misplacement: (f32, f32), } #[derive(Protocol, Debug, Clone, PartialEq)] pub struct AttackOrArmor { pub class: i16, pub amount: i16, }
pacity: i16, pub resource_decay: f32, pub blast_defense_level: u8, pub combat_level: u8, pub interaction_mode: u8, pub minimap_mode: u8, pub interface_kind: u8, pub multiple_attribute_mode: f32, pub minimap_color: u8, pub language_dll_help: i32, pub language_dll_hot_key_text: i32, pub hot_key: i32, pub recyclable: u8, pub enable_auto_gather: u8, pub create_doppelganger_on_death: u8, pub resource_gather_group: u8, pub occlusion_mode: u8, pub obstruction_type: u8, pub obstruction_class: u8, pub r#trait: u8, pub civilization: u8, pub nothing: i16, pub selection_effect: u8, pub editor_selection_colour: u8, pub outline_box: (f32, f32, f32), pub data: u32, pub data_2: u32, #[protocol(fixed_length(RESOURCE_STORAGE_SIZE))] pub resources_storage: Vec<ResourceStorage>, pub damage_graphic_size: u8, #[protocol(length_prefix(elements(damage_graphic_size)))] pub damage_graphics: Vec<DamageGraphic>, pub selection_sound: i16, pub dying_sound: i16, pub wwise_train_sound_id: u32, pub wwise_damage_sound_id: u32, pub wwise_selection_sound_id: u32, pub wwise_dying_sound_id: u32, pub old_attack_reaction: u8, pub convert_terrain: u8, pub name: DeString, pub copy_id: i16, pub base_id: i16, #[protocol(skip_if("unit_type < UnitType::Flag"))] pub speed: Option<f32>, #[protocol(skip_if("unit_type < UnitType::DeadFish"))] pub dead_fish: Option<DeadFish>, #[protocol(skip_if("unit_type < UnitType::Bird"))] pub bird: Option<Bird>, #[protocol(skip_if("unit_type < UnitType::Combatant"))] pub type_50: Option<Combatant>, #[protocol(skip_if("unit_type != UnitType::Projectile"))] pub projectile: Option<Projectile>, #[protocol(skip_if("unit_type < UnitType::Creatable"))] pub creatable: Option<Creatable>, #[protocol(skip_if("unit_type != UnitType::Building"))] pub building: Option<Building>, } #[derive(Protocol, Debug, Clone, PartialEq)] pub struct ResourceStorage(i16, f32, u8); #[derive(Protocol, Debug, Clone, PartialEq)] pub struct DamageGraphic { pub graphic_id: i16, pub damage_percent: i16, pub apply_mode: u8, } #[derive(Protocol, Debug, Clone, PartialEq, PartialOrd)] #[protocol(discriminant = "integer")] #[repr(u8)] pub enum UnitType { EyeCandy = 10, Trees = 15, Flag = 20, Dopl = 25, DeadFish = 30, Bird = 40, Combatant = 50, Projectile = 60,
random
[ { "content": "\n\n#[derive(Protocol, Debug, Clone, PartialEq)]\n\npub struct Task {\n\n pub task_type: i16,\n\n pub id: i16,\n\n pub is_default: u8,\n\n pub action_type: i16,\n\n pub class_id: i16,\n\n pub unit_id: i16,\n\n pub terrain_id: i16,\n\n pub resource_in: i16,\n\n pub resource_multiplier: i16,\n\n pub resource_out: i16,\n\n pub unused_resource: i16,\n\n pub work_value1: f32,\n\n pub work_value2: f32,\n\n pub work_range: f32,\n\n pub auto_search_targets: u8,\n\n pub search_wait_time: f32,\n\n pub enable_targeting: u8,\n", "file_path": "src/dat/unit.rs", "rank": 0, "score": 22383.956922200723 }, { "content": " pub combat_level_flag: u8,\n\n pub gather_type: i16,\n\n pub work_flag2: i16,\n\n pub target_diplomacy: u8,\n\n pub carry_check: u8,\n\n pub pick_for_construction: u8,\n\n pub moving_graphic_id: i16,\n\n pub proceeding_graphic_id: i16,\n\n pub working_graphic_id: i16,\n\n pub carrying_graphic_id: i16,\n\n pub resource_gathering_sound_id: i16,\n\n pub resource_deposit_sound_id: i16,\n\n pub wwise_resource_gathering_sound_id: u32,\n\n pub wwise_resource_deposit_sound_id: u32,\n\n}\n", "file_path": "src/dat/unit.rs", "rank": 1, "score": 22376.901880221965 }, { "content": "#[derive(Protocol, Debug, Clone, PartialEq)]\n\npub struct Units {\n\n pub unit_size: u32,\n\n #[protocol(length_prefix(elements(unit_size)))]\n\n pub unit_headers: Vec<UnitHeaders>,\n\n}\n\n\n\n#[derive(Protocol, Debug, Clone, PartialEq)]\n\npub struct UnitHeaders {\n\n pub exists: bool,\n\n #[protocol(skip_if(\"!exists\"))]\n\n pub tasks_size: Option<TaskList>,\n\n}\n\n\n\n#[derive(Protocol, Debug, Clone, PartialEq)]\n\npub struct TaskList {\n\n pub size: i16,\n\n #[protocol(length_prefix(elements(size)))]\n\n pub tasks: Vec<Task>,\n\n}\n", "file_path": "src/dat/unit.rs", "rank": 2, "score": 22376.741677912152 }, { "content": "\n\n#[derive(Protocol, Debug, Clone, PartialEq)]\n\npub struct TileSize {\n\n pub width: i16,\n\n pub height: i16,\n\n pub delta_y: i16,\n\n}\n\n\n\n#[derive(Protocol, Debug, Clone, PartialEq)]\n\npub struct Terrain {\n\n pub enabled: u8,\n\n pub random: u8,\n\n pub is_water: u8,\n\n pub hide_in_editor: u8,\n\n pub string_id: i16,\n\n pub name: DeString,\n\n pub sp_name: DeString,\n\n pub slp: i32,\n\n pub shape_pointer: i32,\n\n pub sound_id: i32,\n", "file_path": "src/dat/terrain_block.rs", "rank": 19, "score": 21.089370673134596 }, { "content": "use crate::dat::common::DeString;\n\n\n\n#[derive(Protocol, Debug, Clone, PartialEq)]\n\npub struct SpriteTable {\n\n pub size: u16,\n\n #[protocol(length_prefix(elements(size)))]\n\n pub sprite_enabled: Vec<u32>,\n\n #[protocol(length_prefix(pointers(sprite_enabled)))]\n\n pub sprites: Vec<Sprite>,\n\n}\n\n\n\n#[derive(Protocol, Debug, Clone, PartialEq)]\n\npub struct Sprite {\n\n pub name: DeString,\n\n pub filename: DeString,\n\n pub particle_effect_name: DeString,\n\n pub slp_id: i32,\n\n pub is_loaded: bool,\n\n pub force_player_color: u8,\n\n pub layer: u8,\n", "file_path": "src/dat/sprite.rs", "rank": 20, "score": 18.94497692372592 }, { "content": "use crate::dat::common::DeString;\n\n\n\n#[derive(Protocol, Debug, Clone, PartialEq)]\n\npub struct SoundTable {\n\n pub sound_table_size: u16,\n\n #[protocol(length_prefix(elements(sound_table_size)))]\n\n pub sounds: Vec<Sound>,\n\n}\n\n\n\n#[derive(Protocol, Debug, Clone, PartialEq)]\n\npub struct Sound {\n\n pub id: u16,\n\n pub play_delay: i16,\n\n pub file_count: u16,\n\n pub cache_time: u32,\n\n pub total_probability: u16,\n\n #[protocol(length_prefix(elements(file_count)))]\n\n pub items: Vec<SoundItem>,\n\n}\n\n\n\n#[derive(Protocol, Debug, Clone, PartialEq)]\n\npub struct SoundItem {\n\n pub filename: DeString,\n\n pub resource_id: u32,\n\n pub probability: i16,\n\n pub civilization: i16,\n\n pub icon_set: i16,\n\n}\n", "file_path": "src/dat/sound.rs", "rank": 21, "score": 17.834588296797396 }, { "content": "use crate::dat::common::DeString;\n\n\n\nconst REQUIRED_TECH_SIZE: usize = 6;\n\nconst RESOURCE_COSTS_SIZE: usize = 3;\n\n\n\n#[derive(Protocol, Debug, Clone, PartialEq, PartialOrd)]\n\npub struct TechResourcesCost {\n\n pub resource_type: ResourceCostType,\n\n pub amount: i16,\n\n pub flag: ResourceCostTrigger,\n\n}\n\n\n\n#[derive(Protocol, Debug, Clone, PartialEq, PartialOrd)]\n\n#[protocol(discriminant = \"integer\")]\n\n#[repr(u8)]\n\npub enum ResourceCostTrigger {\n\n OnCreate = 0,\n\n OnQueue = 1,\n\n}\n\n\n", "file_path": "src/dat/tech.rs", "rank": 22, "score": 17.680349756606205 }, { "content": " #[protocol(fixed_length(AGES))]\n\n pub units_techs_first: Vec<u8>,\n\n pub line_mode: i32,\n\n pub enabling_research: i32,\n\n}\n\n\n\n#[derive(Protocol, Debug, Clone, PartialEq)]\n\npub struct UnitConnection {\n\n pub id: i32,\n\n pub status: TechTreeStatus,\n\n pub upper_building: i32,\n\n pub tech_tree_common: TechTreeCommon,\n\n pub vertical_line: u32,\n\n pub units_size: u8,\n\n #[protocol(length_prefix(elements(units_size)))]\n\n pub units: Vec<i32>,\n\n pub location_in_age: i32,\n\n pub required_research: i32,\n\n pub line_mode: i32,\n\n pub enabling_research: i32,\n", "file_path": "src/dat/tech_tree.rs", "rank": 23, "score": 17.37005950767788 }, { "content": "use crate::dat::common::DeString;\n\n\n\n#[derive(Protocol, Debug, Clone, PartialEq)]\n\npub struct Effects {\n\n size: u32,\n\n #[protocol(length_prefix(elements(size)))]\n\n pub effects: Vec<Effect>,\n\n}\n\n\n\n#[derive(Protocol, Debug, Clone, PartialEq)]\n\npub struct Effect {\n\n pub name: DeString,\n\n pub command_size: i16,\n\n #[protocol(length_prefix(elements(command_size)))]\n\n pub commands: Vec<EffectCommand>,\n\n}\n\n\n\n#[derive(Protocol, Debug, Clone, PartialEq)]\n\npub struct EffectCommand {\n\n pub command_type: u8,\n\n pub a: i16,\n\n pub b: i16,\n\n pub c: i16,\n\n pub d: f32,\n\n}\n", "file_path": "src/dat/effect.rs", "rank": 24, "score": 16.910597288603324 }, { "content": " pub color_table: u16,\n\n pub transparent_selection: bool,\n\n pub bounding_box: (i16, i16, i16, i16),\n\n pub num_deltas: u16,\n\n pub sound_id: i16,\n\n pub w_wise_sound_id: i32,\n\n pub angle_sound_used: u8,\n\n pub num_frames: u16,\n\n pub num_angles: u16,\n\n pub base_speed: f32,\n\n pub frame_rate: f32,\n\n pub replay_delay: f32,\n\n pub sequence_type: u8,\n\n pub id: i16,\n\n pub mirror_flag: i8,\n\n pub editor_flag: i8,\n\n #[protocol(length_prefix(elements(num_deltas)))]\n\n pub deltas: Vec<SpriteDelta>,\n\n #[protocol(skip_if(\"angle_sound_used == 0\"))]\n\n #[protocol(length_prefix(elements(num_angles)))]\n", "file_path": "src/dat/sprite.rs", "rank": 25, "score": 16.776701982040322 }, { "content": "#[derive(Protocol, Debug, Clone, PartialEq, Eq)]\n\npub struct ColorTable {\n\n pub size: u16,\n\n #[protocol(length_prefix(elements(size)))]\n\n pub colors: Vec<Color>,\n\n}\n\n\n\n/// Player colour data.\n\n#[derive(Protocol, Debug, Clone, PartialEq, Eq)]\n\npub struct Color {\n\n pub id: i32,\n\n /// Base palette index for this player colour.\n\n pub base: i32,\n\n /// The palette index to use for unit outlines when they are obscured by buildings or trees.\n\n pub unit_outline_color: i32,\n\n pub unit_selection_colors: (i32, i32),\n\n /// Palette indices for this colour on the minimap.\n\n pub minimap_colors: (i32, i32, i32),\n\n /// Color table to use for this player colour in the in-game statistics in the bottom right.\n\n pub statistics_text_color: i32,\n\n}\n", "file_path": "src/dat/color.rs", "rank": 26, "score": 16.696779273732542 }, { "content": " pub line_mode: i32,\n\n}\n\n\n\n#[derive(Protocol, Debug, Clone, PartialEq)]\n\npub struct BuildingConnection {\n\n pub id: i32,\n\n pub status: TechTreeStatus,\n\n pub buildings_size: u8,\n\n #[protocol(length_prefix(elements(buildings_size)))]\n\n pub buildings: Vec<i32>,\n\n pub units_size: u8,\n\n #[protocol(length_prefix(elements(units_size)))]\n\n pub units: Vec<i32>,\n\n pub techs_size: u8,\n\n #[protocol(length_prefix(elements(techs_size)))]\n\n pub techs: Vec<i32>,\n\n pub common: TechTreeCommon,\n\n pub location_in_age: u8,\n\n #[protocol(fixed_length(AGES))]\n\n pub units_techs_total: Vec<u8>,\n", "file_path": "src/dat/tech_tree.rs", "rank": 27, "score": 16.286606243018205 }, { "content": "use crate::dat::common::DeString;\n\n\n\nconst TILESIZE_SIZE: usize = 19;\n\nconst TERRAIN_SIZE: usize = 200;\n\nconst COLORS_SIZE: usize = 3;\n\nconst FRAME_DATA_SIZE: usize = 19;\n\nconst TERRAIN_UNIT_SIZE: usize = 30;\n\n\n\n#[derive(Protocol, Debug, Clone, PartialEq)]\n\npub struct TerrainBlock {\n\n pub virtual_function_pointer: u32,\n\n pub map_pointer: u32,\n\n pub map_width: i32,\n\n pub map_height: i32,\n\n pub world_width: i32,\n\n pub world_height: i32,\n\n\n\n #[protocol(fixed_length(TILESIZE_SIZE))]\n\n pub tile_sizes: Vec<TileSize>,\n\n\n", "file_path": "src/dat/terrain_block.rs", "rank": 28, "score": 16.162443385100016 }, { "content": " pub techs: Vec<Tech>,\n\n}\n\n\n\n#[derive(Protocol, Debug, Clone, PartialEq)]\n\npub struct Tech {\n\n #[protocol(fixed_length(REQUIRED_TECH_SIZE))]\n\n pub required_techs: Vec<i16>,\n\n #[protocol(fixed_length(RESOURCE_COSTS_SIZE))]\n\n pub research_resource_cost: Vec<TechResourcesCost>,\n\n pub required_tech_count: i16,\n\n pub civ: i16,\n\n pub full_tech_mode: i16,\n\n pub research_location: i16,\n\n pub language_dll_name: i16,\n\n pub language_dll_description: i16,\n\n pub research_time: i16,\n\n pub effect_id: i16,\n\n pub r#type: i16,\n\n pub icon_id: i16,\n\n pub button_id: u8,\n\n pub language_dll_help: u32,\n\n pub language_dll_tech_tree: u32,\n\n pub hot_key: u32,\n\n pub name: DeString,\n\n pub repeatable: bool,\n\n}\n", "file_path": "src/dat/tech.rs", "rank": 29, "score": 15.838906055554753 }, { "content": "#[derive(Protocol, Debug, Clone, PartialEq, PartialOrd)]\n\n#[protocol(discriminant = \"integer\")]\n\n#[repr(u16)]\n\npub enum ResourceCostType {\n\n Food = 0,\n\n Wood = 1,\n\n Stone = 2,\n\n Gold = 3,\n\n /// Used only for Lithuanians unique bonus\n\n Relic = 7,\n\n // We cannot use i16 as enum discriminant but this is actually -1\n\n None = 65535,\n\n /// Used only for Cumans free elite Kipchaks team bonus\n\n Free = 215,\n\n}\n\n\n\n#[derive(Protocol, Debug, Clone, PartialEq)]\n\npub struct Techs {\n\n pub size: u16,\n\n #[protocol(length_prefix(elements(size)))]\n", "file_path": "src/dat/tech.rs", "rank": 30, "score": 15.757551156133228 }, { "content": " #[protocol(fixed_length(FRAME_DATA_SIZE))]\n\n pub elevation_graphic: Vec<FrameData>,\n\n pub terrain_to_draw: i16,\n\n pub terrain_dimensions: (i16, i16),\n\n\n\n #[protocol(fixed_length(TERRAIN_UNIT_SIZE))]\n\n pub terrain_unit_masked_density: Vec<i16>,\n\n #[protocol(fixed_length(TERRAIN_UNIT_SIZE))]\n\n pub terrain_unit_tid: Vec<i16>,\n\n #[protocol(fixed_length(TERRAIN_UNIT_SIZE))]\n\n pub terrain_unit_density: Vec<i16>,\n\n #[protocol(fixed_length(TERRAIN_UNIT_SIZE))]\n\n pub terrain_unit_centering: Vec<u8>,\n\n pub number_of_terrain_units_used: i16,\n\n pub phantom: i16,\n\n pub phantom2: i16,\n\n}\n\n\n\n#[derive(Protocol, Debug, Clone, PartialEq)]\n\npub struct FrameData {\n\n pub frame_count: i16,\n\n pub angle_count: i16,\n\n pub shape_id: i16,\n\n}\n", "file_path": "src/dat/terrain_block.rs", "rank": 31, "score": 15.697033870366457 }, { "content": " pub sound_id_: u16,\n\n pub wwise_sound_id_: u32,\n\n pub sound_delay__: i16,\n\n pub sound_id__: u16,\n\n pub wwise_sound_id__: u32,\n\n}\n\n\n\n#[derive(Protocol, Debug, Clone, PartialEq)]\n\npub struct SoundProp {\n\n pub sound_delay: i16,\n\n pub sound_id: u16,\n\n pub wwise_sound_id: i32,\n\n}\n", "file_path": "src/dat/sprite.rs", "rank": 32, "score": 15.480491122167038 }, { "content": "#[derive(Protocol, Debug, Clone, PartialEq)]\n\npub struct TechTreeAge {\n\n pub id: u32,\n\n pub status: TechTreeStatus,\n\n pub buildings_size: u8,\n\n #[protocol(length_prefix(elements(buildings_size)))]\n\n pub buildings: Vec<i32>,\n\n pub units_size: u8,\n\n #[protocol(length_prefix(elements(units_size)))]\n\n pub units: Vec<i32>,\n\n pub techs_size: u8,\n\n #[protocol(length_prefix(elements(techs_size)))]\n\n pub techs: Vec<i32>,\n\n pub common: TechTreeCommon,\n\n pub num_building_levels: u8,\n\n #[protocol(fixed_length(SLOT_SIZE))]\n\n pub buildings_per_zone: Vec<u8>,\n\n #[protocol(fixed_length(SLOT_SIZE))]\n\n pub group_length_per_zone: Vec<u8>,\n\n pub max_age_length: u8,\n", "file_path": "src/dat/tech_tree.rs", "rank": 33, "score": 15.115825796006888 }, { "content": " pub wwise_sound_id: u32,\n\n pub wwise_sound_stop_id: u32,\n\n pub blend_priority: i32,\n\n pub blend_type: i32,\n\n pub overlay: DeString,\n\n #[protocol(fixed_length(COLORS_SIZE))]\n\n pub colors: Vec<u8>,\n\n pub cliff_colors: (u8, u8),\n\n pub passable_terrain: u8,\n\n pub im_passable_terrain: u8,\n\n pub is_animated: u8,\n\n pub animation_frames: i16,\n\n pub pause_frames: i16,\n\n pub interval: f32,\n\n pub pause_between_loops: f32,\n\n pub frame: i16,\n\n pub draw_frame: i16,\n\n pub animate_last: f32,\n\n pub frame_changed: u8,\n\n pub drawn: u8,\n", "file_path": "src/dat/terrain_block.rs", "rank": 34, "score": 15.083783746969047 }, { "content": "const SLOT_SIZE: usize = 10;\n\nconst AGES: usize = 5;\n\n\n\n#[derive(Protocol, Debug, Clone, PartialEq)]\n\npub struct TechTree {\n\n pub tech_tree_ages_size: u8,\n\n pub building_connections_size: u8,\n\n pub unit_connections_size: u8,\n\n pub research_connections_size: u8,\n\n pub total_unit_tech_groups: i32,\n\n #[protocol(length_prefix(elements(tech_tree_ages_size)))]\n\n pub tech_tree_ages: Vec<TechTreeAge>,\n\n #[protocol(length_prefix(elements(building_connections_size)))]\n\n pub building_connections: Vec<BuildingConnection>,\n\n #[protocol(length_prefix(elements(unit_connections_size)))]\n\n pub unit_connections: Vec<UnitConnection>,\n\n #[protocol(length_prefix(elements(research_connections_size)))]\n\n pub research_connections: Vec<ResearchConnection>,\n\n}\n\n\n", "file_path": "src/dat/tech_tree.rs", "rank": 35, "score": 14.887951031894355 }, { "content": " pub attack_sounds: Option<Vec<AngleSound>>,\n\n}\n\n\n\n#[derive(Protocol, Debug, Clone, PartialEq)]\n\npub struct SpriteDelta {\n\n pub graphic_id: i16,\n\n pub padding_1: i16,\n\n pub sprite_ptr: i32,\n\n pub offset_x: i16,\n\n pub offset_y: i16,\n\n pub display_angle: i16,\n\n pub padding_2: i16,\n\n}\n\n\n\n#[derive(Protocol, Debug, Clone, PartialEq)]\n\npub struct AngleSound {\n\n pub sound_delay: i16,\n\n pub sound_id: u16,\n\n pub wwise_sound_id: u32,\n\n pub sound_delay_: i16,\n", "file_path": "src/dat/sprite.rs", "rank": 36, "score": 14.484970764792903 }, { "content": "use bytes::Buf;\n\nuse djin_protocol::{Parcel, Settings};\n\nuse std::io::Read;\n\n\n\n#[derive(Protocol, Debug, Clone, PartialEq)]\n\npub struct TerrainHeader {\n\n pub terrain_restriction_size: u16,\n\n pub restriction_size: u16,\n\n\n\n #[protocol(length_prefix(elements(terrain_restriction_size)))]\n\n pub terrain_tables_pointer: Vec<u32>,\n\n\n\n #[protocol(length_prefix(elements(terrain_restriction_size)))]\n\n pub terrains_pointer: Vec<u32>,\n\n}\n\n\n\n#[derive(Debug, Clone, PartialEq)]\n\npub struct TerrainRestrictions {\n\n pub inner: Vec<TerrainRestriction>,\n\n}\n", "file_path": "src/dat/terrain.rs", "rank": 37, "score": 12.923705873252032 }, { "content": "}\n\n\n\n#[derive(Protocol, Debug, Clone, PartialEq)]\n\npub struct ResearchConnection {\n\n pub id: u32,\n\n pub status: TechTreeStatus,\n\n pub upper_building: u32,\n\n pub buildings_size: u8,\n\n #[protocol(length_prefix(elements(buildings_size)))]\n\n pub buildings: Vec<u32>,\n\n pub units_size: u8,\n\n #[protocol(length_prefix(elements(units_size)))]\n\n pub units: Vec<u32>,\n\n pub techs_size: u8,\n\n #[protocol(length_prefix(elements(techs_size)))]\n\n pub techs: Vec<u32>,\n\n pub tech_tree_common: TechTreeCommon,\n\n pub vertical_line: u32,\n\n pub location_in_age: u32,\n\n pub line_mode: u32,\n", "file_path": "src/dat/tech_tree.rs", "rank": 38, "score": 12.826693223703387 }, { "content": "}\n\n\n\n#[derive(Protocol, Debug, Clone, PartialEq)]\n\npub struct TechTreeCommon {\n\n pub slots_used: u32,\n\n /// Connection lines when selected\n\n #[protocol(fixed_length(SLOT_SIZE))]\n\n pub unit_research: Vec<u32>,\n\n /// 0 Age/Tech-level, 1 Building, 2 Unit, 3 Tech.\n\n #[protocol(fixed_length(SLOT_SIZE))]\n\n pub mode: Vec<TechTreeMode>,\n\n}\n\n\n\n#[derive(Protocol, Debug, Clone, PartialEq, PartialOrd)]\n\n#[protocol(discriminant = \"integer\")]\n\n#[repr(u32)]\n\npub enum TechTreeMode {\n\n AgeOrTechlevel = 0,\n\n Building = 1,\n\n Unit = 2,\n", "file_path": "src/dat/tech_tree.rs", "rank": 39, "score": 11.434756446989066 }, { "content": " pub padding_ts: i16,\n\n pub padding_ts_2: i16,\n\n\n\n #[protocol(fixed_length(TERRAIN_SIZE))]\n\n pub terrains: Vec<Terrain>,\n\n pub map_min_x: f32,\n\n pub map_min_y: f32,\n\n pub map_max_x: f32,\n\n pub map_max_y: f32,\n\n pub map_max_xplus_1: f32,\n\n pub map_max_y_plus_1: f32,\n\n pub map_max_y_plus_1_: i16,\n\n pub removed_block_sused: i16,\n\n pub borders_used: i16,\n\n pub max_terrain: i16,\n\n pub tile_width: i16,\n\n pub tile_height: i16,\n\n pub tile_half_height: i16,\n\n pub tile_half_width: i16,\n\n pub elevation_height: i16,\n", "file_path": "src/dat/terrain_block.rs", "rank": 40, "score": 10.953161800142677 }, { "content": "pub mod effect;\n\npub mod random_map;\n\npub mod sound;\n\npub mod sprite;\n\npub mod tech;\n\npub mod tech_tree;\n\npub mod terrain;\n\npub mod terrain_block;\n\npub mod unit;\n\n\n\n#[derive(Protocol, Debug, Clone, PartialEq)]\n\npub struct ResourceUsage {\n\n /// The kind of resource to give or take\n\n pub attribute: ResourceUsageType,\n\n /// The amount give or take\n\n pub amount: i16,\n\n /// How and when this is counted\n\n pub flag: ResourceUsageTrigger,\n\n}\n\n\n", "file_path": "src/dat/mod.rs", "rank": 41, "score": 10.77095466860818 }, { "content": " pub sound_table: SoundTable,\n\n pub sprite_table: SpriteTable,\n\n pub terrain_block: TerrainBlock,\n\n pub random_map: RandomMap,\n\n pub effect_table: Effects,\n\n pub unit_table: Units,\n\n pub civilization_table: Civilizations,\n\n pub tech_table: Techs,\n\n pub misc: Misc,\n\n pub tech_tree: TechTree,\n\n}\n\n\n\n#[derive(Protocol, Debug, Clone, PartialEq)]\n\npub struct GameVersion {\n\n #[protocol(fixed_length(8))]\n\n pub game_version: String,\n\n}\n\n\n\n#[derive(Protocol, Debug, Clone, PartialEq)]\n\npub struct Misc {\n", "file_path": "src/dat/mod.rs", "rank": 42, "score": 10.186560601870813 }, { "content": " pub current_row: i16,\n\n pub current_col: i16,\n\n pub block_begin_row: i16,\n\n pub block_end_row: i16,\n\n pub block_begin_col: i16,\n\n pub block_end_col: i16,\n\n pub search_map_ptr: u32,\n\n pub search_map_rows_ptr: u32,\n\n pub any_frame_change: u8,\n\n pub map_visible_flag: u8,\n\n pub fog_flag: u8,\n\n}\n\n\n\n#[derive(Protocol, Debug, Clone, PartialEq)]\n\npub struct TerrainBorder {\n\n pub draw_terrain: i16,\n\n // always 0\n\n pub underlay_terrain: i16,\n\n pub border_style: i16,\n\n}\n", "file_path": "src/dat/terrain_block.rs", "rank": 43, "score": 10.163045508944107 }, { "content": "#[derive(Protocol, Debug, Clone, PartialEq, PartialOrd)]\n\n#[protocol(discriminant = \"integer\")]\n\n#[repr(u16)]\n\npub enum ResourceUsageTrigger {\n\n OnCreate = 0,\n\n OnQueue = 1,\n\n}\n\n\n\n#[derive(Protocol, Debug, Clone, PartialEq, PartialOrd)]\n\n#[protocol(discriminant = \"integer\")]\n\n#[repr(u16)]\n\npub enum ResourceUsageType {\n\n /// Take or give an amount of food to the player\n\n Food = 0,\n\n /// Take or give an amount of wood to the player\n\n Wood = 1,\n\n /// Take or give an amount of stone to the player\n\n Stone = 2,\n\n /// Take or give an amount of gold to the player\n\n Gold = 3,\n", "file_path": "src/dat/mod.rs", "rank": 44, "score": 9.803418716245762 }, { "content": "\n\n TerrainRestrictions {\n\n inner: restrictions,\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, PartialEq)]\n\npub struct TerrainRestriction {\n\n pub passability: Vec<f32>,\n\n pub pass_graphics: Vec<TerrainPassGraphic>,\n\n}\n\n\n\nimpl TerrainRestriction {\n\n pub fn read(buf: &mut (impl Read + Buf), len: usize, settings: &Settings) -> Self {\n\n debug_assert_eq!(len, 110);\n\n let mut passability = Vec::with_capacity(len);\n\n for _ in 0..len {\n\n passability.push(buf.get_f32_le());\n\n }\n", "file_path": "src/dat/terrain.rs", "rank": 45, "score": 9.33270794168026 }, { "content": " Tech = 3,\n\n}\n\n\n\n#[derive(Protocol, Debug, Clone, PartialEq, PartialOrd)]\n\n#[protocol(discriminant = \"integer\")]\n\n#[repr(u8)]\n\npub enum TechTreeStatus {\n\n /// This does not seem to be used anymore in DE\n\n AvailablePlayer = 2,\n\n}\n", "file_path": "src/dat/tech_tree.rs", "rank": 46, "score": 9.320742757301938 }, { "content": "#[derive(Protocol, Debug, Clone, PartialEq)]\n\npub struct RandomMap {\n\n pub random_maps_ptr: i32,\n\n}\n", "file_path": "src/dat/random_map.rs", "rank": 47, "score": 9.010167733728993 }, { "content": "mod zlib;\n\n\n\nuse crate::dat::civilization::Civilizations;\n\nuse crate::dat::color::ColorTable;\n\nuse crate::dat::effect::Effects;\n\nuse crate::dat::random_map::RandomMap;\n\nuse crate::dat::sound::SoundTable;\n\nuse crate::dat::sprite::SpriteTable;\n\nuse crate::dat::tech::Techs;\n\nuse crate::dat::tech_tree::TechTree;\n\nuse crate::dat::terrain::{TerrainHeader, TerrainRestrictions};\n\nuse crate::dat::terrain_block::TerrainBlock;\n\nuse crate::dat::unit::Units;\n\nuse djin_protocol::Parcel;\n\nuse eyre::Result;\n\nuse std::path::Path;\n\n\n\npub mod civilization;\n\npub mod color;\n\npub mod common;\n", "file_path": "src/dat/mod.rs", "rank": 48, "score": 8.929909654183893 }, { "content": " .effects\n\n .iter()\n\n .for_each(\n\n |effect| println!(\"{}: {:?}\",effect.name, effect.commands)\n\n );*/\n\n /*\n\n datfile\n\n .civilization_table\n\n .civilizations\n\n .iter()\n\n .take(1)\n\n .for_each(\n\n |civ| civ.units.iter().for_each(\n\n |unit| {\n\n if unit.unit_type == UnitType::Creatable {\n\n match (&unit.type_50, &unit.creatable) {\n\n (Some(x), Some(y)) => {\n\n println!(\"{}, misc: {:?}, {:?}\", unit.name, x.reload_time, unit.hit_points);\n\n println!(\"{}, armor : {:?}\", unit.name, x.armor);\n\n println!(\"{}, attack : {:?}\", unit.name, x.attacks);\n", "file_path": "examples/datfile.rs", "rank": 49, "score": 8.723161273983008 }, { "content": "use std::fmt;\n\n\n\n#[derive(Protocol, Debug, Clone)]\n\npub struct DeString {\n\n _delimiter: u16,\n\n len: u16,\n\n #[protocol(length_prefix(elements(\"len\")))]\n\n pub content: String,\n\n}\n\n\n\nimpl fmt::Display for DeString {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(f, \"{}\", self.content)\n\n }\n\n}\n\n\n\nimpl PartialEq for DeString {\n\n fn eq(&self, other: &Self) -> bool {\n\n self.content == other.content\n\n && self.len == other.len\n", "file_path": "src/dat/common.rs", "rank": 50, "score": 8.712737897408593 }, { "content": " println!(\"{}, costs: {:?}\", unit.name, y.resources_costs);\n\n }\n\n _ => {\n\n println!(\"{:?}\", unit.unit_type)\n\n }\n\n }\n\n }\n\n //if unit.unit_type == UnitType::Creatable {\n\n // println!(\"{:?}\", unit.name)\n\n //} \n\n }\n\n )\n\n );*/\n\n}\n", "file_path": "examples/datfile.rs", "rank": 51, "score": 8.654376633558837 }, { "content": "use eyre::Result;\n\nuse flate2::read::DeflateDecoder;\n\nuse std::io::{Cursor, Read};\n\nuse std::path::Path;\n\n\n\npub(crate) fn decompress<S: AsRef<Path> + ?Sized>(path: &S) -> Result<Cursor<Vec<u8>>> {\n\n let file = std::fs::read(path)?;\n\n let mut decoded = DeflateDecoder::new(file.as_slice());\n\n let mut data = Vec::with_capacity(decoded.total_out() as usize);\n\n decoded.read_to_end(&mut data)?;\n\n Ok(Cursor::new(data))\n\n}\n", "file_path": "src/dat/zlib.rs", "rank": 52, "score": 8.211418717283681 }, { "content": "\n\n// Because TerrainRestrictions contains two nested vectors of size `terrain_restriction_size` and\n\n// `restriction_size` we need to implement read manually to pass these length value. This\n\n// could probably be addressed by implementing a #[protocol(context({value})] annotation to store\n\n// these value.\n\nimpl TerrainRestrictions {\n\n pub fn read(\n\n buf: &mut (impl Read + Buf),\n\n terrain_restriction_size: usize,\n\n restriction_size: usize,\n\n settings: &Settings,\n\n ) -> Self {\n\n debug_assert_eq!(terrain_restriction_size, 31);\n\n debug_assert_eq!(restriction_size, 110);\n\n let mut restrictions = Vec::with_capacity(terrain_restriction_size);\n\n for _ in 0..terrain_restriction_size {\n\n restrictions.push(TerrainRestriction::read(buf, restriction_size, settings));\n\n }\n\n\n\n debug_assert_eq!(restrictions.len(), 31);\n", "file_path": "src/dat/terrain.rs", "rank": 53, "score": 7.880741198071497 }, { "content": " /// Take or give an amount of population to the player\n\n Pop = 4,\n\n /// A free unit (Elite Kipchak)\n\n Free = 214,\n\n /// Two units in the game use this attribute : Elite Kipchak and Urus Khan (migth be creatable on some campaingn scenario)\n\n DecreaseSharedUnitCount = 215,\n\n /// A town center slot either in dark age (UNKOWN RTWC1X) or in feudal age for Cumans (UNKOWN RTWC2X)\n\n TownCenter = 218,\n\n /// Also for Elite Kipchak and Urus Khan, decrease the number of available unit (10 For Kipchak)\n\n TeamBonusCounter,\n\n // We cannot use i16 as enum discriminant but this is actually -1\n\n /// This can be ignored\n\n None = 65535,\n\n}\n\n\n\npub struct DatFile {\n\n pub game_version: GameVersion,\n\n pub terrain_header: TerrainHeader,\n\n pub terrain_restrictions: TerrainRestrictions,\n\n pub color_table: ColorTable,\n", "file_path": "src/dat/mod.rs", "rank": 54, "score": 7.274626353291559 }, { "content": " debug_assert_eq!(passability.len(), 110);\n\n\n\n let mut pass_graphics = Vec::with_capacity(len);\n\n for _ in 0..len {\n\n let pass_graphic =\n\n TerrainPassGraphic::read(buf, settings).expect(\"TerrainPassGraphic parsing error\");\n\n pass_graphics.push(pass_graphic);\n\n }\n\n\n\n debug_assert_eq!(passability.len(), 110);\n\n\n\n TerrainRestriction {\n\n passability,\n\n pass_graphics,\n\n }\n\n }\n\n}\n\n\n\n#[derive(Protocol, Debug, Clone, PartialEq)]\n\npub struct TerrainPassGraphic {\n\n pub exit_tile_sprite: u32,\n\n pub enter_tile_sprite: u32,\n\n pub walk_tile_sprite: u32,\n\n pub replication_amount: i32,\n\n}\n", "file_path": "src/dat/terrain.rs", "rank": 55, "score": 6.81222891237409 }, { "content": "use serde::{Deserialize, Serialize};\n\nuse serde_json::Result;\n\nuse aoe_djin::dat::DatFile;\n\nuse aoe_djin::dat::civilization::UnitType;\n\nuse aoe_djin::dat::tech::Tech;\n\nuse aoe_djin::dat::effect::EffectCommand;\n\nuse std::fs::File;\n\nuse std::io::prelude::*;\n\n\n", "file_path": "examples/datfile.rs", "rank": 56, "score": 6.77714865546667 }, { "content": " match datfile\n\n .effect_table\n\n .effects\n\n .get(tech.effect_id as usize) {\n\n Some(x) => {\n\n buffer.write(format!(\"{:?}\",tech.name));\n\n //println!(\"t: {}; e: {}\", tech.name, x.name);\n\n //println!(\"{:?}\", x.commands);\n\n },\n\n _ => {},\n\n }\n\n }*/\n\n fn commandsJson(efs: &Vec<EffectCommand>) -> String{\n\n let mut ret = String::from(\"[\");\n\n efs\n\n .iter()\n\n .for_each(\n\n |efc| {\n\n ret += &format!(\n\n\"\n", "file_path": "examples/datfile.rs", "rank": 57, "score": 6.690318866805253 }, { "content": "mod test {\n\n use crate::dat::tech::ResourceCostType;\n\n use crate::dat::tech::{ResourceCostTrigger, TechResourcesCost};\n\n use crate::dat::DatFile;\n\n use eyre::Result;\n\n use spectral::prelude::*;\n\n\n\n type TestResult = Result<()>;\n\n\n\n #[test]\n\n fn should_read_dat_file() -> TestResult {\n\n let dat_file = DatFile::from_file(\"tests/game_assets/empires2_x2_p1.dat\").unwrap();\n\n // Version\n\n assert_that(&dat_file.game_version.game_version).is_equal_to(\"VER 7.4\\0\".to_string());\n\n\n\n // Terrain Header\n\n assert_that(&dat_file.terrain_header.terrain_restriction_size).is_equal_to(31);\n\n assert_that(&dat_file.terrain_header.restriction_size).is_equal_to(110);\n\n assert_that(&dat_file.terrain_header.terrain_restriction_size).is_equal_to(31);\n\n assert_that(&dat_file.terrain_header.terrain_tables_pointer).has_length(31);\n", "file_path": "src/dat/mod.rs", "rank": 58, "score": 6.655788662915406 }, { "content": " &mut buf,\n\n terrain_header.terrain_restriction_size as usize,\n\n terrain_header.restriction_size as usize,\n\n &settings,\n\n );\n\n let color_table = ColorTable::read(&mut buf, &settings).expect(\"Error reading color_table\");\n\n let sound_table = SoundTable::read(&mut buf, &settings).expect(\"Error reading sound_table\");\n\n let sprite_table =\n\n SpriteTable::read(&mut buf, &settings).expect(\"Error reading sprite_table\");\n\n let terrain_block =\n\n TerrainBlock::read(&mut buf, &settings).expect(\"Error reading terrain_block\");\n\n let random_map = RandomMap::read(&mut buf, &settings).expect(\"Error reading random_map\");\n\n let effect_table = Effects::read(&mut buf, &settings).expect(\"Error reading effect_table\");\n\n let unit_table = Units::read(&mut buf, &settings).expect(\"Error reading unit_table\");\n\n let civilization_table =\n\n Civilizations::read(&mut buf, &settings).expect(\"Error reading civilization_table\");\n\n let tech_table = Techs::read(&mut buf, &settings).expect(\"Error reading tech_table\");\n\n let misc = Misc::read(&mut buf, &settings).expect(\"Error reading misc\");\n\n let tech_tree = TechTree::read(&mut buf, &settings).expect(\"Error reading tech_tree\");\n\n\n", "file_path": "src/dat/mod.rs", "rank": 59, "score": 6.173268220620761 }, { "content": " buffer.write(format!(\n\n\"<\n\n'techName': '{}',\n\n'effectName': '{}',\n\n'effects': {},>,\n\n\"\n\n ,tech.name, x.name, commandsJson(&x.commands)\n\n ).as_bytes());\n\n },\n\n _ => {},\n\n }\n\n }\n\n );\n\n buffer.flush()?;\n\n Ok(())\n\n //let data = datfile.civilization_table.civilizations[0].\n\n //println!(\"{:?}\", )\n\n /*\n\n datfile\n\n .effect_table\n", "file_path": "examples/datfile.rs", "rank": 60, "score": 6.152618347367294 }, { "content": " .find(|tech| tech.name == \"Fletching\")\n\n .expect(\"Could not find fletching\");\n\n\n\n // Fletching cost 100 Food and 50 gold\n\n assert_that(&fletching.research_resource_cost).contains_all_of(\n\n &vec![\n\n TechResourcesCost {\n\n amount: 100,\n\n flag: ResourceCostTrigger::OnQueue,\n\n resource_type: ResourceCostType::Food,\n\n },\n\n TechResourcesCost {\n\n amount: 50,\n\n flag: ResourceCostTrigger::OnQueue,\n\n resource_type: ResourceCostType::Gold,\n\n },\n\n TechResourcesCost {\n\n amount: 0,\n\n flag: ResourceCostTrigger::OnCreate,\n\n resource_type: ResourceCostType::None,\n\n },\n\n ]\n\n .iter(),\n\n );\n\n\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/dat/mod.rs", "rank": 61, "score": 4.985872327250146 }, { "content": "#[macro_use]\n\nextern crate djin_protocol_derive;\n\n\n\n#[cfg(test)]\n\n#[macro_use]\n\nextern crate spectral;\n\n\n\npub mod dat;\n", "file_path": "src/lib.rs", "rank": 62, "score": 4.583419041043084 }, { "content": " time_slice: u32,\n\n unit_kill_rate: u32,\n\n unit_kill_total: u32,\n\n unit_hit_point_rate: u32,\n\n unit_hit_point_total: u32,\n\n razing_kill_rate: u32,\n\n razing_kill_total: u32,\n\n}\n\n\n\nimpl DatFile {\n\n pub fn from_file<S: AsRef<Path> + ?Sized>(path: &S) -> Result<DatFile> {\n\n let mut buf = zlib::decompress(path)?;\n\n\n\n let settings = djin_protocol::Settings {\n\n byte_order: djin_protocol::ByteOrder::LittleEndian,\n\n };\n\n\n\n let game_version = GameVersion::read(&mut buf, &settings).expect(\"Read error\");\n\n let terrain_header = TerrainHeader::read(&mut buf, &settings).expect(\"Read error\");\n\n let terrain_restrictions = TerrainRestrictions::read(\n", "file_path": "src/dat/mod.rs", "rank": 63, "score": 4.319145386485679 }, { "content": " <'command_type': {}, 'a': {:?}, 'b': {:?}, 'c': {:?}, 'd': {:?},>,\",\n\n efc.command_type, efc.a, efc.b, efc.c, efc.d);\n\n }\n\n );\n\n ret += &String::from(\"]\");\n\n return ret\n\n }\n\n\n\n buffer.write(b\"'TechEffects': [\");\n\n datfile\n\n .tech_table\n\n .techs\n\n .iter()\n\n .for_each(\n\n |tech| {\n\n match datfile\n\n .effect_table\n\n .effects\n\n .get(tech.effect_id as usize) {\n\n Some(x) => {\n", "file_path": "examples/datfile.rs", "rank": 64, "score": 3.8728451506229433 }, { "content": " Ok(DatFile {\n\n game_version,\n\n terrain_header,\n\n terrain_restrictions,\n\n color_table,\n\n sound_table,\n\n sprite_table,\n\n terrain_block,\n\n random_map,\n\n effect_table,\n\n unit_table,\n\n civilization_table,\n\n tech_table,\n\n misc,\n\n tech_tree,\n\n })\n\n }\n\n}\n\n\n\n#[cfg(test)]\n", "file_path": "src/dat/mod.rs", "rank": 65, "score": 3.825307769038117 }, { "content": "# Djin\n\n\n\n[![CI](https://github.com/scout-gg/djin/actions/workflows/Check.yml/badge.svg)](https://github.com/scout-gg/djin/actions/workflows/Check.yml)\n\n![GitHub tag (latest by date)](https://img.shields.io/github/v/tag/scout-gg/djin)\n\n[![Conventional Commits](https://img.shields.io/badge/Conventional%20Commits-1.0.0-yellow.svg)](https://conventionalcommits.org)\n\n![License](https://img.shields.io/github/license/scout-gg/djin)\n\n\n\n⚠️ **Work in progress**\n\n\n\nDjin is a *work in progress* replacement for [genie-rs](https://github.com/SiegeEngineers/genie-rs).\n\n\n\nIt currently support Age of Empire II Definitive Edition only but we plan to support other version of the game\n\nwhen the main features will be stabilised.\n\n\n\nThe main difference with genie-rs is the fact that we use a [fork of the protocol crate](https://github.com/oknozor/protocol)\n\nhandle game files serialization and deserialization. This allow us to write almost zero parsing logic.\n\n\n\n\n\nAge of Empires II © Microsoft Corporation. djin was created under Microsoft's \"Game Content Usage Rules\" using \n\nassets from Age of Empires II, and it is not endorsed by or affiliated with Microsoft.\n\n\n\n## Example \n\n\n\nYou can run this example with `cargo run --example datfile` :\n\n\n\n```rust\n\nfn main() {\n\n let datfile = DatFile::from_file(\"tests/game_assets/empires2_x2_p1.dat\").expect(\"Error reading dat file\");\n\n \n\n datfile.civilizations.civilizations.iter()\n\n .for_each(|civ| println!(\"{}\", civ.name))\n\n}\n\n```\n\n\n\n\n", "file_path": "README.md", "rank": 66, "score": 3.299331232547127 }, { "content": "### Documentation\n\n\n\n[8c48a7](https://github.com/scout-gg/djin/commit/8c48a74f21cb38b5f6e770cd4ec2b2e392aec0e7) - add readme badges - [oknozor](https://github.com/oknozor)\n\n\n\n[761085](https://github.com/scout-gg/djin/commit/761085110014e895e7290fa46242cc2dc94e4fa2) - add datfile example - [oknozor](https://github.com/oknozor)\n\n\n\n\n\n- - -\n\n## 0.2.0 - 2021-07-24\n\n\n\n\n\n### Features\n\n\n\n[ded58b](https://github.com/scout-gg/djin/commit/ded58b5845e86e93067422903b7497525cdd16b1) - implement tech tree, datfile is now complete - [oknozor](https://github.com/oknozor)\n\n\n\n\n\n- - -\n\n## 0.1.0 - 2021-07-24\n\n\n\n\n\n### Documentation\n\n\n\n[7988d7](https://github.com/scout-gg/djin/commit/7988d7ae7a78041130556d8277a03f6253e2f04e) - add microsoft Game Content Usage Rules - [oknozor](https://github.com/oknozor)\n\n\n\n\n\n### Features\n\n\n\n[c615e3](https://github.com/scout-gg/djin/commit/c615e3f41a09c753e8ba7d06b750ce1cc7c2fea3) - add tech and use enum for unit id - [oknozor](https://github.com/oknozor)\n\n\n\n[05b606](https://github.com/scout-gg/djin/commit/05b6066985cf95ee9b0a510d2035d4ccf8fe9f45) - parse civilizations - [oknozor](https://github.com/oknozor)\n\n\n\n[26bd21](https://github.com/scout-gg/djin/commit/26bd21aabc89650ddf7e14e8dc184606b8deda5c) - update protocol to use length pointer attr - [oknozor](https://github.com/oknozor)\n\n\n\n[200e0e](https://github.com/scout-gg/djin/commit/200e0e77dc50f0036f7dfa9ae02aecf065294f88) - parse terrains - [oknozor](https://github.com/oknozor)\n\n\n\n[edb0b6](https://github.com/scout-gg/djin/commit/edb0b68b35054df1dc33132580edc1819d6643db) - parse sprites - [oknozor](https://github.com/oknozor)\n\n\n\n[9c3da3](https://github.com/scout-gg/djin/commit/9c3da37c8e76f32465bd6d1caddbd55dccab0349) - implement sound, color, terrain and game version - [oknozor](https://github.com/oknozor)\n\n\n\n\n", "file_path": "CHANGELOG.md", "rank": 67, "score": 3.2481004485744824 }, { "content": "# Changelog\n\nAll notable changes to this project will be documented in this file. See [conventional commits](https://www.conventionalcommits.org/) for commit guidelines.\n\n\n\n- - -\n\n## 0.3.2 - 2021-07-25\n\n\n\n\n\n### Bug Fixes\n\n\n\n[39d916](https://github.com/scout-gg/djin/commit/39d916773d0c38fdf6c70a12601636876f783b01) - make all dat modules public - [oknozor](https://github.com/oknozor)\n\n\n\n\n\n- - -\n\n## 0.3.1 - 2021-07-25\n\n\n\n\n\n### Bug Fixes\n\n\n\n[5ae65f](https://github.com/scout-gg/djin/commit/5ae65fcf497031d70c08db91a16cf5a1f792afd8) - remove pub (crate) visibility for techs - [oknozor](https://github.com/oknozor)\n\n\n\n\n\n- - -\n\n## 0.3.0 - 2021-07-25\n\n\n\n\n\n### Refactoring\n\n\n\n[bc790a](https://github.com/scout-gg/djin/commit/bc790ae47694930b1420d27967be6b062f490920) - rename top level Datfile fields to avoid repetition - [oknozor](https://github.com/oknozor)\n\n\n\n\n\n### Features\n\n\n\n[4a8298](https://github.com/scout-gg/djin/commit/4a8298fece89f9b4010d31ea456c732690fbf9bf) - add tech tree status - [oknozor](https://github.com/oknozor)\n\n\n\n[57edd1](https://github.com/scout-gg/djin/commit/57edd1d1e6d3369da7601f929be337d3ac24f55e) - use enum for resource usage type and resource usage trigger - [oknozor](https://github.com/oknozor)\n\n\n\n\n\n- - -\n\n## 0.2.1 - 2021-07-25\n\n\n\n\n\n### Bug Fixes\n\n\n\n[178c00](https://github.com/scout-gg/djin/commit/178c00e8b3db136344fce8dbe672a47853376038) - fix packaging release stage in cog.toml - [oknozor](https://github.com/oknozor)\n\n\n\n[bcf643](https://github.com/scout-gg/djin/commit/bcf6436d416ca5177a8d341a9d5d83e096552b1f) - make all fields public - [oknozor](https://github.com/oknozor)\n\n\n\n[b7d6e5](https://github.com/scout-gg/djin/commit/b7d6e526739c427927cda36651e341ebab396f25) - remove useless print and add example - [oknozor](https://github.com/oknozor)\n\n\n\n\n", "file_path": "CHANGELOG.md", "rank": 68, "score": 2.5052380845366753 }, { "content": " assert_that(&dat_file.terrain_header.terrains_pointer).has_length(31);\n\n\n\n // Terrain restrictions\n\n assert_that(&dat_file.terrain_restrictions.inner).has_length(31);\n\n\n\n dat_file.terrain_restrictions.inner.iter().for_each(|el| {\n\n assert_that(&el.pass_graphics).has_length(110);\n\n assert_that(&el.passability).has_length(110);\n\n });\n\n\n\n // Colors\n\n assert_that(&dat_file.color_table.colors).has_length(16);\n\n assert_that(&dat_file.sound_table.sounds).has_length(685);\n\n assert_that(&dat_file.civilization_table.civilizations).has_length(38);\n\n\n\n // Tech\n\n let fletching = dat_file\n\n .tech_table\n\n .techs\n\n .iter()\n", "file_path": "src/dat/mod.rs", "rank": 69, "score": 2.1896485158017494 }, { "content": "### Miscellaneous Chores\n\n\n\n[44c300](https://github.com/scout-gg/djin/commit/44c300618498cae99b198c926c4a9502f7e909de) - fmt all - [oknozor](https://github.com/oknozor)\n\n\n\n[7ddd1f](https://github.com/scout-gg/djin/commit/7ddd1fc1d13d3ce0a72e89934fcdbf5c9af613b6) - use protocol fork - [oknozor](https://github.com/oknozor)\n\n\n\n[943d6c](https://github.com/scout-gg/djin/commit/943d6ca6c15f3ec0d05443d033d0e6560a0f53f8) - fix auto release - [oknozor](https://github.com/oknozor)\n\n\n\n[045b22](https://github.com/scout-gg/djin/commit/045b22a7871ba90d50e0f1e5503689ee75811dcb) - add license - [oknozor](https://github.com/oknozor)\n\n\n\n[ab3d96](https://github.com/scout-gg/djin/commit/ab3d96c46bb9fa83ebd63046068f727a6ecfcea6) - expose dat module - [oknozor](https://github.com/oknozor)\n\n\n\n[9d52f1](https://github.com/scout-gg/djin/commit/9d52f11a921690d3af1fdeba7e38074801b347cd) - add CI configuration - [oknozor](https://github.com/oknozor)\n\n\n\n[2c8a9f](https://github.com/scout-gg/djin/commit/2c8a9f93a9b4afc9b730b9cddde9faa0b291715e) - clippy lints and fmt * - [oknozor](https://github.com/oknozor)\n\n\n\n\n\n### Bug Fixes\n\n\n\n[5ac555](https://github.com/scout-gg/djin/commit/5ac555e87e1c11181481caf45e861af21517048f) - fix sound table - [oknozor](https://github.com/oknozor)\n\n\n\n\n\n- - -\n\n\n", "file_path": "CHANGELOG.md", "rank": 70, "score": 0.8326575300324444 } ]
Rust
src/memfd.rs
lucab/memfd-rs
40e00f1b7a6fad816ab1394ce4cc1910c478d32a
use std::os::unix::io::{AsRawFd, FromRawFd, IntoRawFd, RawFd}; use std::{ffi, fs, os::raw}; use crate::{nr, sealing}; #[cfg(any(target_os = "android", target_os="linux"))] unsafe fn memfd_create(name: *const raw::c_char, flags: raw::c_uint) -> raw::c_int { libc::syscall(libc::SYS_memfd_create, name, flags) as raw::c_int } #[derive(Clone, Debug)] pub struct MemfdOptions { allow_sealing: bool, cloexec: bool, hugetlb: Option<HugetlbSize>, } impl MemfdOptions { pub fn new() -> Self { Self::default() } pub fn allow_sealing(mut self, value: bool) -> Self { self.allow_sealing = value; self } pub fn close_on_exec(mut self, value: bool) -> Self { self.cloexec = value; self } pub fn hugetlb(mut self, size: Option<HugetlbSize>) -> Self { self.hugetlb = size; self } fn bitflags(&self) -> u32 { let mut bits = 0; if self.allow_sealing { bits |= nr::MFD_ALLOW_SEALING; } if self.cloexec { bits |= nr::MFD_CLOEXEC; } if let Some(ref hugetlb) = self.hugetlb { bits |= hugetlb.bitflags(); bits |= nr::MFD_HUGETLB; } bits } pub fn create<T: AsRef<str>>(&self, name: T) -> Result<Memfd, crate::Error> { let flags = self.bitflags(); unsafe { let cname = ffi::CString::new(name.as_ref()).map_err(crate::Error::NameCStringConversion)?; let name_ptr = cname.as_ptr(); let fd = memfd_create(name_ptr, flags); if fd < 0 { return Err(crate::Error::Create(std::io::Error::last_os_error())); } Ok(Memfd::from_raw_fd(fd)) } } } impl Default for MemfdOptions { fn default() -> Self { Self { allow_sealing: false, cloexec: true, hugetlb: None, } } } #[allow(clippy::all)] #[derive(Copy, Clone, Debug)] pub enum HugetlbSize { Huge64KB, Huge512KB, Huge1MB, Huge2MB, Huge8MB, Huge16MB, Huge256MB, Huge1GB, Huge2GB, Huge16GB, } impl HugetlbSize { fn bitflags(self) -> u32 { match self { HugetlbSize::Huge64KB => nr::MFD_HUGE_64KB, HugetlbSize::Huge512KB => nr::MFD_HUGE_512KB, HugetlbSize::Huge1MB => nr::MFD_HUGE_1MB, HugetlbSize::Huge2MB => nr::MFD_HUGE_2MB, HugetlbSize::Huge8MB => nr::MFD_HUGE_8MB, HugetlbSize::Huge16MB => nr::MFD_HUGE_16MB, HugetlbSize::Huge256MB => nr::MFD_HUGE_256MB, HugetlbSize::Huge1GB => nr::MFD_HUGE_1GB, HugetlbSize::Huge2GB => nr::MFD_HUGE_2GB, HugetlbSize::Huge16GB => nr::MFD_HUGE_16GB, } } } #[derive(Debug)] pub struct Memfd { file: fs::File, } impl Memfd { pub fn try_from_fd<F>(fd: F) -> Result<Self, F> where F: AsRawFd + IntoRawFd, { if !is_memfd(&fd) { Err(fd) } else { let file = unsafe { fs::File::from_raw_fd(fd.into_raw_fd()) }; Ok(Self { file }) } } pub fn try_from_file(file: fs::File) -> Result<Self, fs::File> { Self::try_from_fd(file) } pub fn as_file(&self) -> &fs::File { &self.file } pub fn into_file(self) -> fs::File { self.file } pub fn seals(&self) -> Result<sealing::SealsHashSet, crate::Error> { let flags = Self::file_get_seals(&self.file)?; Ok(sealing::bitflags_to_seals(flags)) } pub fn add_seal(&self, seal: sealing::FileSeal) -> Result<(), crate::Error> { use std::iter::FromIterator; let set = sealing::SealsHashSet::from_iter(vec![seal]); self.add_seals(&set) } pub fn add_seals(&self, seals: &sealing::SealsHashSet) -> Result<(), crate::Error> { let fd = self.file.as_raw_fd(); let flags = sealing::seals_to_bitflags(seals); let r = unsafe { libc::syscall(libc::SYS_fcntl, fd, libc::F_ADD_SEALS, flags) }; if r < 0 { return Err(crate::Error::AddSeals(std::io::Error::last_os_error())); }; Ok(()) } fn file_get_seals(fp: &fs::File) -> Result<u64, crate::Error> { let fd = fp.as_raw_fd(); let r = unsafe { libc::syscall(libc::SYS_fcntl, fd, libc::F_GET_SEALS) }; if r < 0 { return Err(crate::Error::GetSeals(std::io::Error::last_os_error())); }; Ok(r as u64) } } impl FromRawFd for Memfd { unsafe fn from_raw_fd(fd: RawFd) -> Memfd { let file = fs::File::from_raw_fd(fd); Memfd { file } } } impl AsRawFd for Memfd { fn as_raw_fd(&self) -> RawFd { self.file.as_raw_fd() } } impl IntoRawFd for Memfd { fn into_raw_fd(self) -> RawFd { self.into_file().into_raw_fd() } } fn is_memfd<F: AsRawFd>(fd: &F) -> bool { let ret = unsafe { libc::syscall(libc::SYS_fcntl, fd.as_raw_fd(), libc::F_GET_SEALS) }; ret >= 0 }
use std::os::unix::io::{AsRawFd, FromRawFd, IntoRawFd, RawFd}; use std::{ffi, fs, os::raw}; use crate::{nr, sealing}; #[cfg(any(target_os = "android", target_os="linux"))] unsafe fn memfd_create(name: *const raw::c_char, flags: raw::c_uint) -> raw::c_int { libc::syscall(libc::SYS_memfd_create, name, flags) as raw::c_int } #[derive(Clone, Debug)] pub struct MemfdOptions { allow_sealing: bool, cloexec: bool, hugetlb: Option<HugetlbSize>, } impl MemfdOptions { pub fn new() -> Self { Self::default() } pub fn allow_sealing(mut self, value: bool) -> Self { self.allow_sealing = value; self } pub fn close_on_exec(mut self, value: bool) -> Self { self.cloexec = value; self } pub fn hugetlb(mut self, size: Option<HugetlbSize>) -> Self { self.hugetlb = size; self } fn bitflags(&self) -> u32 { let mut bits = 0; if self.allow_sealing { bits |= nr::MFD_ALLOW_SEALING; } if self.cloexec { bits |= nr::MFD_CLOEXEC; }
pub fn create<T: AsRef<str>>(&self, name: T) -> Result<Memfd, crate::Error> { let flags = self.bitflags(); unsafe { let cname = ffi::CString::new(name.as_ref()).map_err(crate::Error::NameCStringConversion)?; let name_ptr = cname.as_ptr(); let fd = memfd_create(name_ptr, flags); if fd < 0 { return Err(crate::Error::Create(std::io::Error::last_os_error())); } Ok(Memfd::from_raw_fd(fd)) } } } impl Default for MemfdOptions { fn default() -> Self { Self { allow_sealing: false, cloexec: true, hugetlb: None, } } } #[allow(clippy::all)] #[derive(Copy, Clone, Debug)] pub enum HugetlbSize { Huge64KB, Huge512KB, Huge1MB, Huge2MB, Huge8MB, Huge16MB, Huge256MB, Huge1GB, Huge2GB, Huge16GB, } impl HugetlbSize { fn bitflags(self) -> u32 { match self { HugetlbSize::Huge64KB => nr::MFD_HUGE_64KB, HugetlbSize::Huge512KB => nr::MFD_HUGE_512KB, HugetlbSize::Huge1MB => nr::MFD_HUGE_1MB, HugetlbSize::Huge2MB => nr::MFD_HUGE_2MB, HugetlbSize::Huge8MB => nr::MFD_HUGE_8MB, HugetlbSize::Huge16MB => nr::MFD_HUGE_16MB, HugetlbSize::Huge256MB => nr::MFD_HUGE_256MB, HugetlbSize::Huge1GB => nr::MFD_HUGE_1GB, HugetlbSize::Huge2GB => nr::MFD_HUGE_2GB, HugetlbSize::Huge16GB => nr::MFD_HUGE_16GB, } } } #[derive(Debug)] pub struct Memfd { file: fs::File, } impl Memfd { pub fn try_from_fd<F>(fd: F) -> Result<Self, F> where F: AsRawFd + IntoRawFd, { if !is_memfd(&fd) { Err(fd) } else { let file = unsafe { fs::File::from_raw_fd(fd.into_raw_fd()) }; Ok(Self { file }) } } pub fn try_from_file(file: fs::File) -> Result<Self, fs::File> { Self::try_from_fd(file) } pub fn as_file(&self) -> &fs::File { &self.file } pub fn into_file(self) -> fs::File { self.file } pub fn seals(&self) -> Result<sealing::SealsHashSet, crate::Error> { let flags = Self::file_get_seals(&self.file)?; Ok(sealing::bitflags_to_seals(flags)) } pub fn add_seal(&self, seal: sealing::FileSeal) -> Result<(), crate::Error> { use std::iter::FromIterator; let set = sealing::SealsHashSet::from_iter(vec![seal]); self.add_seals(&set) } pub fn add_seals(&self, seals: &sealing::SealsHashSet) -> Result<(), crate::Error> { let fd = self.file.as_raw_fd(); let flags = sealing::seals_to_bitflags(seals); let r = unsafe { libc::syscall(libc::SYS_fcntl, fd, libc::F_ADD_SEALS, flags) }; if r < 0 { return Err(crate::Error::AddSeals(std::io::Error::last_os_error())); }; Ok(()) } fn file_get_seals(fp: &fs::File) -> Result<u64, crate::Error> { let fd = fp.as_raw_fd(); let r = unsafe { libc::syscall(libc::SYS_fcntl, fd, libc::F_GET_SEALS) }; if r < 0 { return Err(crate::Error::GetSeals(std::io::Error::last_os_error())); }; Ok(r as u64) } } impl FromRawFd for Memfd { unsafe fn from_raw_fd(fd: RawFd) -> Memfd { let file = fs::File::from_raw_fd(fd); Memfd { file } } } impl AsRawFd for Memfd { fn as_raw_fd(&self) -> RawFd { self.file.as_raw_fd() } } impl IntoRawFd for Memfd { fn into_raw_fd(self) -> RawFd { self.into_file().into_raw_fd() } } fn is_memfd<F: AsRawFd>(fd: &F) -> bool { let ret = unsafe { libc::syscall(libc::SYS_fcntl, fd.as_raw_fd(), libc::F_GET_SEALS) }; ret >= 0 }
if let Some(ref hugetlb) = self.hugetlb { bits |= hugetlb.bitflags(); bits |= nr::MFD_HUGETLB; } bits }
function_block-function_prefix_line
[ { "content": "/// Check if the close-on-exec flag is set for the memfd.\n\npub fn get_close_on_exec(memfd: &memfd::Memfd) -> std::io::Result<bool> {\n\n // SAFETY: The syscall called has no soundness implications (i.e. does not mess with\n\n // process memory in weird ways, checks its arguments for correctness, etc.). Furthermore\n\n // due to invariants of `Memfd` this syscall is provided a valid file descriptor.\n\n let flags = unsafe { libc::fcntl(memfd.as_file().as_raw_fd(), libc::F_GETFD, 0) };\n\n if flags == -1 {\n\n Err(std::io::Error::last_os_error())\n\n } else {\n\n Ok(flags & libc::FD_CLOEXEC != 0)\n\n }\n\n}\n", "file_path": "tests/memfd.rs", "rank": 0, "score": 49288.20977300215 }, { "content": "#[test]\n\nfn test_sealing_default() {\n\n let opts = memfd::MemfdOptions::default();\n\n let m0 = opts.create(\"default\").unwrap();\n\n let sset = m0.seals().unwrap();\n\n let default = memfd::SealsHashSet::from_iter(vec![memfd::FileSeal::SealSeal]);\n\n assert_eq!(sset, default);\n\n}\n\n\n", "file_path": "tests/sealing.rs", "rank": 1, "score": 43058.099059768065 }, { "content": "#[test]\n\nfn test_sealing_resize() {\n\n let opts = memfd::MemfdOptions::default().allow_sealing(true);\n\n let mfd = opts.create(\"sized-1K\").unwrap();\n\n mfd.as_file().set_len(1024).unwrap();\n\n\n\n mfd.add_seal(memfd::FileSeal::SealGrow).unwrap();\n\n mfd.as_file().set_len(2048).unwrap_err();\n\n mfd.as_file().set_len(512).unwrap();\n\n\n\n mfd.add_seal(memfd::FileSeal::SealShrink).unwrap();\n\n mfd.as_file().set_len(1000).unwrap_err();\n\n mfd.as_file().set_len(1024).unwrap_err();\n\n mfd.as_file().set_len(256).unwrap_err();\n\n mfd.as_file().set_len(512).unwrap();\n\n}\n", "file_path": "tests/sealing.rs", "rank": 2, "score": 43058.099059768065 }, { "content": "#[test]\n\nfn test_sealing_add() {\n\n let opts = memfd::MemfdOptions::default();\n\n let m0 = opts.allow_sealing(true).create(\"default\").unwrap();\n\n let sset = m0.seals().unwrap();\n\n assert_eq!(sset.len(), 0);\n\n\n\n let write_seal = memfd::SealsHashSet::from_iter(vec![memfd::FileSeal::SealWrite]);\n\n m0.add_seal(memfd::FileSeal::SealWrite).unwrap();\n\n let a0 = write_seal;\n\n let r0 = m0.seals().unwrap();\n\n assert_eq!(r0, a0);\n\n\n\n let grow_seal = memfd::SealsHashSet::from_iter(vec![memfd::FileSeal::SealGrow]);\n\n m0.add_seals(&grow_seal).unwrap();\n\n let a1 = a0.union(&grow_seal).cloned().collect();\n\n let r1 = m0.seals().unwrap();\n\n assert_eq!(r1, a1);\n\n\n\n let shrink_seal = memfd::SealsHashSet::from_iter(vec![memfd::FileSeal::SealShrink]);\n\n m0.add_seals(&shrink_seal).unwrap();\n", "file_path": "tests/sealing.rs", "rank": 3, "score": 43058.099059768065 }, { "content": "#[test]\n\nfn test_sealing_unsealed() {\n\n let opts = memfd::MemfdOptions::default();\n\n let m0 = opts.allow_sealing(true).create(\"default\").unwrap();\n\n let sset = m0.seals().unwrap();\n\n assert_eq!(sset.len(), 0);\n\n}\n\n\n", "file_path": "tests/sealing.rs", "rank": 4, "score": 43058.099059768065 }, { "content": "fn main() {\n\n // Create a sealable memfd.\n\n let opts = memfd::MemfdOptions::default().allow_sealing(true);\n\n let mfd = opts.create(\"sized-1K\").unwrap();\n\n\n\n // Resize to 1024B.\n\n mfd.as_file().set_len(1024).unwrap();\n\n\n\n // Add seals to prevent further resizing.\n\n let mut seals = memfd::SealsHashSet::new();\n\n seals.insert(memfd::FileSeal::SealShrink);\n\n seals.insert(memfd::FileSeal::SealGrow);\n\n mfd.add_seals(&seals).unwrap();\n\n\n\n // Prevent further sealing changes.\n\n mfd.add_seal(memfd::FileSeal::SealSeal).unwrap();\n\n\n\n // Write 1K of data, allowed by size seals.\n\n let data_1k = vec![0x00; 1024];\n\n let r = mfd.as_file().write_all(&data_1k);\n", "file_path": "examples/sized.rs", "rank": 5, "score": 42131.09414153197 }, { "content": "#[test]\n\nfn test_memfd_no_cloexec() {\n\n let memfd = memfd::MemfdOptions::default()\n\n .close_on_exec(false)\n\n .create(\"no-cloexec\")\n\n .unwrap();\n\n assert_eq!(get_close_on_exec(&memfd).unwrap(), false);\n\n}\n\n\n", "file_path": "tests/memfd.rs", "rank": 6, "score": 38087.80229209791 }, { "content": "#[test]\n\nfn test_memfd_from_into() {\n\n let opts = memfd::MemfdOptions::default();\n\n let m0 = opts.create(\"default\").unwrap();\n\n let f0 = m0.into_file();\n\n let _ = memfd::Memfd::try_from_file(f0)\n\n .expect(\"failed to convert a legit memfd file\");\n\n\n\n let rootdir = fs::File::open(\"/\").unwrap();\n\n let _ = memfd::Memfd::try_from_file(rootdir)\n\n .expect_err(\"unexpected conversion from a non-memfd file\");\n\n}\n\n\n", "file_path": "tests/memfd.rs", "rank": 8, "score": 22358.672667300663 }, { "content": "#[cfg(test)]\n\n#[test]\n\nfn error_send_sync() {\n\n fn assert_error<E: std::error::Error + Send + Sync + fmt::Display + fmt::Debug + 'static>() {}\n\n assert_error::<Error>();\n\n}\n", "file_path": "src/errors.rs", "rank": 9, "score": 21392.238971322284 }, { "content": "#[test]\n\nfn test_memfd_default() {\n\n let opts = memfd::MemfdOptions::default();\n\n let m0 = opts.create(\"default\").unwrap();\n\n let meta0 = m0.as_file().metadata().unwrap();\n\n assert_eq!(meta0.len(), 0);\n\n assert_eq!(meta0.is_file(), true);\n\n assert_eq!(get_close_on_exec(&m0).unwrap(), true);\n\n drop(m0)\n\n}\n\n\n", "file_path": "tests/memfd.rs", "rank": 10, "score": 21392.238971322284 }, { "content": "#[test]\n\nfn test_memfd_multi() {\n\n let opts = memfd::MemfdOptions::default();\n\n let m0 = opts.create(\"default\").unwrap();\n\n let f0 = m0.as_file().as_raw_fd();\n\n\n\n let m1 = opts.create(\"default\").unwrap();\n\n let f1 = m1.as_file().as_raw_fd();\n\n assert!(f0 != f1);\n\n\n\n let m0_file = m0.into_file();\n\n assert_eq!(f0, m0_file.as_raw_fd());\n\n}\n\n\n", "file_path": "tests/memfd.rs", "rank": 11, "score": 21392.238971322284 }, { "content": "//! A simple example showing how to crate size-sealed memfd.\n\n//!\n\n//! It creates a new memfd and seals to on a fixed 1K size.\n\n//!\n\n//! This is an example ONLY: do NOT panic/unwrap/assert\n\n//! in production code!\n\n\n\nextern crate memfd;\n\n\n\nuse std::io::{Seek, SeekFrom, Write};\n\n\n", "file_path": "examples/sized.rs", "rank": 12, "score": 20036.277212263263 }, { "content": " assert!(r.is_ok());\n\n mfd.as_file().seek(SeekFrom::Start(0)).unwrap();\n\n\n\n // Write 2K of data, now allowed by size seals.\n\n let data_2k = vec![0x11; 2048];\n\n let r = mfd.as_file().write_all(&data_2k);\n\n assert!(r.is_err());\n\n mfd.as_file().seek(SeekFrom::Start(0)).unwrap();\n\n\n\n // Try to resize to 2048B, not allowed by size seals.\n\n let r = mfd.as_file().set_len(2048);\n\n assert!(r.is_err());\n\n\n\n // Overwrite 1K of data, allowed by size seals.\n\n let data_1k = vec![0x22; 1024];\n\n let r = mfd.as_file().write_all(&data_1k);\n\n assert!(r.is_ok());\n\n}\n", "file_path": "examples/sized.rs", "rank": 13, "score": 20032.909593546257 }, { "content": "\n\n/// Convert a set of seals into a bitflags value.\n\npub(crate) fn seals_to_bitflags(set: &SealsHashSet) -> u32 {\n\n let mut bits = 0;\n\n for seal in set.iter() {\n\n bits |= seal.bitflags();\n\n }\n\n bits\n\n}\n\n\n\n/// Convert a bitflags value to a set of seals.\n\npub(crate) fn bitflags_to_seals(bitflags: u64) -> SealsHashSet {\n\n let mut sset = SealsHashSet::new();\n\n if bitflags & (libc::F_SEAL_SEAL as u64) != 0 {\n\n sset.insert(FileSeal::SealSeal);\n\n }\n\n if bitflags & (libc::F_SEAL_SHRINK as u64) != 0 {\n\n sset.insert(FileSeal::SealShrink);\n\n }\n\n if bitflags & (libc::F_SEAL_GROW as u64) != 0 {\n\n sset.insert(FileSeal::SealGrow);\n\n }\n\n if bitflags & (libc::F_SEAL_WRITE as u64) != 0 {\n\n sset.insert(FileSeal::SealWrite);\n\n }\n\n sset\n\n}\n", "file_path": "src/sealing.rs", "rank": 14, "score": 19115.582872756546 }, { "content": " /// Corresponds to `F_SEAL_WRITE`.\n\n SealWrite,\n\n /// File sealing cannot be further manipulated.\n\n ///\n\n /// Corresponds to `F_SEAL_SEAL`.\n\n SealSeal,\n\n}\n\n\n\nimpl FileSeal {\n\n /// Return the bit-wise flag value of this seal.\n\n pub(crate) fn bitflags(self) -> u32 {\n\n let b = match self {\n\n FileSeal::SealSeal => libc::F_SEAL_SEAL,\n\n FileSeal::SealShrink => libc::F_SEAL_SHRINK,\n\n FileSeal::SealGrow => libc::F_SEAL_GROW,\n\n FileSeal::SealWrite => libc::F_SEAL_WRITE,\n\n };\n\n b as u32\n\n }\n\n}\n", "file_path": "src/sealing.rs", "rank": 15, "score": 19115.26580442157 }, { "content": "use std::collections::HashSet;\n\n\n\n/// An `HashSet` specialized on `FileSeal`.\n\npub type SealsHashSet = HashSet<FileSeal>;\n\n\n\n/// Seal that can be applied to a [`Memfd`].\n\n///\n\n/// [`Memfd`]: crate::Memfd\n\n#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)]\n\npub enum FileSeal {\n\n /// File cannot be reduced in size.\n\n ///\n\n /// Corresponds to `F_SEAL_SHRINK`.\n\n SealShrink,\n\n /// File cannot be grown in size.\n\n ///\n\n /// Corresponds to `F_SEAL_GROW`.\n\n SealGrow,\n\n /// File cannot be written.\n\n ///\n", "file_path": "src/sealing.rs", "rank": 16, "score": 19113.46383136384 }, { "content": " let a2 = a1.union(&shrink_seal).cloned().collect();\n\n let r2 = m0.seals().unwrap();\n\n assert_eq!(r2, a2);\n\n\n\n let seal_seal = memfd::SealsHashSet::from_iter(vec![memfd::FileSeal::SealSeal]);\n\n m0.add_seals(&seal_seal).unwrap();\n\n let a3 = a2.union(&seal_seal).cloned().collect();\n\n let r3 = m0.seals().unwrap();\n\n assert_eq!(r3, a3);\n\n\n\n // memfd is \"seal\" sealed, adding further sealing should fail.\n\n m0.add_seals(&shrink_seal).unwrap_err();\n\n}\n\n\n", "file_path": "tests/sealing.rs", "rank": 17, "score": 19108.082380034553 }, { "content": "extern crate memfd;\n\nuse std::iter::FromIterator;\n\n\n\n#[test]\n", "file_path": "tests/sealing.rs", "rank": 18, "score": 19107.743845754758 }, { "content": "// runtime.\n\n#![cfg(any(target_os = \"android\", target_os= \"linux\"))]\n\n\n\nmod errors;\n\nmod memfd;\n\nmod nr;\n\nmod sealing;\n\n\n\npub use crate::{\n\n errors::Error,\n\n memfd::{HugetlbSize, Memfd, MemfdOptions},\n\n sealing::{FileSeal, SealsHashSet},\n\n};\n", "file_path": "src/lib.rs", "rank": 24, "score": 9.549571040987576 }, { "content": "//! Error handling.\n\nuse std::fmt;\n\n\n\n/// Enumeration of errors possible in this library\n\n#[derive(Debug)]\n\npub enum Error {\n\n /// Cannot convert the `name` argument to a C String!\n\n NameCStringConversion(std::ffi::NulError),\n\n /// Cannot create the memfd\n\n Create(std::io::Error),\n\n /// Cannot add new seals to the memfd\n\n AddSeals(std::io::Error),\n\n /// Cannot read the seals of a memfd\n\n GetSeals(std::io::Error),\n\n}\n\n\n\nimpl std::error::Error for Error {\n\n fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {\n\n use Error::*;\n\n match self {\n", "file_path": "src/errors.rs", "rank": 25, "score": 9.308122615128843 }, { "content": " NameCStringConversion(ref e) => Some(e),\n\n Create(ref e) => Some(e),\n\n AddSeals(ref e) => Some(e),\n\n GetSeals(ref e) => Some(e),\n\n }\n\n }\n\n}\n\n\n\nimpl fmt::Display for Error {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n use Error::*;\n\n f.write_str(match self {\n\n NameCStringConversion(_) => \"cannot convert `name` to a C string\",\n\n Create(_) => \"cannot create a memfd\",\n\n AddSeals(_) => \"cannot add seals to the memfd\",\n\n GetSeals(_) => \"cannot read seals for a memfd\",\n\n })\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\n#[test]\n", "file_path": "src/errors.rs", "rank": 26, "score": 7.946647070770343 }, { "content": "/* from <sys/memfd.h> */\n\n\n\npub(super) const MFD_CLOEXEC: u32 = 1;\n\npub(super) const MFD_ALLOW_SEALING: u32 = 2;\n\npub(super) const MFD_HUGETLB: u32 = 4;\n\n\n\n/* from <asm-generic/hugetlb_encode.h> */\n\n\n\npub(super) const MFD_HUGE_SHIFT: u32 = 26;\n\n\n\npub(super) const MFD_HUGE_64KB: u32 = 16 << MFD_HUGE_SHIFT;\n\npub(super) const MFD_HUGE_512KB: u32 = 19 << MFD_HUGE_SHIFT;\n\npub(super) const MFD_HUGE_1MB: u32 = 20 << MFD_HUGE_SHIFT;\n\npub(super) const MFD_HUGE_2MB: u32 = 21 << MFD_HUGE_SHIFT;\n\npub(super) const MFD_HUGE_8MB: u32 = 23 << MFD_HUGE_SHIFT;\n\npub(super) const MFD_HUGE_16MB: u32 = 24 << MFD_HUGE_SHIFT;\n\npub(super) const MFD_HUGE_256MB: u32 = 28 << MFD_HUGE_SHIFT;\n\npub(super) const MFD_HUGE_1GB: u32 = 30 << MFD_HUGE_SHIFT;\n\npub(super) const MFD_HUGE_2GB: u32 = 31 << MFD_HUGE_SHIFT;\n\npub(super) const MFD_HUGE_16GB: u32 = 34 << MFD_HUGE_SHIFT;\n", "file_path": "src/nr.rs", "rank": 28, "score": 7.798277920492886 }, { "content": "//! A pure-Rust library to work with Linux memfd and seals.\n\n//!\n\n//! It provides support for creating `memfd` objects on Linux\n\n//! and handling seals on them. This was first introduced in\n\n//! Linux kernel 3.17.\n\n//! For further details, see `memfd_create(2)` manpage.\n\n//!\n\n//! ```rust\n\n//! use memfd;\n\n//!\n\n//! fn new_sized_memfd() -> Result<memfd::Memfd, Box<dyn std::error::Error>> {\n\n//! // Create a sealable memfd.\n\n//! let opts = memfd::MemfdOptions::default().allow_sealing(true);\n\n//! let mfd = opts.create(\"sized-1K\")?;\n\n//!\n\n//! // Resize to 1024B.\n\n//! mfd.as_file().set_len(1024)?;\n\n//!\n\n//! // Add seals to prevent further resizing.\n\n//! let mut seals = memfd::SealsHashSet::new();\n", "file_path": "src/lib.rs", "rank": 31, "score": 7.142032690224742 }, { "content": "# memfd\n\n\n\n[![Build Status](https://travis-ci.org/lucab/memfd-rs.svg?branch=master)](https://travis-ci.org/lucab/memfd-rs)\n\n[![crates.io](https://img.shields.io/crates/v/memfd.svg)](https://crates.io/crates/memfd)\n\n[![Documentation](https://docs.rs/memfd/badge.svg)](https://docs.rs/memfd)\n\n\n\nA pure-Rust library to work with Linux memfd and seals.\n\n\n\nIt provides support for creating `memfd` objects on Linux\n\nand handling seals on them. This was first introduced in\n\nLinux kernel 3.17.\n\nFor further details, see `memfd_create(2)` manpage.\n\n\n\n## Example\n\n\n\n```rust\n\nextern crate memfd;\n\nuse memfd::errors::Result;\n\n\n\nfn new_sized_memfd() -> Result<memfd::Memfd> {\n\n // Create a sealable memfd.\n\n let opts = memfd::MemfdOptions::default().allow_sealing(true);\n\n let mfd = opts.create(\"sized-1K\")?;\n\n\n\n // Resize to 1024B.\n\n mfd.as_file().set_len(1024)?;\n\n\n\n // Add seals to prevent further resizing.\n\n let mut seals = memfd::SealsHashSet::new();\n\n seals.insert(memfd::FileSeal::SealShrink);\n\n seals.insert(memfd::FileSeal::SealGrow);\n\n mfd.add_seals(&seals)?;\n\n\n\n // Prevent further sealing changes.\n\n mfd.add_seal(memfd::FileSeal::SealSeal);\n\n\n\n Ok(mfd)\n\n}\n\n```\n\n\n\nSome more examples are available under [examples](examples).\n\n\n\n## License\n\n\n\nLicensed under either of\n\n\n\n * MIT license - <http://opensource.org/licenses/MIT>\n\n * Apache License, Version 2.0 - <http://www.apache.org/licenses/LICENSE-2.0>\n\n\n\nat your option.\n", "file_path": "README.md", "rank": 34, "score": 6.838522477081314 }, { "content": "extern crate memfd;\n\nuse std::fs;\n\nuse std::os::unix::io::AsRawFd;\n\n\n\n#[test]\n", "file_path": "tests/memfd.rs", "rank": 37, "score": 4.531432145456453 }, { "content": "//! seals.insert(memfd::FileSeal::SealShrink);\n\n//! seals.insert(memfd::FileSeal::SealGrow);\n\n//! mfd.add_seals(&seals)?;\n\n//!\n\n//! // Prevent further sealing changes.\n\n//! mfd.add_seal(memfd::FileSeal::SealSeal)?;\n\n//!\n\n//! Ok(mfd)\n\n//! }\n\n//! ```\n\n#![deny(\n\n missing_docs,\n\n broken_intra_doc_links,\n\n clippy::all,\n\n unreachable_pub,\n\n unused,\n\n)]\n\n#![cfg_attr(docsrs, feature(doc_cfg))]\n\n#![cfg_attr(docsrs, doc(cfg(any(target_os = \"android\", target_os= \"linux\" ))))]\n\n// No-op crate on platforms that do not support memfd_create, instead of failing to link, or at\n", "file_path": "src/lib.rs", "rank": 38, "score": 4.464848684742023 } ]
Rust
src/maps/perf_map_poller.rs
redcanaryco/oxidebpf
2ec84a57cf99504a484378908d295d863ff27c32
use std::{ collections::HashMap, fmt::{self, Formatter}, sync::{Arc, Condvar, Mutex}, thread, time::Duration, }; use crossbeam_channel::{Sender, TrySendError}; use mio::{unix::SourceFd, Events, Interest, Poll, Token}; use nix::errno::Errno; use slog::crit; use crate::{ maps::{PerCpu, PerfEvent, PerfMap}, PerfChannelMessage, LOGGER, }; pub struct PerfMapPoller { poll: Poll, tokens: HashMap<Token, PerfMap>, } impl PerfMapPoller { pub fn new( perfmaps: impl Iterator<Item = PerfMap>, polling_signal: Arc<(Mutex<bool>, Condvar)>, ) -> Result<Self, InitError> { let poll = Poll::new().map_err(InitError::Creation)?; let registry = poll.registry(); let tokens = perfmaps .map(|p| { let token = Token(p.ev_fd as usize); registry .register(&mut SourceFd(&p.ev_fd), token, Interest::READABLE) .map(|_| (token, p)) }) .collect::<Result<_, _>>() .map_err(InitError::Registration)?; { let (lock, cvar) = &*polling_signal; let mut locked_signal = lock .lock() .map_err(|e| InitError::ReadySignal(e.to_string()))?; *locked_signal = true; cvar.notify_one(); } Ok(Self { poll, tokens }) } pub fn poll( mut self, tx: Sender<PerfChannelMessage>, polling_delay: Duration, ) -> Result<(), std::io::Error> { let mut events = Events::with_capacity(self.tokens.len()); loop { match self.poll_once(&mut events, &tx) { Ok(_) => thread::sleep(polling_delay), Err(RunError::Disconnected) => return Ok(()), Err(RunError::Poll(e)) => return Err(e), } } } fn poll_once( &mut self, events: &mut Events, tx: &Sender<PerfChannelMessage>, ) -> Result<(), RunError> { if let Err(e) = self.poll.poll(events, Some(Duration::from_millis(100))) { match nix::errno::Errno::from_i32(nix::errno::errno()) { Errno::EINTR => return Ok(()), _ => return Err(RunError::Poll(e)), } } let perf_events = events .iter() .filter_map(|e| self.tokens.get(&e.token())) .flat_map(|perfmap| { let name = &perfmap.name; let cpuid = perfmap.cpuid() as i32; unsafe { perfmap .read_all() .map(move |e| e.map(|e| (name.clone(), cpuid, e))) } }) .filter_map(|e| match e { Ok(e) => Some(e), Err(e) => { crit!(LOGGER.0, "perf_map_poller(); perfmap read error: {:?}", e); None } }); let mut dropped = 0; for (map_name, cpuid, event) in perf_events { match event { PerfEvent::Lost(count) => { dropped += count; match tx.try_send(PerfChannelMessage::Dropped(dropped)) { Ok(_) => dropped = 0, Err(TrySendError::Disconnected(_)) => return Err(RunError::Disconnected), #[cfg(feature = "metrics")] Err(TrySendError::Full(_)) => { metrics::increment_counter!("perfmap.channel.full", "map_name" => map_name) } #[cfg(not(feature = "metrics"))] Err(TrySendError::Full(_)) => {} } } PerfEvent::Sample(data) => tx .send(PerfChannelMessage::Event { map_name, cpuid, data, }) .map_err(|_| RunError::Disconnected)?, }; } Ok(()) } } pub enum InitError { Creation(std::io::Error), Registration(std::io::Error), ReadySignal(String), } impl fmt::Display for InitError { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { match self { InitError::Creation(e) => write!(f, "error creating poller: {}", e), InitError::Registration(e) => write!(f, "error registering poller: {}", e), InitError::ReadySignal(e) => write!(f, "error grabbing cond mutex: {}", e), } } } enum RunError { Poll(std::io::Error), Disconnected, }
use std::{ collections::HashMap, fmt::{self, Formatter}, sync::{Arc, Condvar, Mutex}, thread, time::Duration, }; use crossbeam_channel::{Sender, TrySendError}; use mio::{unix::SourceFd, Events, Interest, Poll, Token}; use nix::errno::Errno; use slog::crit; use crate::{ maps::{PerCpu, PerfEvent, PerfMap}, PerfChannelMessage, LOGGER, }; pub struct PerfMapPoller { poll: Poll, tokens: HashMap<Token, PerfMap>, } impl PerfMapPoller { pub fn new( perfmaps: impl Iterator<Item = PerfMap>, polling_signal: Arc<(Mutex<bool>, Condvar)>, ) -> Result<Self, InitError> { let poll = Poll::new().map_err(InitError::Creation)?; let registry = poll.registry(); let tokens = perfmaps .map(|p| { let token = Token(p.ev_fd as usize); registry .register(&mut SourceFd(&p.ev_fd), token, Interest::READABLE) .map(|_| (token, p)) }) .collect::<Result<_, _>>() .map_err(InitError::Registration)?; { let (lock, cvar) = &*polling_signal; let mut locked_signal = lock .lock() .map_err(|e| InitError::ReadySignal(e.to_string()))?; *locked_signal = true; cvar.notify_one(); } Ok(Self { poll, tokens }) } pub fn poll( mut self, tx: Sender<PerfChannelMessage>, polling_delay: Duration, ) -> Result<(), std::io::Error> { let mut events = Events::with_capacity(self.tokens.len()); loop { match self.poll_once(&mut events, &tx) { Ok(_) => thread::sleep(polling_delay), Err(RunError::Disconnected) => return Ok(()), Err(RunError::Poll(e)) => return Err(e), } } }
} pub enum InitError { Creation(std::io::Error), Registration(std::io::Error), ReadySignal(String), } impl fmt::Display for InitError { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { match self { InitError::Creation(e) => write!(f, "error creating poller: {}", e), InitError::Registration(e) => write!(f, "error registering poller: {}", e), InitError::ReadySignal(e) => write!(f, "error grabbing cond mutex: {}", e), } } } enum RunError { Poll(std::io::Error), Disconnected, }
fn poll_once( &mut self, events: &mut Events, tx: &Sender<PerfChannelMessage>, ) -> Result<(), RunError> { if let Err(e) = self.poll.poll(events, Some(Duration::from_millis(100))) { match nix::errno::Errno::from_i32(nix::errno::errno()) { Errno::EINTR => return Ok(()), _ => return Err(RunError::Poll(e)), } } let perf_events = events .iter() .filter_map(|e| self.tokens.get(&e.token())) .flat_map(|perfmap| { let name = &perfmap.name; let cpuid = perfmap.cpuid() as i32; unsafe { perfmap .read_all() .map(move |e| e.map(|e| (name.clone(), cpuid, e))) } }) .filter_map(|e| match e { Ok(e) => Some(e), Err(e) => { crit!(LOGGER.0, "perf_map_poller(); perfmap read error: {:?}", e); None } }); let mut dropped = 0; for (map_name, cpuid, event) in perf_events { match event { PerfEvent::Lost(count) => { dropped += count; match tx.try_send(PerfChannelMessage::Dropped(dropped)) { Ok(_) => dropped = 0, Err(TrySendError::Disconnected(_)) => return Err(RunError::Disconnected), #[cfg(feature = "metrics")] Err(TrySendError::Full(_)) => { metrics::increment_counter!("perfmap.channel.full", "map_name" => map_name) } #[cfg(not(feature = "metrics"))] Err(TrySendError::Full(_)) => {} } } PerfEvent::Sample(data) => tx .send(PerfChannelMessage::Event { map_name, cpuid, data, }) .map_err(|_| RunError::Disconnected)?, }; } Ok(()) }
function_block-full_function
[ { "content": "/// Return the current memlock limit.\n\npub fn get_memlock_limit() -> Result<usize, OxidebpfError> {\n\n // use getrlimit() syscall\n\n unsafe {\n\n let mut rlim = libc::rlimit {\n\n rlim_cur: 0,\n\n rlim_max: 0,\n\n };\n\n\n\n let ret = libc::getrlimit(libc::RLIMIT_MEMLOCK, &mut rlim as *mut _);\n\n if ret < 0 {\n\n info!(\n\n LOGGER.0,\n\n \"get_memlock_limit(); could not get memlock limit, errno: {}\",\n\n nix::errno::errno()\n\n );\n\n return Err(OxidebpfError::LinuxError(\n\n \"get_memlock_limit\".to_string(),\n\n nix::errno::Errno::from_i32(nix::errno::errno()),\n\n ));\n\n }\n", "file_path": "src/lib.rs", "rank": 0, "score": 137150.49432975063 }, { "content": "pub fn max_possible_index() -> Result<usize, OxidebpfError> {\n\n let cpu_string = std::fs::read_to_string(\"/sys/devices/system/cpu/possible\").map_err(\n\n |e| {\n\n info!(\n\n LOGGER.0,\n\n \"cpu_info::max_possible_index(); could not read /sys/devices/system/cpu/possible; error: {:?}\", e\n\n );\n\n OxidebpfError::FileIOError\n\n },\n\n )?;\n\n\n\n max_index(&cpu_string)\n\n}\n\n\n", "file_path": "src/cpu_info.rs", "rank": 1, "score": 133583.52868517875 }, { "content": "pub fn set_memlock_limit(limit: usize) -> Result<(), OxidebpfError> {\n\n unsafe {\n\n let rlim = libc::rlimit {\n\n rlim_cur: limit as u64,\n\n rlim_max: limit as u64,\n\n };\n\n let ret = libc::setrlimit(libc::RLIMIT_MEMLOCK, &rlim as *const _);\n\n\n\n if ret < 0 {\n\n info!(\n\n LOGGER.0,\n\n \"set_memlock_limit(); unable to set memlock limit, errno: {}\",\n\n nix::errno::errno()\n\n );\n\n Err(OxidebpfError::LinuxError(\n\n \"set_memlock_limit\".to_string(),\n\n nix::errno::Errno::from_i32(nix::errno::errno()),\n\n ))\n\n } else {\n\n Ok(())\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 2, "score": 130153.2279063718 }, { "content": "pub fn online() -> Result<Vec<i32>, OxidebpfError> {\n\n let cpu_string = std::fs::read_to_string(\"/sys/devices/system/cpu/online\").map_err(|e| {\n\n info!(\n\n LOGGER.0,\n\n \"cpu_info::online(); could not read /sys/devices/system/cpu/online; error: {:?}\", e\n\n );\n\n OxidebpfError::FileIOError\n\n })?;\n\n\n\n process_cpu_string(&cpu_string)\n\n}\n\n\n", "file_path": "src/cpu_info.rs", "rank": 3, "score": 110957.5225327652 }, { "content": "/// Mounts debugfs to the specified location if it hasn't been mounted already.\n\npub fn mount_if_missing(mount_location: &str) -> Result<(), OxidebpfError> {\n\n if mount_point().is_some() {\n\n return Ok(());\n\n }\n\n\n\n let path = Path::new(mount_location);\n\n if !path.exists() {\n\n // creation is best effort - chown may fail on some paths, such as `/sys/kernel/debug`\n\n if let Err(e) = create_dir_all(path)\n\n .map_err(|_e| OxidebpfError::FileIOError)\n\n .and_then(|_| {\n\n unistd::chown(path, Some(unistd::getuid()), Some(unistd::getgid())).map_err(|_| {\n\n OxidebpfError::LinuxError(\n\n \"chown\".to_string(),\n\n nix::errno::from_i32(nix::errno::errno()),\n\n )\n\n })\n\n })\n\n {\n\n info!(\n", "file_path": "src/debugfs.rs", "rank": 4, "score": 108289.52533321688 }, { "content": "fn page_size() -> Result<usize, OxidebpfError> {\n\n let raw_size = unsafe { libc::sysconf(libc::_SC_PAGE_SIZE) };\n\n\n\n match raw_size.cmp(&0) {\n\n std::cmp::Ordering::Less => {\n\n let e = errno();\n\n info!(\n\n LOGGER.0,\n\n \"PerfMap::new_group(); perfmap error, size < 0: {}; errno: {}\", raw_size, e\n\n );\n\n Err(OxidebpfError::LinuxError(\n\n \"perf map get PAGE_SIZE\".to_string(),\n\n nix::errno::from_i32(e),\n\n ))\n\n }\n\n std::cmp::Ordering::Equal => {\n\n info!(\n\n LOGGER.0,\n\n \"PerfMap::new_group(); perfmap error, bad page size (size == 0)\"\n\n );\n", "file_path": "src/maps/mod.rs", "rank": 5, "score": 103596.48030353879 }, { "content": "/// Return the current process capabilities header and set.\n\npub fn get_capabilities() -> Result<(CapUserHeader, CapUserData), OxidebpfError> {\n\n let mut hdrp = CapUserHeader {\n\n version: 0x20080522, // version 3\n\n pid: 0, // calling process\n\n };\n\n\n\n let mut datap = CapUserData::default();\n\n\n\n let ret = unsafe {\n\n libc::syscall(\n\n libc::SYS_capget,\n\n &mut hdrp as *mut _ as *mut libc::c_void,\n\n &mut datap as *mut _ as *mut libc::c_void,\n\n )\n\n };\n\n\n\n if ret < 0 {\n\n Err(OxidebpfError::LinuxError(\n\n \"get_capabilities()\".to_string(),\n\n nix::errno::from_i32(nix::errno::errno()),\n\n ))\n\n } else {\n\n Ok((hdrp, datap))\n\n }\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 6, "score": 101415.558189222 }, { "content": "fn max_index(cpu_string: &str) -> Result<usize, OxidebpfError> {\n\n let last = cpu_string\n\n .trim()\n\n .split(',')\n\n .last()\n\n .ok_or(OxidebpfError::CpuOnlineFormatError)?;\n\n\n\n let last_index = match last.split_once('-') {\n\n None => last,\n\n Some((_, b)) => b,\n\n };\n\n\n\n last_index\n\n .parse()\n\n .map_err(|_| OxidebpfError::CpuOnlineFormatError)\n\n}\n\n\n", "file_path": "src/cpu_info.rs", "rank": 7, "score": 90724.77519222272 }, { "content": "/// Sets thread priority according to the given policy and then sets a\n\n/// niceness value when relevant. Errors are logged but otherwise\n\n/// ignored.\n\nfn prioritize_thread(polling_policy: SchedulingPolicy) {\n\n let native_id = match polling_policy {\n\n SchedulingPolicy::Deadline(_, _, _) => {\n\n // SAFETY: this syscall is always successful\n\n unsafe { libc::syscall(libc::SYS_gettid) as libc::pthread_t }\n\n }\n\n _ => thread_priority::thread_native_id(),\n\n };\n\n let priority = polling_policy.into();\n\n let policy = polling_policy.into();\n\n\n\n // This call throws errors if the passed in priority and policies don't match, so we need\n\n // to ensure that it's what's expected (1 to 99 inclusive for realtime, set of 3 nanosecond\n\n // counts for realtime deadline, 0 for all others).\n\n if let Err(e) = thread_priority::set_thread_priority_and_policy(native_id, priority, policy) {\n\n error!(\n\n LOGGER.0,\n\n \"perf_map_poller(); could not set thread priority, continuing at inherited: {:?}\", e\n\n );\n\n };\n", "file_path": "src/program_version.rs", "rank": 8, "score": 88171.99392074786 }, { "content": "/// Returns the path where `debugfs` is mounted or None if unable to locate.\n\npub fn mount_point() -> Option<String> {\n\n let mount_iter = match MountIter::new() {\n\n Ok(mount_iter) => mount_iter,\n\n Err(e) => {\n\n info!(LOGGER.0, \"failed to create MountIter: {}\", e.to_string());\n\n return None;\n\n }\n\n };\n\n\n\n mount_iter\n\n .flatten()\n\n .find(|m| m.fstype == \"debugfs\")\n\n .map(|m| m.dest.into_os_string().into_string().unwrap_or_default())\n\n}\n\n\n", "file_path": "src/debugfs.rs", "rank": 9, "score": 84918.6974514131 }, { "content": "fn create_slogger_root() -> (slog::Logger, AtomicSwitchCtrl) {\n\n let drain = slog::Logger::root(slog::Discard, o!());\n\n let drain = AtomicSwitch::new(drain);\n\n (slog::Logger::root(drain.clone(), o!()), drain.ctrl())\n\n}\n\n\n\n#[cfg(target_arch = \"aarch64\")]\n\nconst ARCH_SYSCALL_PREFIX: &str = \"__arm64_\";\n\n#[cfg(target_arch = \"x86_64\")]\n\nconst ARCH_SYSCALL_PREFIX: &str = \"__x64_\";\n\n\n\n#[repr(C)]\n\n#[derive(Debug, Default)]\n\npub struct CapUserHeader {\n\n version: u32,\n\n pid: i32,\n\n}\n\n\n\n#[repr(C)]\n\n#[derive(Debug, Default)]\n", "file_path": "src/lib.rs", "rank": 10, "score": 79266.02775402158 }, { "content": "fn enter_pid_mnt_ns(pid: pid_t, my_mount: RawFd) -> Result<usize, OxidebpfError> {\n\n let new_mnt = std::fs::File::open(format!(\"/proc/{}/ns/mnt\", pid))\n\n .map_err(|_| OxidebpfError::FileIOError)?;\n\n let new_inode = new_mnt\n\n .metadata()\n\n .map_err(|_| OxidebpfError::FileIOError)?\n\n .st_ino();\n\n let my_inode = nix::sys::stat::fstat(my_mount)\n\n .map_err(|_| OxidebpfError::FileIOError)?\n\n .st_ino;\n\n if new_inode == my_inode {\n\n return Err(OxidebpfError::SelfTrace);\n\n }\n\n\n\n setns(new_mnt.into_raw_fd(), CLONE_NEWNS)\n\n}\n\n\n", "file_path": "src/perf/syscall.rs", "rank": 11, "score": 78882.58660782925 }, { "content": "fn my_mount_fd() -> Result<RawFd, OxidebpfError> {\n\n let my_mnt =\n\n std::fs::File::open(\"/proc/self/ns/mnt\").map_err(|_| OxidebpfError::FileIOError)?;\n\n Ok(my_mnt.into_raw_fd())\n\n}\n\n\n", "file_path": "src/perf/syscall.rs", "rank": 12, "score": 77941.18912560013 }, { "content": "// Packs the kernel version into an u32\n\nfn get_running_kernel_version() -> Result<u32, OxidebpfError> {\n\n let utsname = nix::sys::utsname::uname();\n\n let release = utsname.release();\n\n let version_base = kernel_major_minor_str_to_u32(release);\n\n\n\n if let Err(e) = set_memlock_limit(libc::RLIM_INFINITY as usize) {\n\n info!(\n\n LOGGER.0,\n\n \"get_running_kernel_version(); failed to set memlock_limit; error: {:?}\", e,\n\n );\n\n }\n\n\n\n // There doesn't seem a portable way to find the \"LINUX_VERSION_CODE\", so we create a minimal\n\n // ebpf program and load it with different versions until we find one that works. At most, we\n\n // do this 255 times, as we only enumerate the revision number (1 byte).\n\n let data: Vec<u8> = vec![0xb7, 0, 0, 0, 0, 0, 0, 0, 0x95, 0, 0, 0, 0, 0, 0, 0];\n\n let code = BpfCode::try_from(&data[..])?; // r0 = 0, return r0\n\n let license = \"Proprietary\".to_string();\n\n for revision in 0..256 {\n\n if let Ok(fd) = crate::bpf::syscall::bpf_prog_load(\n", "file_path": "src/blueprint.rs", "rank": 13, "score": 77941.18912560013 }, { "content": "#[repr(C)]\n\n#[derive(Clone, Copy)]\n\nstruct PerfEventHeader {\n\n type_: c_uint,\n\n misc: c_ushort,\n\n size: c_ushort,\n\n}\n\n#[repr(C)]\n\npub struct PerfEventLostSamples {\n\n header: PerfEventHeader,\n\n pub id: u64,\n\n pub count: u64,\n\n}\n\n\n\n#[repr(C)]\n\npub struct PerfEventSample {\n\n header: PerfEventHeader,\n\n size: u32,\n\n // array to data of len `size` stored as as char[1] because Rust's\n\n // DST and C's DST are are not FFI compatible. This needs to be a\n\n // char[] to avoid padding issues since chars are special in c\n\n // padding (in that they do not get pre-padded)\n\n data: [std::os::raw::c_char; 1],\n\n}\n\n\n\n#[derive(Debug)]\n\npub(crate) enum PerfEvent {\n\n Sample(Vec<u8>),\n\n Lost(u64),\n\n}\n\n\n", "file_path": "src/maps/mod.rs", "rank": 14, "score": 76266.2721337608 }, { "content": "// returns a power of two that is equal or less than n\n\nfn lte_power_of_two(n: usize) -> usize {\n\n if n.is_power_of_two() {\n\n return n;\n\n }\n\n\n\n match n.checked_next_power_of_two() {\n\n None => 1 << (usize::BITS - 1),\n\n Some(x) => x >> 1,\n\n }\n\n}\n\n\n", "file_path": "src/maps/mod.rs", "rank": 15, "score": 74882.11212787482 }, { "content": "struct PerfEventIterator<'a> {\n\n // modified by iterator\n\n data_tail: u64,\n\n data_head: u64,\n\n errored: bool,\n\n copy_buf: Vec<u8>, // re-usable buffer to make ring joins be contiguous\n\n\n\n // calculated at creation\n\n buffer_size: usize,\n\n base: *const u8,\n\n metadata: *mut PerfMem,\n\n\n\n // gives us the lifetime we need to prevent the iterator outliving\n\n // the perfmap\n\n _marker: std::marker::PhantomData<&'a PerfMap>,\n\n}\n\n\n\nimpl<'a> PerfEventIterator<'a> {\n\n fn new(map: &'a PerfMap) -> Self {\n\n // the first page is just metadata\n", "file_path": "src/maps/mod.rs", "rank": 16, "score": 73962.10218850059 }, { "content": "// SAFETY: the fd returned here is passed up through Program to ProgramVersion,\n\n// which manages the fd lifecycle.\n\nfn perf_event_with_attach_point(\n\n attach_point: &str,\n\n return_bit: u64,\n\n p_type: u32,\n\n offset: u64,\n\n cpu: i32,\n\n pid: Option<i32>,\n\n) -> Result<RawFd, OxidebpfError> {\n\n #![allow(clippy::redundant_closure)]\n\n let ap_cstring =\n\n CString::new(attach_point).map_err(|e| OxidebpfError::CStringConversionError(e))?;\n\n let perf_event_attr = PerfEventAttr {\n\n sample_union: PerfSample { sample_period: 1 },\n\n wakeup_union: PerfWakeup { wakeup_events: 1 },\n\n bp_addr_union: PerfBpAddr {\n\n config1: ap_cstring.as_ptr() as u64,\n\n },\n\n bp_len_union: PerfBpLen { config2: offset },\n\n config: return_bit,\n\n p_type,\n", "file_path": "src/perf/syscall.rs", "rank": 17, "score": 72745.60177674862 }, { "content": "fn parse_and_verify_elf(data: &[u8]) -> Result<Elf, OxidebpfError> {\n\n let elf = Elf::parse(data).map_err(|e| {\n\n info!(\n\n LOGGER.0,\n\n \"parse_and_verify_elf(); Invalid ELF; error: {:?}\", e\n\n );\n\n OxidebpfError::InvalidElf\n\n })?;\n\n\n\n match elf.header.e_machine {\n\n header::EM_BPF | header::EM_NONE => (),\n\n _ => return Err(OxidebpfError::InvalidElf),\n\n }\n\n\n\n Ok(elf)\n\n}\n", "file_path": "src/blueprint.rs", "rank": 18, "score": 71328.05417683291 }, { "content": "// SAFETY: original fd is held in the perf_event_open_debugfs function\n\n// and is not passed or duplicated.\n\nfn restore_mnt_ns(original_mnt_ns_fd: RawFd) -> Result<(), OxidebpfError> {\n\n setns(original_mnt_ns_fd, CLONE_NEWNS)?;\n\n unsafe {\n\n if libc::close(original_mnt_ns_fd as c_int) < 0 {\n\n let e = errno();\n\n info!(\n\n LOGGER.0,\n\n \"restore_mnt_ns(); could not close original mount namespace fd; fd: {}; errno: {}\",\n\n original_mnt_ns_fd,\n\n e\n\n );\n\n Err(OxidebpfError::LinuxError(\n\n format!(\"restore mount namspace => close({})\", original_mnt_ns_fd),\n\n nix::errno::from_i32(e),\n\n ))\n\n } else {\n\n Ok(())\n\n }\n\n }\n\n}\n", "file_path": "src/perf/syscall.rs", "rank": 19, "score": 67667.8877841053 }, { "content": "fn process_cpu_string(cpu_string: &str) -> Result<Vec<i32>, OxidebpfError> {\n\n let mut cpus = vec![];\n\n\n\n for sublist in cpu_string.trim().split(',') {\n\n if sublist.contains('-') {\n\n let pair: Vec<&str> = sublist.split('-').collect();\n\n if pair.len() != 2 {\n\n info!(\n\n LOGGER.0,\n\n \"process_cpu_string(); cpu online formatting error: {}\", cpu_string\n\n );\n\n return Err(OxidebpfError::CpuOnlineFormatError);\n\n }\n\n\n\n // we checked the length above so indexing is OK\n\n let from: i32 = pair[0].parse().map_err(|e| {\n\n info!(\n\n LOGGER.0,\n\n \"process_cpu_string(); cpu online i32 parse error; pair: {:?}; error: {:?}\",\n\n pair,\n", "file_path": "src/cpu_info.rs", "rank": 20, "score": 64833.424053885086 }, { "content": "fn get_kernel_version(data: &[u8], elf: &Elf) -> Result<u32, OxidebpfError> {\n\n const MAGIC_VERSION: u32 = 0xFFFFFFFE;\n\n let version = get_section_by_name(elf, \"version\")\n\n .and_then(|section| get_section_data(data, section))\n\n .filter(|section_data| section_data.len() == 4)\n\n .map(|section_data| {\n\n let mut int_data: [u8; 4] = Default::default();\n\n int_data.copy_from_slice(section_data);\n\n u32::from_ne_bytes(int_data)\n\n })\n\n .unwrap_or(MAGIC_VERSION);\n\n\n\n Ok(if version == MAGIC_VERSION {\n\n #[cfg(not(feature = \"rootless_blueprints\"))]\n\n {\n\n info!(LOGGER.0, \"Dynamically finding the running kernel version\");\n\n get_running_kernel_version()?\n\n }\n\n #[cfg(feature = \"rootless_blueprints\")]\n\n {\n", "file_path": "src/blueprint.rs", "rank": 21, "score": 64572.407389718224 }, { "content": "fn perf_attach_tracepoint(prog_fd: RawFd, perf_fd: RawFd) -> Result<i32, OxidebpfError> {\n\n perf_event_ioc_set_bpf(perf_fd, prog_fd as u32)?;\n\n perf_event_ioc_enable(perf_fd)\n\n}\n\n\n", "file_path": "src/perf/syscall.rs", "rank": 22, "score": 58377.30173499082 }, { "content": "#[derive(Debug, Clone)]\n\nstruct Reloc {\n\n pub symbol_name: String,\n\n /// The instruction to apply the relocation to\n\n pub insn_index: u64,\n\n /// The type of relocation. (R_BPF_64_32, R_BPF_64_64,)\n\n pub reloc_type: u32,\n\n}\n\n\n\nimpl Reloc {\n\n /// Retrieve the section relocations for a given program section index\n\n fn get_map_relocations(program_index: usize, elf: &Elf) -> Result<Vec<Self>, OxidebpfError> {\n\n // find the relocation index\n\n let reloc_index = elf\n\n .section_headers\n\n .iter()\n\n .enumerate()\n\n .find(|(_index, sh)| {\n\n sh.sh_type == section_header::SHT_REL && sh.sh_info == program_index as u32\n\n })\n\n .map(|(reloc_index, _)| reloc_index);\n", "file_path": "src/blueprint.rs", "rank": 23, "score": 53634.465175293066 }, { "content": "#[derive(Clone, Default)]\n\nstruct TailCallMapping {\n\n map: String,\n\n index: u32,\n\n}\n\n\n\n/// The description of an individual eBPF program. Note: This is _not_ the same\n\n/// as the eBPF program itself, the actual binary is loaded from a\n\n/// [`ProgramBlueprint`](struct@ProgramBlueprint).\n\n#[derive(Clone, Default)]\n\npub struct Program<'a> {\n\n kind: Option<ProgramType>,\n\n name: &'a str,\n\n attach_points: Vec<String>,\n\n optional: bool,\n\n loaded: bool,\n\n is_syscall: bool,\n\n fd: RawFd,\n\n pid: Option<pid_t>,\n\n tail_call_mapping: Option<TailCallMapping>,\n\n debugfs_mount: DebugfsMountOpts,\n", "file_path": "src/lib.rs", "rank": 24, "score": 50382.82506194351 }, { "content": "#[repr(align(8), C)]\n\n#[derive(Clone, Copy, Default)]\n\nstruct MapElem {\n\n map_fd: c_uint,\n\n key: c_ulong,\n\n keyval: KeyVal,\n\n flags: c_ulong,\n\n}\n\n\n", "file_path": "src/bpf/mod.rs", "rank": 25, "score": 50382.82506194351 }, { "content": "#[repr(align(8), C)]\n\n#[derive(Clone, Copy, Default)]\n\nstruct BpfObj {\n\n pathname: c_ulong,\n\n bpf_fd: c_uint,\n\n file_flags: c_uint,\n\n}\n\n\n", "file_path": "src/bpf/mod.rs", "rank": 26, "score": 50382.82506194351 }, { "content": "#[repr(C)]\n\n#[derive(Debug)]\n\nstruct PerfMem {\n\n version: c_uint,\n\n compat_version: c_uint,\n\n lock: c_uint,\n\n index: c_uint,\n\n offset: c_long,\n\n time_enabled: c_ulong,\n\n time_running: c_ulong,\n\n capabilities: PerfMemCapabilitiesBitfield,\n\n pmc_width: c_ushort,\n\n time_shift: c_ushort,\n\n time_mult: c_uint,\n\n time_offset: c_ulong,\n\n time_zero: c_ulong,\n\n size: c_uint,\n\n reserved_1: c_uint,\n\n time_cycles: c_ulong,\n\n time_mask: c_ulong,\n\n __reserved: [c_uchar; 928usize],\n\n data_head: c_ulong,\n", "file_path": "src/maps/mod.rs", "rank": 27, "score": 50382.82506194351 }, { "content": "#[repr(align(8), C)]\n\n#[derive(Clone, Copy, Default, Debug)]\n\nstruct BpfIterCreate {\n\n link_fd: c_uint,\n\n flags: c_uint,\n\n}\n\n\n", "file_path": "src/bpf/mod.rs", "rank": 28, "score": 48995.71719468625 }, { "content": "#[repr(align(8), C)]\n\n#[derive(Clone, Copy)]\n\nstruct BpfProgLoad {\n\n // Minimal functionality set\n\n prog_type: c_uint,\n\n insn_cnt: c_uint,\n\n insns: c_ulong, // Vec<BpfInsn> - const struct bpf_insn\n\n license: c_ulong, // const char *\n\n log_level: c_uint,\n\n log_size: c_uint,\n\n log_buf: c_ulong, // 'char *' buffer\n\n // Additional functionality set, as of 5.12.7\n\n kern_version: c_uint, // not used later, used in early versions\n\n prog_flags: c_uint,\n\n prog_name: [u8; BPF_OBJ_NAME_LEN], // char array, length BPF_OBJ_NAME_LEN\n\n prog_ifindex: c_uint,\n\n expected_attach_type: c_uint,\n\n prog_btf_fd: c_uint,\n\n func_info_rec_size: c_uint,\n\n func_info: c_ulong,\n\n func_info_cnt: c_uint,\n\n line_info_rec_size: c_uint,\n", "file_path": "src/bpf/mod.rs", "rank": 29, "score": 48995.71719468625 }, { "content": "#[repr(align(8), C)]\n\n#[derive(Clone, Copy, Default)]\n\nstruct BpfBtfLoad {\n\n btf: c_ulong,\n\n btf_log_buf: c_ulong,\n\n btf_size: c_uint,\n\n btf_log_size: c_uint,\n\n btf_log_level: c_uint,\n\n}\n\n\n", "file_path": "src/bpf/mod.rs", "rank": 30, "score": 48995.71719468625 }, { "content": "#[repr(align(8), C)]\n\n#[derive(Clone, Copy)]\n\nstruct PerfMemBitfield {\n\n field: c_ulong,\n\n}\n\n\n\n#[repr(align(8), C)]\n\nunion PerfMemCapabilitiesBitfield {\n\n capabilities: c_ulong,\n\n bitfield: PerfMemBitfield,\n\n}\n\n\n\nimpl Debug for PerfMemCapabilitiesBitfield {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {\n\n write!(f, \"debug not implemented\")\n\n }\n\n}\n\n\n", "file_path": "src/maps/mod.rs", "rank": 31, "score": 48995.71719468625 }, { "content": "#[repr(align(8), C)]\n\n#[derive(Clone, Copy, Default)]\n\nstruct BpfProgQuery {\n\n target_fd: c_uint,\n\n attach_type: c_uint,\n\n query_flags: c_uint,\n\n attach_flags: c_uint,\n\n prog_ids: c_ulong,\n\n prog_cnt: c_uint,\n\n}\n\n\n", "file_path": "src/bpf/mod.rs", "rank": 32, "score": 48995.71719468625 }, { "content": "#[repr(align(8), C)]\n\n#[derive(Clone, Copy, Default, Debug)]\n\nstruct BpfLinkDetach {\n\n link_fd: c_uint,\n\n}\n\n\n", "file_path": "src/bpf/mod.rs", "rank": 33, "score": 48995.71719468625 }, { "content": "#[repr(align(8), C)]\n\n#[derive(Clone, Copy, Default, Debug)]\n\nstruct TaskFdQuery {\n\n pid: c_uint,\n\n fd: c_uint,\n\n flags: c_uint,\n\n buf_len: c_uint,\n\n buf: c_ulong,\n\n prog_id: c_uint,\n\n fd_type: c_uint,\n\n probe_offset: c_ulong,\n\n probe_addr: c_ulong,\n\n}\n\n\n\n#[repr(align(8), C)]\n\n#[derive(Clone, Copy)]\n\nunion LinkTarget {\n\n target_fd: c_uint,\n\n target_ifindex: c_uint,\n\n}\n\n\n\nimpl Debug for LinkTarget {\n", "file_path": "src/bpf/mod.rs", "rank": 34, "score": 48995.71719468625 }, { "content": "#[repr(align(8), C)]\n\n#[derive(Clone, Copy, Default)]\n\nstruct BpfProgTach {\n\n target_fd: c_uint,\n\n attach_bpf_fd: c_uint,\n\n attach_type: c_uint,\n\n attach_flags: c_uint,\n\n replace_bpf_fd: c_uint,\n\n}\n\n\n", "file_path": "src/bpf/mod.rs", "rank": 35, "score": 48995.71719468625 }, { "content": "#[repr(align(8), C)]\n\n#[derive(Clone, Copy, Default, Debug)]\n\nstruct BpfLinkUpdate {\n\n link_fd: c_uint,\n\n new_prog_fd: c_uint,\n\n flags: c_uint,\n\n old_prog_fd: c_uint,\n\n}\n\n\n", "file_path": "src/bpf/mod.rs", "rank": 36, "score": 48995.71719468625 }, { "content": "#[repr(align(8), C)]\n\n#[derive(Clone, Copy, Default, Debug)]\n\nstruct BpfLinkCreate {\n\n prog_fd: c_uint,\n\n target: LinkTarget,\n\n attach_type: c_uint,\n\n flags: c_uint,\n\n link_target_info: LinkTargetInfo,\n\n}\n\n\n", "file_path": "src/bpf/mod.rs", "rank": 37, "score": 48995.71719468625 }, { "content": "#[repr(align(8), C)]\n\n#[derive(Clone, Copy, Default)]\n\nstruct BpfGetId {\n\n id: GetIdUnion,\n\n next_id: c_uint,\n\n open_flags: c_uint,\n\n}\n\n\n\n#[repr(align(8), C)]\n\n#[derive(Clone, Copy)]\n\nunion GetIdUnion {\n\n start_id: c_uint,\n\n prog_id: c_uint,\n\n map_id: c_uint,\n\n btf_id: c_uint,\n\n link_id: c_uint,\n\n}\n\n\n\nimpl Default for GetIdUnion {\n\n fn default() -> Self {\n\n Self { start_id: 0 }\n\n }\n\n}\n\n\n", "file_path": "src/bpf/mod.rs", "rank": 38, "score": 48995.71719468625 }, { "content": "#[repr(align(8), C)]\n\n#[derive(Clone, Copy, Default)]\n\nstruct BpfMapBatch {\n\n in_batch: c_ulong,\n\n out_batch: c_ulong,\n\n keys: c_ulong,\n\n values: c_ulong,\n\n count: c_uint,\n\n map_fd: c_uint,\n\n elem_flags: c_ulong,\n\n flags: c_ulong,\n\n}\n\n\n", "file_path": "src/bpf/mod.rs", "rank": 39, "score": 48995.71719468625 }, { "content": "#[repr(align(8), C)]\n\n#[derive(Clone, Copy, Default, Debug)]\n\nstruct BpfEnableStats {\n\n stat_type: c_uint,\n\n}\n\n\n", "file_path": "src/bpf/mod.rs", "rank": 40, "score": 48995.71719468625 }, { "content": "pub trait PerCpu {\n\n fn cpuid(&self) -> i32;\n\n}\n\n\n\nimpl ProgMap {\n\n pub(crate) fn new(map_name: &str, max_entries: u32) -> Result<Self, OxidebpfError> {\n\n let fd = bpf_map_create(BPF_MAP_TYPE_PROG_ARRAY, 4u32, 4u32, max_entries)?;\n\n let map = Map {\n\n name: map_name.to_string(),\n\n fd,\n\n map_config: MapConfig::new(bpf_map_type::BPF_MAP_TYPE_PROG_ARRAY, 4, 4, max_entries),\n\n map_config_size: std::mem::size_of::<MapConfig>(),\n\n loaded: true,\n\n };\n\n Ok(ProgMap { base: map })\n\n }\n\n\n\n // TODO: these functions are a good candidate for a trait\n\n pub(crate) fn set_fd(&mut self, fd: RawFd) {\n\n self.base.fd = fd;\n", "file_path": "src/maps/mod.rs", "rank": 41, "score": 48628.98214684072 }, { "content": "fn perf_map_poller(\n\n perfmaps: Vec<PerfMap>,\n\n tx: Sender<PerfChannelMessage>,\n\n polling_delay: Duration,\n\n polling_policy: SchedulingPolicy,\n\n polling_signal: Arc<(Mutex<bool>, Condvar)>,\n\n) {\n\n prioritize_thread(polling_policy);\n\n\n\n let poller = match PerfMapPoller::new(perfmaps.into_iter(), polling_signal) {\n\n Ok(poller) => poller,\n\n Err(e) => {\n\n crit!(LOGGER.0, \"perf_map_poller(); {}\", e);\n\n return;\n\n }\n\n };\n\n\n\n if let Err(e) = poller.poll(tx, polling_delay) {\n\n crit!(\n\n LOGGER.0,\n\n \"perf_map_poller(); unrecoverable polling error: {}\",\n\n e\n\n );\n\n }\n\n}\n\n\n", "file_path": "src/program_version.rs", "rank": 42, "score": 47997.167414592375 }, { "content": "#[repr(align(8), C)]\n\n#[derive(Clone, Copy, Default, Debug)]\n\nstruct BpfProgBindMap {\n\n prog_fd: c_uint,\n\n map_fd: c_uint,\n\n flags: c_uint,\n\n}\n\n\n\n/// Holds a BpfAttr union where only the specified `size`, in bytes, is to be used for\n\n/// underlying bpf syscalls.\n\n#[derive(Debug, Copy, Clone)]\n\npub(crate) struct SizedBpfAttr {\n\n pub(crate) bpf_attr: BpfAttr,\n\n /// The amount of used bytes of the given [`BpfAttr`]. See [`sys_bpf`](Fn@sys_bpf) for\n\n /// an example.\n\n pub(crate) size: usize,\n\n}\n\n\n\nimpl From<MapDefinition> for SizedBpfAttr {\n\n fn from(map: MapDefinition) -> Self {\n\n Self {\n\n bpf_attr: BpfAttr {\n", "file_path": "src/bpf/mod.rs", "rank": 43, "score": 47738.04967176773 }, { "content": "#[repr(align(8), C)]\n\n#[derive(Clone, Copy, Default)]\n\nstruct BpfRawTracepointOpen {\n\n name: c_ulong,\n\n prog_fd: c_uint,\n\n}\n\n\n", "file_path": "src/bpf/mod.rs", "rank": 44, "score": 47738.04967176773 }, { "content": "#[repr(align(8), C)]\n\n#[derive(Clone, Copy, Default)]\n\nstruct BpfProgTestRun {\n\n prog_fd: c_uint,\n\n retval: c_uint,\n\n data_size_in: c_uint,\n\n data_size_out: c_uint,\n\n data_in: c_ulong,\n\n data_out: c_ulong,\n\n repeat: c_uint,\n\n duration: c_uint,\n\n ctx_size_in: c_uint,\n\n ctx_size_out: c_uint,\n\n ctx_in: c_ulong,\n\n ctx_out: c_ulong,\n\n flags: c_uint,\n\n cpu: c_uint,\n\n}\n\n\n", "file_path": "src/bpf/mod.rs", "rank": 45, "score": 47738.04967176773 }, { "content": "#[repr(align(8), C)]\n\n#[derive(Clone, Copy, Default, Debug)]\n\nstruct LinkTargetIterInfo {\n\n iter_info: c_ulong,\n\n iter_info_len: c_uint,\n\n}\n\n\n\n#[repr(align(8), C)]\n\n#[derive(Clone, Copy)]\n\nunion LinkTargetInfo {\n\n target_btf_id: c_uint,\n\n info: LinkTargetIterInfo,\n\n}\n\n\n\nimpl Debug for LinkTargetInfo {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {\n\n write!(f, \"LinkTargetInfo\")\n\n }\n\n}\n\n\n\nimpl Default for LinkTargetInfo {\n\n fn default() -> Self {\n\n Self { target_btf_id: 0 }\n\n }\n\n}\n\n\n", "file_path": "src/bpf/mod.rs", "rank": 46, "score": 47738.04967176773 }, { "content": "fn perf_attach_tracepoint_with_debugfs(\n\n prog_fd: RawFd,\n\n event_path: String,\n\n cpu: i32,\n\n) -> Result<(String, RawFd), OxidebpfError> {\n\n let p_type = std::fs::read_to_string((*PMU_TTYPE_FILE).as_path())\n\n .map_err(|_| OxidebpfError::FileIOError)?\n\n .trim()\n\n .to_string()\n\n .parse::<u32>()\n\n .map_err(|_| OxidebpfError::NumberParserError)?;\n\n\n\n let config = std::fs::read_to_string(format!(\n\n \"{}/tracing/events/{}/id\",\n\n mount_point().unwrap_or_else(|| \"/sys/kernel/debug\".to_string()),\n\n event_path\n\n ))\n\n .map_err(|_| {\n\n if event_path.contains(\"kretprobe\") {\n\n OxidebpfError::KretprobeNamingError\n", "file_path": "src/perf/syscall.rs", "rank": 47, "score": 46743.67532985651 }, { "content": "#[repr(align(8), C)]\n\n#[derive(Clone, Copy, Default)]\n\nstruct BpfObjGetInfoByFd {\n\n bpf_fd: c_uint,\n\n info_len: c_uint,\n\n info: c_ulong,\n\n}\n\n\n", "file_path": "src/bpf/mod.rs", "rank": 48, "score": 46592.511762556445 }, { "content": "/// This trait specifies a map that can be read from or written to (e.g., array types).\n\npub trait RWMap<T, U> {\n\n /// # Safety\n\n ///\n\n /// This function should only be called when `std::mem::size_of::<T>()` matches\n\n /// the value in the map being read from and when `std::mem::size_of::<U>()`\n\n /// matches the key.\n\n unsafe fn read(&self, key: U) -> Result<T, OxidebpfError>;\n\n\n\n /// # Safety\n\n ///\n\n /// This function should only be called when `std::mem::size_of::<T>()` matches\n\n /// the value in the map being written to and when `std::mem::size_of::<U>()`\n\n /// matches the key.\n\n unsafe fn write(&self, key: U, value: T) -> Result<(), OxidebpfError>;\n\n}\n\n\n", "file_path": "src/maps/mod.rs", "rank": 49, "score": 42497.099496703624 }, { "content": "fn drop_debugfs_uprobes(debugfs_mount: &str) {\n\n let up_file = match std::fs::OpenOptions::new()\n\n .append(true)\n\n .write(true)\n\n .read(true)\n\n .open(format!(\"{}/tracing/uprobe_events\", debugfs_mount))\n\n {\n\n Ok(f) => f,\n\n Err(e) => {\n\n info!(\n\n LOGGER.0,\n\n \"ProgramVersion::drop(); could not modify {}/tracing/uprobe_events: {:?}\",\n\n debugfs_mount,\n\n e\n\n );\n\n return;\n\n }\n\n };\n\n let up_reader = BufReader::new(&up_file);\n\n let mut up_writer = BufWriter::new(&up_file);\n", "file_path": "src/program_version.rs", "rank": 50, "score": 40557.55574347517 }, { "content": "fn drop_debugfs_kprobes(debugfs_mount: &str) {\n\n let kp_file = match std::fs::OpenOptions::new()\n\n .read(true)\n\n .write(true)\n\n .append(true)\n\n .open(format!(\"{}/tracing/uprobe_events\", debugfs_mount))\n\n {\n\n Ok(f) => f,\n\n Err(e) => {\n\n info!(\n\n LOGGER.0,\n\n \"ProgramVersion::drop(); could not modify {}/tracing/kprobe_events: {:?}\",\n\n debugfs_mount,\n\n e\n\n );\n\n return;\n\n }\n\n };\n\n let kp_reader = BufReader::new(&kp_file);\n\n let mut kp_writer = BufWriter::new(&kp_file);\n", "file_path": "src/program_version.rs", "rank": 51, "score": 40557.55574347517 }, { "content": "fn kernel_major_minor_str_to_u32(release: &str) -> u32 {\n\n let release = release.to_string();\n\n // The release information comes in the format \"major.minor.patch-extra\".\n\n let mut split = release.split('.').flat_map(str::parse);\n\n ((split.next().unwrap_or(0) & 0xFF) << 16) + ((split.next().unwrap_or(0) & 0xFF) << 8)\n\n}\n\n\n", "file_path": "src/blueprint.rs", "rank": 52, "score": 38117.4944734738 }, { "content": "fn get_license(data: &[u8], elf: &Elf) -> String {\n\n get_section_by_name(elf, \"license\")\n\n .and_then(|sh| get_section_data(data, sh))\n\n .and_then(|section_data| CStr::from_bytes_with_nul(section_data).ok())\n\n .and_then(|s| s.to_str().ok())\n\n .map(|s| s.to_string())\n\n .unwrap_or_default()\n\n}\n\n\n", "file_path": "src/blueprint.rs", "rank": 53, "score": 36768.928471972744 }, { "content": "fn get_symbol_name(elf: &Elf, sym: &Sym) -> Option<String> {\n\n elf.strtab.get_at(sym.st_name).map(String::from)\n\n}\n\n\n", "file_path": "src/blueprint.rs", "rank": 54, "score": 34010.65840996275 }, { "content": "fn get_section_by_name<'a>(elf: &'a Elf, name: &str) -> Option<&'a SectionHeader> {\n\n elf.section_headers\n\n .iter()\n\n .find(|sh| get_section_name(elf, sh) == Some(name))\n\n}\n\n\n", "file_path": "src/blueprint.rs", "rank": 55, "score": 31071.711298241964 }, { "content": "fn get_section_data<'a>(data: &'a [u8], sh: &'a SectionHeader) -> Option<&'a [u8]> {\n\n data.get(sh.sh_offset as usize..(sh.sh_offset + sh.sh_size) as usize)\n\n}\n\n\n", "file_path": "src/blueprint.rs", "rank": 56, "score": 30516.67263757241 }, { "content": "fn get_section_name<'a>(elf: &'a Elf, sh: &'a SectionHeader) -> Option<&'a str> {\n\n elf.shdr_strtab.get_unsafe(sh.sh_name)\n\n}\n\n\n", "file_path": "src/blueprint.rs", "rank": 57, "score": 30516.67263757241 }, { "content": " perfmaps: Vec<PerfMap>,\n\n tx: Sender<PerfChannelMessage>,\n\n ) -> Result<(), OxidebpfError> {\n\n let polling_delay = Duration::from_millis(self.polling_delay);\n\n let polling_policy = self\n\n .polling_thread_policy\n\n .unwrap_or(SchedulingPolicy::Other(0));\n\n\n\n // the PerfMapPoller thread will use this to signal when\n\n // it is ready to receive events.\n\n let perf_poller_signal = Arc::new((Mutex::new(false), Condvar::new()));\n\n let perf_poller_signal_clone = perf_poller_signal.clone();\n\n\n\n let _ = std::thread::Builder::new()\n\n .name(\"PerfMapPoller\".to_string())\n\n .spawn(move || {\n\n perf_map_poller(\n\n perfmaps,\n\n tx,\n\n polling_delay,\n", "file_path": "src/program_version.rs", "rank": 62, "score": 31.633068264077725 }, { "content": "\n\n if wait_result.timed_out() {\n\n info!(\n\n LOGGER.0,\n\n \"event_poller(); PerfMapPoller is not ready to receive events\"\n\n );\n\n }\n\n\n\n Ok(())\n\n }\n\n\n\n pub(crate) fn load_program_version(\n\n &mut self,\n\n mut program_blueprint: ProgramBlueprint,\n\n mut perfmap_opts_fn: impl FnMut() -> (Sender<PerfChannelMessage>, PerfBufferSize),\n\n ) -> Result<(), OxidebpfError> {\n\n let mut matching_blueprints: Vec<ProgramObject> = self\n\n .programs\n\n .iter()\n\n .map(|p| {\n", "file_path": "src/program_version.rs", "rank": 63, "score": 30.77400415153092 }, { "content": " polling_policy,\n\n perf_poller_signal_clone,\n\n )\n\n })\n\n .map_err(|e| {\n\n crit!(LOGGER.0, \"event_poller(); error in thread polling: {:?}\", e);\n\n OxidebpfError::ThreadPollingError\n\n })?;\n\n\n\n // Wait until PerfMapPoller is ready.\n\n let max_wait = Duration::from_secs(1);\n\n let (lock, cvar) = &*perf_poller_signal;\n\n let wait_result = cvar\n\n .wait_timeout_while(\n\n lock.lock().map_err(|_| OxidebpfError::LockError)?,\n\n max_wait,\n\n |&mut pending| !pending,\n\n )\n\n .map_err(|_| OxidebpfError::LockError)?\n\n .1;\n", "file_path": "src/program_version.rs", "rank": 64, "score": 29.540857899780015 }, { "content": " fn default() -> Self {\n\n Self {\n\n sample_period: 0u64,\n\n }\n\n }\n\n}\n\n\n\n#[repr(C)]\n\npub(crate) union PerfWakeup {\n\n pub(crate) wakeup_events: c_uint,\n\n pub(crate) wakeup_watermark: c_uint,\n\n}\n\n\n\nimpl Debug for PerfWakeup {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {\n\n let value = unsafe { self.wakeup_watermark };\n\n write!(f, \"PerfWakeup: {}\", value)\n\n }\n\n}\n\n\n", "file_path": "src/perf/mod.rs", "rank": 65, "score": 26.890333451586482 }, { "content": "impl Default for PerfWakeup {\n\n fn default() -> Self {\n\n Self {\n\n wakeup_events: 0u32,\n\n }\n\n }\n\n}\n\n\n\n#[repr(align(8), C)]\n\npub(crate) union PerfBpAddr {\n\n pub(crate) bp_addr: c_ulong,\n\n pub(crate) config1: c_ulong,\n\n}\n\n\n\nimpl Debug for PerfBpAddr {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {\n\n let value = unsafe { self.bp_addr };\n\n write!(f, \"PerfBpAddr: {}\", value)\n\n }\n\n}\n", "file_path": "src/perf/mod.rs", "rank": 66, "score": 26.458834031436073 }, { "content": "use std::fmt::{Debug, Formatter};\n\nuse std::os::raw::{c_int, c_uint, c_ulong, c_ushort};\n\n\n\npub(crate) mod constant;\n\npub(crate) mod syscall;\n\n\n\n#[repr(C)]\n\npub(crate) union PerfSample {\n\n pub(crate) sample_period: c_ulong,\n\n pub(crate) sample_freq: c_ulong,\n\n}\n\n\n\nimpl Debug for PerfSample {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {\n\n let value = unsafe { self.sample_freq };\n\n write!(f, \"PerfSample: {}\", value)\n\n }\n\n}\n\n\n\nimpl Default for PerfSample {\n", "file_path": "src/perf/mod.rs", "rank": 67, "score": 25.721996885243534 }, { "content": " }\n\n\n\n /// Manually specify the perfmap polling interval for this `ProgramVersion`.\n\n pub fn polling_delay(mut self, delay: u64) -> Self {\n\n self.polling_delay = delay;\n\n self\n\n }\n\n\n\n pub(crate) fn set_debugfs_mount_point(&mut self, debugfs_mount: DebugfsMountOpts) {\n\n for program in self.programs.iter_mut() {\n\n program.set_debugfs_mount_point(debugfs_mount.clone());\n\n }\n\n }\n\n\n\n pub(crate) fn set_polling_policy(&mut self, policy: Option<SchedulingPolicy>) {\n\n self.polling_thread_policy = policy;\n\n }\n\n\n\n fn event_poller(\n\n &self,\n", "file_path": "src/program_version.rs", "rank": 68, "score": 24.257204687603 }, { "content": "\n\nimpl Default for PerfBpAddr {\n\n fn default() -> Self {\n\n Self { bp_addr: 0u64 }\n\n }\n\n}\n\n\n\n#[repr(align(8), C)]\n\npub(crate) union PerfBpLen {\n\n pub(crate) bp_len: c_ulong,\n\n pub(crate) config2: c_ulong,\n\n}\n\n\n\nimpl Debug for PerfBpLen {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {\n\n let value = unsafe { self.bp_len };\n\n write!(f, \"PerfBpLen: {}\", value)\n\n }\n\n}\n\n\n", "file_path": "src/perf/mod.rs", "rank": 69, "score": 23.903480444438 }, { "content": " };\n\n Ok(ArrayMap { base: map })\n\n }\n\n\n\n pub(crate) fn set_fd(&mut self, fd: RawFd) {\n\n self.base.fd = fd;\n\n }\n\n\n\n pub(crate) fn get_fd(&self) -> &RawFd {\n\n &self.base.fd\n\n }\n\n\n\n pub(crate) fn is_loaded(&self) -> bool {\n\n self.base.loaded\n\n }\n\n}\n\n\n\nimpl Display for ArrayMap {\n\n fn fmt(&self, f: &mut Formatter) -> std::fmt::Result {\n\n write!(f, \"Name: {}, loaded: {}\", self.base.name, self.base.loaded)\n", "file_path": "src/maps/mod.rs", "rank": 70, "score": 23.22698413108285 }, { "content": "\n\n#[derive(Debug, Clone)]\n\npub(crate) struct BpfCode(pub Vec<BpfInsn>);\n\n\n\nimpl TryFrom<&[u8]> for BpfCode {\n\n type Error = OxidebpfError;\n\n fn try_from(raw: &[u8]) -> Result<Self, Self::Error> {\n\n if raw.len() < std::mem::size_of::<BpfInsn>()\n\n || raw.len() % std::mem::size_of::<BpfInsn>() != 0\n\n {\n\n info!(\n\n LOGGER.0,\n\n \"BpfCode::try_from(); Invalid program length, raw.len(): {}\",\n\n raw.len()\n\n );\n\n return Err(OxidebpfError::InvalidProgramLength);\n\n }\n\n let mut instructions: Vec<BpfInsn> = Vec::new();\n\n for i in (0..raw.len()).step_by(std::mem::size_of::<BpfInsn>()) {\n\n instructions.push(BpfInsn::try_from(\n", "file_path": "src/bpf/mod.rs", "rank": 72, "score": 22.647098442770407 }, { "content": " match program_version\n\n .load_program_version(program_blueprint.clone(), &mut perfmap_opts_fn)\n\n {\n\n Ok(()) => {\n\n self.loaded_version = Some(program_version);\n\n break;\n\n }\n\n Err(e) => {\n\n errors.push(e);\n\n }\n\n };\n\n }\n\n\n\n match &self.loaded_version {\n\n None => {\n\n info!(\n\n LOGGER.0,\n\n \"ProgramGroup::load(); error: no program version was able to load for {:?}, errors: {:?}\",\n\n match std::env::current_exe() {\n\n Ok(p) => p,\n", "file_path": "src/program_group.rs", "rank": 73, "score": 22.628437809153564 }, { "content": " /// # Safety\n\n ///\n\n /// This is only safe if a single iterator is running per perfmap.\n\n /// This function is marked as `&self` for easiness of use and\n\n /// because it is internal only but it probably should be `&mut\n\n /// self`. When the iterator is dropped it internally changes data\n\n /// in the mmap that the kernel manages (data_tail to be precise)\n\n /// to tell it what is the last bit we read so we shouldn't have\n\n /// multiple mutations at the same time.\n\n pub(crate) unsafe fn read_all(\n\n &self,\n\n ) -> impl Iterator<Item = Result<PerfEvent, OxidebpfError>> + '_ {\n\n PerfEventIterator::new(self)\n\n }\n\n}\n\n\n", "file_path": "src/maps/mod.rs", "rank": 74, "score": 22.4494349081415 }, { "content": " program_blueprint: ProgramBlueprint,\n\n program_versions: Vec<ProgramVersion<'a>>,\n\n mut perfmap_opts_fn: impl FnMut() -> (Sender<PerfChannelMessage>, PerfBufferSize),\n\n ) -> Result<(), OxidebpfError> {\n\n if self.loaded {\n\n info!(\n\n LOGGER.0,\n\n \"ProgramGroup::load(); error: attempting to load a program group that was already loaded\"\n\n );\n\n return Err(OxidebpfError::ProgramGroupAlreadyLoaded);\n\n }\n\n\n\n if let Some(limit) = self.mem_limit {\n\n set_memlock_limit(limit)?;\n\n }\n\n let mut errors = vec![];\n\n for mut program_version in program_versions {\n\n program_version.set_debugfs_mount_point(self.debugfs_mount.clone());\n\n program_version.set_polling_policy(self.polling_thread_policy);\n\n\n", "file_path": "src/program_group.rs", "rank": 75, "score": 21.105739245604596 }, { "content": " metrics::histogram!(\"perfmap.buffer_unread_pct\", pct_used, &labels);\n\n }\n\n\n\n PerfEventIterator {\n\n data_tail,\n\n data_head,\n\n errored: false,\n\n copy_buf: vec![],\n\n buffer_size,\n\n base,\n\n metadata,\n\n _marker: std::marker::PhantomData,\n\n }\n\n }\n\n}\n\n\n\nimpl<'a> Iterator for PerfEventIterator<'a> {\n\n type Item = Result<PerfEvent, OxidebpfError>;\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n", "file_path": "src/maps/mod.rs", "rank": 76, "score": 21.099658061994177 }, { "content": " }\n\n\n\n fn attach_kprobe(&self) -> Result<(Vec<String>, Vec<RawFd>), OxidebpfError> {\n\n let is_return = self.kind == Some(ProgramType::Kretprobe);\n\n\n\n self.attach_points\n\n .iter()\n\n .fold(Ok((vec![], vec![])), |mut result, attach_point| {\n\n match attach_kprobe(self.fd, attach_point, is_return, None, 0) {\n\n Ok(fd) => {\n\n // skip if we already failed\n\n if let Ok((_, fds)) = &mut result {\n\n fds.push(fd);\n\n }\n\n }\n\n Err(e) => {\n\n info!(LOGGER.0, \"Program::attach_kprobe(); original error: {:?}\", e);\n\n self.mount_debugfs_if_missing();\n\n match attach_kprobe_debugfs(self.fd, attach_point, is_return, None, 0) {\n\n Ok((path, fd)) => {\n", "file_path": "src/lib.rs", "rank": 77, "score": 20.940806011738445 }, { "content": " // skip if we already failed\n\n if let Ok((paths, fds)) = &mut result {\n\n paths.push(path);\n\n fds.push(fd);\n\n }\n\n }\n\n Err(s) => match &mut result {\n\n Ok(_) => result = Err(vec![e, s]),\n\n Err(errors) => {\n\n info!(\n\n LOGGER.0,\n\n \"Program::attach_kprobe(); multiple kprobe load errors: {:?}; {:?}\", e, s\n\n );\n\n errors.extend(vec![e, s])\n\n }\n\n },\n\n }\n\n }\n\n }\n\n\n", "file_path": "src/lib.rs", "rank": 78, "score": 20.775390788952997 }, { "content": "\n\n let debugfs_path = mount_point().unwrap_or_else(|| \"/sys/kernel/debug\".to_string());\n\n let event_path = format!(\"{}/tracing/{}_events\", debugfs_path, prefix);\n\n info!(\n\n LOGGER.0,\n\n \"perf_event_open_debugfs(); event_path: {}\", event_path\n\n );\n\n let mut event_file = std::fs::OpenOptions::new()\n\n .write(true)\n\n .append(true)\n\n .open(&event_path)\n\n .map_err(|_e| {\n\n info!(\n\n LOGGER.0,\n\n \"failed to open debugfs tracing path: '{}'\", event_path\n\n );\n\n OxidebpfError::DebugFsNotMounted\n\n })?;\n\n\n\n let mut uuid = uuid::Uuid::new_v4().to_string();\n", "file_path": "src/perf/syscall.rs", "rank": 79, "score": 20.717260321595646 }, { "content": " fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {\n\n write!(f, \"LinkTarget\")\n\n }\n\n}\n\n\n\nimpl Default for LinkTarget {\n\n fn default() -> Self {\n\n Self { target_fd: 0 }\n\n }\n\n}\n\n\n", "file_path": "src/bpf/mod.rs", "rank": 80, "score": 20.68014094140269 }, { "content": "\n\n pub(crate) fn is_loaded(&self) -> bool {\n\n self.base.loaded\n\n }\n\n}\n\n\n\nimpl Display for BpfHashMap {\n\n fn fmt(&self, f: &mut Formatter) -> std::fmt::Result {\n\n write!(f, \"Name: {}, loaded: {}\", self.base.name, self.base.loaded)\n\n }\n\n}\n\n\n\nimpl ArrayMap {\n\n /// Create a new ArrayMap\n\n ///\n\n /// Calling new will create a new BPF_MAP_TYPE_ARRAY map. It stores some meta data\n\n /// to track it. The array map supports read and write operations to access the\n\n /// members of the map\n\n ///\n\n /// # Safety\n", "file_path": "src/maps/mod.rs", "rank": 81, "score": 20.517609591093972 }, { "content": " }\n\n Err(s) => match &mut result {\n\n Ok(_) => result = Err(vec![e, s]),\n\n Err(errors) => {\n\n info!(\n\n LOGGER.0,\n\n \"Program::attach_uprobe(); multiple uprobe load errors: {:?}; {:?}\", e, s\n\n );\n\n errors.extend(vec![e, s])\n\n }\n\n },\n\n }\n\n }\n\n }\n\n\n\n result\n\n })\n\n .map_err(OxidebpfError::MultipleErrors)\n\n }\n\n\n", "file_path": "src/lib.rs", "rank": 82, "score": 20.30703453136003 }, { "content": "use slog::info;\n\nuse std::convert::TryFrom;\n\nuse std::ffi::CStr;\n\nuse std::fmt::{Debug, Formatter};\n\nuse std::os::raw::{c_int, c_short, c_uchar, c_uint, c_ulong};\n\n\n\nuse crate::bpf::constant::{bpf_prog_type, BPF_OBJ_NAME_LEN};\n\nuse crate::error::OxidebpfError;\n\nuse crate::ProgramType;\n\nuse crate::LOGGER;\n\n\n\npub(crate) mod constant;\n\npub(crate) mod syscall;\n\n\n\n#[repr(align(8), C)]\n\n#[derive(Clone, Copy, Debug)]\n\npub(crate) struct MapConfig {\n\n // Minimum functionality set\n\n pub(crate) map_type: c_uint,\n\n pub(crate) key_size: c_uint, // size of key in bytes\n", "file_path": "src/bpf/mod.rs", "rank": 84, "score": 20.087899161736818 }, { "content": " &raw[i..i + std::mem::size_of::<BpfInsn>()],\n\n )?);\n\n }\n\n Ok(BpfCode(instructions))\n\n }\n\n}\n\n\n\n#[repr(align(8), C)]\n\n#[derive(Debug, Clone)]\n\npub(crate) struct BpfInsn {\n\n pub code: c_uchar,\n\n pub regs: c_uchar,\n\n pub off: c_short,\n\n pub imm: c_int,\n\n}\n\n\n\nimpl TryFrom<&[u8]> for BpfInsn {\n\n type Error = OxidebpfError;\n\n fn try_from(raw: &[u8]) -> Result<Self, Self::Error> {\n\n if raw.len() < std::mem::size_of::<BpfInsn>() {\n", "file_path": "src/bpf/mod.rs", "rank": 85, "score": 20.087517429748058 }, { "content": " LOGGER.0,\n\n \"sys_bpf(); cmd: {}; errno: {}; arg_bpf_attr: {:?}\", cmd, e, arg_bpf_attr\n\n );\n\n if Errno::from_i32(e) == EAGAIN && idx < 5 {\n\n OperationResult::Retry(\"EAGAIN\")\n\n } else {\n\n OperationResult::Err(\"Unrecoverable error retrying BPF load\")\n\n }\n\n } else {\n\n OperationResult::Ok(ret as usize)\n\n }\n\n });\n\n\n\n match result {\n\n Ok(size) => Ok(size),\n\n Err(err) => {\n\n if e == 0 {\n\n Err(err.into())\n\n } else {\n\n Err(OxidebpfError::LinuxError(\n", "file_path": "src/bpf/syscall.rs", "rank": 86, "score": 19.834667161863006 }, { "content": " if let Ok((_, fds)) = &mut result {\n\n fds.push(fd);\n\n }\n\n }\n\n Err(e) => {\n\n self.mount_debugfs_if_missing();\n\n match attach_uprobe_debugfs(\n\n self.fd,\n\n attach_point,\n\n is_return,\n\n None,\n\n cpu,\n\n pid,\n\n ) {\n\n Ok((path, fd)) => {\n\n // skip if we already failed\n\n if let Ok((paths, fds)) = &mut result {\n\n paths.push(path);\n\n fds.push(fd);\n\n }\n", "file_path": "src/lib.rs", "rank": 87, "score": 19.045599849670555 }, { "content": " \"Program::attach_probes(); attempting to load unsupported program type: unknown\"\n\n );\n\n Err(OxidebpfError::UnsupportedProgramType)\n\n }\n\n }\n\n }\n\n\n\n pub(crate) fn loaded_as(&mut self, fd: RawFd) {\n\n self.loaded = true;\n\n self.fd = fd;\n\n }\n\n\n\n fn set_fd(&mut self, fd: RawFd) {\n\n self.fd = fd\n\n }\n\n\n\n fn get_fd(&self) -> Result<RawFd, OxidebpfError> {\n\n if self.loaded {\n\n Ok(self.fd)\n\n } else {\n\n Err(OxidebpfError::ProgramNotLoaded)\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 88, "score": 18.767406569871085 }, { "content": " MultipleErrors(Vec<OxidebpfError>),\n\n UncaughtMountNsError,\n\n BpfProgLoadError((Box<OxidebpfError>, String)),\n\n MapValueSizeMismatch,\n\n MapKeySizeMismatch,\n\n ProgramGroupAlreadyLoaded,\n\n RetryError(String),\n\n LockError,\n\n /// This error is returned when trying to attach a kretprobe with debugfs.\n\n /// There's a chance we need to change the path name and retry, which is what\n\n /// this error indicates.\n\n KretprobeNamingError,\n\n UnknownPerfEvent(u32),\n\n}\n\n\n\nimpl Display for OxidebpfError {\n\n fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n match self {\n\n OxidebpfError::NoProgramVersionLoaded(e) => {\n\n for err in e {\n", "file_path": "src/error.rs", "rank": 89, "score": 18.597356083747805 }, { "content": "\n\n info!(\n\n LOGGER.0,\n\n \"attach_kprobe_debugfs(); event_path: {}; is_return: {}\", event_path, is_return\n\n );\n\n\n\n match perf_attach_tracepoint_with_debugfs(fd, event_path.clone(), cpu) {\n\n Err(OxidebpfError::KretprobeNamingError) => {\n\n info!(\n\n LOGGER.0,\n\n \"attach_kprobe_debugfs(); FileIOError - checking if retprobe\"\n\n );\n\n if is_return {\n\n // depending on the kernel version, we may need to have either `kprobe`\n\n // or `kretprobe` as the path\n\n let new_path = event_path.replace(\"kretprobe\", \"kprobe\");\n\n info!(LOGGER.0, \"attach_kprobe_debugfs(); new_path: {}\", new_path);\n\n perf_attach_tracepoint_with_debugfs(fd, new_path, cpu)\n\n } else {\n\n info!(\n", "file_path": "src/perf/syscall.rs", "rank": 90, "score": 18.55093404474831 }, { "content": " info!(\n\n LOGGER.0,\n\n \"BpfInsn::try_from(); invalid instruction length, raw.len(): {}\",\n\n raw.len()\n\n );\n\n return Err(OxidebpfError::InvalidInstructionLength);\n\n }\n\n Ok(unsafe { std::ptr::read(raw.as_ptr() as *const _) })\n\n }\n\n}\n\n\n\nimpl BpfInsn {\n\n pub fn get_src(&self) -> u8 {\n\n (self.regs >> 4) & 0xf\n\n }\n\n\n\n pub fn set_src(&mut self, val: u8) {\n\n self.regs = (self.regs & 0xf) | (val << 4);\n\n }\n\n\n", "file_path": "src/bpf/mod.rs", "rank": 91, "score": 18.495459044249895 }, { "content": " pub(crate) branch_sample_type: c_ulong, // enum perf_branch_sample_type\n\n pub(crate) sample_regs_user: c_ulong,\n\n pub(crate) sample_stack_user: c_uint,\n\n pub(crate) clockid: c_int,\n\n pub(crate) sample_regs_intr: c_ulong,\n\n pub(crate) aux_watermark: c_uint,\n\n pub(crate) sample_max_stack: c_ushort,\n\n pub(crate) __reserved_2: c_ushort,\n\n pub(crate) aux_sample_size: c_uint,\n\n pub(crate) __reserved_3: c_uint,\n\n}\n\n\n\nimpl Default for PerfEventAttr {\n\n fn default() -> Self {\n\n Self {\n\n p_type: 0,\n\n size: std::mem::size_of::<PerfEventAttr>() as u32,\n\n config: 0,\n\n sample_union: Default::default(),\n\n sample_type: 0,\n", "file_path": "src/perf/mod.rs", "rank": 92, "score": 18.37920440778295 }, { "content": "/// Safe wrapper around `u_perf_event_ioc_set_bpf()`\n\npub(crate) fn perf_event_ioc_set_bpf(perf_fd: RawFd, data: u32) -> Result<i32, OxidebpfError> {\n\n #![allow(clippy::useless_conversion, clippy::redundant_closure)] // fails to compile otherwise\n\n let data_unwrapped = match data.try_into() {\n\n Ok(d) => d,\n\n Err(_e) => 0, // Should be infallible\n\n };\n\n unsafe {\n\n u_perf_event_ioc_set_bpf(perf_fd, data_unwrapped)\n\n .map_err(|e| OxidebpfError::PerfIoctlError(e))\n\n }\n\n}\n\n\n\n/// Safe wrapper around `u_perf_event_ioc_enable()`\n\npub(crate) fn perf_event_ioc_enable(perf_fd: RawFd) -> Result<i32, OxidebpfError> {\n\n unsafe { u_perf_event_ioc_enable(perf_fd).map_err(OxidebpfError::PerfIoctlError) }\n\n}\n\n\n\n/// Safe wrapper around `u_perf_event_ioc_disable()`\n\npub(crate) fn perf_event_ioc_disable(perf_fd: RawFd) -> Result<i32, OxidebpfError> {\n\n unsafe { u_perf_event_ioc_disable(perf_fd).map_err(OxidebpfError::PerfIoctlError) }\n\n}\n\n\n", "file_path": "src/perf/syscall.rs", "rank": 93, "score": 18.360932652916908 }, { "content": " size: std::mem::size_of::<MapElem>(),\n\n };\n\n unsafe {\n\n sys_bpf(BPF_MAP_UPDATE_ELEM, bpf_attr)?;\n\n }\n\n Ok(())\n\n}\n\n\n\npub(crate) unsafe fn bpf_map_create_with_sized_attr(\n\n bpf_attr: SizedBpfAttr,\n\n) -> Result<RawFd, OxidebpfError> {\n\n let fd = sys_bpf(BPF_MAP_CREATE, bpf_attr)?;\n\n Ok(fd as RawFd)\n\n}\n\n\n\n/// The caller must provide a `size` that indicates the amount of _bytes_ used in `map_config`.\n\n/// See the example for [`sys_bpf`](Fn@sys_bpf).\n\npub(crate) unsafe fn bpf_map_create_with_config(\n\n map_config: MapConfig,\n\n size: usize,\n", "file_path": "src/bpf/syscall.rs", "rank": 94, "score": 18.255904959198592 }, { "content": " LOGGER.0,\n\n \"attach_kprobe_debugfs(); perf_attach_tracepoint_with_debugfs returned an error and probe is not a retprobe - cannot retry - event_path: {}\",\n\n event_path,\n\n );\n\n Err(OxidebpfError::FileIOError)\n\n }\n\n }\n\n Ok(v) => Ok(v),\n\n Err(e) => {\n\n info!(LOGGER.0, \"attach_kprobe_debugfs(); Other Error: {:?}\", e);\n\n Err(e)\n\n }\n\n }\n\n}\n\n\n\n// SAFETY: the fd returned here is passed up through Program to ProgramVersion,\n\n// which manages the fd lifecycle.\n\npub(crate) fn attach_kprobe(\n\n fd: RawFd,\n\n attach_point: &str,\n", "file_path": "src/perf/syscall.rs", "rank": 95, "score": 18.059802572061535 }, { "content": " /// &[\"sys_ptrace\"],\n\n /// ).syscall(true),\n\n /// Program::new(\n\n /// \"sys_process_vm_writev\",\n\n /// &[\"sys_process_vm_writev\"],\n\n /// ).syscall(true)\n\n /// ];\n\n ///\n\n /// ProgramVersion::new(program_vec);\n\n /// ```\n\n pub fn new(programs: Vec<Program<'a>>) -> Self {\n\n ProgramVersion {\n\n programs,\n\n fds: HashSet::new(),\n\n ev_names: HashSet::new(),\n\n array_maps: HashMap::new(),\n\n hash_maps: HashMap::new(),\n\n polling_delay: 100,\n\n polling_thread_policy: None,\n\n }\n", "file_path": "src/program_version.rs", "rank": 96, "score": 18.040185603566243 }, { "content": " e,\n\n );\n\n return Err(e);\n\n }\n\n }\n\n Ok(s) => {\n\n self.ev_names.extend(s.0);\n\n // SAFETY: these fds that came from `p.attach()` are not managed by `p`\n\n self.fds.extend(s.1);\n\n }\n\n }\n\n self.fds.insert(fd);\n\n }\n\n }\n\n\n\n // start perfmap event poller, if one exists\n\n if let Some((tx, _)) = perfmap_opts {\n\n self.event_poller(perfmaps, tx)?;\n\n }\n\n\n", "file_path": "src/program_version.rs", "rank": 97, "score": 18.00780232455446 }, { "content": " fn attach(&mut self) -> Result<(Vec<String>, Vec<RawFd>), OxidebpfError> {\n\n match self.attach_probes() {\n\n Ok(res) => Ok(res),\n\n Err(e) => {\n\n if self.is_syscall {\n\n self.attach_points\n\n .iter_mut()\n\n .for_each(|ap| *ap = format!(\"{}{}\", ARCH_SYSCALL_PREFIX, ap));\n\n\n\n self.attach_probes()\n\n } else {\n\n info!(LOGGER.0, \"Program::attach(); attach error: {:?}\", e);\n\n Err(e)\n\n }\n\n }\n\n }\n\n }\n\n\n\n fn attach_probes(&self) -> Result<(Vec<String>, Vec<RawFd>), OxidebpfError> {\n\n if !self.loaded {\n", "file_path": "src/lib.rs", "rank": 98, "score": 17.951650295191335 }, { "content": "\n\npub(crate) fn perf_event_open_debugfs(\n\n pid: pid_t,\n\n event_type: ProgramType,\n\n offset: u64,\n\n func_name_or_path: &str,\n\n) -> Result<String, OxidebpfError> {\n\n let prefix = match event_type {\n\n ProgramType::Kprobe => \"kprobe\",\n\n ProgramType::Kretprobe => \"kprobe\",\n\n ProgramType::Uprobe => \"uprobe\",\n\n ProgramType::Uretprobe => \"uprobe\",\n\n t => {\n\n info!(\n\n LOGGER.0,\n\n \"perf_event_open_debugfs(); (prefix), unsupported event type: {:?}\", t\n\n );\n\n return Err(OxidebpfError::UnsupportedEventType);\n\n }\n\n };\n", "file_path": "src/perf/syscall.rs", "rank": 99, "score": 17.848349618377647 } ]
Rust
src/travis.rs
pietroalbini/travis-migrate
078cb2acfa7b685ecd2b7d9cf3d982cb7a82944f
use failure::{bail, Error}; use log::{debug, info}; use reqwest::{ header::{HeaderName, AUTHORIZATION, USER_AGENT}, Client, Method, RequestBuilder, }; use std::process::Command; #[derive(serde_derive::Deserialize)] struct PaginationLink { #[serde(rename = "@href")] href: String, } #[derive(serde_derive::Deserialize)] struct Pagination { next: Option<PaginationLink>, } #[derive(serde_derive::Deserialize)] struct Common { #[serde(rename = "@pagination")] pagination: Pagination, } #[derive(serde_derive::Deserialize)] struct Repositories { #[serde(flatten)] common: Common, repositories: Vec<Repository>, } #[derive(serde_derive::Deserialize)] pub(crate) struct Repository { pub(crate) slug: String, migration_status: Option<String>, } #[derive(serde_derive::Deserialize)] struct Crons { #[serde(flatten)] common: Common, crons: Vec<Cron>, } #[derive(serde_derive::Serialize, serde_derive::Deserialize)] #[serde(rename_all = "snake_case")] pub(crate) enum CronInterval { Daily, Weekly, Monthly, } #[derive(serde_derive::Deserialize)] pub(crate) struct Branch { name: String, } #[derive(serde_derive::Serialize, serde_derive::Deserialize)] pub(crate) struct Cron { #[serde(skip_serializing)] branch: Branch, interval: CronInterval, dont_run_if_recent_build_exists: bool, } pub(crate) struct TravisCI { tld: &'static str, token: String, client: Client, } impl TravisCI { pub(crate) fn new(tld: &'static str, token: Option<String>) -> Result<Self, Error> { let token = if let Some(token) = token { token } else { info!("fetching API token for travis-ci.{}", tld); let output = Command::new("travis") .arg("token") .arg(format!("--{}", tld)) .arg("--no-interactive") .output()?; if !output.status.success() { bail!( "failed to get the travis-ci.{} token: {}", tld, String::from_utf8_lossy(&output.stderr) ); } String::from_utf8(output.stdout)?.trim().to_string() }; Ok(TravisCI { tld, token, client: Client::new(), }) } fn build_request(&self, method: Method, url: &str) -> RequestBuilder { let tmp_url; let mut url = url.trim_start_matches('/'); if !url.starts_with("https://") { tmp_url = format!("https://api.travis-ci.{}/{}", self.tld, url); url = &tmp_url; } debug!("{} {}", method, url); self.client .request(method, url) .header(USER_AGENT, "pietroalbini/travis-migrate") .header(AUTHORIZATION, format!("token {}", self.token)) .header(HeaderName::from_static("travis-api-version"), "3") } fn paginated<F>(&self, method: &Method, url: &str, mut f: F) -> Result<(), Error> where F: FnMut(RequestBuilder) -> Result<Common, Error>, { let mut common = f(self.build_request(method.clone(), url))?; while let Some(link) = common.pagination.next { common = f(self.build_request(method.clone(), &link.href))?; } Ok(()) } fn repo_name(&self, name: &str) -> String { name.replace("/", "%2F") } pub(crate) fn repos_to_migrate(&self, login: &str) -> Result<Vec<Repository>, Error> { let mut repos = Vec::new(); self.paginated(&Method::GET, &format!("owner/{}/repos", login), |req| { let mut resp: Repositories = req .form(&[("active_on_org", "true")]) .send()? .error_for_status()? .json()?; repos.append(&mut resp.repositories); Ok(resp.common) })?; Ok(repos) } pub(crate) fn start_migration(&self, repo: &str) -> Result<(), Error> { let _ = self .build_request( Method::POST, &format!("repo/{}/migrate", self.repo_name(repo)), ) .send()? .error_for_status()?; Ok(()) } pub(crate) fn is_migrated(&self, repo: &str) -> Result<bool, Error> { let repo: Repository = self .build_request(Method::GET, &format!("repo/{}", self.repo_name(repo))) .send()? .error_for_status()? .json()?; Ok(repo.migration_status.as_ref().map(|s| s.as_str()) == Some("migrated")) } pub(crate) fn list_crons(&self, repo: &str) -> Result<Vec<Cron>, Error> { let mut crons = Vec::new(); self.paginated( &Method::GET, &format!("repo/{}/crons", self.repo_name(repo)), |req| { let mut resp: Crons = req.send()?.error_for_status()?.json()?; crons.append(&mut resp.crons); Ok(resp.common) }, )?; Ok(crons) } pub(crate) fn create_cron(&self, repo: &str, cron: &Cron) -> Result<(), Error> { let _ = self .build_request( Method::POST, &format!( "repo/{}/branch/{}/cron", self.repo_name(repo), cron.branch.name ), ) .json(cron) .send()? .error_for_status()?; Ok(()) } }
use failure::{bail, Error}; use log::{debug, info}; use reqwest::{ header::{HeaderName, AUTHORIZATION, USER_AGENT}, Client, Method, RequestBuilder, }; use std::process::Command; #[derive(serde_derive::Deserialize)] struct PaginationLink { #[serde(rename = "@href")] href: String, } #[derive(serde_derive::Deserialize)] struct Pagination { next: Option<PaginationLink>, } #[derive(serde_derive::Deserialize)] struct Common { #[serde(rename = "@pagination")] pagination: Pagination, } #[derive(serde_derive::Deserialize)] struct Repositories { #[serde(flatten)] common: Common, repositories: Vec<Repository>, } #[derive(serde_derive::Deserialize)] pub(crate) struct Repository { pub(crate) slug: String, migration_status: Option<String>, } #[derive(serde_derive::Deserialize)] struct Crons { #[serde(flatten)] common: Common, crons: Vec<Cron>, } #[derive(serde_derive::Serialize, serde_derive::Deserialize)] #[serde(rename_all = "snake_case")] pub(crate) enum CronInterval { Daily, Weekly, Monthly, } #[derive(serde_derive::Deserialize)] pub(crate) struct Branch { name: String, } #[derive(serde_derive::Serialize, serde_derive::Deserialize)] pub(crate) struct Cron { #[serde(skip_serializing)] branch: Branch, interval: CronInterval, dont_run_if_recent_build_exists: bool, } pub(crate) struct TravisCI { tld: &'static str, token: String, client: Client, } impl TravisCI { pub(crate) fn new(tld: &'static str, token: Option<String>) -> Result<Self, Error> { let token = if let Some(token) = token { token } else { info!("fetching API token for travis-ci.{}", tld); let output = Command::new("travis") .arg("token") .arg(format!("--{}", tld)) .arg("--no-interactive") .output()?; if !output.status.success() { bail!( "failed to get the travis-ci.{} token: {}", tld, String::from_utf8_lossy(&output.stderr) ); } String::from_utf8(output.stdout)?.trim().to_string() }; Ok(TravisCI { tld, token, client: Client::new(), }) } fn build_request(&self, method: Method, url: &str) -> RequestBuilder { let tmp_url; let mut url = url.trim_start_matches('/'); if !url.starts_with("https://") { tmp_url = format!("https://api.travis-ci.{}/{}", self.tld, url); url = &tmp_url; } debug!("{} {}", method, url); self.client .request(method, url) .header(USER_AGENT, "pietroalbini/travis-migrate") .header(AUTHORIZATION, format!("token {}", self.token)) .header(HeaderName::from_static("travis-api-version"), "3") } fn paginated<F>(&self, method: &Method, url: &str, mut f: F) -> Result<(), Error> where F: FnMut(RequestBuilder) -> Result<Common, Error>, { let mut common = f(self.build_request(method.clone(), url))?; while let Some(link) = common.pagination.next { common = f(self.build_request(method.clone(), &link.href))?; } Ok(()) } fn repo_name(&self, name: &str) -> String { name.replace("/", "%2F") }
pub(crate) fn start_migration(&self, repo: &str) -> Result<(), Error> { let _ = self .build_request( Method::POST, &format!("repo/{}/migrate", self.repo_name(repo)), ) .send()? .error_for_status()?; Ok(()) } pub(crate) fn is_migrated(&self, repo: &str) -> Result<bool, Error> { let repo: Repository = self .build_request(Method::GET, &format!("repo/{}", self.repo_name(repo))) .send()? .error_for_status()? .json()?; Ok(repo.migration_status.as_ref().map(|s| s.as_str()) == Some("migrated")) } pub(crate) fn list_crons(&self, repo: &str) -> Result<Vec<Cron>, Error> { let mut crons = Vec::new(); self.paginated( &Method::GET, &format!("repo/{}/crons", self.repo_name(repo)), |req| { let mut resp: Crons = req.send()?.error_for_status()?.json()?; crons.append(&mut resp.crons); Ok(resp.common) }, )?; Ok(crons) } pub(crate) fn create_cron(&self, repo: &str, cron: &Cron) -> Result<(), Error> { let _ = self .build_request( Method::POST, &format!( "repo/{}/branch/{}/cron", self.repo_name(repo), cron.branch.name ), ) .json(cron) .send()? .error_for_status()?; Ok(()) } }
pub(crate) fn repos_to_migrate(&self, login: &str) -> Result<Vec<Repository>, Error> { let mut repos = Vec::new(); self.paginated(&Method::GET, &format!("owner/{}/repos", login), |req| { let mut resp: Repositories = req .form(&[("active_on_org", "true")]) .send()? .error_for_status()? .json()?; repos.append(&mut resp.repositories); Ok(resp.common) })?; Ok(repos) }
function_block-full_function
[ { "content": "fn app() -> Result<(), Error> {\n\n let args = CLI::from_args();\n\n\n\n match args {\n\n CLI::List { account } => {\n\n let travis_com = TravisCI::new(\"com\", std::env::var(\"TRAVIS_TOKEN_COM\").ok())?;\n\n let repos = travis_com.repos_to_migrate(&account)?;\n\n if repos.is_empty() {\n\n info!(\"no repos to migrate found\");\n\n } else {\n\n info!(\"repos to migrate:\");\n\n for repo in &repos {\n\n info!(\"{}\", repo.slug);\n\n }\n\n }\n\n }\n\n CLI::MigrateRepo { slug } => {\n\n let travis_org = TravisCI::new(\"org\", std::env::var(\"TRAVIS_TOKEN_ORG\").ok())?;\n\n let travis_com = TravisCI::new(\"com\", std::env::var(\"TRAVIS_TOKEN_COM\").ok())?;\n\n let github = GitHub::new(std::env::var(\"GITHUB_TOKEN\")?);\n", "file_path": "src/main.rs", "rank": 0, "score": 70179.09013377136 }, { "content": "fn migrate_protection_contexts(contexts: &[String]) -> Vec<&str> {\n\n contexts\n\n .iter()\n\n .map(|ctx| match ctx.as_str() {\n\n \"continuos-integration/travis-ci\" => \"Travis CI - Branch\",\n\n \"continuos-integration/travis-ci/push\" => \"Travis CI - Branch\",\n\n \"continuos-integration/travis-ci/pr\" => \"Travis CI - Pull Request\",\n\n other => other,\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 2, "score": 55953.374095212625 }, { "content": "enum CLI {\n\n #[structopt(name = \"list\", about = \"list repositories that can be migrated\")]\n\n List {\n\n #[structopt(name = \"account\")]\n\n account: String,\n\n },\n\n #[structopt(name = \"migrate-repo\", about = \"migrate a repository\")]\n\n MigrateRepo {\n\n #[structopt(name = \"slug\")]\n\n slug: String,\n\n },\n\n #[structopt(\n\n name = \"migrate-account\",\n\n about = \"migrate all repositories in an account\"\n\n )]\n\n MigrateAccount {\n\n #[structopt(name = \"account\")]\n\n account: String,\n\n #[structopt(name = \"exclude\", long = \"exclude\", multiple = true)]\n\n exclude: Vec<String>,\n\n },\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 7, "score": 29348.843154198625 }, { "content": "fn main() {\n\n let mut logger = env_logger::Builder::new();\n\n logger.filter_module(\"travis_migrate\", log::LevelFilter::Info);\n\n if let Ok(content) = std::env::var(\"RUST_LOG\") {\n\n logger.parse(&content);\n\n }\n\n logger.init();\n\n\n\n if let Err(err) = app() {\n\n error!(\"{}\", err);\n\n for cause in err.iter_causes() {\n\n error!(\"caused by: {}\", cause);\n\n }\n\n }\n\n}\n", "file_path": "src/main.rs", "rank": 8, "score": 27499.617011360562 }, { "content": "fn migrate(\n\n travis_org: &TravisCI,\n\n travis_com: &TravisCI,\n\n github: &GitHub,\n\n repo: &str,\n\n) -> Result<(), Error> {\n\n let crons = travis_org.list_crons(repo)?;\n\n info!(\"{}: found {} cron(s) to migrate\", repo, crons.len());\n\n\n\n info!(\"{}: migrating...\", repo);\n\n travis_com.start_migration(&repo)?;\n\n while !travis_com.is_migrated(&repo)? {\n\n std::thread::sleep(Duration::from_millis(100));\n\n }\n\n info!(\"{}: migration complete\", repo);\n\n\n\n if !crons.is_empty() {\n\n for cron in &crons {\n\n travis_com.create_cron(repo, cron)?;\n\n }\n", "file_path": "src/main.rs", "rank": 9, "score": 27499.617011360562 }, { "content": " let tmp_url;\n\n let mut url = url.trim_start_matches('/');\n\n if !url.starts_with(\"https://\") {\n\n tmp_url = format!(\"https://api.github.com/{}\", url);\n\n url = &tmp_url;\n\n }\n\n debug!(\"{} {}\", method, url);\n\n self.client\n\n .request(method, url)\n\n .header(USER_AGENT, \"pietroalbini/travis-migrate\")\n\n .header(AUTHORIZATION, format!(\"token {}\", self.token))\n\n }\n\n\n\n fn paginated<F>(&self, method: &Method, url: String, mut f: F) -> Result<(), Error>\n\n where\n\n F: FnMut(Response) -> Result<(), Error>,\n\n {\n\n let mut next = Some(url);\n\n while let Some(next_url) = next.take() {\n\n let resp = self\n", "file_path": "src/github.rs", "rank": 12, "score": 22.58905502063625 }, { "content": " }\n\n Ok(())\n\n }\n\n\n\n pub(crate) fn protected_branches(&self, repo: &str) -> Result<Vec<Branch>, Error> {\n\n let url = format!(\"repos/{}/branches?protected=true\", repo);\n\n let mut branches = Vec::new();\n\n self.paginated(&Method::GET, url, |mut resp| {\n\n let mut content: Vec<Branch> = resp.json()?;\n\n branches.append(&mut content);\n\n Ok(())\n\n })?;\n\n Ok(branches)\n\n }\n\n\n\n pub(crate) fn set_required_status_checks(\n\n &self,\n\n repo: &str,\n\n branch: &str,\n\n contexts: &[&str],\n", "file_path": "src/github.rs", "rank": 16, "score": 16.126436566016107 }, { "content": "use failure::Error;\n\nuse hyper_old_types::header::{Link, RelationType};\n\nuse log::debug;\n\nuse reqwest::{\n\n header::{AUTHORIZATION, LINK, USER_AGENT},\n\n Client, Method, RequestBuilder, Response,\n\n};\n\nuse serde_derive::Deserialize;\n\nuse serde_json::json;\n\n\n\n#[derive(Debug, Deserialize)]\n\npub(crate) struct Branch {\n\n pub(crate) name: String,\n\n pub(crate) protection: BranchProtection,\n\n}\n\n\n\n#[derive(Debug, Deserialize)]\n\npub(crate) struct BranchProtection {\n\n pub(crate) required_status_checks: RequiredStatusChecks,\n\n}\n", "file_path": "src/github.rs", "rank": 19, "score": 13.356112969011317 }, { "content": "\n\n#[derive(Debug, Deserialize)]\n\npub(crate) struct RequiredStatusChecks {\n\n pub(crate) contexts: Vec<String>,\n\n}\n\n\n\npub(crate) struct GitHub {\n\n token: String,\n\n client: Client,\n\n}\n\n\n\nimpl GitHub {\n\n pub(crate) fn new(token: String) -> Self {\n\n GitHub {\n\n token,\n\n client: Client::new(),\n\n }\n\n }\n\n\n\n fn build_request(&self, method: Method, url: &str) -> RequestBuilder {\n", "file_path": "src/github.rs", "rank": 20, "score": 13.114644810078195 }, { "content": " ) -> Result<(), Error> {\n\n let url = format!(\n\n \"repos/{}/branches/{}/protection/required_status_checks\",\n\n repo, branch\n\n );\n\n self.build_request(Method::PATCH, &url)\n\n .json(&json!({\n\n \"contexts\": contexts,\n\n }))\n\n .send()?\n\n .error_for_status()?;\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/github.rs", "rank": 22, "score": 10.359025169363239 }, { "content": " .build_request(method.clone(), &next_url)\n\n .send()?\n\n .error_for_status()?;\n\n\n\n // Extract the next page\n\n if let Some(links) = resp.headers().get(LINK) {\n\n let links: Link = links.to_str()?.parse()?;\n\n for link in links.values() {\n\n if link\n\n .rel()\n\n .map(|r| r.iter().any(|r| *r == RelationType::Next))\n\n .unwrap_or(false)\n\n {\n\n next = Some(link.link().to_string());\n\n break;\n\n }\n\n }\n\n }\n\n\n\n f(resp)?;\n", "file_path": "src/github.rs", "rank": 23, "score": 10.04597526548507 }, { "content": " info!(\"{}: restored {} cron(s)\", repo, crons.len());\n\n }\n\n\n\n for branch in github.protected_branches(repo)?.into_iter() {\n\n let contexts = branch.protection.required_status_checks.contexts;\n\n let new_contexts = migrate_protection_contexts(&contexts);\n\n if contexts != new_contexts {\n\n github.set_required_status_checks(repo, &branch.name, &new_contexts)?;\n\n info!(\n\n \"{}: updated required status checks for branch `{}`\",\n\n repo, branch.name\n\n );\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 24, "score": 8.842471272729764 }, { "content": "#![allow(clippy::new_ret_no_self)]\n\n\n\nmod github;\n\nmod travis;\n\n\n\nuse crate::{github::GitHub, travis::TravisCI};\n\nuse failure::Error;\n\nuse log::{error, info};\n\nuse std::time::Duration;\n\nuse structopt::StructOpt;\n\n\n\n#[derive(StructOpt)]\n\n#[structopt(\n\n name = \"travis-migrate\",\n\n about = \"Migrate from travis-ci.org to travis-ci.com\"\n\n)]\n", "file_path": "src/main.rs", "rank": 25, "score": 8.317612488212873 }, { "content": " migrate(&travis_org, &travis_com, &github, &slug)?;\n\n }\n\n CLI::MigrateAccount { account, exclude } => {\n\n let travis_org = TravisCI::new(\"org\", std::env::var(\"TRAVIS_TOKEN_ORG\").ok())?;\n\n let travis_com = TravisCI::new(\"com\", std::env::var(\"TRAVIS_TOKEN_COM\").ok())?;\n\n let github = GitHub::new(std::env::var(\"GITHUB_TOKEN\")?);\n\n let repos = travis_com.repos_to_migrate(&account)?;\n\n if repos.is_empty() {\n\n info!(\"no repos to migrate found\");\n\n } else {\n\n info!(\"{} repo(s) to migrate\", repos.len());\n\n for repo in &repos {\n\n if exclude.contains(&repo.slug) {\n\n info!(\"skipping {}\", repo.slug);\n\n } else {\n\n migrate(&travis_org, &travis_com, &github, &repo.slug)?;\n\n }\n\n }\n\n }\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 26, "score": 7.232390281200527 }, { "content": "## API authentication keys\n\n\n\nThe tool needs the following API keys:\n\n\n\n* `GITHUB_TOKEN`: a personal access token of a GitHub account that has **full\n\n admin access** to all the repositories\n\n* `TRAVIS_TOKEN_ORG`: the `travis-ci.org` API key of the account you want to\n\n use to perform the migration\n\n* `TRAVIS_TOKEN_COM`: the `travis-ci.com` API key of the account you want to\n\n use to perform the migration\n\n\n\nIf you have the [Travis CLI][travis-cli] installed you can omit the Travis\n\nenvironment variables, since the tool will call the CLI to fetch the tokens\n\nautomatically. Also note not all the subcommands require all the environment\n\nvariables to be present (listing repositories available to migrate only\n\nrequires `TRAVIS_TOKEN_PRO`).\n\n\n\n[travis-cli]: https://github.com/travis-ci/travis.rb\n\n\n\n## Usage\n\n\n\nYou can list all the repositories that can be migrated in an\n\naccount/organization with:\n\n\n\n```\n\n$ cargo run list rust-lang\n\n```\n\n\n\nYou can migrate a single repository with:\n\n\n\n```\n\n$ cargo run migrate-repo rust-lang/rust\n\n```\n\n\n\nYou can migrate all the repositories in an account/organization with:\n\n\n\n```\n\n$ cargo run migrate-account rust-lang\n\n```\n\n\n\nYou can also exclude some repositories while migrating a whole\n\naccount/organization:\n\n\n\n```\n\n$ cargo run migrate-account rust-lang --exclude rust-lang/rust --exclude rust-lang/cargo\n\n```\n\n\n\nBefore you migrate you need to have the [Travis CI][travis-app] GitHub app\n\ninstalled on your account, and you need to give it access to the repositories\n\nyou want to migrate.\n\n\n\n[travis-app]: https://github.com/marketplace/travis-ci\n", "file_path": "README.md", "rank": 27, "score": 6.102198841803016 }, { "content": "<h1 align=\"center\">travis-ci.org to travis-ci.com migrator</h1>\n\n\n\n<p align=\"center\"><b>:warning::warning: &nbsp;\n\nIt's not possible to migrate repositories back to travis-ci.org\n\n&nbsp; :warning::warning:</b></p>\n\n\n\n`travis-migrate` is a tool that automatically migrates repositories or whole\n\naccounts/organizations from [travis-ci.org][org] to [travis-ci.com][com], while\n\ntrying to preserve as much data and settings as possible. It was built by the\n\nRust Infrastructure team to migrate all the repositories in our organizations.\n\n\n\nIn addition to the [migration steps performed by Travis itself][data-migrated],\n\nthe tool:\n\n\n\n* Migrates all the cron jobs configured in the repository\n\n* Migrates the required status checks in the repository's protected branches\n\n\n\nYou need Rust 1.31.0 or greater in order to use this tool. Made by [Pietro\n\nAlbini](https://www.pietroalbini.org) and released under the MIT license.\n\n\n\n> When the tool was written the Migration API was limited to beta testers. If\n\n> you can't access it you need to contact Travis Support and ask them to enable\n\n> it on the users/organizations you want to migrate.\n\n\n\n[data-migrated]: https://docs.travis-ci.com/user/open-source-repository-migration/#what-information-will-be-transferred-to-travis-cicom\n\n[org]: https://travis-ci.org\n\n[com]: https://travis-ci.com\n\n\n", "file_path": "README.md", "rank": 28, "score": 4.634293872633293 } ]
Rust
ton_client/src/crypto/boxes/encryption_box/aes.rs
markgenuine/TON-SDK
2b49c8270a34eab1a66e2eac7764b69568e7d324
/* * Copyright 2018-2021 TON Labs LTD. * * Licensed under the SOFTWARE EVALUATION License (the "License"); you may not use * this file except in compliance with the License. * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific TON DEV software governing permissions and * limitations under the License. */ use aes::{Aes128, Aes192, Aes256, BlockCipher, BlockDecrypt, BlockEncrypt, NewBlockCipher}; use block_modes::{BlockMode, Cbc}; use crate::crypto::Error; use crate::encoding::{base64_decode, hex_decode}; use crate::error::ClientResult; use super::{CipherMode, EncryptionBox, EncryptionBoxInfo}; #[derive(Serialize, Deserialize, Clone, Debug, ApiType, Default)] pub struct AesParams { pub mode: CipherMode, pub key: String, pub iv: Option<String>, } #[derive(Serialize, Deserialize, Clone, Debug, ApiType, Default)] pub struct AesInfo { pub mode: CipherMode, pub iv: Option<String>, } pub(crate) struct AesEncryptionBox { key: Vec<u8>, mode: CipherMode, iv: Vec<u8>, } impl AesEncryptionBox { pub fn new(params: AesParams) -> ClientResult<Self> { let iv_required = match params.mode { CipherMode::CBC => true, _ => return Err(Error::unsupported_cipher_mode(&format!("{:?}", params.mode))) }; if iv_required && params.iv.is_none() { return Err(Error::iv_required(&params.mode)); } let key = hex_decode(&params.key)?; if key.len() != 16 && key.len() != 24 && key.len() != 32 { return Err(Error::invalid_key_size(key.len(), &[128, 192, 256])); } let iv = params.iv .map(|string| { let iv = hex_decode(&string)?; if iv.len() == aes::BLOCK_SIZE { Ok(iv) } else { Err(Error::invalid_iv_size(iv.len(), aes::BLOCK_SIZE)) } }) .transpose()? .unwrap_or_default(); Ok(Self { key, iv, mode: params.mode }) } fn create_block_mode<C, B>(key: &[u8], iv: &[u8]) -> ClientResult<B> where C: BlockCipher + BlockEncrypt + BlockDecrypt + NewBlockCipher, B: BlockMode<C, block_modes::block_padding::ZeroPadding> { B::new_from_slices(key, iv) .map_err(|err| Error::cannot_create_cipher(err)) } fn encrypt_data<'a, C, B>(key: &[u8], iv: &[u8], data: &'a mut [u8], size: usize) -> ClientResult<&'a [u8]> where C: BlockCipher + BlockEncrypt + BlockDecrypt + NewBlockCipher, B: BlockMode<C, block_modes::block_padding::ZeroPadding> { Self::create_block_mode::<C, B>(key, iv)? .encrypt(data, size) .map_err(|err| Error::encrypt_data_error(format!("{:#?}", err))) } fn decrypt_data<C, B>(key: &[u8], iv: &[u8], data: &mut [u8]) -> ClientResult<()> where C: BlockCipher + BlockEncrypt + BlockDecrypt + NewBlockCipher, B: BlockMode<C, block_modes::block_padding::ZeroPadding> { Self::create_block_mode::<C, B>(key, iv)? .decrypt(data) .map_err(|err| Error::decrypt_data_error(format!("{:#?}", err))) .map(|_| ()) } fn decode_base64_aligned(data: &str, align: usize) -> ClientResult<(Vec<u8>, usize)> { let data_size = (data.len() + 3) / 4 * 3; let aligned_size = (data_size + align - 1) & !(align - 1); let mut vec = vec![0u8; aligned_size]; let size = base64::decode_config_slice(data, base64::STANDARD, &mut vec) .map_err(|err| crate::client::Error::invalid_base64(data, err))?; Ok((vec, size)) } } #[async_trait::async_trait] impl EncryptionBox for AesEncryptionBox { async fn get_info(&self) -> ClientResult<EncryptionBoxInfo> { let iv = if self.iv.len() != 0 { Some(hex::encode(&self.iv)) } else { None }; let aes_info = AesInfo { mode: self.mode.clone(), iv }; Ok(EncryptionBoxInfo { algorithm: Some("AES".to_owned()), hdpath: None, public: None, options: Some(json!(aes_info)) }) } async fn encrypt(&self, data: &String) -> ClientResult<String> { let (mut data, size) = Self::decode_base64_aligned(data, aes::BLOCK_SIZE)?; let result = match (self.key.len(), &self.mode) { (16, CipherMode::CBC) => Self::encrypt_data::<Aes128, Cbc<Aes128, _>>(&self.key, &self.iv, &mut data, size)?, (24, CipherMode::CBC) => Self::encrypt_data::<Aes192, Cbc<Aes192, _>>(&self.key, &self.iv, &mut data, size)?, (32, CipherMode::CBC) => Self::encrypt_data::<Aes256, Cbc<Aes256, _>>(&self.key, &self.iv, &mut data, size)?, _ => return Err(Error::unsupported_cipher_mode(&format!("{:?}", self.mode))), }; Ok(base64::encode(result)) } async fn decrypt(&self, data: &String) -> ClientResult<String> { let mut data = base64_decode(data)?; match (self.key.len(), &self.mode) { (16, CipherMode::CBC) => Self::decrypt_data::<Aes128, Cbc<Aes128, _>>(&self.key, &self.iv, &mut data)?, (24, CipherMode::CBC) => Self::decrypt_data::<Aes192, Cbc<Aes192, _>>(&self.key, &self.iv, &mut data)?, (32, CipherMode::CBC) => Self::decrypt_data::<Aes256, Cbc<Aes256, _>>(&self.key, &self.iv, &mut data)?, _ => return Err(Error::unsupported_cipher_mode(&format!("{:?}", self.mode))), } Ok(base64::encode(&data)) } }
/* * Copyright 2018-2021 TON Labs LTD. * * Licensed under the SOFTWARE EVALUATION License (the "License"); you may not use * this file except in compliance with the License. * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOU
Aes128, Aes192, Aes256, BlockCipher, BlockDecrypt, BlockEncrypt, NewBlockCipher}; use block_modes::{BlockMode, Cbc}; use crate::crypto::Error; use crate::encoding::{base64_decode, hex_decode}; use crate::error::ClientResult; use super::{CipherMode, EncryptionBox, EncryptionBoxInfo}; #[derive(Serialize, Deserialize, Clone, Debug, ApiType, Default)] pub struct AesParams { pub mode: CipherMode, pub key: String, pub iv: Option<String>, } #[derive(Serialize, Deserialize, Clone, Debug, ApiType, Default)] pub struct AesInfo { pub mode: CipherMode, pub iv: Option<String>, } pub(crate) struct AesEncryptionBox { key: Vec<u8>, mode: CipherMode, iv: Vec<u8>, } impl AesEncryptionBox { pub fn new(params: AesParams) -> ClientResult<Self> { let iv_required = match params.mode { CipherMode::CBC => true, _ => return Err(Error::unsupported_cipher_mode(&format!("{:?}", params.mode))) }; if iv_required && params.iv.is_none() { return Err(Error::iv_required(&params.mode)); } let key = hex_decode(&params.key)?; if key.len() != 16 && key.len() != 24 && key.len() != 32 { return Err(Error::invalid_key_size(key.len(), &[128, 192, 256])); } let iv = params.iv .map(|string| { let iv = hex_decode(&string)?; if iv.len() == aes::BLOCK_SIZE { Ok(iv) } else { Err(Error::invalid_iv_size(iv.len(), aes::BLOCK_SIZE)) } }) .transpose()? .unwrap_or_default(); Ok(Self { key, iv, mode: params.mode }) } fn create_block_mode<C, B>(key: &[u8], iv: &[u8]) -> ClientResult<B> where C: BlockCipher + BlockEncrypt + BlockDecrypt + NewBlockCipher, B: BlockMode<C, block_modes::block_padding::ZeroPadding> { B::new_from_slices(key, iv) .map_err(|err| Error::cannot_create_cipher(err)) } fn encrypt_data<'a, C, B>(key: &[u8], iv: &[u8], data: &'a mut [u8], size: usize) -> ClientResult<&'a [u8]> where C: BlockCipher + BlockEncrypt + BlockDecrypt + NewBlockCipher, B: BlockMode<C, block_modes::block_padding::ZeroPadding> { Self::create_block_mode::<C, B>(key, iv)? .encrypt(data, size) .map_err(|err| Error::encrypt_data_error(format!("{:#?}", err))) } fn decrypt_data<C, B>(key: &[u8], iv: &[u8], data: &mut [u8]) -> ClientResult<()> where C: BlockCipher + BlockEncrypt + BlockDecrypt + NewBlockCipher, B: BlockMode<C, block_modes::block_padding::ZeroPadding> { Self::create_block_mode::<C, B>(key, iv)? .decrypt(data) .map_err(|err| Error::decrypt_data_error(format!("{:#?}", err))) .map(|_| ()) } fn decode_base64_aligned(data: &str, align: usize) -> ClientResult<(Vec<u8>, usize)> { let data_size = (data.len() + 3) / 4 * 3; let aligned_size = (data_size + align - 1) & !(align - 1); let mut vec = vec![0u8; aligned_size]; let size = base64::decode_config_slice(data, base64::STANDARD, &mut vec) .map_err(|err| crate::client::Error::invalid_base64(data, err))?; Ok((vec, size)) } } #[async_trait::async_trait] impl EncryptionBox for AesEncryptionBox { async fn get_info(&self) -> ClientResult<EncryptionBoxInfo> { let iv = if self.iv.len() != 0 { Some(hex::encode(&self.iv)) } else { None }; let aes_info = AesInfo { mode: self.mode.clone(), iv }; Ok(EncryptionBoxInfo { algorithm: Some("AES".to_owned()), hdpath: None, public: None, options: Some(json!(aes_info)) }) } async fn encrypt(&self, data: &String) -> ClientResult<String> { let (mut data, size) = Self::decode_base64_aligned(data, aes::BLOCK_SIZE)?; let result = match (self.key.len(), &self.mode) { (16, CipherMode::CBC) => Self::encrypt_data::<Aes128, Cbc<Aes128, _>>(&self.key, &self.iv, &mut data, size)?, (24, CipherMode::CBC) => Self::encrypt_data::<Aes192, Cbc<Aes192, _>>(&self.key, &self.iv, &mut data, size)?, (32, CipherMode::CBC) => Self::encrypt_data::<Aes256, Cbc<Aes256, _>>(&self.key, &self.iv, &mut data, size)?, _ => return Err(Error::unsupported_cipher_mode(&format!("{:?}", self.mode))), }; Ok(base64::encode(result)) } async fn decrypt(&self, data: &String) -> ClientResult<String> { let mut data = base64_decode(data)?; match (self.key.len(), &self.mode) { (16, CipherMode::CBC) => Self::decrypt_data::<Aes128, Cbc<Aes128, _>>(&self.key, &self.iv, &mut data)?, (24, CipherMode::CBC) => Self::decrypt_data::<Aes192, Cbc<Aes192, _>>(&self.key, &self.iv, &mut data)?, (32, CipherMode::CBC) => Self::decrypt_data::<Aes256, Cbc<Aes256, _>>(&self.key, &self.iv, &mut data)?, _ => return Err(Error::unsupported_cipher_mode(&format!("{:?}", self.mode))), } Ok(base64::encode(&data)) } }
T WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific TON DEV software governing permissions and * limitations under the License. */ use aes::{
random
[]
Rust
tezos/api/src/environment.rs
Kyras/tezedge
98bc9991765682d077347575781afb451d147492
use std::collections::HashMap; use std::str::FromStr; use enum_iterator::IntoEnumIterator; use serde::{Deserialize, Serialize}; use lazy_static::lazy_static; use crate::ffi::{GenesisChain, ProtocolOverrides}; lazy_static! { pub static ref TEZOS_ENV: HashMap<TezosEnvironment, TezosEnvironmentConfiguration> = init(); } #[derive(Serialize, Deserialize, Copy, Clone, Debug, PartialEq, Eq, Hash, IntoEnumIterator)] pub enum TezosEnvironment { Alphanet, Babylonnet, Mainnet, Zeronet, } #[derive(Debug, Clone)] pub struct ParseTezosEnvironmentError(String); impl FromStr for TezosEnvironment { type Err = ParseTezosEnvironmentError; fn from_str(s: &str) -> Result<Self, Self::Err> { match s.to_ascii_lowercase().as_str() { "alphanet" => Ok(TezosEnvironment::Alphanet), "babylonnet" | "babylon" => Ok(TezosEnvironment::Babylonnet), "mainnet" => Ok(TezosEnvironment::Mainnet), "zeronet" => Ok(TezosEnvironment::Zeronet), _ => Err(ParseTezosEnvironmentError(format!("Invalid variant name: {}", s))) } } } fn init() -> HashMap<TezosEnvironment, TezosEnvironmentConfiguration> { let mut env: HashMap<TezosEnvironment, TezosEnvironmentConfiguration> = HashMap::new(); env.insert(TezosEnvironment::Alphanet, TezosEnvironmentConfiguration { genesis: GenesisChain { time: "2018-11-30T15:30:56Z".to_string(), block: "BLockGenesisGenesisGenesisGenesisGenesisb83baZgbyZe".to_string(), protocol: "Ps6mwMrF2ER2s51cp9yYpjDcuzQjsc2yAz8bQsRgdaRxw4Fk95H".to_string(), }, bootstrap_lookup_addresses: vec![ "boot.tzalpha.net".to_string(), "bootalpha.tzbeta.net".to_string() ], version: "TEZOS_ALPHANET_2018-11-30T15:30:56Z".to_string(), protocol_overrides: ProtocolOverrides { forced_protocol_upgrades: vec![], voted_protocol_overrides: vec![], }, }); env.insert(TezosEnvironment::Babylonnet, TezosEnvironmentConfiguration { genesis: GenesisChain { time: "2019-09-27T07:43:32Z".to_string(), block: "BLockGenesisGenesisGenesisGenesisGenesisd1f7bcGMoXy".to_string(), protocol: "PtBMwNZT94N7gXKw4i273CKcSaBrrBnqnt3RATExNKr9KNX2USV".to_string(), }, bootstrap_lookup_addresses: vec![ "35.246.251.120".to_string(), "34.89.154.253".to_string(), "babylonnet.kaml.fr".to_string(), "tezaria.com".to_string() ], version: "TEZOS_ALPHANET_BABYLON_2019-09-27T07:43:32Z".to_string(), protocol_overrides: ProtocolOverrides { forced_protocol_upgrades: vec![], voted_protocol_overrides: vec![], }, }); env.insert(TezosEnvironment::Mainnet, TezosEnvironmentConfiguration { genesis: GenesisChain { time: "2018-06-30T16:07:32Z".to_string(), block: "BLockGenesisGenesisGenesisGenesisGenesisf79b5d1CoW2".to_string(), protocol: "Ps9mPmXaRzmzk35gbAYNCAw6UXdE2qoABTHbN2oEEc1qM7CwT9P".to_string(), }, bootstrap_lookup_addresses: vec![ "boot.tzbeta.net".to_string() ], version: "TEZOS_BETANET_2018-06-30T16:07:32Z".to_string(), protocol_overrides: ProtocolOverrides { forced_protocol_upgrades: vec![ (28082 as i32, "PsYLVpVvgbLhAhoqAkMFUo6gudkJ9weNXhUYCiLDzcUpFpkk8Wt".to_string()), (204761 as i32, "PsddFKi32cMJ2qPjf43Qv5GDWLDPZb3T3bF6fLKiF5HtvHNU7aP".to_string()) ], voted_protocol_overrides: vec![ ("PsBABY5HQTSkA4297zNHfsZNKtxULfL18y95qb3m53QJiXGmrbU".to_string(), "PsBabyM1eUXZseaJdmXFApDSBqj8YBfwELoxZHHW77EMcAbbwAS".to_string()) ], }, }); env.insert(TezosEnvironment::Zeronet, TezosEnvironmentConfiguration { genesis: GenesisChain { time: "2019-08-06T15:18:56Z".to_string(), block: "BLockGenesisGenesisGenesisGenesisGenesiscde8db4cX94".to_string(), protocol: "PtBMwNZT94N7gXKw4i273CKcSaBrrBnqnt3RATExNKr9KNX2USV".to_string(), }, bootstrap_lookup_addresses: vec![ "bootstrap.zeronet.fun".to_string(), "bootzero.tzbeta.net".to_string() ], version: "TEZOS_ZERONET_2019-08-06T15:18:56Z".to_string(), protocol_overrides: ProtocolOverrides { forced_protocol_upgrades: vec![], voted_protocol_overrides: vec![], }, }); env } pub struct TezosEnvironmentConfiguration { pub genesis: GenesisChain, pub bootstrap_lookup_addresses: Vec<String>, pub version: String, pub protocol_overrides: ProtocolOverrides, }
use std::collections::HashMap; use std::str::FromStr; use enum_iterator::IntoEnumIterator; use serde::{Deserialize, Serialize}; use lazy_static::lazy_static; use crate::ffi::{GenesisChain, ProtocolOverrides}; lazy_static! { pub static ref TEZOS_ENV: HashMap<TezosEnvironment, TezosEnvironmentConfiguration> = init(); } #[derive(Serialize, Deserialize, Copy, Clone, Debug, PartialEq, Eq, Hash, IntoEnumIterator)] pub enum TezosEnvironment { Alphanet, Babylonnet, Mainnet, Zeronet, } #[derive(Debug, Clone)] pub struct ParseTezosEnvironmentError(String); impl FromStr for TezosEnvironment { type Err = ParseTezosEnvironmentError;
} fn init() -> HashMap<TezosEnvironment, TezosEnvironmentConfiguration> { let mut env: HashMap<TezosEnvironment, TezosEnvironmentConfiguration> = HashMap::new(); env.insert(TezosEnvironment::Alphanet, TezosEnvironmentConfiguration { genesis: GenesisChain { time: "2018-11-30T15:30:56Z".to_string(), block: "BLockGenesisGenesisGenesisGenesisGenesisb83baZgbyZe".to_string(), protocol: "Ps6mwMrF2ER2s51cp9yYpjDcuzQjsc2yAz8bQsRgdaRxw4Fk95H".to_string(), }, bootstrap_lookup_addresses: vec![ "boot.tzalpha.net".to_string(), "bootalpha.tzbeta.net".to_string() ], version: "TEZOS_ALPHANET_2018-11-30T15:30:56Z".to_string(), protocol_overrides: ProtocolOverrides { forced_protocol_upgrades: vec![], voted_protocol_overrides: vec![], }, }); env.insert(TezosEnvironment::Babylonnet, TezosEnvironmentConfiguration { genesis: GenesisChain { time: "2019-09-27T07:43:32Z".to_string(), block: "BLockGenesisGenesisGenesisGenesisGenesisd1f7bcGMoXy".to_string(), protocol: "PtBMwNZT94N7gXKw4i273CKcSaBrrBnqnt3RATExNKr9KNX2USV".to_string(), }, bootstrap_lookup_addresses: vec![ "35.246.251.120".to_string(), "34.89.154.253".to_string(), "babylonnet.kaml.fr".to_string(), "tezaria.com".to_string() ], version: "TEZOS_ALPHANET_BABYLON_2019-09-27T07:43:32Z".to_string(), protocol_overrides: ProtocolOverrides { forced_protocol_upgrades: vec![], voted_protocol_overrides: vec![], }, }); env.insert(TezosEnvironment::Mainnet, TezosEnvironmentConfiguration { genesis: GenesisChain { time: "2018-06-30T16:07:32Z".to_string(), block: "BLockGenesisGenesisGenesisGenesisGenesisf79b5d1CoW2".to_string(), protocol: "Ps9mPmXaRzmzk35gbAYNCAw6UXdE2qoABTHbN2oEEc1qM7CwT9P".to_string(), }, bootstrap_lookup_addresses: vec![ "boot.tzbeta.net".to_string() ], version: "TEZOS_BETANET_2018-06-30T16:07:32Z".to_string(), protocol_overrides: ProtocolOverrides { forced_protocol_upgrades: vec![ (28082 as i32, "PsYLVpVvgbLhAhoqAkMFUo6gudkJ9weNXhUYCiLDzcUpFpkk8Wt".to_string()), (204761 as i32, "PsddFKi32cMJ2qPjf43Qv5GDWLDPZb3T3bF6fLKiF5HtvHNU7aP".to_string()) ], voted_protocol_overrides: vec![ ("PsBABY5HQTSkA4297zNHfsZNKtxULfL18y95qb3m53QJiXGmrbU".to_string(), "PsBabyM1eUXZseaJdmXFApDSBqj8YBfwELoxZHHW77EMcAbbwAS".to_string()) ], }, }); env.insert(TezosEnvironment::Zeronet, TezosEnvironmentConfiguration { genesis: GenesisChain { time: "2019-08-06T15:18:56Z".to_string(), block: "BLockGenesisGenesisGenesisGenesisGenesiscde8db4cX94".to_string(), protocol: "PtBMwNZT94N7gXKw4i273CKcSaBrrBnqnt3RATExNKr9KNX2USV".to_string(), }, bootstrap_lookup_addresses: vec![ "bootstrap.zeronet.fun".to_string(), "bootzero.tzbeta.net".to_string() ], version: "TEZOS_ZERONET_2019-08-06T15:18:56Z".to_string(), protocol_overrides: ProtocolOverrides { forced_protocol_upgrades: vec![], voted_protocol_overrides: vec![], }, }); env } pub struct TezosEnvironmentConfiguration { pub genesis: GenesisChain, pub bootstrap_lookup_addresses: Vec<String>, pub version: String, pub protocol_overrides: ProtocolOverrides, }
fn from_str(s: &str) -> Result<Self, Self::Err> { match s.to_ascii_lowercase().as_str() { "alphanet" => Ok(TezosEnvironment::Alphanet), "babylonnet" | "babylon" => Ok(TezosEnvironment::Babylonnet), "mainnet" => Ok(TezosEnvironment::Mainnet), "zeronet" => Ok(TezosEnvironment::Zeronet), _ => Err(ParseTezosEnvironmentError(format!("Invalid variant name: {}", s))) } }
function_block-full_function
[ { "content": "type OperationHash = Hash;\n", "file_path": "tezos/interop_callback/src/callback.rs", "rank": 0, "score": 165307.25237255346 }, { "content": "type ContextHash = Hash;\n", "file_path": "tezos/interop_callback/src/callback.rs", "rank": 1, "score": 165307.25237255346 }, { "content": "type BlockHash = Hash;\n", "file_path": "tezos/interop_callback/src/callback.rs", "rank": 2, "score": 165307.25237255346 }, { "content": "pub fn init_storage(storage_data_dir: String, genesis: &'static GenesisChain, protocol_overrides: &'static ProtocolOverrides)\n\n -> Result<Result<OcamlStorageInitInfo, TezosStorageInitError>, OcamlError> {\n\n runtime::execute(move || {\n\n // genesis configuration\n\n let mut genesis_tuple: Tuple = Tuple::new(3);\n\n genesis_tuple.set(0, Str::from(genesis.time.as_str()).into()).unwrap();\n\n genesis_tuple.set(1, Str::from(genesis.block.as_str()).into()).unwrap();\n\n genesis_tuple.set(2, Str::from(genesis.protocol.as_str()).into()).unwrap();\n\n\n\n // protocol overrides\n\n let protocol_overrides_tuple: Tuple = protocol_overrides_to_ocaml(protocol_overrides)?;\n\n\n\n let ocaml_function = ocaml::named_value(\"init_storage\").expect(\"function 'init_storage' is not registered\");\n\n match ocaml_function.call3_exn::<Str, Value, Value>(\n\n storage_data_dir.as_str().into(),\n\n Value::from(genesis_tuple),\n\n Value::from(protocol_overrides_tuple),\n\n ) {\n\n Ok(result) => {\n\n let ocaml_result: Tuple = result.into();\n", "file_path": "tezos/interop/src/ffi.rs", "rank": 3, "score": 161177.11060468495 }, { "content": "// Creates tezos app \n\npub fn tezos_app() -> App<'static, 'static> {\n\n // Default values for arguments are specidied in default configuration file\n\n //\n\n // Flag Required=true must be handled separately as we parse args twice, \n\n // once to see only if confi-file arg is present and second time to parse all args\n\n //\n\n // In case some args are required=true and user provides only config-file, \n\n // first round of parsing would always fail then \n\n let app = App::new(\"Tezos Light Node\")\n\n .version(\"0.3.1\")\n\n .author(\"SimpleStaking and the project contributors\")\n\n .about(\"Rust implementation of the tezos node\")\n\n .setting(clap::AppSettings::AllArgsOverrideSelf)\n\n .arg(Arg::with_name(\"config-file\")\n\n .long(\"config-file\")\n\n .takes_value(true)\n\n .value_name(\"PATH\")\n\n .help(\"Configuration file with start-up arguments (same format as cli arguments)\")\n\n .validator(|v| if Path::new(&v).exists() { Ok(()) } else { Err(format!(\"Configuration file not found at '{}'\", v)) }))\n\n .arg(Arg::with_name(\"tezos-data-dir\")\n", "file_path": "light_node/src/configuration.rs", "rank": 4, "score": 158029.9697179805 }, { "content": "struct EnumDeserializer<'a, 'de: 'a> {\n\n de: &'a mut Deserializer<'de>,\n\n}\n\n\n\nimpl<'a, 'de> EnumDeserializer<'a, 'de> {\n\n fn new(de: &'a mut Deserializer<'de>) -> Self {\n\n EnumDeserializer { de }\n\n }\n\n}\n\n\n\nimpl<'de, 'a> de::EnumAccess<'de> for EnumDeserializer<'a, 'de> {\n\n type Error = Error;\n\n type Variant = Self;\n\n\n\n fn variant_seed<V>(self, seed: V) -> Result<(V::Value, Self::Variant), Self::Error>\n\n where\n\n V: DeserializeSeed<'de>,\n\n {\n\n match self.de.input {\n\n Value::Tag(variant, _) => {\n", "file_path": "tezos/encoding/src/de.rs", "rank": 5, "score": 157527.6263489286 }, { "content": "type Hash = Vec<u8>;\n\n\n\n#[derive(Serialize, Deserialize, Clone, Debug)]\n\npub enum ContextAction {\n\n Set {\n\n context_hash: Option<Hash>,\n\n block_hash: Option<Hash>,\n\n operation_hash: Option<Hash>,\n\n key: Vec<String>,\n\n value: Vec<u8>,\n\n value_as_json: Option<String>,\n\n start_time: f64,\n\n end_time: f64,\n\n },\n\n Delete {\n\n context_hash: Option<Hash>,\n\n block_hash: Option<Hash>,\n\n operation_hash: Option<Hash>,\n\n key: Vec<String>,\n\n start_time: f64,\n", "file_path": "tezos/context/src/channel.rs", "rank": 6, "score": 151745.59305760846 }, { "content": "type Hash = Vec<u8>;\n", "file_path": "tezos/interop_callback/src/callback.rs", "rank": 7, "score": 149445.80607092113 }, { "content": "pub fn get_constants(bytes: &[u8], protocol: ProtocolHash) -> Result<Option<HashMap<&'static str, UniversalValue>>, Error> {\n\n let hash: &str = &HashType::ProtocolHash.bytes_to_string(&protocol);\n\n match hash {\n\n proto_001::PROTOCOL_HASH => {\n\n use crate::protocol::proto_001::constants::{ParametricConstants, FIXED};\n\n let mut param = ParametricConstants::from_bytes(bytes.to_vec())?.as_map();\n\n param.extend(FIXED.clone().as_map());\n\n Ok(Some(param))\n\n }\n\n proto_002::PROTOCOL_HASH => {\n\n use crate::protocol::proto_002::constants::{ParametricConstants, FIXED};\n\n let mut param = ParametricConstants::from_bytes(bytes.to_vec())?.as_map();\n\n param.extend(FIXED.clone().as_map());\n\n Ok(Some(param))\n\n }\n\n proto_003::PROTOCOL_HASH => {\n\n use crate::protocol::proto_003::constants::{ParametricConstants, FIXED};\n\n let mut param = ParametricConstants::from_bytes(bytes.to_vec())?.as_map();\n\n param.extend(FIXED.clone().as_map());\n\n Ok(Some(param))\n", "file_path": "tezos/messages/src/protocol/mod.rs", "rank": 8, "score": 142017.0608436758 }, { "content": "pub trait BincodeEncoded: Sized + Serialize + for<'a> Deserialize<'a> {\n\n fn decode(bytes: &[u8]) -> Result<Self, SchemaError> {\n\n bincode::deserialize(bytes)\n\n .map_err(|_| SchemaError::DecodeError)\n\n }\n\n\n\n fn encode(&self) -> Result<Vec<u8>, SchemaError> {\n\n bincode::serialize::<Self>(self)\n\n .map_err(|_| SchemaError::EncodeError)\n\n }\n\n}\n\n\n\nimpl<T> Encoder for T where T: BincodeEncoded {\n\n fn encode(&self) -> Result<Vec<u8>, SchemaError> {\n\n T::encode(self)\n\n }\n\n}\n\n\n\nimpl<T> Decoder for T where T: BincodeEncoded {\n\n fn decode(bytes: &[u8]) -> Result<Self, SchemaError> {\n", "file_path": "storage/src/persistent/codec.rs", "rank": 9, "score": 140259.25316870058 }, { "content": "struct StructDeserializer<'de> {\n\n input: Iter<'de, (String, Value)>,\n\n value: Option<&'de Value>,\n\n}\n\n\n\nimpl<'de> Deserializer<'de> {\n\n pub fn new(input: &'de Value) -> Self {\n\n Deserializer { input }\n\n }\n\n}\n\n\n\nimpl<'de> SeqDeserializer<'de> {\n\n pub fn new(input: &'de [Value]) -> Self {\n\n SeqDeserializer {\n\n input: input.iter(),\n\n }\n\n }\n\n}\n\n\n\nimpl<'de> StructDeserializer<'de> {\n", "file_path": "tezos/encoding/src/de.rs", "rank": 10, "score": 131099.58482455756 }, { "content": "struct StringDeserializer {\n\n input: String,\n\n}\n\n\n\nimpl<'de> de::Deserializer<'de> for StringDeserializer {\n\n type Error = Error;\n\n\n\n fn deserialize_any<V>(self, visitor: V) -> Result<V::Value, Self::Error>\n\n where\n\n V: Visitor<'de>,\n\n {\n\n visitor.visit_string(self.input)\n\n }\n\n\n\n forward_to_deserialize_any! {\n\n bool u8 u16 u32 u64 i8 i16 i32 i64 f32 f64 char str string unit option\n\n seq bytes byte_buf map unit_struct newtype_struct\n\n tuple_struct struct tuple enum identifier ignored_any\n\n }\n\n}\n\n\n\n\n", "file_path": "tezos/encoding/src/de.rs", "rank": 11, "score": 124643.90146270148 }, { "content": "#[derive(Serialize, Deserialize, Debug)]\n\nstruct InitStorageParams {\n\n storage_data_dir: String,\n\n tezos_environment: TezosEnvironment\n\n}\n\n\n", "file_path": "tezos/wrapper/src/service.rs", "rank": 12, "score": 122251.24241425082 }, { "content": "struct SeqDeserializer<'de> {\n\n input: Iter<'de, Value>,\n\n}\n\n\n", "file_path": "tezos/encoding/src/de.rs", "rank": 13, "score": 119753.62576043535 }, { "content": "type NetworkListenerRef = ActorRef<NetworkChannelMsg>;\n\n\n\npub struct NetworkChannelListener {\n\n start: Instant,\n\n event_index: u64,\n\n record_storage: EventPayloadStorage,\n\n record_meta_storage: EventStorage,\n\n network_channel: NetworkChannelRef,\n\n}\n\n\n\nimpl NetworkChannelListener {\n\n fn name() -> &'static str { \"network-listener\" }\n\n\n\n fn new((rocks_db, network_channel): (Arc<DB>, NetworkChannelRef)) -> Self {\n\n let record_meta_storage = EventStorage::new(rocks_db.clone());\n\n let event_index = record_meta_storage.count_events().unwrap_or_default() as u64;\n\n Self {\n\n start: Instant::now(),\n\n event_index,\n\n record_storage: EventPayloadStorage::new(rocks_db),\n", "file_path": "monitoring/src/listener/listener.rs", "rank": 14, "score": 119402.06639412294 }, { "content": "fn assert_eq_hash(expected: &str, hash: OcamlHash) {\n\n assert!(!hash.is_empty());\n\n let hash: RustBytes = hash.convert_to();\n\n let hash_ocaml = hex::encode(hash);\n\n assert_eq!(expected, hash_ocaml.as_str());\n\n}\n\n\n", "file_path": "tezos/interop/tests/test_bytes_roundtrips.rs", "rank": 15, "score": 118878.79058339872 }, { "content": "// GET /monitor/protocols\n\ntype ProtocolHash = UniString;\n\n\n\n\n\n// GET /monitor/active_chains\n\n\n\n#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]\n\n#[serde(untagged)]\n\npub enum ChainStatus {\n\n Active {\n\n chain_id: ChainId,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n test_protocol: Option<ProtocolHash>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n expiration_date: Option<TimeStamp>,\n\n },\n\n Stopping {\n\n stopping: ChainId\n\n },\n\n}\n\n\n", "file_path": "rpc/src/encoding/monitor.rs", "rank": 16, "score": 117310.20566002518 }, { "content": "/// Trait for getting hash of the message.\n\npub trait MessageHash {\n\n fn message_hash(&self) -> Result<Hash, MessageHashError>;\n\n}\n\n\n\nimpl<T: BinaryMessage> MessageHash for T {\n\n #[inline]\n\n fn message_hash(&self) -> Result<Hash, MessageHashError> {\n\n let bytes = self.as_bytes()?;\n\n Ok(blake2b::digest_256(&bytes))\n\n }\n\n}", "file_path": "tezos/messages/src/p2p/binary_message.rs", "rank": 17, "score": 113184.01656024792 }, { "content": "pub trait TypedLane<C: Codec> {\n\n fn get(&self, index: usize) -> Result<Option<C>, SkipListError>;\n\n\n\n fn put(&self, index: usize, value: &C) -> Result<(), SkipListError>;\n\n\n\n fn base_iterator(&self, starting_index: usize) -> Result<LaneIterator<C>, SkipListError>;\n\n\n\n fn rev_base_iterator(&self, end_index: usize, count: usize) -> Result<SizedLaneIterator<C>, SkipListError>;\n\n}\n\n\n\nimpl<C: Codec> TypedLane<C> for Lane {\n\n /// Get value from specific index (relative to this lane).\n\n fn get(&self, index: usize) -> Result<Option<C>, SkipListError> {\n\n self.db.get(&NodeHeader::new(self.list_id, self.level, index))\n\n .map_err(SkipListError::from)?\n\n .map(|value| C::decode(&value).map_err(SkipListError::from))\n\n .transpose()\n\n }\n\n\n\n /// Put new value on specific index of this lane, beware, that lanes should contain continuous\n", "file_path": "storage/src/skip_list/lane.rs", "rank": 18, "score": 107281.36812930767 }, { "content": "#[test]\n\n#[serial]\n\nfn test_init_empty_storage_with_alphanet_and_then_reinit_with_zeronet_the_same_directory() {\n\n init_test_runtime();\n\n\n\n let storage_data_dir = \"bootstrap_test_storage_06\";\n\n // ALPHANET init empty storage for test\n\n let alphanet_init_info: TezosStorageInitInfo = client::init_storage(\n\n common::prepare_empty_dir(&storage_data_dir),\n\n TezosEnvironment::Alphanet,\n\n ).unwrap();\n\n // current hash must be equal to genesis\n\n assert_eq!(alphanet_init_info.genesis_block_header_hash, alphanet_init_info.current_block_header_hash);\n\n\n\n let alphanet_block_header_hash_level1 = BlockHeader::from_bytes(hex::decode(test_data::BLOCK_HEADER_LEVEL_1).unwrap()).unwrap();\n\n\n\n // ALPHANET - apply first block - level 1\n\n let apply_block_result = client::apply_block(\n\n &alphanet_init_info.chain_id,\n\n &alphanet_block_header_hash_level1,\n\n &test_data::block_operations_from_hex(\n\n test_data::BLOCK_HEADER_HASH_LEVEL_1,\n", "file_path": "tezos/client/tests/bootstrap_storage_test.rs", "rank": 20, "score": 104746.29686890673 }, { "content": "type ContextMap = HashMap<String, Bucket<Vec<u8>>>;\n\n\n\n/// Object containing information to recreate the full block information\n\n#[derive(Serialize, Debug, Clone)]\n\npub struct FullBlockInfo {\n\n pub hash: String,\n\n pub chain_id: String,\n\n pub header: InnerBlockHeader,\n\n pub metadata: HashMap<String, Value>,\n\n pub operations: Vec<Vec<HashMap<String, Value>>>,\n\n}\n\n\n\n/// Object containing all block header information\n\n#[derive(Serialize, Debug, Clone)]\n\npub struct InnerBlockHeader {\n\n pub level: i32,\n\n pub proto: u8,\n\n pub predecessor: String,\n\n pub timestamp: String,\n\n pub validation_pass: u8,\n", "file_path": "rpc/src/helpers.rs", "rank": 21, "score": 103782.25374729367 }, { "content": "/// Spawn new HTTP server on given address interacting with specific actor system\n\npub fn spawn_server(bind_address: &SocketAddr, env: RpcServiceEnvironment) -> impl Future<Output=Result<(), hyper::Error>> {\n\n let routes = Arc::new(router::create_routes());\n\n\n\n hyper::Server::bind(bind_address)\n\n .serve(make_service_fn(move |_| {\n\n let env = env.clone();\n\n let routes = routes.clone();\n\n\n\n async move {\n\n let env = env.clone();\n\n let routes = routes.clone();\n\n Ok::<_, hyper::Error>(service_fn(move |req: Request<Body>| {\n\n let env = env.clone();\n\n let routes = routes.clone();\n\n async move {\n\n if let Some((handler, params)) = routes.find(&req.uri().path().to_string()) {\n\n let params: Params = params.into_iter().map(|(param, value)| (param.to_string(), value.to_string())).collect();\n\n let query: Query = req.uri().query().map(parse_query_string).unwrap_or_else(|| HashMap::new());\n\n\n\n let handler = handler.clone();\n", "file_path": "rpc/src/server/mod.rs", "rank": 22, "score": 103159.40567725735 }, { "content": "/// Initializes storage for Tezos ocaml storage in chosen directory\n\npub fn init_storage(storage_data_dir: String, tezos_environment: TezosEnvironment) -> Result<TezosStorageInitInfo, TezosStorageInitError> {\n\n let cfg: &TezosEnvironmentConfiguration = match environment::TEZOS_ENV.get(&tezos_environment) {\n\n None => return Err(TezosStorageInitError::InitializeError {\n\n message: format!(\"FFI 'init_storage' failed, because there is no tezos environment configured for: {:?}\", tezos_environment)\n\n }),\n\n Some(cfg) => cfg\n\n };\n\n match ffi::init_storage(storage_data_dir, &cfg.genesis, &cfg.protocol_overrides) {\n\n Ok(result) => Ok(TezosStorageInitInfo::new(result?)\n\n .map_err(|err| TezosStorageInitError::InitializeError { message: format!(\"Decoding from hex failed! Reason: {:?}\", err) })?),\n\n Err(e) => {\n\n Err(TezosStorageInitError::InitializeError {\n\n message: format!(\"FFI 'init_storage' failed! Initialization of Tezos storage failed, this storage is required, we can do nothing without that! Reason: {:?}\", e)\n\n })\n\n }\n\n }\n\n}\n\n\n", "file_path": "tezos/client/src/client.rs", "rank": 23, "score": 99606.05574976851 }, { "content": "/// Initialize Tezos PRNG\n\n/// \n\n/// # Arguments\n\n/// \n\n/// * `state` - RandomSeedState, initially the random seed.\n\n/// * `nonce_size` - Nonce_length from current protocol constants.\n\n/// * `blocks_per_cycle` - Blocks_per_cycle from current protocol context constants\n\n/// * `use_string_bytes` - String converted to bytes, i.e. endorsing rights use b\"level endorsement:\".\n\n/// * `level` - block level\n\n/// * `offset` - For baking priority, for endorsing slot\n\n/// \n\n/// Return first random sequence state to use in [get_prng_number](`get_prng_number`)\n\npub fn init_prng(cycle_data: &RightsContextData, constants: &RightsConstants, use_string_bytes: &[u8], level: i32, offset: i32) -> Result<RandomSeedState, failure::Error> {\n\n // a safe way to convert betwwen types is to use try_from\n\n let nonce_size = usize::try_from(*constants.nonce_length())?;\n\n let blocks_per_cycle = *constants.blocks_per_cycle();\n\n let state = cycle_data.random_seed(); \n\n let zero_bytes: Vec<u8> = vec![0; nonce_size];\n\n\n\n // the position of the block in its cycle; has to be i32\n\n let cycle_position: i32 = level_position(level.into(), blocks_per_cycle)?.try_into()?;\n\n\n\n // take the state (initially the random seed), zero bytes, the use string and the blocks position in the cycle as bytes, merge them together and hash the result\n\n let rd = blake2b::digest_256(&merge_slices!(&state, &zero_bytes, use_string_bytes, &cycle_position.to_be_bytes())).to_vec();\n\n \n\n // take the 4 highest bytes and xor them with the priority/slot (offset)\n\n let higher = num_from_slice!(rd, 0, i32) ^ offset;\n\n \n\n // set the 4 highest bytes to the result of the xor operation\n\n let sequence = blake2b::digest_256(&merge_slices!(&higher.to_be_bytes(), &rd[4..])).to_vec();\n\n\n\n Ok(sequence)\n\n}\n\n\n", "file_path": "rpc/src/helpers.rs", "rank": 24, "score": 96709.7698126529 }, { "content": "// Str means hash as hex string\n\nfn to_hash(hash: Str) -> Option<Hash> {\n\n if hash.len() <= 0 {\n\n None\n\n } else {\n\n Some(hex::decode(hash.as_str()).unwrap())\n\n }\n\n}\n\n\n", "file_path": "tezos/interop_callback/src/callback.rs", "rank": 25, "score": 96265.4353503224 }, { "content": "pub trait SplitEncodingFn: Fn(SchemaType) -> Encoding + Send + Sync {}\n\n\n\nimpl<F> SplitEncodingFn for F where F: Fn(SchemaType) -> Encoding + Send + Sync {}\n\n\n\nimpl fmt::Debug for dyn SplitEncodingFn<Output=Encoding> + Send + Sync {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(f, \"Fn(SchemaType) -> Encoding\")\n\n }\n\n}\n\n\n\n\n", "file_path": "tezos/encoding/src/encoding.rs", "rank": 26, "score": 95600.58039068122 }, { "content": "/// Get block header from storage or None\n\npub fn get_block_header(chain_id: &ChainId, block_header_hash: &BlockHash) -> Result<Option<BlockHeader>, BlockHeaderError> {\n\n match ffi::get_block_header(chain_id.clone(), block_header_hash.clone()) {\n\n Ok(result) => {\n\n let header = result?;\n\n match header {\n\n None => Ok(None),\n\n Some(header) => {\n\n match BlockHeader::from_bytes(header) {\n\n Ok(header) => Ok(Some(header)),\n\n Err(e) => Err(BlockHeaderError::ReadError { message: format!(\"Decoding from hex failed! Reason: {:?}\", e) })\n\n }\n\n }\n\n }\n\n }\n\n Err(e) => {\n\n Err(BlockHeaderError::ReadError {\n\n message: format!(\"FFI 'get_block_header' failed! Something is wrong! Reason: {:?}\", e)\n\n })\n\n }\n\n }\n\n}\n\n\n", "file_path": "tezos/client/src/client.rs", "rank": 27, "score": 91418.9067123876 }, { "content": "pub fn process_protocol_events<P: AsRef<Path>>(socket_path: P) -> Result<(), IpcError> {\n\n let ipc_client: IpcClient<NoopMessage, ContextAction> = IpcClient::new(socket_path);\n\n let (_, mut tx) = ipc_client.connect()?;\n\n while let Ok(action) = context_receive() {\n\n tx.send(&action)?;\n\n if let ContextAction::Shutdown = action {\n\n break;\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "tezos/wrapper/src/service.rs", "rank": 28, "score": 88795.62617713364 }, { "content": "/// Decode protocoled context data\n\npub fn decode_context_data(protocol_hash: ProtocolHash, key: Vec<String>, data: Vec<u8>) -> Result<Option<String>, ContextDataError> {\n\n match ffi::decode_context_data(protocol_hash, key, data) {\n\n Ok(result) => Ok(result?),\n\n Err(e) => {\n\n Err(ContextDataError::DecodeError {\n\n message: format!(\"FFI 'decode_context_data' failed! Reason: {:?}\", e)\n\n })\n\n }\n\n }\n\n}\n\n\n", "file_path": "tezos/client/src/client.rs", "rank": 29, "score": 88120.43584666094 }, { "content": "pub fn simulate_ledger(mut list: Box<dyn TypedSkipList<u64, u64, OrderedValue>>) {\n\n let mut rng = rand::thread_rng();\n\n\n\n let mut context: Ctx = Default::default();\n\n let mut contexts: Vec<Ctx> = Default::default();\n\n let mut changes: Vec<Ctx> = Default::default();\n\n\n\n for _ in 0..LEDGER_SIZE {\n\n let mut state: Ctx = Default::default();\n\n let op_count = rng.gen_range(1, OPERATION_COUNT);\n\n\n\n for _ in 0..op_count {\n\n let primary_key = rng.gen_range(0, KEY_COUNT);\n\n let secondary_key = state.0.keys()\n\n .map(|v| v.clone())\n\n .collect::<Vec<u64>>()\n\n .choose(&mut rng)\n\n .map(|v| v.clone());\n\n\n\n match rng.gen() {\n", "file_path": "storage/tests/skip_list.rs", "rank": 30, "score": 86291.72745127643 }, { "content": "/// Load identity from tezos configuration file.\n\npub fn load_identity<P: AsRef<Path>>(identity_json_file_path: P) -> Result<Identity, IdentityError> {\n\n let identity = fs::read_to_string(identity_json_file_path)\n\n .map(|contents| serde_json::from_str::<Identity>(&contents).map_err(|err| IdentityError::DeserializationError { reason: err }))??;\n\n Ok(identity)\n\n}\n\n\n", "file_path": "light_node/src/identity.rs", "rank": 31, "score": 85043.41162765642 }, { "content": "/// Interpret a `Value` as an instance of type `D`.\n\n///\n\n/// This conversion can fail if the structure of the `Value` does not match the\n\n/// structure expected by `D`.\n\npub fn from_value<'de, D: Deserialize<'de>>(value: &'de Value) -> Result<D, BinaryReaderError> {\n\n let mut de = Deserializer::new(value);\n\n Ok(D::deserialize(&mut de)?)\n\n}\n\n\n\n\n\n/*\n\n * -----------------------------------------------------------------------------\n\n * BigInt deserialization\n\n * -----------------------------------------------------------------------------\n\n */\n", "file_path": "tezos/encoding/src/de.rs", "rank": 32, "score": 84570.04061790142 }, { "content": "#[derive(Deserialize, Debug)]\n\nstruct Artifact {\n\n name: String,\n\n url: String,\n\n}\n\n\n", "file_path": "tezos/interop/build.rs", "rank": 33, "score": 84322.70128475947 }, { "content": "#[derive(Debug)]\n\nenum Operation {\n\n Set,\n\n Copy,\n\n Delete,\n\n}\n\n\n\nimpl Distribution<Operation> for Standard {\n\n fn sample<R: Rng + ?Sized>(&self, rng: &mut R) -> Operation {\n\n use Operation::*;\n\n match rng.gen_range(0, 3) {\n\n 0 => Set,\n\n 1 => Copy,\n\n _ => Delete,\n\n }\n\n }\n\n}\n\n\n\nconst LEDGER_SIZE: usize = 5000;\n\nconst OPERATION_COUNT: usize = 100;\n\nconst KEY_COUNT: u64 = 10000;\n\n\n", "file_path": "storage/tests/skip_list.rs", "rank": 34, "score": 83388.09603415336 }, { "content": "/// Genesis block needs extra handling because predecessor of the genesis block is genesis itself.\n\n/// Which means that successor of the genesis block is also genesis block. By combining those\n\n/// two statements we get cyclic relationship and everything breaks..\n\npub fn initialize_storage_with_genesis_block(genesis_hash: &BlockHash, genesis: &BlockHeader, genesis_chain_id: &ChainId, persistent_storage: &PersistentStorage, log: Logger) -> Result<(), StorageError> {\n\n let genesis_with_hash = BlockHeaderWithHash {\n\n hash: genesis_hash.clone(),\n\n header: Arc::new(genesis.clone()),\n\n };\n\n let mut block_storage = BlockStorage::new(persistent_storage);\n\n if block_storage.get(&genesis_with_hash.hash)?.is_none() {\n\n info!(log, \"Initializing storage with genesis block\");\n\n block_storage.put_block_header(&genesis_with_hash)?;\n\n // TODO: include the data for the other chains as well (mainet, zeronet, etc.)\n\n // just for babylonnet for now\n\n let genesis_meta_string = \"{\\\"protocol\\\":\\\"PrihK96nBAFSxVL1GLJTVhu9YnzkMFiBeuJRPA8NwuZVZCE1L6i\\\",\\\"next_protocol\\\":\\\"PtBMwNZT94N7gXKw4i273CKcSaBrrBnqnt3RATExNKr9KNX2USV\\\",\\\"test_chain_status\\\":{\\\"status\\\":\\\"not_running\\\"},\\\"max_operations_ttl\\\":0,\\\"max_operation_data_length\\\":0,\\\"max_block_header_length\\\":115,\\\"max_operation_list_length\\\":[]}\".to_string();\n\n let genesis_op_string = \"{\\\"operations\\\":[]}\".to_string();\n\n let genesis_prot_string = \"\".to_string();\n\n let block_json_data = BlockJsonDataBuilder::default()\n\n .block_header_proto_json(genesis_prot_string)\n\n .block_header_proto_metadata_json(genesis_meta_string)\n\n .operations_proto_metadata_json(genesis_op_string)\n\n .build().unwrap();\n\n block_storage.put_block_json_data(&genesis_with_hash.hash, block_json_data)?;\n", "file_path": "storage/src/lib.rs", "rank": 35, "score": 83059.8535623751 }, { "content": "#[derive(Debug)]\n\nstruct RemoteLib {\n\n lib_url: String,\n\n sha256_checksum_url: String,\n\n}\n\n\n", "file_path": "tezos/interop/build.rs", "rank": 36, "score": 82944.27275642374 }, { "content": "#[derive(Clone)]\n\nstruct Network {\n\n /// Message receiver boolean indicating whether\n\n /// more messages should be received from network\n\n rx_run: Arc<AtomicBool>,\n\n /// Message sender\n\n tx: Arc<Mutex<Option<EncryptedMessageWriter>>>,\n\n /// Socket address of the peer\n\n socket_address: SocketAddr,\n\n}\n\n\n\n/// Local node info\n\npub struct Local {\n\n /// port where remote node can establish new connection\n\n listener_port: u16,\n\n /// our public key\n\n public_key: String,\n\n /// our secret key\n\n secret_key: String,\n\n /// proof of work\n\n proof_of_work_stamp: String,\n", "file_path": "networking/src/p2p/peer.rs", "rank": 37, "score": 82944.26934055077 }, { "content": "/// Holds various stats with info about internal synchronization.\n\nstruct Stats {\n\n /// Count of received blocks\n\n unseen_block_count: usize,\n\n /// Lest time when last unseen block was received\n\n unseen_block_last: Instant,\n\n /// Last time when last unseen operations was received\n\n unseen_operations_last: Instant,\n\n /// ID of the last applied block\n\n applied_block_level: Option<i32>,\n\n /// Last time a block was applied\n\n applied_block_last: Option<Instant>,\n\n /// Last time state was hydrated\n\n hydrated_state_last: Option<Instant>,\n\n}\n\n\n\n/// Purpose of this actor is to perform chain synchronization.\n\n#[actor(DisconnectStalledPeers, CheckChainCompleteness, AskPeersAboutCurrentBranch, LogStats, NetworkChannelMsg, ShellChannelMsg, SystemEvent, DeadLetter)]\n\npub struct ChainManager {\n\n /// All events generated by the network layer will end up in this channel\n\n network_channel: NetworkChannelRef,\n", "file_path": "shell/src/chain_manager.rs", "rank": 38, "score": 82938.99467516478 }, { "content": "/// Establish connection to existing IPC endpoint (which was created by tezedge node).\n\n/// Begin receiving commands from the tezedge node until `ShutdownCall` command is received.\n\npub fn process_protocol_commands<Proto: ProtocolApi, P: AsRef<Path>>(socket_path: P) -> Result<(), IpcError> {\n\n let ipc_client: IpcClient<ProtocolMessage, NodeMessage> = IpcClient::new(socket_path);\n\n let (mut rx, mut tx) = ipc_client.connect()?;\n\n while let Ok(cmd) = rx.receive() {\n\n match cmd {\n\n ProtocolMessage::ApplyBlockCall(params) => {\n\n let res = Proto::apply_block(&params.chain_id, &params.block_header, &params.operations);\n\n tx.send(&NodeMessage::ApplyBlockResult(res))?;\n\n }\n\n ProtocolMessage::ChangeRuntimeConfigurationCall(params) => {\n\n let res = Proto::change_runtime_configuration(params);\n\n tx.send(&NodeMessage::ChangeRuntimeConfigurationResult(res))?;\n\n }\n\n ProtocolMessage::InitStorageCall(params) => {\n\n let res = Proto::init_storage(params.storage_data_dir, params.tezos_environment);\n\n tx.send(&NodeMessage::InitStorageResult(res))?;\n\n }\n\n ProtocolMessage::GenerateIdentity(params) => {\n\n let res = Proto::generate_identity(params.expected_pow);\n\n tx.send(&NodeMessage::GenerateIdentityResult(res))?;\n", "file_path": "tezos/wrapper/src/service.rs", "rank": 39, "score": 82831.03751613823 }, { "content": "#[derive(Serialize, Deserialize, Debug, IntoStaticStr)]\n\nenum NodeMessage {\n\n ApplyBlockResult(Result<ApplyBlockResult, ApplyBlockError>),\n\n ChangeRuntimeConfigurationResult(Result<(), TezosRuntimeConfigurationError>),\n\n InitStorageResult(Result<TezosStorageInitInfo, TezosStorageInitError>),\n\n GenerateIdentityResult(Result<Identity, TezosGenerateIdentityError>),\n\n ShutdownResult\n\n}\n\n\n\n/// Empty message\n", "file_path": "tezos/wrapper/src/service.rs", "rank": 40, "score": 82108.03393855398 }, { "content": "#[derive(Serialize, Deserialize, Debug, IntoStaticStr)]\n\nenum ProtocolMessage {\n\n ApplyBlockCall(ApplyBlockParams),\n\n ChangeRuntimeConfigurationCall(TezosRuntimeConfiguration),\n\n InitStorageCall(InitStorageParams),\n\n GenerateIdentity(GenerateIdentityParams),\n\n ShutdownCall,\n\n}\n\n\n", "file_path": "tezos/wrapper/src/service.rs", "rank": 41, "score": 82108.03393855398 }, { "content": "#[derive(Debug, Fail)]\n\nenum PeerError {\n\n #[fail(display = \"Received NACK from remote peer\")]\n\n NackReceived,\n\n #[fail(display = \"Failed to create precomputed key\")]\n\n FailedToPrecomputeKey,\n\n #[fail(display = \"Network error: {}\", message)]\n\n NetworkError {\n\n error: Error,\n\n message: &'static str,\n\n },\n\n #[fail(display = \"Message serialization error\")]\n\n SerializationError {\n\n error: tezos_encoding::ser::Error\n\n },\n\n #[fail(display = \"Message deserialization error\")]\n\n DeserializationError {\n\n error: BinaryReaderError\n\n },\n\n}\n\n\n", "file_path": "networking/src/p2p/peer.rs", "rank": 42, "score": 82092.84642546458 }, { "content": "#[derive(Serialize, Deserialize, Debug)]\n\nstruct NoopMessage;\n\n\n", "file_path": "tezos/wrapper/src/service.rs", "rank": 43, "score": 81661.5200124461 }, { "content": "#[derive(Clone, Debug)]\n\nstruct CurrentHead {\n\n /// Represents local current head. Value here is the same as the\n\n /// hash of the last applied block.\n\n local: BlockHash,\n\n /// Remote / network remote current head. This represents info about\n\n /// the current branch with the highest level received from network.\n\n remote: BlockHash,\n\n /// Level of the remote current head.\n\n remote_level: i32,\n\n}\n\n\n", "file_path": "shell/src/chain_manager.rs", "rank": 44, "score": 81656.40945518539 }, { "content": "/// This struct represents a shared state between `OcamlTask` and `OcamlResult`.\n\nstruct SharedState {\n\n /// this waker is used to notify that `OcamlResult` is now ready to be polled\n\n waker: Option<Waker>\n\n}\n\n\n", "file_path": "tezos/interop/src/runtime.rs", "rank": 45, "score": 81651.05900861291 }, { "content": "/// Holds information about a specific peer.\n\nstruct PeerState {\n\n /// Reference to peer actor\n\n peer_ref: PeerRef,\n\n /// Queued blocks\n\n queued_block_headers: HashMap<BlockHash, MissingBlock>,\n\n /// Queued operations\n\n queued_operations: HashMap<BlockHash, MissingOperations>,\n\n /// Level of the current head received from peer\n\n current_head_level: Option<i32>,\n\n /// Last time we received updated head from peer\n\n current_head_update_last: Instant,\n\n /// Last time we requested block from the peer\n\n block_request_last: Instant,\n\n /// Last time we received block from the peer\n\n block_response_last: Instant,\n\n /// Last time we requested operations from the peer\n\n operations_request_last: Instant,\n\n /// Last time we received operations from the peer\n\n operations_response_last: Instant,\n\n\n", "file_path": "shell/src/chain_manager.rs", "rank": 46, "score": 81645.96557730091 }, { "content": "/// Holds data related to ocaml environment.\n\nstruct OcamlEnvironment {\n\n spawner: OcamlTaskSpawner,\n\n}\n\n\n", "file_path": "tezos/interop/src/runtime.rs", "rank": 47, "score": 81645.96557730091 }, { "content": "struct IpcIO {\n\n rx: IpcReceiver<NodeMessage>,\n\n tx: IpcSender<ProtocolMessage>,\n\n}\n\n\n\n/// Encapsulate IPC communication.\n\npub struct ProtocolController<'a> {\n\n io: RefCell<IpcIO>,\n\n configuration: &'a ProtocolEndpointConfiguration,\n\n}\n\n\n\n/// Provides convenience methods for IPC communication.\n\n///\n\n/// Instead of manually sending and receiving messages over IPC channel use provided methods.\n\n/// Methods also handle things such as timeouts and also checks is correct response type is received.\n\nimpl<'a> ProtocolController<'a> {\n\n const GENERATE_IDENTITY_TIMEOUT: Duration = Duration::from_secs(600);\n\n const APPLY_BLOCK_TIMEOUT: Duration = Duration::from_secs(30);\n\n\n\n /// Apply block\n", "file_path": "tezos/wrapper/src/service.rs", "rank": 48, "score": 81645.96557730091 }, { "content": "/// Ocaml task is executed by `OcamlThreadExecutor`. Task holds future responsible\n\n/// for executing ocaml function(s) and passing the result back to rust.\n\nstruct OcamlTask {\n\n /// this operation will be executed by `OcamlThreadExecutor` in the thread that is allowed to access the ocaml runtime\n\n op: Box<dyn FnOnce() + Send + 'static>,\n\n /// shared state between `OcamlTask` and `OcamlResult`\n\n state: Arc<Mutex<SharedState>>,\n\n}\n\n\n\nimpl OcamlTask {\n\n /// Create new ocaml task\n\n ///\n\n /// # Arguments\n\n ///\n\n /// * `f` - the function will be executed in ocaml thread context\n\n /// * `f_result_holder` - will hold result of the `f` after `f`'s completion\n\n /// * `shared_state` - shared state between `OcamlTask` and `OcamlResult`\n\n fn new<F, T>(f: F, f_result_holder: TaskResultHolder<T>, shared_state: Arc<Mutex<SharedState>>) -> OcamlTask\n\n where\n\n F: FnOnce() -> T + Send + 'static,\n\n T: Send + 'static\n\n {\n", "file_path": "tezos/interop/src/runtime.rs", "rank": 49, "score": 81645.96557730091 }, { "content": "pub trait TypedSkipList<K: Codec, V: Codec, C: ListValue<K, V>>: SkipList {\n\n fn get(&self, index: usize) -> Result<Option<C>, SkipListError>;\n\n\n\n fn get_key(&self, index: usize, key: &K) -> Result<Option<V>, SkipListError>;\n\n\n\n fn get_raw(&self, lane_level: usize, index: usize) -> Result<Option<C>, SkipListError>;\n\n\n\n fn push(&mut self, value: C) -> Result<(), SkipListError>;\n\n\n\n fn diff(&self, from: usize, to: usize) -> Result<Option<C>, SkipListError>;\n\n}\n\n\n\nimpl<K: Codec, V: Codec, C: ListValue<K, V>> TypedSkipList<K, V, C> for DatabaseBackedSkipList {\n\n /// Rebuild state for given index\n\n fn get(&self, index: usize) -> Result<Option<C>, SkipListError> {\n\n // There is an sequential index on lowest level, if expected index is bigger than\n\n // length of chain, it is not stored, otherwise, IT MUST BE FOUND.\n\n if index >= self.state.len {\n\n return Ok(None);\n\n }\n", "file_path": "storage/src/skip_list/skip_list.rs", "rank": 50, "score": 81506.00645195777 }, { "content": "fn protocol(hash: &str) -> ProtocolHash {\n\n HashType::ProtocolHash\n\n .string_to_bytes(hash)\n\n .unwrap()\n\n}\n\n\n", "file_path": "tezos/client/tests/decode_context_data_test.rs", "rank": 51, "score": 80963.15313798723 }, { "content": "#[derive(Serialize, Deserialize, Debug)]\n\nstruct GenerateIdentityParams {\n\n expected_pow: f64,\n\n}\n\n\n\n/// This event message is generated as a response to the `ProtocolMessage` command.\n", "file_path": "tezos/wrapper/src/service.rs", "rank": 52, "score": 80441.84667384048 }, { "content": "#[derive(Serialize, Deserialize, Debug)]\n\nstruct ApplyBlockParams {\n\n chain_id: ChainId,\n\n block_header: BlockHeader,\n\n operations: Vec<Option<OperationsForBlocksMessage>>,\n\n}\n\n\n", "file_path": "tezos/wrapper/src/service.rs", "rank": 53, "score": 80441.84667384048 }, { "content": "#[derive(Serialize, Deserialize)]\n\nstruct BenchData {\n\n block_hash: Vec<u8>,\n\n chain_hash: Vec<u8>,\n\n}\n\n\n", "file_path": "tezos/wrapper/benches/bench_shm.rs", "rank": 54, "score": 80436.78351390151 }, { "content": "struct BigIntVisitor;\n\n\n\nimpl<'de> Visitor<'de> for BigIntVisitor {\n\n type Value = BigInt;\n\n\n\n fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {\n\n formatter.write_str(\"a hex encoded string\")\n\n }\n\n\n\n fn visit_string<E>(self, value: String) -> Result<Self::Value, E>\n\n where\n\n E: de::Error,\n\n {\n\n let bigint: BigInt = num_bigint::BigInt::parse_bytes(value.as_bytes(), 16).unwrap().into();\n\n Ok(bigint)\n\n }\n\n\n\n fn visit_str<E>(self, value: &str) -> Result<Self::Value, E>\n\n where\n\n E: de::Error,\n", "file_path": "tezos/encoding/src/de.rs", "rank": 55, "score": 80426.29223869529 }, { "content": "/// Spawns ocaml task. Spawning is simply sending ocaml task into the sender queue `spawned_tasks`.\n\n/// Ocaml tasks are then received and executed by the `OcamlThreadExecutor` singleton.\n\nstruct OcamlTaskSpawner {\n\n /// Sender is used to send tasks to the `OcamlThreadExecutor`.\n\n spawned_tasks: Arc<Mutex<Sender<OcamlTask>>>\n\n}\n\n\n\nimpl OcamlTaskSpawner {\n\n /// Spawns ocaml task. Spawning is simply sending ocaml task into the sender queue `spawned_tasks`.\n\n /// Ocaml tasks are then received and executed by the `OcamlThreadExecutor` singleton.\n\n pub fn spawn(&self, task: OcamlTask) -> Result<(), SendError<OcamlTask>> {\n\n self.spawned_tasks.lock().unwrap().send(task)\n\n }\n\n}\n\n\n", "file_path": "tezos/interop/src/runtime.rs", "rank": 56, "score": 80426.29223869529 }, { "content": "/// Runs `OcamlTask` to it's completion. By design of this library there will be\n\n/// only a single instance of `OcamlThreadExecutor` running because ocaml runtime\n\n/// is not designed to be accessed from multiple threads.\n\nstruct OcamlThreadExecutor {\n\n /// Receiver is used to receive tasks which will be then executed\n\n /// in the ocaml runtime.\n\n ready_tasks: Receiver<OcamlTask>\n\n}\n\n\n\nimpl OcamlThreadExecutor {\n\n /// Runs scheduled ocaml task to it's completion.\n\n fn run(&self) {\n\n while let Ok(task) = self.ready_tasks.recv() {\n\n // execute future from task\n\n (task.op)();\n\n // notify waker that OcamlResult (it implements Future) is ready to be polled\n\n if let Some(waker) = task.state.lock().unwrap().waker.take() {\n\n waker.wake()\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "tezos/interop/src/runtime.rs", "rank": 57, "score": 80426.29223869529 }, { "content": "#[test]\n\nfn deserialized_equals_serialized_message() {\n\n let original_message = PeerMessage::Deactivate(DeactivateMessage::new(hex::decode(\"8eceda2f\").unwrap()));\n\n let resp: PeerMessageResponse = original_message.into();\n\n let msg_bytes = resp.as_bytes().unwrap();\n\n let deserialized = PeerMessageResponse::from_bytes(msg_bytes).expect(\"expected valid message\");\n\n let deserialized_message = deserialized.messages().get(0).expect(\"expected message in response\");\n\n let original_message = PeerMessage::Deactivate(DeactivateMessage::new(hex::decode(\"8eceda2f\").unwrap()));\n\n\n\n if let (PeerMessage::Deactivate(ref orig_msg), PeerMessage::Deactivate(ref des_msg)) = (original_message, deserialized_message) {\n\n assert_eq!(orig_msg.deactivate(), des_msg.deactivate());\n\n } else {\n\n panic!(\"expected two deactivate messages\")\n\n }\n\n}", "file_path": "tezos/messages/tests/encoding_deactivate.rs", "rank": 58, "score": 79114.86194745764 }, { "content": "// dummy hashing function\n\nfn copy_bytes(data: &[u8]) -> Vec<u8> {\n\n data.to_vec()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_encode_chain_id() -> Result<(), failure::Error> {\n\n let decoded = HashType::ChainId.bytes_to_string(&hex::decode(\"8eceda2f\")?);\n\n let expected = \"NetXgtSLGNJvNye\";\n\n assert_eq!(expected, decoded);\n\n\n\n Ok(())\n\n }\n\n\n\n #[test]\n\n fn test_encode_block_header() -> Result<(), failure::Error> {\n\n let decoded = HashType::BlockHash.bytes_to_string(&hex::decode(\"46a6aefde9243ae18b191a8d010b7237d5130b3530ce5d1f60457411b2fa632d\")?);\n", "file_path": "crypto/src/hash.rs", "rank": 59, "score": 79045.32566115983 }, { "content": "type Ctx = OrderedValue;\n\n\n", "file_path": "storage/tests/skip_list.rs", "rank": 60, "score": 77945.9163442635 }, { "content": "/// Indicates that type has it's own ser/de schema.\n\npub trait HasEncoding {\n\n fn encoding() -> Encoding;\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn schema_split() {\n\n let split_encoding = Encoding::Split(Arc::new(|schema_type| {\n\n match schema_type {\n\n SchemaType::Json => Encoding::Uint16,\n\n SchemaType::Binary => Encoding::Float\n\n }\n\n }));\n\n\n\n if let Encoding::Split(inner_encoding) = split_encoding {\n\n match inner_encoding(SchemaType::Json) {\n\n Encoding::Uint16 => {}\n", "file_path": "tezos/encoding/src/encoding.rs", "rank": 61, "score": 77193.15968762717 }, { "content": "/// A trait for converting base58check encoded values.\n\npub trait FromBase58Check {\n\n /// Size of the checksum used by implementation.\n\n const CHECKSUM_BYTE_SIZE: usize = 4;\n\n\n\n /// Convert a value of `self`, interpreted as base58check encoded data, into the tuple with version and payload as bytes vector.\n\n fn from_base58check(&self) -> Result<Vec<u8>, FromBase58CheckError>;\n\n}\n\n\n\nimpl ToBase58Check for [u8] {\n\n fn to_base58check(&self) -> String {\n\n // 4 bytes checksum\n\n let mut payload = Vec::with_capacity(self.len() + 4);\n\n payload.extend(self);\n\n let checksum = double_sha256(self);\n\n payload.extend(&checksum[..4]);\n\n\n\n payload.to_base58()\n\n }\n\n}\n\n\n", "file_path": "crypto/src/base58.rs", "rank": 62, "score": 77187.99060953164 }, { "content": "/// A trait for converting a value to base58 encoded string.\n\npub trait ToBase58Check {\n\n /// Converts a value of `self` to a base58 value, returning the owned string.\n\n fn to_base58check(&self) -> String;\n\n}\n\n\n", "file_path": "crypto/src/base58.rs", "rank": 63, "score": 77187.99060953164 }, { "content": "type ChainId = UniString;\n\n\n", "file_path": "rpc/src/encoding/monitor.rs", "rank": 64, "score": 76795.36749902095 }, { "content": "type ItemCount = u16;\n\n\n\n/// Precisely identifies location of a record in a commit log.\n\n#[derive(Copy, Clone, Debug, Serialize, Deserialize)]\n\npub struct Location(Offset, ByteLimit);\n\n\n\nimpl Location {\n\n #[inline]\n\n pub fn is_consecutive(&self, prev: &Location) -> bool {\n\n (prev.0 < self.0) && (self.0 - prev.0 == 1)\n\n }\n\n}\n\n\n\nimpl fmt::Display for Location {\n\n fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {\n\n formatter.write_fmt(format_args!(\"Location({},{})\", self.0, self.1))\n\n }\n\n}\n\n\n\nimpl BincodeEncoded for Location {}\n\n\n\n/// Range of values to get from a commit log\n\n#[derive(Copy, Clone, Debug, Serialize, Deserialize, PartialEq)]\n\npub struct Range(Offset, ByteLimit, ItemCount);\n\n\n", "file_path": "storage/src/persistent/commit_log.rs", "rank": 65, "score": 76795.36749902095 }, { "content": "type ByteLimit = usize;\n", "file_path": "storage/src/persistent/commit_log.rs", "rank": 66, "score": 76795.36749902095 }, { "content": "pub fn apply_block(\n\n chain_id: RustBytes,\n\n block_header: RustBytes,\n\n operations: Vec<Option<Vec<RustBytes>>>)\n\n -> Result<Result<ApplyBlockResult, ApplyBlockError>, OcamlError> {\n\n runtime::execute(move || {\n\n let ocaml_function = ocaml::named_value(\"apply_block\").expect(\"function 'apply_block' is not registered\");\n\n\n\n // call ffi\n\n match ocaml_function.call3_exn::<OcamlHash, OcamlBytes, List>(\n\n chain_id.convert_to(),\n\n block_header.convert_to(),\n\n operations_to_ocaml(&operations),\n\n ) {\n\n Ok(validation_result) => {\n\n let validation_result: Tuple = validation_result.into();\n\n\n\n let validation_result_message: Str = validation_result.get(0).unwrap().into();\n\n let context_hash: OcamlHash = validation_result.get(1).unwrap().into();\n\n let block_header_proto_json: Str = validation_result.get(2).unwrap().into();\n", "file_path": "tezos/interop/src/ffi.rs", "rank": 67, "score": 75977.55725577715 }, { "content": "/// Applies new block to Tezos ocaml storage, means:\n\n/// - block and operations are decoded by the protocol\n\n/// - block and operations data are correctly stored in Tezos chain/storage\n\n/// - new current head is evaluated\n\n/// - returns validation_result.message\n\npub fn apply_block(\n\n chain_id: &ChainId,\n\n block_header: &BlockHeader,\n\n operations: &Vec<Option<OperationsForBlocksMessage>>) -> Result<ApplyBlockResult, ApplyBlockError> {\n\n\n\n // check operations count by validation_pass\n\n if (block_header.validation_pass() as usize) != operations.len() {\n\n return Err(ApplyBlockError::IncompleteOperations {\n\n expected: block_header.validation_pass() as usize,\n\n actual: operations.len(),\n\n });\n\n }\n\n\n\n let block_header = match block_header.as_bytes() {\n\n Err(e) => return Err(\n\n ApplyBlockError::InvalidBlockHeaderData {\n\n message: format!(\"Block header as_bytes failed: {:?}, block: {:?}\", e, block_header)\n\n }\n\n ),\n\n Ok(data) => data\n", "file_path": "tezos/client/src/client.rs", "rank": 68, "score": 75977.55725577715 }, { "content": "pub trait HasSingleValue {\n\n fn get_str(&self, key: &str) -> Option<&str>;\n\n\n\n fn get_u64(&self, key: &str) -> Option<u64> {\n\n self.get_str(key).and_then(|value| value.parse::<u64>().ok())\n\n }\n\n\n\n fn get_usize(&self, key: &str) -> Option<usize> {\n\n self.get_str(key).and_then(|value| value.parse::<usize>().ok())\n\n }\n\n\n\n fn contains_key(&self, key: &str) -> bool {\n\n self.get_str(key).is_some()\n\n }\n\n}\n\n\n\nimpl HasSingleValue for Params {\n\n fn get_str(&self, key: &str) -> Option<&str> {\n\n self.iter().find_map(|(k, v)| {\n\n if k == key {\n", "file_path": "rpc/src/server/handler.rs", "rank": 69, "score": 75977.55725577715 }, { "content": "pub trait ToBytes {\n\n fn to_byte_vec(&self) -> Vec<u8>;\n\n}\n\n\n\nimpl <E,T> ToBytes for BitVec<E,T>\n\n where\n\n E: Cursor,\n\n T: Bits\n\n{\n\n fn to_byte_vec(&self) -> Vec<u8> {\n\n let mut bytes = vec![];\n\n let mut byte = 0;\n\n let mut offset = 0;\n\n for (idx_bit, bit) in self.iter().rev().enumerate() {\n\n let idx_byte = (idx_bit % 8) as u8;\n\n byte.set(idx_byte, bit);\n\n if idx_byte == 7 {\n\n bytes.push(byte);\n\n byte = 0;\n\n }\n\n offset = idx_byte;\n\n }\n\n if offset != 7 {\n\n bytes.push(byte);\n\n }\n\n bytes.reverse();\n\n bytes\n\n }\n\n}\n", "file_path": "tezos/encoding/src/bit_utils.rs", "rank": 70, "score": 75977.55725577715 }, { "content": "/// Provides trait that must be implemented by a protocol runner.\n\npub trait ProtocolApi {\n\n\n\n /// Apply block\n\n fn apply_block(chain_id: &ChainId, block_header: &BlockHeader, operations: &Vec<Option<OperationsForBlocksMessage>>) -> Result<ApplyBlockResult, ApplyBlockError>;\n\n\n\n /// Change tezos runtime configuration\n\n fn change_runtime_configuration(settings: TezosRuntimeConfiguration) -> Result<(), TezosRuntimeConfigurationError>;\n\n\n\n /// Command tezos ocaml code to initialize storage.\n\n fn init_storage(storage_data_dir: String, tezos_environment: TezosEnvironment) -> Result<TezosStorageInitInfo, TezosStorageInitError>;\n\n\n\n /// Command tezos ocaml code to generate a new identity.\n\n fn generate_identity(expected_pow: f64) -> Result<Identity, TezosGenerateIdentityError>;\n\n}\n", "file_path": "tezos/wrapper/src/protocol.rs", "rank": 71, "score": 75977.55725577715 }, { "content": "/// This trait extends basic column family by introducing Codec types safety and enforcement\n\npub trait KeyValueSchema {\n\n type Key: Codec;\n\n type Value: Codec;\n\n\n\n fn descriptor() -> ColumnFamilyDescriptor {\n\n ColumnFamilyDescriptor::new(Self::name(), Options::default())\n\n }\n\n\n\n fn name() -> &'static str;\n\n}\n\n\n\npub struct CommitLogDescriptor {\n\n name: String,\n\n}\n\n\n\nimpl CommitLogDescriptor {\n\n pub fn name(&self) -> &str {\n\n self.name.as_str()\n\n }\n\n}\n\n\n", "file_path": "storage/src/persistent/schema.rs", "rank": 72, "score": 74838.8201094169 }, { "content": "pub trait CommitLogSchema {\n\n // TODO: split value to `ValueIn` and `ValueOut` - we will start to use references in `ValueIn` but that will introduce\n\n // lifetime bound which is not currently supported for associated types. Unless we want to all lifetime\n\n // to the `CommitLogSchema`.\n\n type Value: Codec;\n\n\n\n fn descriptor() -> CommitLogDescriptor {\n\n CommitLogDescriptor {\n\n name: Self::name().into()\n\n }\n\n }\n\n\n\n fn name() -> &'static str;\n\n}\n", "file_path": "storage/src/persistent/schema.rs", "rank": 73, "score": 74833.89970992552 }, { "content": "pub trait BitReverse {\n\n fn reverse(&self) -> Self;\n\n}\n\n\n\nimpl <E,T> BitReverse for BitVec<E,T>\n\n where\n\n E: Cursor,\n\n T: Bits\n\n{\n\n #[inline]\n\n fn reverse(&self) -> BitVec<E,T> {\n\n let mut reversed: BitVec<E,T> = BitVec::new();\n\n for bit in self.iter().rev() {\n\n reversed.push(bit)\n\n }\n\n reversed\n\n }\n\n}\n\n\n\n\n", "file_path": "tezos/encoding/src/bit_utils.rs", "rank": 74, "score": 74833.89970992552 }, { "content": "pub trait BitTrim {\n\n fn trim_left(&self) -> Self;\n\n}\n\n\n\nimpl <E,T> BitTrim for BitVec<E,T>\n\n where\n\n E: Cursor,\n\n T: Bits\n\n{\n\n fn trim_left(&self) -> BitVec<E,T> {\n\n let mut trimmed: BitVec<E,T> = BitVec::new();\n\n\n\n let mut notrim = false;\n\n for bit in self.iter() {\n\n if bit {\n\n trimmed.push(bit);\n\n notrim = true;\n\n } else if notrim {\n\n trimmed.push(bit);\n\n }\n\n }\n\n trimmed\n\n }\n\n}\n\n\n", "file_path": "tezos/encoding/src/bit_utils.rs", "rank": 75, "score": 74833.89970992552 }, { "content": "/// By default channel is disabled.\n\n///\n\n/// This is needed to prevent unit tests from overflowing the shared channel.\n\npub fn enable_context_channel() {\n\n CHANNEL_ENABLED.store(true, Ordering::Release)\n\n}\n\n\n", "file_path": "tezos/context/src/channel.rs", "rank": 76, "score": 74833.89970992552 }, { "content": "fn assert_eq_hash_and_header(expected_hash: &str, expected_header: &str, header_tuple: Tuple) {\n\n assert_eq!(2, header_tuple.len());\n\n assert_eq_hash(expected_hash, header_tuple.get(0).unwrap().into());\n\n assert_eq_bytes(expected_header, header_tuple.get(1).unwrap().into());\n\n}\n\n\n", "file_path": "tezos/interop/tests/test_bytes_roundtrips.rs", "rank": 77, "score": 74777.24902112265 }, { "content": "type RandomSeedState = Vec<u8>;\n\npub type TezosPRNGResult = Result<(i32, RandomSeedState), TezosPRNGError>;\n\n\n", "file_path": "rpc/src/helpers.rs", "rank": 78, "score": 74634.54019660247 }, { "content": "pub fn get_block_header(chain_id: RustBytes, block_header_hash: RustBytes) -> Result<Result<Option<RustBytes>, BlockHeaderError>, OcamlError> {\n\n runtime::execute(move || {\n\n let ocaml_function = ocaml::named_value(\"get_block_header\").expect(\"function 'get_block_header' is not registered\");\n\n match ocaml_function.call2_exn::<OcamlHash, OcamlHash>(chain_id.convert_to(), block_header_hash.convert_to()) {\n\n Ok(block_header) => {\n\n let block_header: OcamlBytes = block_header.into();\n\n if block_header.is_empty() {\n\n Ok(None)\n\n } else {\n\n Ok(Some(block_header.convert_to()))\n\n }\n\n }\n\n Err(e) => {\n\n Err(BlockHeaderError::from(e))\n\n }\n\n }\n\n })\n\n}\n\n\n", "file_path": "tezos/interop/src/ffi.rs", "rank": 79, "score": 74158.36829910368 }, { "content": "/// Decode value from binary format.\n\npub trait Decoder: Sized {\n\n /// Try to decode message from its binary format\n\n fn decode(bytes: &[u8]) -> Result<Self, SchemaError>;\n\n}\n\n\n", "file_path": "storage/src/persistent/codec.rs", "rank": 80, "score": 73919.21384378288 }, { "content": "/// Encode input value to binary format.\n\npub trait Encoder: Sized {\n\n /// Try to encode instance into its binary format\n\n fn encode(&self) -> Result<Vec<u8>, SchemaError>;\n\n}\n\n\n", "file_path": "storage/src/persistent/codec.rs", "rank": 81, "score": 73919.21384378288 }, { "content": "pub trait Interchange<T> {\n\n fn convert_to(&self) -> T;\n\n fn is_empty(&self) -> bool;\n\n}\n\n\n\nimpl Interchange<OcamlBytes> for RustBytes {\n\n fn convert_to(&self) -> OcamlBytes {\n\n Array1::from(self.as_slice())\n\n }\n\n\n\n fn is_empty(&self) -> bool {\n\n self.is_empty()\n\n }\n\n}\n\n\n\nimpl Interchange<RustBytes> for OcamlBytes {\n\n fn convert_to(&self) -> RustBytes {\n\n self.data().to_vec()\n\n }\n\n\n", "file_path": "tezos/interop/src/ffi.rs", "rank": 82, "score": 73919.21384378288 }, { "content": "pub trait SkipList {\n\n fn len(&self) -> usize;\n\n\n\n fn levels(&self) -> usize;\n\n\n\n fn contains(&self, index: usize) -> bool;\n\n}\n\n\n\nimpl SkipList for DatabaseBackedSkipList {\n\n /// Get number of elements stored in this node\n\n #[inline]\n\n fn len(&self) -> usize {\n\n self.state.len\n\n }\n\n\n\n #[inline]\n\n fn levels(&self) -> usize {\n\n self.state.levels\n\n }\n\n\n\n /// Check, that given index is stored in structure\n\n #[inline]\n\n fn contains(&self, index: usize) -> bool {\n\n self.state.len > index\n\n }\n\n}\n\n\n", "file_path": "storage/src/skip_list/skip_list.rs", "rank": 83, "score": 73751.64058134274 }, { "content": "#[test]\n\npub fn flat_list_restore_state() {\n\n let tmp_storage = TmpStorage::create(\"__flat_list:flat_list_restore_state\").expect(\"Storage error\");\n\n {\n\n let mut list: Box<dyn TypedSkipList<_, _, OrderedValue>> = Box::new(DatabaseBackedFlatList::new(8, tmp_storage.storage().kv()).expect(\"failed to create flat list\"));\n\n let mut map = HashMap::new();\n\n map.insert(0, 0);\n\n let expected = map.clone();\n\n list.push(OrderedValue::new(map)).expect(\"failed to store value into flat list\");\n\n let val = list.get(0).expect(\"error during storage operation\").expect(\"Expected value in storage\");\n\n assert_eq!(val, OrderedValue::new(expected));\n\n }\n\n // drop the list reference, and hope it will hydrate correctly\n\n {\n\n let mut list: Box<dyn TypedSkipList<_, _, OrderedValue>> = Box::new(DatabaseBackedFlatList::new(8, tmp_storage.storage().kv()).expect(\"failed to create flat list\"));\n\n let mut map = HashMap::new();\n\n map.insert(1, 1);\n\n let mut expected = map.clone();\n\n expected.insert(0, 0);\n\n list.push(OrderedValue::new(map)).expect(\"failed to store value into flat list\");\n\n let val = list.get(1).expect(\"error during storage operation\").expect(\"Expected value in storage\");\n\n assert_eq!(val, OrderedValue::new(expected));\n\n }\n\n}\n\n\n", "file_path": "storage/tests/skip_list.rs", "rank": 84, "score": 73751.64058134274 }, { "content": "/// Trait for json encoding to implement.\n\npub trait JsonMessage {\n\n\n\n /// Produce JSON from the struct.\n\n fn as_json(&self) -> Result<String, ser::Error>;\n\n}\n\n\n\nimpl<T> JsonMessage for T\n\n where T: HasEncoding + Serialize + Sized {\n\n\n\n #[inline]\n\n fn as_json(&self) -> Result<String, ser::Error> {\n\n let mut writer = JsonWriter::new();\n\n writer.write(self, &Self::encoding())\n\n }\n\n}\n\n\n\n/// Message hash error\n\n#[derive(Debug, Fail)]\n\npub enum MessageHashError {\n\n #[fail(display = \"Message serialization error\")]\n", "file_path": "tezos/messages/src/p2p/binary_message.rs", "rank": 85, "score": 73751.64058134274 }, { "content": "#[test]\n\npub fn skip_list_simulate_ledger() {\n\n let tmp_storage = TmpStorage::create(\"__skip_list:skip_list_simulate_ledger\").expect(\"Storage error\");\n\n let list: Box<dyn TypedSkipList<_, _, OrderedValue>> = Box::new(DatabaseBackedSkipList::new(8, tmp_storage.storage().kv()).expect(\"failed to create skip list\"));\n\n simulate_ledger(list);\n\n}\n\n\n", "file_path": "storage/tests/skip_list.rs", "rank": 86, "score": 73751.64058134274 }, { "content": " pub trait CachedData {\n\n fn cache_reader(&self) -> &dyn CacheReader;\n\n fn cache_writer(&mut self) -> Option<&mut dyn CacheWriter>;\n\n }\n\n\n\n #[derive(Clone, Default)]\n\n pub struct BinaryDataCache {\n\n data: Option<Vec<u8>>\n\n }\n\n\n\n impl CacheReader for BinaryDataCache {\n\n #[inline]\n\n fn get(&self) -> Option<Vec<u8>> {\n\n self.data.as_ref().cloned()\n\n }\n\n }\n\n\n\n impl CacheWriter for BinaryDataCache {\n\n #[inline]\n\n fn put(&mut self, body: &[u8]) {\n", "file_path": "tezos/messages/src/p2p/binary_message.rs", "rank": 87, "score": 73751.64058134274 }, { "content": "#[test]\n\npub fn list_check_get_key() {\n\n let tmp_storage = TmpStorage::create(\"__skip_list:list_check_get_key\").expect(\"Storage error\");\n\n let mut list: Box<dyn TypedSkipList<_, _, OrderedValue>> = Box::new(DatabaseBackedSkipList::new(8, tmp_storage.storage().kv()).expect(\"failed to create skip list\"));\n\n for x in 0..=7 {\n\n let mut map = HashMap::new();\n\n map.insert(x, x);\n\n list.push(OrderedValue::new(map)).expect(\"failed to store value into skip list\");\n\n }\n\n assert_eq!(list.levels(), 2);\n\n let val = list.get_key(7, &7);\n\n assert_eq!(val.unwrap(), Some(7));\n\n let val = list.get_key(6, &7);\n\n assert_eq!(val.unwrap(), None);\n\n}\n\n\n", "file_path": "storage/tests/skip_list.rs", "rank": 88, "score": 73751.64058134274 }, { "content": " pub trait CacheWriter {\n\n fn put(&mut self, body: &[u8]);\n\n }\n\n\n", "file_path": "tezos/messages/src/p2p/binary_message.rs", "rank": 89, "score": 73751.64058134274 }, { "content": "#[test]\n\npub fn flat_list_simulate_ledger() {\n\n let tmp_storage = TmpStorage::create(\"__flat_list:flat_list_simulate_ledger\").expect(\"Storage error\");\n\n let list: Box<dyn TypedSkipList<_, _, OrderedValue>> = Box::new(DatabaseBackedFlatList::new(8, tmp_storage.storage().kv()).expect(\"failed to create flat list\"));\n\n simulate_ledger(list);\n\n}\n\n\n", "file_path": "storage/tests/skip_list.rs", "rank": 90, "score": 73751.64058134274 }, { "content": " pub trait CacheReader {\n\n fn get(&self) -> Option<Vec<u8>>;\n\n }\n\n\n", "file_path": "tezos/messages/src/p2p/binary_message.rs", "rank": 91, "score": 73751.64058134274 }, { "content": "#[test]\n\npub fn list_check_faster_lane() {\n\n let tmp_storage = TmpStorage::create(\"__skip_list:list_check_faster_lane\").expect(\"Storage error\");\n\n let mut list: Box<dyn TypedSkipList<_, _, Value>> = Box::new(DatabaseBackedSkipList::new(6, tmp_storage.storage().kv()).expect(\"failed to create skip list\"));\n\n for index in 0..=7 {\n\n list.push(Value::new(vec![index])).expect(\"failed to push value to skip list\");\n\n }\n\n assert_eq!(list.levels(), 2);\n\n let val = list.get(7).expect(\"failed to get value from skip list\");\n\n assert_eq!(val.is_some(), list.contains(7), \"List `get` and `contains` return inconsistent answers\");\n\n assert!(val.is_some());\n\n assert_eq!(val.unwrap(), Value::new((0..=7).collect()));\n\n}\n\n\n", "file_path": "storage/tests/skip_list.rs", "rank": 92, "score": 73751.64058134274 }, { "content": "#[test]\n\npub fn list_check_lane_traversal() {\n\n let tmp_storage = TmpStorage::create(\"__skip_list:list_check_lane_traversal\").expect(\"Storage error\");\n\n let mut list: Box<dyn TypedSkipList<_, _, Value>> = Box::new(DatabaseBackedSkipList::new(7, tmp_storage.storage().kv()).expect(\"failed to create skip list\"));\n\n for index in 0..=63 {\n\n list.push(Value::new(vec![index])).expect(\"failed to push value to skip list\");\n\n }\n\n assert_eq!(list.levels(), 3);\n\n let val = list.get(63).expect(\"failed to get value from skip list\");\n\n assert_eq!(val.is_some(), list.contains(63), \"List `get` and `contains` return inconsistent answers\");\n\n assert!(val.is_some());\n\n assert_eq!(val.unwrap(), Value::new((0..=63).collect()));\n\n}\n\n\n", "file_path": "storage/tests/skip_list.rs", "rank": 93, "score": 73751.64058134274 }, { "content": "type OcamlBytes = Array1<u8>;\n", "file_path": "tezos/interop_callback/src/callback.rs", "rank": 94, "score": 73545.7597350379 }, { "content": "type ContextValue = Vec<u8>;\n\n\n", "file_path": "tezos/interop_callback/src/callback.rs", "rank": 95, "score": 73545.7597350379 }, { "content": "type ContextKey = Vec<String>;\n", "file_path": "tezos/interop_callback/src/callback.rs", "rank": 96, "score": 73545.7597350379 }, { "content": "/// Unwraps a block hash or provides alternative block hash.\n\n/// Alternatives are: genesis block or current head\n\nfn unwrap_block_hash(block_id: Option<&str>, state: &RpcCollectedStateRef, genesis_hash: &str) -> String {\n\n block_id.map(String::from).unwrap_or_else(|| {\n\n let state = state.read().unwrap();\n\n state.current_head().as_ref()\n\n .map(|current_head| HashType::BlockHash.bytes_to_string(&current_head.header().hash))\n\n .unwrap_or(genesis_hash.to_string())\n\n })\n\n}\n\n\n\n\n", "file_path": "rpc/src/server/handler.rs", "rank": 97, "score": 73488.05648877553 }, { "content": "#[test]\n\nfn can_deserialize_and_serialize_current_branch_message() {\n\n let data_list = [\n\n \"8eceda2f000000ce000306f80146a6aefde9243ae18b191a8d010b7237d5130b3530ce5d1f60457411b2fa632d000000005c73d01c04acecbfac449678f1d68b90c7b7a86c9280fd373d872e072f3fb1b395681e71490000001100000001000000000800000000005ba1ca934484026d24be9ad40c98341c20e51092dd62bbf470bb9ff85061fa981ebbd90000000000031b4f9aff00c6d9a5d1fbf5eda49a01e52017dc78ca1d7a45f3f4fe32840052f9845a61ccdd6cf20139cedef0ed52395a327ad13390d9e8c1e999339a24f8513fe513ed689a46a6aefde9243ae18b191a8d010b7237d5130b3530ce5d1f60457411b2fa632d9aeb8e663111c3e5d3406bbf263a2d5869475ea8552bf16b28ef26a3ffac590a58f26ddf689bdc4547de09bc2ddb8e1e7a7a0646e40a49873578525c798c42e4c89f1799339c0dc8daa87f370d3a9a9ab4299a5d9d9082e1cfd3cd0cf1986f3f7543a65cd9bb6c0a96cd881cfcfd720178d859de8bceb4254bae78f29f0202773aeddd330be233bde3b84900cddff0546c952c3e32c36b1d27f96179c339230bf76cb1d94f23b8ba8542122e7a8a19d1e4f683f7961daed8eaf67897991a1a4de78712518593773de4b3c20ff3892c0bad466374ee96f452d76b1fa5ddd776f534505c1a16e7eea2cc8d75c484d67296678401b21cdc1c18ab4be2354ac2d83f85c2cc6844fe52989734d425f57dea06151085db0c37f39030c4cfbefc8d8a045d3a8c29b88d91c15a47e51b8e793845c00dcaf7b199f4030c43d561e10b3a24bec9b94c48f24a7641cdcce20ba3bd2fd2626d45e939098bd6ec36e4b000aae3babad329a056ebd8793270212913874adaba0141b67ddddd65128318303ef7bff158a78591c2c52ca7b9a0c4fbf06631565c3a8f823248fd91ebdc873d5f1d884e66aef6b7866a94c4eded8e8b4ecd5352b15f5c97a59fda96a4964422e7fe2c3077c471b8da1fe32ca4d6741f58bd848e332e0e51653109d345edbeda5460f9e816dccaf0836d4c1176dbbba0a6b91445b3ccecee204b542b1bcb05cd06977d845910c90a00ec90188228ae47d0a16a1ab95b2d91b21c876f5c5ea179bb3f3410808fd5cf4aef34d38a2442819daa51d3c33dd30418502e686245fffacd6cede9a686c6b79fb6e17c83b48829c12f073049434a574e21a3f1776b68bf65a0366f32bdc144e86eb40feac4a48804b6e0cd5548f5edad790336fd29354b737b129d7fdc7b6fa4049e4c570961d1e23926c5acae7b763cdbc805f3f27f7726dc347573ca9b083f8268b148037bca6bbba46237e1083e07e8aee4621f2802c9ef50ba576a33e3c8673d75d9c0df662f7884ecf8d668fcfe61ed05077de4e624406a81a3c7f1bb9ef4aa9589620b48d4d3489f2bf94b738024af3ac7ecff13b9067d47b4562ebb9e14579d0df81a74802856020c91d94ca50f21dc20b660d8d9121689b7e967a47a1712a10f334762211ba39cc84c0b93d909f4b762abdac509d9fc629e4fcbe252180fda831a535e10bef8f34dc999842c37c57dd995e16d09c1198c063de759b4179ea0a39fc4acf7fb9f7038606cc0dd1f69f4c7cf13fc7e2ecf9c41b49817bc6388a7f7bd02ac1f6d48e948133c4bcb1ac9291b7c3d3c6da243db457cefd3e9e858a6fb1ddd87bf17185cf2d8e80807767cb6d81923b700aa86747e00dea299d0d5fea6110468ddd369ce564175110d3f0c4c1992a945ecb5b6f80b43fab2b756dc62530e144e5037f879ecdad2b8fc1934577d1360bdffe8fc393e02ccd8af1b4e60302894ce6efb12266e04de3c67f0f820e847904a0a1d5648e4fad65f1ef8d9edb65bc7106580cd69b3253d1ee1a14deda1102b16ff5f191c69642d29873df9ff44a9b72f8ff8431fc2d09c6f5bc1bc06ad8a67e66b7cce84cfdb363ab2261d4a4029a4a619f0b41d6c60b9e5476ab42e007be918e46251984f6c6598ccbf8c168c6c826dbf39d6cc2135c5c1b121bc71ae49dccaf070d3e356348d0283e6922b1379dde5434cbfc470466593b36b1589fde46be2142bfa3ad77694af14d6d9ec37d6666a1abea506ad199155f1b76e7cf53c0634b44ae294581263482d52fe6d9190200c6437c7dc8256ccea74afb960f1d9525218fb5a6b22c6eb2e84a81483077b5ccebd78c5eedbcdc09a8b21fba0ec5033087632d66db9dc5a5028efa085c5006abe83cb01bc64782b7d35d7f464eb1f9c0ef8da0686367daf1185d0b7c44542b9648568bbc9fd1e25a7cacf1b11aa75dc5e030a495ba32973f9ae9a4dc7df3ac5816e5b5ed86ec64dbea3c11455dd725cede53d76aa1dae91af6add713e2f8592d82035e3ea2735427f199186b977bcf895455b1b3187d1835bea62586560697ff8200fa8f6d9cd4b19b72c0af8608b9f59279914b13316c08f8b86ea6ae45333e578beb9e935340d833b32e67a44cf11b0502ec30cb8de665d23277cb1d84e60ecd3220161e050b4af34cd04bece5d62b96d947ca2d46b93a88e1bb8829ced792de8615749f5a1ff47ecea00847cfac403c61276d5ece498b5b5e317c2f9b04c8a77855c198b74b8fe2230cdadf9d46d4667beb5de17e99835e90e49188077c7cf235b3a7da3d25da02b64d53170458bf1850fd9188e62bfb42b62020631cf26541cec29b450bb6512c0e0a02dfc9b51621c45709328ce4b730217926038c01202b8bad2b090a7b96f772ad65f8f96d8cf6a0d2cc86654defff2e19598bb4d12f91035915248bec4b3ab96fd698b588092b8c817fa6343397ac951ef3b21c0d9fbddd3de37a71a7c384d8c2aab928a0f5c4150c196213d1e9d2c503eb0fd509d80131676f72a0181286dd7920b28d140bab35802205be190d10887dc9db6a263c4ac8d9687e04c583efcc18c43e389c3996706468377cb4433cebecc70e3abe168311a0e4968130fa8e85931629741a914e0728d03730e48cd72f8eb1aa1141b3d5abfdca5ee6aaded702e2475e3f3e7277038d4a515b18c35b20adafb9765e6414f95c38d2cbf6e8a1b5710aeb66e0f99ebcf6ba9bf0f95c023444c98ee48b1a289d6c3352e355dd06fc1cdb898ed37edf78e01f58ad0fd14a535d325c307be4f1177ce72ff1d70cd6fbcf635727a968c78a1ccad0d762c7d15364b152290b0cdea403283ef60520477172f6db1d2180bdd32ca0a194085d61bbc3cb50a6a3c905fab7daaaccc92ca0a3525c23592e0861337df759fe8eb93df114b28b94d5a4c26e9635fbe9f9b91985c522dcc886b9c582589fa1781437b6991b63821f3aaa2a2f3d94df40e21c20b42cd30393f639b065273cd33fe56419165f63a89b23c6189c426fe4c451e1d6afff82bdb8842b42314d99372a7cc3962f0efe77921301c92f4084bb8207c1c96d241416883e276ad4bc8d6b1051e6a11f0827458368fef27cda6760933b321c372b00255c31333997a96cb78c7fbd82a1b905c7e87ae4aa7066c13a4c21cbf09a0a5c345433373b81ab818bc6f6d22964883d4adc3f16d61cd1514baf9a8301add991c83c0cf10c7ba641a11ccc2789680d37cac29ebb9c07ad31567f733f2df978710d5fd768b60276ec2d5129e72813f0cb9efc569aa73e19b57aa623063bab01ddd98c53a13c85c7909eb626ff3ffa37ce8a7b10f235f99f0ec7b533b7f537be7ded4c08b30976bbac292a4e8f4bb85a83edb53ceb978c7f615cbc1101df39b74697dadd90fa7cc8fc5102fc483026bbf3c66f0749a90c16fe3622558bc6999ebde5ba64ca890f71430b402b8c9ba012ad793a1c70b141b48a07fbbb525965d949c992725740c07d1415ef9b26fe63d50a67c6d4979d68f8dad3e32cb03e26e4e4e64d50f94ab17286173825374a9f6e96ff50466c2b699a38fc69e5a79e7e319a62693ec85e2a8b2ef77a24de96bfab7d7a76d343a569c8b2e572d77757589b9d2a11e8ea8ff56e22ebae7c883053c8db992684ba05f0a6574f8162e480a32b8882d489a7a8d4313caadbcf44418d400983acf65f952ece41d21fee18c67b12a26949294111bbed41d88aa5e26a78bdbdb509e664f431f817c33f8a22b0d9a2110d16159fcdcaf000a70e51c6cdc3008549a48c47091aa2f8320ba4b8e060b71591da10abc7f5e080c92c2d7537a29804755fd50c02cbad30687b4cb66b2d0eaa9b82dc75daf8f685ad3f8cdcdae9c02d60f4218f008777a4bf505015bfdeb7647f1869b45095c298ae4f16cf11518a778716d6f7972e954aeb3c6774550e41534f1c8fae506bba6cd233efd13c8ab72be51b345f6132fbf0e38d88457254d877da235e168d8f1d97e5edac77fad58ae4189da88534ec437b619cab43302519c7d654edd6d42a0bbfb891593fb9ad3526bb8dc7a38c8ecc3fe591bfa3e0750ec23751475c88678fb1109483e1f7661695a727ce0397a1ef0e7856a6ed253df9e97a7cd1c5dc14534fb296f0cd58b93fa142d771d1db1df1c3a9188bd1a3a27ba08ffe1b340fa70dfb4fbc3bf47acbda083c110f07b3c479717d738271a30ec44e550572024b0fa23a48165542ac931606e9716fa6a8a7d5b70982b533b649f3624b0221a96c69263e5bd844b04724e1b68242b01ef8daa8bd5bf02e293779af56807c40184c1192fe1c9c1ebf0da4906f3c319f84afe57890bacf65947fada4b70a03323e955e529ae9127a2b2bff2d6f7afd14301035b2656ccf6d0e44683bac4760c370c7339513ea55ef7a0b24e939338215a82dfb7fbc8af11b8b207148955330628f19a77a4b7061106dbfc6c0598ab111c598126dc61c1fc1f34e8fe046731f05ad52a614cd91e8672c9dc6889a37d6b198d757272dcb8c4c9b024d3ac6962eded524f9e281780a3e149cb406bce50f6de5988f9bdf29f3c1c2f1deb4b13407b63a3900148d48e26ed33093a1f99394f1fafea588b79c7d516ea9e0d5955f44e07dca183a6dbc5f4c562697b0b3ec37d8d63493624774b283b9aabf22aac52e5200acd3c89fddbd16a23cdc1e1f081d08c3c9277d43b3bf2ce488a563350e07b1d89cf3753ff777e272344684200d4a3d5b3afc6f8ddd2be6f9c0ad32c3922733d6461cf0446c7bfd99f2f32a3189cd8882ffe6aabf39d08b43eb37dd6de92a92e9484ec8b6b6c823502fa6b6780a3924b5ea0e93bc07a7261e78d72440f2a16a614d3f29cf0951b561e76e6bacc51030257370b813ef356d76fa61e99b82af73b36365017f4b03cd537ea6aceca48c3cc2b34f163beb21203f0498fd7ec42e309463ea343cdddd326234517d3705b05dc3e8d66132d039fe6a7461e09196b6859606ed18391809b1d5691793beb77ded910997e667a1eec275446ddae8cd6015c632acf5d9f92f014b5da5497e23a07c0d4413e426a57005d8a79eb2dc11b99410c1858db28c55769d7724027665984c98b6dacc79a8aafde11d50bd0b87253c267821651c302e3d993bfb0e52656b5278b96c00c3474d7e2632a9936551371578840a5a999999863fda5ef6e8d04b0ddd807d4905c16c3449580622fc0fa5288f8039cd0cf7ca0f591acc6eb4fbada88c7fdd273b736b27b5305ee25c079cc18a5c1956793302cf8d679b26d22593f9f7858c5ff95f03a8e738652a892b89ec667e87bb35dbc552e3a6123325c94308dd4580fc91111a64698b8a18e36b48f8d0c770c2c1374a4fee29693cec76a3dc724894691916cb10d06dba3207d6c67d1ae49233a25a685bd23b549e1d756904e925a42db2b00fb56c8e4f94ff9b4af7d65b8d9fc46108878c823aa94d76b8b55a4c8d0a8379d74b2eff1a4252a57150f2233037af553c9404f3bef48e7e4b34db072ec28c5d160bb1b7967d00ea088117b6d34fb3a67e41e16f6f9c09b45760786168cc741e43bb4b73f095257503ca15ffc84097754f42633388e8959b01e2135edcf43c455f5ebb395b1c2dcd9c99ed8856415681c16f43b71caeae745a0e60933bdaa98d0ca720fd52861eefde238d5f63e49a2ef8b936472ac00c430edb8e4298da4df3bc18fb156d9495127db36c6240883c6858c25eaa2178443aca5d1b4c3dfea078773d8833fbb6b649df8136245b6372fab1e45ce78031349df0e3a4f259768d4a948aea689485f8717cf126a836cbabcb14cdd850645e37aad3fe735588e4311dfbc2587ff9ef1c4c23c6b0f3f0c44570e9654e2d77eaf2e87558ef06d9570930d5ade7198a4f4725b354266aa699aaf18fb241c5daa2fce132ff4b5217aa8c977bfcb7e8ded6207a88919559e681b1e9ffc745958f504074740ddedb7c3bc162290ee73fa0563f03648c8975ed43a2f97b2c001bea83484fc7396192de64b90e855ce3f0c193c93416c7eb0b5821f16d99a046687e18a6f6ba0e35725412714d15b354ab8f3de8a1c462b82070568d617e203415b414050feea9442f310d461814930cd28dd9d3eda8cdf4258c40df5ec8f3d8eb9a033b3a8d00b18b9ed04552eedf5efea93f6adbf2e6c117a6904478b0dab56d49ee382507aba19bf48ee1685f29d2e9f0636dd24d88a28dba43bc035720d1ba70b2186b160d386bb08037dfa7130f19d369a9d94ebfa8796d5f64f15bf3d894e7a882f14124a40b5e2898f454e4fbd2a3fd3ece11641dad2d0da8fcf233671b6a04fcf679d2a381c2544ea6c1ea29ba6157776ed8424affa610d\",\n\n ];\n\n\n\n let branches: Vec<CurrentBranchMessage> = data_list.iter()\n\n .map(|d| hex::decode(d).unwrap())\n\n .map(|bytes| CurrentBranchMessage::from_bytes(bytes).unwrap())\n\n .collect();\n\n\n\n for (idx, branch) in branches.iter().enumerate() {\n\n let expected = data_list[idx];\n\n let actual = hex::encode(branch.as_bytes().unwrap());\n\n assert_eq!(expected, actual);\n\n }\n\n}\n\n\n", "file_path": "tezos/messages/tests/encoding_current_branch.rs", "rank": 98, "score": 73218.63660641688 }, { "content": "/// Crate new randomly named unix domain socket file in temp directory.\n\npub fn temp_sock() -> PathBuf {\n\n let mut rng = thread_rng();\n\n let temp_dir = env::temp_dir();\n\n let chars = iter::repeat(())\n\n .map(|()| rng.sample(Alphanumeric))\n\n .take(7)\n\n .collect::<String>();\n\n\n\n temp_dir.join(chars + \".sock\")\n\n}\n\n\n", "file_path": "ipc/src/lib.rs", "rank": 99, "score": 72775.55629793127 } ]
Rust
miniz_oxide/src/inflate/mod.rs
MichaelMcDonnell/miniz_oxide
b6ca295deaecd549c504873481ceb4e2a65a1933
use crate::alloc::boxed::Box; use crate::alloc::vec; use crate::alloc::vec::Vec; use ::core::cmp::min; use ::core::usize; pub mod core; mod output_buffer; pub mod stream; use self::core::*; const TINFL_STATUS_FAILED_CANNOT_MAKE_PROGRESS: i32 = -4; const TINFL_STATUS_BAD_PARAM: i32 = -3; const TINFL_STATUS_ADLER32_MISMATCH: i32 = -2; const TINFL_STATUS_FAILED: i32 = -1; const TINFL_STATUS_DONE: i32 = 0; const TINFL_STATUS_NEEDS_MORE_INPUT: i32 = 1; const TINFL_STATUS_HAS_MORE_OUTPUT: i32 = 2; #[repr(i8)] #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] pub enum TINFLStatus { FailedCannotMakeProgress = TINFL_STATUS_FAILED_CANNOT_MAKE_PROGRESS as i8, BadParam = TINFL_STATUS_BAD_PARAM as i8, Adler32Mismatch = TINFL_STATUS_ADLER32_MISMATCH as i8, Failed = TINFL_STATUS_FAILED as i8, Done = TINFL_STATUS_DONE as i8, NeedsMoreInput = TINFL_STATUS_NEEDS_MORE_INPUT as i8, HasMoreOutput = TINFL_STATUS_HAS_MORE_OUTPUT as i8, } impl TINFLStatus { pub fn from_i32(value: i32) -> Option<TINFLStatus> { use self::TINFLStatus::*; match value { TINFL_STATUS_FAILED_CANNOT_MAKE_PROGRESS => Some(FailedCannotMakeProgress), TINFL_STATUS_BAD_PARAM => Some(BadParam), TINFL_STATUS_ADLER32_MISMATCH => Some(Adler32Mismatch), TINFL_STATUS_FAILED => Some(Failed), TINFL_STATUS_DONE => Some(Done), TINFL_STATUS_NEEDS_MORE_INPUT => Some(NeedsMoreInput), TINFL_STATUS_HAS_MORE_OUTPUT => Some(HasMoreOutput), _ => None, } } } #[inline] pub fn decompress_to_vec(input: &[u8]) -> Result<Vec<u8>, TINFLStatus> { decompress_to_vec_inner(input, 0, usize::max_value()) } #[inline] pub fn decompress_to_vec_zlib(input: &[u8]) -> Result<Vec<u8>, TINFLStatus> { decompress_to_vec_inner( input, inflate_flags::TINFL_FLAG_PARSE_ZLIB_HEADER, usize::max_value(), ) } #[inline] pub fn decompress_to_vec_with_limit(input: &[u8], max_size: usize) -> Result<Vec<u8>, TINFLStatus> { decompress_to_vec_inner(input, 0, max_size) } #[inline] pub fn decompress_to_vec_zlib_with_limit( input: &[u8], max_size: usize, ) -> Result<Vec<u8>, TINFLStatus> { decompress_to_vec_inner(input, inflate_flags::TINFL_FLAG_PARSE_ZLIB_HEADER, max_size) } fn decompress_to_vec_inner( input: &[u8], flags: u32, max_output_size: usize, ) -> Result<Vec<u8>, TINFLStatus> { let flags = flags | inflate_flags::TINFL_FLAG_USING_NON_WRAPPING_OUTPUT_BUF; let mut ret: Vec<u8> = vec![0; min(input.len().saturating_mul(2), max_output_size)]; let mut decomp = Box::<DecompressorOxide>::default(); let mut in_pos = 0; let mut out_pos = 0; loop { let (status, in_consumed, out_consumed) = decompress(&mut decomp, &input[in_pos..], &mut ret, out_pos, flags); in_pos += in_consumed; out_pos += out_consumed; match status { TINFLStatus::Done => { ret.truncate(out_pos); return Ok(ret); } TINFLStatus::HasMoreOutput => { let new_len = ret .len() .checked_add(out_pos) .ok_or(TINFLStatus::HasMoreOutput)?; if new_len > max_output_size { return Err(TINFLStatus::HasMoreOutput); }; ret.resize(new_len, 0); } _ => return Err(status), } } } pub fn decompress_slice_iter_to_slice<'out, 'inp>( out: &'out mut [u8], it: impl Iterator<Item = &'inp [u8]>, zlib_header: bool, ignore_adler32: bool, ) -> Result<usize, TINFLStatus> { use self::core::inflate_flags::*; let mut it = it.peekable(); let r = &mut DecompressorOxide::new(); let mut out_pos = 0; while let Some(in_buf) = it.next() { let has_more = it.peek().is_some(); let flags = { let mut f = TINFL_FLAG_USING_NON_WRAPPING_OUTPUT_BUF; if zlib_header { f |= TINFL_FLAG_PARSE_ZLIB_HEADER; } if ignore_adler32 { f |= TINFL_FLAG_IGNORE_ADLER32; } if has_more { f |= TINFL_FLAG_HAS_MORE_INPUT; } f }; let (status, _input_read, bytes_written) = decompress(r, in_buf, out, out_pos, flags); out_pos += bytes_written; match status { TINFLStatus::NeedsMoreInput => continue, TINFLStatus::Done => return Ok(out_pos), e => return Err(e), } } Err(TINFLStatus::FailedCannotMakeProgress) } #[cfg(test)] mod test { use super::{ decompress_slice_iter_to_slice, decompress_to_vec_zlib, decompress_to_vec_zlib_with_limit, TINFLStatus, }; const encoded: [u8; 20] = [ 120, 156, 243, 72, 205, 201, 201, 215, 81, 168, 202, 201, 76, 82, 4, 0, 27, 101, 4, 19, ]; #[test] fn decompress_vec() { let res = decompress_to_vec_zlib(&encoded[..]).unwrap(); assert_eq!(res.as_slice(), &b"Hello, zlib!"[..]); } #[test] fn decompress_vec_with_high_limit() { let res = decompress_to_vec_zlib_with_limit(&encoded[..], 100_000).unwrap(); assert_eq!(res.as_slice(), &b"Hello, zlib!"[..]); } #[test] fn fail_to_decompress_with_limit() { let res = decompress_to_vec_zlib_with_limit(&encoded[..], 8); match res { Err(TINFLStatus::HasMoreOutput) => (), _ => panic!("Decompression output size limit was not enforced"), } } #[test] fn test_decompress_slice_iter_to_slice() { let mut out = [0_u8; 12_usize]; let r = decompress_slice_iter_to_slice(&mut out, Some(&encoded[..]).into_iter(), true, false); assert_eq!(r, Ok(12)); assert_eq!(&out[..12], &b"Hello, zlib!"[..]); for chunk_size in 1..13 { let mut out = [0_u8; 12_usize + 1]; let r = decompress_slice_iter_to_slice(&mut out, encoded.chunks(chunk_size), true, false); assert_eq!(r, Ok(12)); assert_eq!(&out[..12], &b"Hello, zlib!"[..]); } let mut out = [0_u8; 3_usize]; let r = decompress_slice_iter_to_slice(&mut out, encoded.chunks(7), true, false); assert!(r.is_err()); } }
use crate::alloc::boxed::Box; use crate::alloc::vec; use crate::alloc::vec::Vec; use ::core::cmp::min; use ::core::usize; pub mod core; mod output_buffer; pub mod stream; use self::core::*; const TINFL_STATUS_FAILED_CANNOT_MAKE_PROGRESS: i32 = -4; const TINFL_STATUS_BAD_PARAM: i32 = -3; const TINFL_STATUS_ADLER32_MISMATCH: i32 = -2; const TINFL_STATUS_FAILED: i32 = -1; const TINFL_STATUS_DONE: i32 = 0; const TINFL_STATUS_NEEDS_MORE_INPUT: i32 = 1; const TINFL_STATUS_HAS_MORE_OUTPUT: i32 = 2; #[repr(i8)] #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] pub enum TINFLStatus { FailedCannotMakeProgress = TINFL_STATUS_FAILED_CANNOT_MAKE_PROGRESS as i8, BadParam = TINFL_STATUS_BAD_PARAM as i8, Adler32Mismatch = TINFL_STATUS_ADLER32_MISMATCH as i8, Failed = TINFL_STATUS_FAILED as i8, Done = TINFL_STATUS_DONE as i8, NeedsMoreInput = TINFL_STATUS_NEEDS_MORE_INPUT as i8, HasMoreOutput = TINFL_STATUS_HAS_MORE_OUTPUT as i8, } impl TINFLStatus { pub fn from_i32(value: i32) -> Option<TINFLStatus> { use self::TINFLStatus::*; match value { TINFL_STATUS_FAILED_CANNOT_MAKE_PROGRESS => Some(FailedCannotMakeProgress), TINFL_STATUS_BAD_PARAM => Some(BadParam), TINFL_STATUS_ADLER32_MISMATCH => Some(Adler32Mismatch), TINFL_STATUS_FAILED => Some(Failed), TINFL_STATUS_DONE => Some(Done), TINFL_STATUS_NEEDS_MORE_INPUT => Some(NeedsMoreInput), TINFL_STATUS_HAS_MORE_OUTPUT => Some(HasMoreOutput), _ => None, } } } #[inline] pub fn decompress_to_vec(input: &[u8]) -> Result<Vec<u8>, TINFLStatus> { decompress_to_vec_inner(input, 0, usize::max_value()) } #[inline] pub fn decompress_to_vec_zlib(input: &[u8]) -> Result<Vec<u8>, TINFLStatus> { decompress_to_vec_inner( input, inflate_flags::TINFL_FLAG_PARSE_ZLIB_HEADER, usize::max_value(), ) } #[inline] pub fn decompress_to_vec_with_limit(input: &[u8], max_size: usize) -> Result<Vec<u8>, TINFLStatus> { decompress_to_vec_inner(input, 0, max_size) } #[inline] pub fn decompress_to_vec_zlib_with_limit( input: &[u8], max_size: usize, ) -> Result<Vec<u8>, TINFLStatus> { decompress_to_vec_inner(input, inflate_flags::TINFL_FLAG_PARSE_ZLIB_HEADER, max_size) }
pub fn decompress_slice_iter_to_slice<'out, 'inp>( out: &'out mut [u8], it: impl Iterator<Item = &'inp [u8]>, zlib_header: bool, ignore_adler32: bool, ) -> Result<usize, TINFLStatus> { use self::core::inflate_flags::*; let mut it = it.peekable(); let r = &mut DecompressorOxide::new(); let mut out_pos = 0; while let Some(in_buf) = it.next() { let has_more = it.peek().is_some(); let flags = { let mut f = TINFL_FLAG_USING_NON_WRAPPING_OUTPUT_BUF; if zlib_header { f |= TINFL_FLAG_PARSE_ZLIB_HEADER; } if ignore_adler32 { f |= TINFL_FLAG_IGNORE_ADLER32; } if has_more { f |= TINFL_FLAG_HAS_MORE_INPUT; } f }; let (status, _input_read, bytes_written) = decompress(r, in_buf, out, out_pos, flags); out_pos += bytes_written; match status { TINFLStatus::NeedsMoreInput => continue, TINFLStatus::Done => return Ok(out_pos), e => return Err(e), } } Err(TINFLStatus::FailedCannotMakeProgress) } #[cfg(test)] mod test { use super::{ decompress_slice_iter_to_slice, decompress_to_vec_zlib, decompress_to_vec_zlib_with_limit, TINFLStatus, }; const encoded: [u8; 20] = [ 120, 156, 243, 72, 205, 201, 201, 215, 81, 168, 202, 201, 76, 82, 4, 0, 27, 101, 4, 19, ]; #[test] fn decompress_vec() { let res = decompress_to_vec_zlib(&encoded[..]).unwrap(); assert_eq!(res.as_slice(), &b"Hello, zlib!"[..]); } #[test] fn decompress_vec_with_high_limit() { let res = decompress_to_vec_zlib_with_limit(&encoded[..], 100_000).unwrap(); assert_eq!(res.as_slice(), &b"Hello, zlib!"[..]); } #[test] fn fail_to_decompress_with_limit() { let res = decompress_to_vec_zlib_with_limit(&encoded[..], 8); match res { Err(TINFLStatus::HasMoreOutput) => (), _ => panic!("Decompression output size limit was not enforced"), } } #[test] fn test_decompress_slice_iter_to_slice() { let mut out = [0_u8; 12_usize]; let r = decompress_slice_iter_to_slice(&mut out, Some(&encoded[..]).into_iter(), true, false); assert_eq!(r, Ok(12)); assert_eq!(&out[..12], &b"Hello, zlib!"[..]); for chunk_size in 1..13 { let mut out = [0_u8; 12_usize + 1]; let r = decompress_slice_iter_to_slice(&mut out, encoded.chunks(chunk_size), true, false); assert_eq!(r, Ok(12)); assert_eq!(&out[..12], &b"Hello, zlib!"[..]); } let mut out = [0_u8; 3_usize]; let r = decompress_slice_iter_to_slice(&mut out, encoded.chunks(7), true, false); assert!(r.is_err()); } }
fn decompress_to_vec_inner( input: &[u8], flags: u32, max_output_size: usize, ) -> Result<Vec<u8>, TINFLStatus> { let flags = flags | inflate_flags::TINFL_FLAG_USING_NON_WRAPPING_OUTPUT_BUF; let mut ret: Vec<u8> = vec![0; min(input.len().saturating_mul(2), max_output_size)]; let mut decomp = Box::<DecompressorOxide>::default(); let mut in_pos = 0; let mut out_pos = 0; loop { let (status, in_consumed, out_consumed) = decompress(&mut decomp, &input[in_pos..], &mut ret, out_pos, flags); in_pos += in_consumed; out_pos += out_consumed; match status { TINFLStatus::Done => { ret.truncate(out_pos); return Ok(ret); } TINFLStatus::HasMoreOutput => { let new_len = ret .len() .checked_add(out_pos) .ok_or(TINFLStatus::HasMoreOutput)?; if new_len > max_output_size { return Err(TINFLStatus::HasMoreOutput); }; ret.resize(new_len, 0); } _ => return Err(status), } } }
function_block-full_function
[ { "content": "/// Compress the input data to a vector, using the specified compression level (0-10).\n\npub fn compress_to_vec(input: &[u8], level: u8) -> Vec<u8> {\n\n compress_to_vec_inner(input, level, 0, 0)\n\n}\n\n\n", "file_path": "miniz_oxide/src/deflate/mod.rs", "rank": 0, "score": 212970.98006308556 }, { "content": "/// Compress the input data to a vector, using the specified compression level (0-10), and with a\n\n/// zlib wrapper.\n\npub fn compress_to_vec_zlib(input: &[u8], level: u8) -> Vec<u8> {\n\n compress_to_vec_inner(input, level, 1, 0)\n\n}\n\n\n", "file_path": "miniz_oxide/src/deflate/mod.rs", "rank": 1, "score": 209111.21697096687 }, { "content": "/// Simple function to compress data to a vec.\n\nfn compress_to_vec_inner(input: &[u8], level: u8, window_bits: i32, strategy: i32) -> Vec<u8> {\n\n // The comp flags function sets the zlib flag if the window_bits parameter is > 0.\n\n let flags = create_comp_flags_from_zip_params(level.into(), window_bits, strategy);\n\n let mut compressor = CompressorOxide::new(flags);\n\n let mut output = vec![0; ::core::cmp::max(input.len() / 2, 2)];\n\n\n\n let mut in_pos = 0;\n\n let mut out_pos = 0;\n\n loop {\n\n let (status, bytes_in, bytes_out) = compress(\n\n &mut compressor,\n\n &input[in_pos..],\n\n &mut output[out_pos..],\n\n TDEFLFlush::Finish,\n\n );\n\n\n\n out_pos += bytes_out;\n\n in_pos += bytes_in;\n\n\n\n match status {\n", "file_path": "miniz_oxide/src/deflate/mod.rs", "rank": 2, "score": 203206.26429949314 }, { "content": "#[inline]\n\npub fn update_hash(current_hash: u16, byte: u8) -> u16 {\n\n ((current_hash << LZ_HASH_SHIFT) ^ u16::from(byte)) & (LZ_HASH_SIZE as u16 - 1)\n\n}\n\n\n\npub struct HashBuffers {\n\n pub dict: [u8; LZ_DICT_FULL_SIZE],\n\n pub next: [u16; LZ_DICT_SIZE],\n\n pub hash: [u16; LZ_DICT_SIZE],\n\n}\n\n\n\nimpl HashBuffers {\n\n #[inline]\n\n pub fn reset(&mut self) {\n\n *self = HashBuffers::default();\n\n }\n\n}\n\n\n\nimpl Default for HashBuffers {\n\n fn default() -> HashBuffers {\n\n HashBuffers {\n", "file_path": "miniz_oxide/src/deflate/buffer.rs", "rank": 7, "score": 157017.49726611836 }, { "content": "/// Create a set of compression flags using parameters used by zlib and other compressors.\n\n/// Mainly intented for use with transition from c libraries as it deals with raw integers.\n\n///\n\n/// # Parameters\n\n/// `level` determines compression level. Clamped to maximum of 10. Negative values result in\n\n/// `Compressionlevel::DefaultLevel`.\n\n/// `window_bits`: Above 0, wraps the stream in a zlib wrapper, 0 or negative for a raw deflate\n\n/// stream.\n\n/// `strategy`: Sets the strategy if this conforms to any of the values in `CompressionStrategy`.\n\n///\n\n/// # Notes\n\n/// This function may be removed or moved to the `miniz_oxide_c_api` in the future.\n\npub fn create_comp_flags_from_zip_params(level: i32, window_bits: i32, strategy: i32) -> u32 {\n\n let num_probes = (if level >= 0 {\n\n cmp::min(10, level)\n\n } else {\n\n CompressionLevel::DefaultLevel as i32\n\n }) as usize;\n\n let greedy = if level <= 3 {\n\n TDEFL_GREEDY_PARSING_FLAG\n\n } else {\n\n 0\n\n };\n\n let mut comp_flags = NUM_PROBES[num_probes] | greedy;\n\n\n\n if window_bits > 0 {\n\n comp_flags |= TDEFL_WRITE_ZLIB_HEADER;\n\n }\n\n\n\n if level == 0 {\n\n comp_flags |= TDEFL_FORCE_ALL_RAW_BLOCKS;\n\n } else if strategy == CompressionStrategy::Filtered as i32 {\n", "file_path": "miniz_oxide/src/deflate/core.rs", "rank": 8, "score": 156164.37415255103 }, { "content": "/// Try to decompress from `input` to `output` with the given [`InflateState`]\n\n///\n\n/// # `flush`\n\n///\n\n/// Generally, the various [`MZFlush`] flags have meaning only on the compression side. They can be\n\n/// supplied here, but the only one that has any semantic meaning is [`MZFlush::Finish`], which is a\n\n/// signal that the stream is expected to finish, and failing to do so is an error. It isn't\n\n/// necessary to specify it when the stream ends; you'll still get returned a\n\n/// [`MZStatus::StreamEnd`] anyway. Other values either have no effect or cause errors. It's\n\n/// likely that you'll almost always just want to use [`MZFlush::None`].\n\n///\n\n/// # Errors\n\n///\n\n/// Returns [`MZError::Buf`] if the size of the `output` slice is empty or no progress was made due\n\n/// to lack of expected input data, or if called with [`MZFlush::Finish`] and input wasn't all\n\n/// consumed.\n\n///\n\n/// Returns [`MZError::Data`] if this or a a previous call failed with an error return from\n\n/// [`TINFLStatus`]; probably indicates corrupted data.\n\n///\n\n/// Returns [`MZError::Stream`] when called with [`MZFlush::Full`] (meaningless on\n\n/// decompression), or when called without [`MZFlush::Finish`] after an earlier call with\n\n/// [`MZFlush::Finish`] has been made.\n\npub fn inflate(\n\n state: &mut InflateState,\n\n input: &[u8],\n\n output: &mut [u8],\n\n flush: MZFlush,\n\n) -> StreamResult {\n\n let mut bytes_consumed = 0;\n\n let mut bytes_written = 0;\n\n let mut next_in = input;\n\n let mut next_out = output;\n\n\n\n if flush == MZFlush::Full {\n\n return StreamResult::error(MZError::Stream);\n\n }\n\n\n\n let mut decomp_flags = if state.data_format == DataFormat::Zlib {\n\n inflate_flags::TINFL_FLAG_COMPUTE_ADLER32\n\n } else {\n\n inflate_flags::TINFL_FLAG_IGNORE_ADLER32\n\n };\n", "file_path": "miniz_oxide/src/inflate/stream.rs", "rank": 9, "score": 147477.5146385042 }, { "content": "/// Try to compress from input to output with the given [`CompressorOxide`].\n\n///\n\n/// # Errors\n\n///\n\n/// Returns [`MZError::Buf`] If the size of the `output` slice is empty or no progress was made due\n\n/// to lack of expected input data, or if called without [`MZFlush::Finish`] after the compression\n\n/// was already finished.\n\n///\n\n/// Returns [`MZError::Param`] if the compressor parameters are set wrong.\n\n///\n\n/// Returns [`MZError::Stream`] when lower-level decompressor returns a\n\n/// [`TDEFLStatus::PutBufFailed`]; may not actually be possible.\n\npub fn deflate(\n\n compressor: &mut CompressorOxide,\n\n input: &[u8],\n\n output: &mut [u8],\n\n flush: MZFlush,\n\n) -> StreamResult {\n\n if output.is_empty() {\n\n return StreamResult::error(MZError::Buf);\n\n }\n\n\n\n if compressor.prev_return_status() == TDEFLStatus::Done {\n\n return if flush == MZFlush::Finish {\n\n StreamResult {\n\n bytes_written: 0,\n\n bytes_consumed: 0,\n\n status: Ok(MZStatus::StreamEnd),\n\n }\n\n } else {\n\n StreamResult::error(MZError::Buf)\n\n };\n", "file_path": "miniz_oxide/src/deflate/stream.rs", "rank": 10, "score": 147472.37859060057 }, { "content": "/// Main compression function. Tries to compress as much as possible from `in_buf` and\n\n/// puts compressed output into `out_buf`.\n\n///\n\n/// The value of `flush` determines if the compressor should attempt to flush all output\n\n/// and alternatively try to finish the stream.\n\n///\n\n/// Use [`TDEFLFlush::Finish`] on the final call to signal that the stream is finishing.\n\n///\n\n/// Note that this function does not keep track of whether a flush marker has been output, so\n\n/// if called using [`TDEFLFlush::Sync`], the caller needs to ensure there is enough space in the\n\n/// output buffer if they want to avoid repeated flush markers.\n\n/// See #105 for details.\n\n///\n\n/// # Returns\n\n/// Returns a tuple containing the current status of the compressor, the current position\n\n/// in the input buffer and the current position in the output buffer.\n\npub fn compress(\n\n d: &mut CompressorOxide,\n\n in_buf: &[u8],\n\n out_buf: &mut [u8],\n\n flush: TDEFLFlush,\n\n) -> (TDEFLStatus, usize, usize) {\n\n compress_inner(\n\n d,\n\n &mut CallbackOxide::new_callback_buf(in_buf, out_buf),\n\n flush,\n\n )\n\n}\n\n\n", "file_path": "miniz_oxide/src/deflate/core.rs", "rank": 11, "score": 146096.09709228677 }, { "content": "/// Main decompression function. Keeps decompressing data from `in_buf` until the `in_buf` is\n\n/// empty, `out` is full, the end of the deflate stream is hit, or there is an error in the\n\n/// deflate stream.\n\n///\n\n/// # Arguments\n\n///\n\n/// `r` is a [`DecompressorOxide`] struct with the state of this stream.\n\n///\n\n/// `in_buf` is a reference to the compressed data that is to be decompressed. The decompressor will\n\n/// start at the first byte of this buffer.\n\n///\n\n/// `out` is a reference to the buffer that will store the decompressed data, and that\n\n/// stores previously decompressed data if any.\n\n///\n\n/// * The offset given by `out_pos` indicates where in the output buffer slice writing should start.\n\n/// * If [`TINFL_FLAG_USING_NON_WRAPPING_OUTPUT_BUF`] is not set, the output buffer is used in a\n\n/// wrapping manner, and it's size is required to be a power of 2.\n\n/// * The decompression function normally needs access to 32KiB of the previously decompressed data\n\n///(or to the beginning of the decompressed data if less than 32KiB has been decompressed.)\n\n/// - If this data is not available, decompression may fail.\n\n/// - Some deflate compressors allow specifying a window size which limits match distances to\n\n/// less than this, or alternatively an RLE mode where matches will only refer to the previous byte\n\n/// and thus allows a smaller output buffer. The window size can be specified in the zlib\n\n/// header structure, however, the header data should not be relied on to be correct.\n\n///\n\n/// `flags` indicates settings and status to the decompression function.\n\n/// * The [`TINFL_FLAG_HAS_MORE_INPUT`] has to be specified if more compressed data is to be provided\n\n/// in a subsequent call to this function.\n\n/// * See the the [`inflate_flags`] module for details on other flags.\n\n///\n\n/// # Returns\n\n///\n\n/// Returns a tuple containing the status of the compressor, the number of input bytes read, and the\n\n/// number of bytes output to `out`.\n\n///\n\n/// This function shouldn't panic pending any bugs.\n\npub fn decompress(\n\n r: &mut DecompressorOxide,\n\n in_buf: &[u8],\n\n out: &mut [u8],\n\n out_pos: usize,\n\n flags: u32,\n\n) -> (TINFLStatus, usize, usize) {\n\n let out_buf_size_mask = if flags & TINFL_FLAG_USING_NON_WRAPPING_OUTPUT_BUF != 0 {\n\n usize::max_value()\n\n } else {\n\n // In the case of zero len, any attempt to write would produce HasMoreOutput,\n\n // so to gracefully process the case of there really being no output,\n\n // set the mask to all zeros.\n\n out.len().saturating_sub(1)\n\n };\n\n\n\n // Ensure the output buffer's size is a power of 2, unless the output buffer\n\n // is large enough to hold the entire output file (in which case it doesn't\n\n // matter).\n\n // Also make sure that the output buffer position is not past the end of the output buffer.\n", "file_path": "miniz_oxide/src/inflate/core.rs", "rank": 12, "score": 146095.559701258 }, { "content": "/// Main compression function. Callbacks output.\n\n///\n\n/// # Returns\n\n/// Returns a tuple containing the current status of the compressor, the current position\n\n/// in the input buffer.\n\n///\n\n/// The caller is responsible for ensuring the `CallbackFunc` struct will not cause undefined\n\n/// behaviour.\n\npub fn compress_to_output(\n\n d: &mut CompressorOxide,\n\n in_buf: &[u8],\n\n flush: TDEFLFlush,\n\n mut callback_func: impl FnMut(&[u8]) -> bool,\n\n) -> (TDEFLStatus, usize) {\n\n let res = compress_inner(\n\n d,\n\n &mut CallbackOxide::new_callback_func(\n\n in_buf,\n\n CallbackFunc {\n\n put_buf_func: &mut callback_func,\n\n },\n\n ),\n\n flush,\n\n );\n\n\n\n (res.0, res.1)\n\n}\n\n\n", "file_path": "miniz_oxide/src/deflate/core.rs", "rank": 13, "score": 142397.00973653322 }, { "content": "pub fn mz_deflate_oxide(stream_oxide: &mut StreamOxide<Compressor>, flush: i32) -> MZResult {\n\n let state: &mut Compressor = {\n\n let enum_ref = stream_oxide.state.as_mut().ok_or(MZError::Stream)?;\n\n StateType::from_enum(enum_ref)\n\n }\n\n .ok_or(MZError::Stream)?;\n\n let next_in = stream_oxide.next_in.as_mut().ok_or(MZError::Stream)?;\n\n let next_out = stream_oxide.next_out.as_mut().ok_or(MZError::Stream)?;\n\n\n\n let flush = MZFlush::new(flush)?;\n\n\n\n let ret = if let Some(compressor) = state.inner.as_mut() {\n\n deflate(compressor, next_in, next_out, flush)\n\n } else {\n\n return Err(MZError::Param);\n\n };\n\n\n\n *next_in = &next_in[ret.bytes_consumed as usize..];\n\n *next_out = &mut mem::replace(next_out, &mut [])[ret.bytes_written as usize..];\n\n // Wrapping add to emulate miniz_behaviour, will wrap around >4 GiB on 32-bit.\n\n stream_oxide.total_in = stream_oxide\n\n .total_in\n\n .wrapping_add(ret.bytes_consumed as c_ulong);\n\n stream_oxide.total_out = stream_oxide\n\n .total_out\n\n .wrapping_add(ret.bytes_written as c_ulong);\n\n stream_oxide.adler = state.adler32();\n\n ret.into()\n\n}\n\n\n", "file_path": "src/lib_oxide.rs", "rank": 14, "score": 142069.6248497818 }, { "content": "/// Initialize the wrapped compressor with the requested level (0-10) and default settings.\n\n///\n\n/// The compression level will be set to 6 (default) if the requested level is not available.\n\npub fn mz_deflate_init_oxide(stream_oxide: &mut StreamOxide<Compressor>, level: i32) -> MZResult {\n\n mz_deflate_init2_oxide(\n\n stream_oxide,\n\n level,\n\n MZ_DEFLATED,\n\n MZ_DEFAULT_WINDOW_BITS,\n\n 9,\n\n CompressionStrategy::Default as i32,\n\n )\n\n}\n\n\n", "file_path": "src/lib_oxide.rs", "rank": 15, "score": 139391.10885447724 }, { "content": "pub fn mz_inflate_oxide(stream_oxide: &mut StreamOxide<InflateState>, flush: i32) -> MZResult {\n\n let state: &mut InflateState = {\n\n let enum_ref = stream_oxide.state.as_mut().ok_or(MZError::Stream)?;\n\n StateType::from_enum(enum_ref)\n\n }\n\n .ok_or(MZError::Stream)?;\n\n\n\n let next_in = stream_oxide.next_in.as_mut().ok_or(MZError::Stream)?;\n\n let next_out = stream_oxide.next_out.as_mut().ok_or(MZError::Stream)?;\n\n\n\n let flush = MZFlush::new(flush)?;\n\n let ret = inflate(state, next_in, next_out, flush);\n\n *next_in = &next_in[ret.bytes_consumed as usize..];\n\n *next_out = &mut mem::replace(next_out, &mut [])[ret.bytes_written as usize..];\n\n // Wrapping add to emulate miniz_behaviour, will wrap around >4 GiB on 32-bit.\n\n stream_oxide.total_in = stream_oxide\n\n .total_in\n\n .wrapping_add(ret.bytes_consumed as c_ulong);\n\n stream_oxide.total_out = stream_oxide\n\n .total_out\n\n .wrapping_add(ret.bytes_written as c_ulong);\n\n stream_oxide.adler = state.decompressor().adler32().unwrap_or(0);\n\n ret.into()\n\n}\n\n\n", "file_path": "src/lib_oxide.rs", "rank": 16, "score": 139391.10885447724 }, { "content": "fn write(src: &[u8], dst: &mut [u8], dst_pos: &mut usize) -> Result<()> {\n\n match dst.get_mut(*dst_pos..*dst_pos + src.len()) {\n\n Some(s) => s.copy_from_slice(src),\n\n None => return Err(Error {}),\n\n }\n\n *dst_pos += src.len();\n\n Ok(())\n\n}\n\n\n\nimpl Default for HuffmanOxide {\n\n fn default() -> Self {\n\n HuffmanOxide {\n\n count: [[0; MAX_HUFF_SYMBOLS]; MAX_HUFF_TABLES],\n\n codes: [[0; MAX_HUFF_SYMBOLS]; MAX_HUFF_TABLES],\n\n code_sizes: [[0; MAX_HUFF_SYMBOLS]; MAX_HUFF_TABLES],\n\n }\n\n }\n\n}\n\n\n\nimpl HuffmanOxide {\n", "file_path": "miniz_oxide/src/deflate/core.rs", "rank": 17, "score": 138046.51244830852 }, { "content": "pub fn mz_crc32_oxide(crc32: c_uint, data: &[u8]) -> c_uint {\n\n let mut digest = crc32fast::Hasher::new_with_initial(crc32);\n\n digest.update(data);\n\n digest.finalize()\n\n}\n\n\n\n/// Signature of function used to allocate the compressor/decompressor structs.\n\n#[allow(bad_style)]\n\npub type mz_alloc_func = unsafe extern \"C\" fn(*mut c_void, size_t, size_t) -> *mut c_void;\n\n/// Signature of function used to free the compressor/decompressor structs.\n\n#[allow(bad_style)]\n\npub type mz_free_func = unsafe extern \"C\" fn(*mut c_void, *mut c_void);\n\n\n\n#[allow(bad_style)]\n\npub type mz_realloc_func =\n\n unsafe extern \"C\" fn(*mut c_void, *mut c_void, size_t, size_t) -> *mut c_void;\n\n\n\n#[allow(bad_style)]\n\npub type mz_alloc_callback =\n\n Option<unsafe extern \"C\" fn(*mut c_void, size_t, size_t) -> *mut c_void>;\n", "file_path": "src/c_export.rs", "rank": 18, "score": 127022.00797204008 }, { "content": "#[inline]\n\nfn apply_match(\n\n out_slice: &mut [u8],\n\n out_pos: usize,\n\n dist: usize,\n\n match_len: usize,\n\n out_buf_size_mask: usize,\n\n) {\n\n debug_assert!(out_pos + match_len <= out_slice.len());\n\n\n\n let source_pos = out_pos.wrapping_sub(dist) & out_buf_size_mask;\n\n\n\n if match_len == 3 {\n\n // Fast path for match len 3.\n\n out_slice[out_pos] = out_slice[source_pos];\n\n out_slice[out_pos + 1] = out_slice[(source_pos + 1) & out_buf_size_mask];\n\n out_slice[out_pos + 2] = out_slice[(source_pos + 2) & out_buf_size_mask];\n\n return;\n\n }\n\n\n\n if cfg!(not(any(target_arch = \"x86\", target_arch = \"x86_64\"))) {\n", "file_path": "miniz_oxide/src/inflate/core.rs", "rank": 19, "score": 126310.26857234858 }, { "content": "#[cfg(test)]\n\n#[inline]\n\nfn write_u16_le(val: u16, slice: &mut [u8], pos: usize) {\n\n slice[pos] = val as u8;\n\n slice[pos + 1] = (val >> 8) as u8;\n\n}\n\n\n\n// Read the two bytes starting at pos and interpret them as an u16.\n\n#[inline]\n\nconst fn read_u16_le(slice: &[u8], pos: usize) -> u16 {\n\n // The compiler is smart enough to optimize this into an unaligned load.\n\n slice[pos] as u16 | ((slice[pos + 1] as u16) << 8)\n\n}\n\n\n\n/// Main compression struct.\n\npub struct CompressorOxide {\n\n lz: LZOxide,\n\n params: ParamsOxide,\n\n huff: Box<HuffmanOxide>,\n\n dict: DictOxide,\n\n}\n\n\n", "file_path": "miniz_oxide/src/deflate/core.rs", "rank": 20, "score": 125507.12786514689 }, { "content": "#[doc(hidden)]\n\n#[cfg(feature = \"simd\")]\n\npub fn update_adler32(adler: u32, data: &[u8]) -> u32 {\n\n let mut hash = simd_adler32::Adler32::from_checksum(adler);\n\n hash.write(data);\n\n hash.finish()\n\n}\n", "file_path": "miniz_oxide/src/shared.rs", "rank": 21, "score": 124203.38775470303 }, { "content": "fn push_dict_out(state: &mut InflateState, next_out: &mut &mut [u8]) -> usize {\n\n let n = cmp::min(state.dict_avail as usize, next_out.len());\n\n (next_out[..n]).copy_from_slice(&state.dict[state.dict_ofs..state.dict_ofs + n]);\n\n *next_out = &mut mem::replace(next_out, &mut [])[n..];\n\n state.dict_avail -= n;\n\n state.dict_ofs = (state.dict_ofs + (n)) & (TINFL_LZ_DICT_SIZE - 1);\n\n n\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::{inflate, InflateState};\n\n use crate::{DataFormat, MZFlush, MZStatus};\n\n use alloc::vec;\n\n\n\n #[test]\n\n fn test_state() {\n\n let encoded = [\n\n 120u8, 156, 243, 72, 205, 201, 201, 215, 81, 168, 202, 201, 76, 82, 4, 0, 27, 101, 4,\n\n 19,\n", "file_path": "miniz_oxide/src/inflate/stream.rs", "rank": 23, "score": 121784.92694873064 }, { "content": "/// Try to fully decompress the data provided in the stream struct, with the specified\n\n/// level.\n\n///\n\n/// Returns MZResult::Ok on success.\n\npub fn mz_compress2_oxide(\n\n stream_oxide: &mut StreamOxide<Compressor>,\n\n level: i32,\n\n dest_len: &mut c_ulong,\n\n) -> MZResult {\n\n mz_deflate_init_oxide(stream_oxide, level)?;\n\n let status = mz_deflate_oxide(stream_oxide, MZFlush::Finish as i32);\n\n mz_deflate_end_oxide(stream_oxide)?;\n\n\n\n match status {\n\n Ok(MZStatus::StreamEnd) => {\n\n *dest_len = stream_oxide.total_out;\n\n Ok(MZStatus::Ok)\n\n }\n\n Ok(MZStatus::Ok) => Err(MZError::Buf),\n\n _ => status,\n\n }\n\n}\n\n\n", "file_path": "src/lib_oxide.rs", "rank": 24, "score": 114029.87105396141 }, { "content": "pub fn mz_uncompress2_oxide(\n\n stream_oxide: &mut StreamOxide<InflateState>,\n\n dest_len: &mut c_ulong,\n\n) -> MZResult {\n\n mz_inflate_init_oxide(stream_oxide)?;\n\n let status = mz_inflate_oxide(stream_oxide, MZFlush::Finish as i32);\n\n mz_inflate_end_oxide(stream_oxide)?;\n\n\n\n let empty_in = stream_oxide\n\n .next_in\n\n .map_or(true, |next_in| next_in.is_empty());\n\n match (status, empty_in) {\n\n (Ok(MZStatus::StreamEnd), _) => {\n\n *dest_len = stream_oxide.total_out;\n\n Ok(MZStatus::Ok)\n\n }\n\n (Err(MZError::Buf), true) => Err(MZError::Data),\n\n (status, _) => status,\n\n }\n\n}\n\n\n", "file_path": "src/lib_oxide.rs", "rank": 25, "score": 114026.07300479506 }, { "content": "/// Reset the compressor, so it can be used to compress a new set of data.\n\n///\n\n/// Returns `MZError::Stream` if the inner stream is missing, otherwise `MZStatus::Ok`.\n\n// TODO: probably not covered by tests\n\npub fn mz_deflate_reset_oxide(stream_oxide: &mut StreamOxide<Compressor>) -> MZResult {\n\n stream_oxide.total_in = 0;\n\n stream_oxide.total_out = 0;\n\n stream_oxide.adler = 0;\n\n stream_oxide.next_in = None;\n\n stream_oxide.next_out = None;\n\n let state = stream_oxide.state().ok_or(MZError::Stream)?;\n\n state.reset();\n\n Ok(MZStatus::Ok)\n\n}\n\n\n", "file_path": "src/lib_oxide.rs", "rank": 26, "score": 112231.24003921975 }, { "content": "/// Free the inner compression state.\n\n///\n\n/// Currently always returns `MZStatus::Ok`.\n\npub fn mz_deflate_end_oxide(stream_oxide: &mut StreamOxide<Compressor>) -> MZResult {\n\n stream_oxide.state = None;\n\n Ok(MZStatus::Ok)\n\n}\n\n\n", "file_path": "src/lib_oxide.rs", "rank": 27, "score": 112223.62129456436 }, { "content": "/// Initialize the compressor with the requested parameters.\n\n///\n\n/// # Params\n\n/// stream_oxide: The stream to be initialized.\n\n/// level: Compression level (0-10).\n\n/// method: Compression method. Only `MZ_DEFLATED` is accepted.\n\n/// window_bits: Number of bits used to represent the compression sliding window.\n\n/// Only `MZ_DEFAULT_WINDOW_BITS` is currently supported.\n\n/// A negative value, i.e `-MZ_DEFAULT_WINDOW_BITS` indicates that the stream\n\n/// should be not be wrapped in a zlib wrapper.\n\n/// mem_level: Currently unused. Only values from 1 to and including 9 are accepted.\n\n/// strategy: Compression strategy. See `deflate::CompressionStrategy` for accepted options.\n\n/// The default, which is used in most cases, is 0.\n\npub fn mz_deflate_init2_oxide(\n\n stream_oxide: &mut StreamOxide<Compressor>,\n\n level: i32,\n\n method: i32,\n\n window_bits: i32,\n\n mem_level: i32,\n\n strategy: i32,\n\n) -> MZResult {\n\n let comp_flags = deflate_flags::TDEFL_COMPUTE_ADLER32\n\n | create_comp_flags_from_zip_params(level, window_bits, strategy);\n\n\n\n let invalid_level = !(1..=9).contains(&mem_level);\n\n if (method != MZ_DEFLATED) || invalid_level || invalid_window_bits(window_bits) {\n\n return Err(MZError::Param);\n\n }\n\n\n\n stream_oxide.adler = MZ_ADLER32_INIT;\n\n stream_oxide.total_in = 0;\n\n stream_oxide.total_out = 0;\n\n\n\n let mut compr: Box<Compressor> = Box::default();\n\n compr.inner = Some(CompressorOxide::new(comp_flags));\n\n stream_oxide.state = Some(Box::new(InternalState::Deflate(compr)));\n\n\n\n Ok(MZStatus::Ok)\n\n}\n\n\n", "file_path": "src/lib_oxide.rs", "rank": 28, "score": 111545.1033117469 }, { "content": "pub fn mz_inflate_init2_oxide(\n\n stream_oxide: &mut StreamOxide<InflateState>,\n\n window_bits: i32,\n\n) -> MZResult {\n\n if invalid_window_bits(window_bits) {\n\n return Err(MZError::Param);\n\n }\n\n\n\n stream_oxide.adler = 0;\n\n stream_oxide.total_in = 0;\n\n stream_oxide.total_out = 0;\n\n\n\n stream_oxide.state = Some(Box::new(InternalState::Inflate(\n\n InflateState::new_boxed_with_window_bits(window_bits),\n\n )));\n\n\n\n Ok(MZStatus::Ok)\n\n}\n\n\n", "file_path": "src/lib_oxide.rs", "rank": 29, "score": 111534.83750298235 }, { "content": "pub fn mz_inflate_init_oxide(stream_oxide: &mut StreamOxide<InflateState>) -> MZResult {\n\n mz_inflate_init2_oxide(stream_oxide, MZ_DEFAULT_WINDOW_BITS)\n\n}\n\n\n", "file_path": "src/lib_oxide.rs", "rank": 30, "score": 110099.06409622158 }, { "content": "pub fn mz_inflate_end_oxide(stream_oxide: &mut StreamOxide<InflateState>) -> MZResult {\n\n stream_oxide.state = None;\n\n Ok(MZStatus::Ok)\n\n}\n\n\n\n/*\n", "file_path": "src/lib_oxide.rs", "rank": 31, "score": 110099.06409622158 }, { "content": "fn get_test_data() -> Vec<u8> {\n\n use std::env;\n\n let path = env::var(\"TEST_FILE\").unwrap_or_else(|_| \"miniz/miniz.c\".to_string());\n\n get_test_file_data(&path)\n\n}\n\n\n", "file_path": "tests/test.rs", "rank": 32, "score": 107738.0968952658 }, { "content": "fn roundtrip(level: u8) {\n\n let data = get_test_data();\n\n let enc = compress_to_vec(&data.as_slice()[..], level);\n\n println!(\n\n \"Input len: {}, compressed len: {}, level: {}\",\n\n data.len(),\n\n enc.len(),\n\n level\n\n );\n\n let dec = decompress_to_vec(enc.as_slice()).unwrap();\n\n assert!(data == dec);\n\n}\n\n\n", "file_path": "miniz_oxide/tests/test.rs", "rank": 33, "score": 107738.0968952658 }, { "content": "#[inline]\n\nfn end_of_input(flags: u32) -> Action {\n\n Action::End(if flags & TINFL_FLAG_HAS_MORE_INPUT != 0 {\n\n TINFLStatus::NeedsMoreInput\n\n } else {\n\n TINFLStatus::FailedCannotMakeProgress\n\n })\n\n}\n\n\n", "file_path": "miniz_oxide/src/inflate/core.rs", "rank": 34, "score": 106200.0146317714 }, { "content": "#[inline]\n\nfn read_u16_le(iter: &mut slice::Iter<u8>) -> u16 {\n\n let ret = {\n\n let two_bytes = iter.as_ref()[..2].try_into().unwrap();\n\n u16::from_le_bytes(two_bytes)\n\n };\n\n iter.nth(1);\n\n ret\n\n}\n\n\n\n/// Read an le u32 value from the slice iterator.\n\n///\n\n/// # Panics\n\n/// Panics if there are less than four bytes left.\n", "file_path": "miniz_oxide/src/inflate/core.rs", "rank": 35, "score": 104334.3152587089 }, { "content": "#[inline(always)]\n\n#[cfg(target_pointer_width = \"64\")]\n\nfn read_u32_le(iter: &mut slice::Iter<u8>) -> u32 {\n\n let ret = {\n\n let four_bytes: [u8; 4] = iter.as_ref()[..4].try_into().unwrap();\n\n u32::from_le_bytes(four_bytes)\n\n };\n\n iter.nth(3);\n\n ret\n\n}\n\n\n\n/// Ensure that there is data in the bit buffer.\n\n///\n\n/// On 64-bit platform, we use a 64-bit value so this will\n\n/// result in there being at least 32 bits in the bit buffer.\n\n/// This function assumes that there is at least 4 bytes left in the input buffer.\n", "file_path": "miniz_oxide/src/inflate/core.rs", "rank": 36, "score": 104333.89337439346 }, { "content": "fn get_test_data() -> Vec<u8> {\n\n use std::env;\n\n let path = env::var(\"TEST_FILE\").unwrap_or_else(|_| \"../miniz/miniz.c\".to_string());\n\n get_test_file_data(&path)\n\n}\n\n\n", "file_path": "miniz_oxide/tests/test.rs", "rank": 37, "score": 102933.77108962937 }, { "content": "#[derive(Copy, Clone, PartialEq, Eq, Debug)]\n\nenum State {\n\n Start = 0,\n\n ReadZlibCmf,\n\n ReadZlibFlg,\n\n ReadBlockHeader,\n\n BlockTypeNoCompression,\n\n RawHeader,\n\n RawMemcpy1,\n\n RawMemcpy2,\n\n ReadTableSizes,\n\n ReadHufflenTableCodeSize,\n\n ReadLitlenDistTablesCodeSize,\n\n ReadExtraBitsCodeSize,\n\n DecodeLitlen,\n\n WriteSymbol,\n\n ReadExtraBitsLitlen,\n\n DecodeDistance,\n\n ReadExtraBitsDistance,\n\n RawReadFirstByte,\n\n RawStoreFirstByte,\n", "file_path": "miniz_oxide/src/inflate/core.rs", "rank": 38, "score": 97326.76079151583 }, { "content": "enum Action {\n\n None,\n\n Jump(State),\n\n End(TINFLStatus),\n\n}\n\n\n", "file_path": "miniz_oxide/src/inflate/core.rs", "rank": 39, "score": 97312.94695324497 }, { "content": "/// Returns true if the window_bits parameter is valid.\n\nfn invalid_window_bits(window_bits: i32) -> bool {\n\n (window_bits != MZ_DEFAULT_WINDOW_BITS) && (-window_bits != MZ_DEFAULT_WINDOW_BITS)\n\n}\n\n\n", "file_path": "src/lib_oxide.rs", "rank": 40, "score": 96206.42089724014 }, { "content": "#[inline(always)]\n\n#[cfg(not(target_pointer_width = \"64\"))]\n\nfn fill_bit_buffer(l: &mut LocalVars, in_iter: &mut slice::Iter<u8>) {\n\n // If the buffer is 32-bit wide, read 2 bytes instead.\n\n if l.num_bits < 15 {\n\n l.bit_buf |= BitBuffer::from(read_u16_le(in_iter)) << l.num_bits;\n\n l.num_bits += 16;\n\n }\n\n}\n\n\n\n/// Check that the zlib header is correct and that there is enough space in the buffer\n\n/// for the window size specified in the header.\n\n///\n\n/// See https://tools.ietf.org/html/rfc1950\n", "file_path": "miniz_oxide/src/inflate/core.rs", "rank": 41, "score": 95883.0105286662 }, { "content": "fn record_literal(h: &mut HuffmanOxide, lz: &mut LZOxide, lit: u8) {\n\n lz.total_bytes += 1;\n\n lz.write_code(lit);\n\n\n\n *lz.get_flag() >>= 1;\n\n lz.consume_flag();\n\n\n\n h.count[0][lit as usize] += 1;\n\n}\n\n\n", "file_path": "miniz_oxide/src/deflate/core.rs", "rank": 42, "score": 95878.6189053497 }, { "content": "#[inline]\n\nfn transfer(\n\n out_slice: &mut [u8],\n\n mut source_pos: usize,\n\n mut out_pos: usize,\n\n match_len: usize,\n\n out_buf_size_mask: usize,\n\n) {\n\n for _ in 0..match_len >> 2 {\n\n out_slice[out_pos] = out_slice[source_pos & out_buf_size_mask];\n\n out_slice[out_pos + 1] = out_slice[(source_pos + 1) & out_buf_size_mask];\n\n out_slice[out_pos + 2] = out_slice[(source_pos + 2) & out_buf_size_mask];\n\n out_slice[out_pos + 3] = out_slice[(source_pos + 3) & out_buf_size_mask];\n\n source_pos += 4;\n\n out_pos += 4;\n\n }\n\n\n\n match match_len & 3 {\n\n 0 => (),\n\n 1 => out_slice[out_pos] = out_slice[source_pos & out_buf_size_mask],\n\n 2 => {\n", "file_path": "miniz_oxide/src/inflate/core.rs", "rank": 43, "score": 95337.17086944667 }, { "content": "fn tdefl_init(\n\n d: Option<&mut CompressorOxide>,\n\n put_buf_func: PutBufFuncPtr,\n\n put_buf_user: *mut c_void,\n\n flags: c_int,\n\n) -> TDEFLStatus {\n\n if let Some(d) = d {\n\n *d = CompressorOxide::new(\n\n put_buf_func.map(|func|\n\n CallbackFunc { put_buf_func: func, put_buf_user: put_buf_user }\n\n ),\n\n flags as u32,\n\n );\n\n TDEFLStatus::Okay\n\n } else {\n\n TDEFLStatus::BadParam\n\n }\n\n}*/\n\n\n\n// Missing safe rust analogue (though maybe best served by flate2 front-end instead)\n\n/*\n", "file_path": "miniz_oxide/src/deflate/mod.rs", "rank": 44, "score": 94098.82172598297 }, { "content": "fn tdefl_compress(\n\n d: Option<&mut CompressorOxide>,\n\n in_buf: *const c_void,\n\n in_size: Option<&mut usize>,\n\n out_buf: *mut c_void,\n\n out_size: Option<&mut usize>,\n\n flush: TDEFLFlush,\n\n) -> TDEFLStatus {\n\n let res = match d {\n\n None => {\n\n in_size.map(|size| *size = 0);\n\n out_size.map(|size| *size = 0);\n\n (TDEFLStatus::BadParam, 0, 0)\n\n },\n\n Some(compressor) => {\n\n let callback_res = CallbackOxide::new(\n\n compressor.callback_func.clone(),\n\n in_buf,\n\n in_size,\n\n out_buf,\n", "file_path": "miniz_oxide/src/deflate/mod.rs", "rank": 45, "score": 94098.82172598297 }, { "content": "fn inflate_loop(\n\n state: &mut InflateState,\n\n next_in: &mut &[u8],\n\n next_out: &mut &mut [u8],\n\n total_in: &mut usize,\n\n total_out: &mut usize,\n\n decomp_flags: u32,\n\n flush: MZFlush,\n\n) -> MZResult {\n\n let orig_in_len = next_in.len();\n\n loop {\n\n let status = decompress(\n\n &mut state.decomp,\n\n *next_in,\n\n &mut state.dict,\n\n state.dict_ofs,\n\n decomp_flags,\n\n );\n\n\n\n let in_bytes = status.1;\n", "file_path": "miniz_oxide/src/inflate/stream.rs", "rank": 46, "score": 94074.03839354188 }, { "content": "enum CallbackOut<'a> {\n\n Func(CallbackFunc<'a>),\n\n Buf(CallbackBuf<'a>),\n\n}\n\n\n\nimpl<'a> CallbackOut<'a> {\n\n fn new_output_buffer<'b>(\n\n &'b mut self,\n\n local_buf: &'b mut [u8],\n\n out_buf_ofs: usize,\n\n ) -> OutputBufferOxide<'b> {\n\n let is_local;\n\n let buf_len = OUT_BUF_SIZE - 16;\n\n let chosen_buffer = match *self {\n\n CallbackOut::Buf(ref mut cb) if cb.out_buf.len() - out_buf_ofs >= OUT_BUF_SIZE => {\n\n is_local = false;\n\n &mut cb.out_buf[out_buf_ofs..out_buf_ofs + buf_len]\n\n }\n\n _ => {\n\n is_local = true;\n", "file_path": "miniz_oxide/src/deflate/core.rs", "rank": 47, "score": 92842.94984943603 }, { "content": "/// Fast inner decompression loop which is run while there is at least\n\n/// 259 bytes left in the output buffer, and at least 6 bytes left in the input buffer\n\n/// (The maximum one match would need + 1).\n\n///\n\n/// This was inspired by a similar optimization in zlib, which uses this info to do\n\n/// faster unchecked copies of multiple bytes at a time.\n\n/// Currently we don't do this here, but this function does avoid having to jump through the\n\n/// big match loop on each state change(as rust does not have fallthrough or gotos at the moment),\n\n/// and already improves decompression speed a fair bit.\n\nfn decompress_fast(\n\n r: &mut DecompressorOxide,\n\n mut in_iter: &mut slice::Iter<u8>,\n\n out_buf: &mut OutputBuffer,\n\n flags: u32,\n\n local_vars: &mut LocalVars,\n\n out_buf_size_mask: usize,\n\n) -> (TINFLStatus, State) {\n\n // Make a local copy of the most used variables, to avoid having to update and read from values\n\n // in a random memory location and to encourage more register use.\n\n let mut l = *local_vars;\n\n let mut state;\n\n\n\n let status: TINFLStatus = 'o: loop {\n\n state = State::DecodeLitlen;\n\n loop {\n\n // This function assumes that there is at least 259 bytes left in the output buffer,\n\n // and that there is at least 14 bytes left in the input buffer. 14 input bytes:\n\n // 15 (prev lit) + 15 (length) + 5 (length extra) + 15 (dist)\n\n // + 29 + 32 (left in bit buf, including last 13 dist extra) = 111 bits < 14 bytes\n", "file_path": "miniz_oxide/src/inflate/core.rs", "rank": 48, "score": 92706.65976996168 }, { "content": "fn flush_block(\n\n d: &mut CompressorOxide,\n\n callback: &mut CallbackOxide,\n\n flush: TDEFLFlush,\n\n) -> Result<i32> {\n\n let mut saved_buffer;\n\n {\n\n let mut output = callback\n\n .out\n\n .new_output_buffer(&mut d.params.local_buf.b, d.params.out_buf_ofs);\n\n output.bit_buffer = d.params.saved_bit_buffer;\n\n output.bits_in = d.params.saved_bits_in;\n\n\n\n let use_raw_block = (d.params.flags & TDEFL_FORCE_ALL_RAW_BLOCKS != 0)\n\n && (d.dict.lookahead_pos - d.dict.code_buf_dict_pos) <= d.dict.size;\n\n\n\n assert!(d.params.flush_remaining == 0);\n\n d.params.flush_ofs = 0;\n\n d.params.flush_remaining = 0;\n\n\n", "file_path": "miniz_oxide/src/deflate/core.rs", "rank": 49, "score": 92695.89965094432 }, { "content": "fn compress_block(\n\n huff: &mut HuffmanOxide,\n\n output: &mut OutputBufferOxide,\n\n lz: &LZOxide,\n\n static_block: bool,\n\n) -> Result<bool> {\n\n if static_block {\n\n huff.start_static_block(output);\n\n } else {\n\n huff.start_dynamic_block(output)?;\n\n }\n\n\n\n compress_lz_codes(huff, output, &lz.codes[..lz.code_position])\n\n}\n\n\n", "file_path": "miniz_oxide/src/deflate/core.rs", "rank": 50, "score": 92695.89965094432 }, { "content": "fn compress_inner(\n\n d: &mut CompressorOxide,\n\n callback: &mut CallbackOxide,\n\n flush: TDEFLFlush,\n\n) -> (TDEFLStatus, usize, usize) {\n\n d.params.out_buf_ofs = 0;\n\n d.params.src_pos = 0;\n\n\n\n let prev_ok = d.params.prev_return_status == TDEFLStatus::Okay;\n\n let flush_finish_once = d.params.flush != TDEFLFlush::Finish || flush == TDEFLFlush::Finish;\n\n\n\n d.params.flush = flush;\n\n if !prev_ok || !flush_finish_once {\n\n d.params.prev_return_status = TDEFLStatus::BadParam;\n\n return (d.params.prev_return_status, 0, 0);\n\n }\n\n\n\n if d.params.flush_remaining != 0 || d.params.finished {\n\n let res = flush_output_buffer(callback, &mut d.params);\n\n d.params.prev_return_status = res.0;\n", "file_path": "miniz_oxide/src/deflate/core.rs", "rank": 51, "score": 92695.89965094432 }, { "content": "#[inline]\n\nfn memset<T: Copy>(slice: &mut [T], val: T) {\n\n for x in slice {\n\n *x = val\n\n }\n\n}\n\n\n\n/// Read an le u16 value from the slice iterator.\n\n///\n\n/// # Panics\n\n/// Panics if there are less than two bytes left.\n", "file_path": "miniz_oxide/src/inflate/core.rs", "rank": 52, "score": 92159.22571669708 }, { "content": "fn memset<T: Copy>(slice: &mut [T], val: T) {\n\n for x in slice {\n\n *x = val\n\n }\n\n}\n\n\n", "file_path": "miniz_oxide/src/deflate/core.rs", "rank": 53, "score": 92154.41220906514 }, { "content": "fn get_test_file_data(name: &str) -> Vec<u8> {\n\n use std::fs::File;\n\n let mut input = Vec::new();\n\n let mut f = File::open(name).unwrap();\n\n\n\n f.read_to_end(&mut input).unwrap();\n\n input\n\n}\n\n\n\nmacro_rules! decompress_bench {\n\n ($bench_name:ident, $decompress_func:ident, $level:expr, $path_to_data:expr) => {\n\n #[bench]\n\n fn $bench_name(b: &mut Bencher) {\n\n let input = get_test_file_data($path_to_data);\n\n let compressed = compress_to_vec(input.as_slice(), $level);\n\n\n\n let mut out_len: usize = 0;\n\n b.iter(|| unsafe {\n\n w($decompress_func(\n\n compressed.as_ptr() as *mut c_void,\n", "file_path": "benches/bench.rs", "rank": 54, "score": 92087.89044411092 }, { "content": "fn get_test_file_data(name: &str) -> Vec<u8> {\n\n use std::fs::File;\n\n let mut input = Vec::new();\n\n let mut f = File::open(name).unwrap();\n\n\n\n f.read_to_end(&mut input).unwrap();\n\n input\n\n}\n\n\n", "file_path": "tests/test.rs", "rank": 55, "score": 92087.89044411092 }, { "content": "#[test]\n\nfn need_more_input_has_more_output_at_same_time() {\n\n use miniz_oxide::inflate::core;\n\n\n\n let input = get_test_file_data(\"tests/test_data/numbers.deflate\");\n\n let data = get_test_file_data(\"tests/test_data/numbers.txt\");\n\n\n\n let decomp = |input: &[u8]| {\n\n let mut decomp = core::DecompressorOxide::new();\n\n decomp.init();\n\n\n\n let mut output = [0; core::TINFL_LZ_DICT_SIZE];\n\n let flags = core::inflate_flags::TINFL_FLAG_HAS_MORE_INPUT;\n\n\n\n let (status, in_consumed, out_consumed) =\n\n core::decompress(&mut decomp, input, &mut output, 0, flags);\n\n\n\n let input_empty = in_consumed == input.len();\n\n let output_full = out_consumed == output.len();\n\n\n\n eprintln!(\n", "file_path": "miniz_oxide/tests/test.rs", "rank": 56, "score": 91719.19923262458 }, { "content": "fn flush_output_buffer(c: &mut CallbackOxide, p: &mut ParamsOxide) -> (TDEFLStatus, usize, usize) {\n\n let mut res = (TDEFLStatus::Okay, p.src_pos, 0);\n\n if let CallbackOut::Buf(ref mut cb) = c.out {\n\n let n = cmp::min(cb.out_buf.len() - p.out_buf_ofs, p.flush_remaining as usize);\n\n if n != 0 {\n\n (&mut cb.out_buf[p.out_buf_ofs..p.out_buf_ofs + n])\n\n .copy_from_slice(&p.local_buf.b[p.flush_ofs as usize..p.flush_ofs as usize + n]);\n\n }\n\n p.flush_ofs += n as u32;\n\n p.flush_remaining -= n as u32;\n\n p.out_buf_ofs += n;\n\n res.2 = p.out_buf_ofs;\n\n }\n\n\n\n if p.finished && p.flush_remaining == 0 {\n\n res.0 = TDEFLStatus::Done\n\n }\n\n res\n\n}\n\n\n", "file_path": "miniz_oxide/src/deflate/core.rs", "rank": 58, "score": 90887.39819516521 }, { "content": "fn compress_lz_codes(\n\n huff: &HuffmanOxide,\n\n output: &mut OutputBufferOxide,\n\n lz_code_buf: &[u8],\n\n) -> Result<bool> {\n\n let mut flags = 1;\n\n let mut bb = BitBuffer {\n\n bit_buffer: u64::from(output.bit_buffer),\n\n bits_in: output.bits_in,\n\n };\n\n\n\n let mut i: usize = 0;\n\n while i < lz_code_buf.len() {\n\n if flags == 1 {\n\n flags = u32::from(lz_code_buf[i]) | 0x100;\n\n i += 1;\n\n }\n\n\n\n // The lz code was a length code\n\n if flags & 1 == 1 {\n", "file_path": "miniz_oxide/src/deflate/core.rs", "rank": 59, "score": 90256.99335041668 }, { "content": "#[inline]\n\nfn read_byte<F>(in_iter: &mut slice::Iter<u8>, flags: u32, f: F) -> Action\n\nwhere\n\n F: FnOnce(u8) -> Action,\n\n{\n\n match in_iter.next() {\n\n None => end_of_input(flags),\n\n Some(&byte) => f(byte),\n\n }\n\n}\n\n\n\n// TODO: `l: &mut LocalVars` may be slow similar to decompress_fast (even with inline(always))\n\n/// Try to read `amount` number of bits from `in_iter` and call the function `f` with the bits as an\n\n/// an argument after reading, returning the result of that function, or `Action::End` if there are\n\n/// not enough bytes left.\n", "file_path": "miniz_oxide/src/inflate/core.rs", "rank": 60, "score": 89674.35691776096 }, { "content": "#[test]\n\nfn issue_75_empty_input_infinite_loop() {\n\n // Make sure compression works with empty input,\n\n // a bug resulted in this causing an infinite loop in\n\n // compress_to_vec_inner.\n\n let c = miniz_oxide::deflate::compress_to_vec(&[], 6);\n\n let d = miniz_oxide::inflate::decompress_to_vec(&c).expect(\"decompression failed!\");\n\n assert_eq!(d.len(), 0);\n\n let c = miniz_oxide::deflate::compress_to_vec(&[0], 6);\n\n let d = miniz_oxide::inflate::decompress_to_vec(&c).expect(\"decompression failed!\");\n\n assert!(&d == &[0]);\n\n}\n\n\n\n/*\n", "file_path": "miniz_oxide/tests/test.rs", "rank": 61, "score": 89403.63851579138 }, { "content": "fn tdefl_compress_mem_to_mem(\n\n out_buf: *mut c_void,\n\n out_buf_len: usize,\n\n src_buf: *const c_void,\n\n src_buf_len: usize,\n\n flags: c_int,\n\n) -> usize*/\n\n\n", "file_path": "miniz_oxide/src/deflate/mod.rs", "rank": 62, "score": 89295.8327738484 }, { "content": "fn tdefl_compress_mem_to_output(\n\n buf: *const c_void,\n\n buf_len: usize,\n\n put_buf_func: PutBufFuncPtr,\n\n put_buf_user: *mut c_void,\n\n flags: c_int,\n\n) -> bool*/\n\n\n\n// Missing safe Rust analogue\n\n/*\n", "file_path": "miniz_oxide/src/deflate/mod.rs", "rank": 63, "score": 89295.8327738484 }, { "content": "fn record_match(h: &mut HuffmanOxide, lz: &mut LZOxide, mut match_len: u32, mut match_dist: u32) {\n\n assert!(match_len >= MIN_MATCH_LEN.into());\n\n assert!(match_dist >= 1);\n\n assert!(match_dist as usize <= LZ_DICT_SIZE);\n\n\n\n lz.total_bytes += match_len;\n\n match_dist -= 1;\n\n match_len -= u32::from(MIN_MATCH_LEN);\n\n lz.write_code(match_len as u8);\n\n lz.write_code(match_dist as u8);\n\n lz.write_code((match_dist >> 8) as u8);\n\n\n\n *lz.get_flag() >>= 1;\n\n *lz.get_flag() |= 0x80;\n\n lz.consume_flag();\n\n\n\n let symbol = if match_dist < 512 {\n\n SMALL_DIST_SYM[match_dist as usize]\n\n } else {\n\n LARGE_DIST_SYM[((match_dist >> 8) & 127) as usize]\n\n } as usize;\n\n h.count[1][symbol] += 1;\n\n h.count[0][LEN_SYM[match_len as usize] as usize] += 1;\n\n}\n\n\n", "file_path": "miniz_oxide/src/deflate/core.rs", "rank": 64, "score": 88784.03074575761 }, { "content": "/// Tag that determines reset policy of [InflateState](struct.InflateState.html)\n\npub trait ResetPolicy {\n\n /// Performs reset\n\n fn reset(&self, state: &mut InflateState);\n\n}\n\n\n\n/// Resets state, without performing expensive ops (e.g. zeroing buffer)\n\n///\n\n/// Note that not zeroing buffer can lead to security issues when dealing with untrusted input.\n\npub struct MinReset;\n\n\n\nimpl ResetPolicy for MinReset {\n\n fn reset(&self, state: &mut InflateState) {\n\n state.decompressor().init();\n\n state.dict_ofs = 0;\n\n state.dict_avail = 0;\n\n state.first_call = true;\n\n state.has_flushed = false;\n\n state.last_status = TINFLStatus::NeedsMoreInput;\n\n }\n\n}\n", "file_path": "miniz_oxide/src/inflate/stream.rs", "rank": 65, "score": 88580.4444753513 }, { "content": "fn get_test_file_data(name: &str) -> Vec<u8> {\n\n use std::fs::File;\n\n let mut input = Vec::new();\n\n let mut f = File::open(name).unwrap();\n\n\n\n f.read_to_end(&mut input).unwrap();\n\n input\n\n}\n\n\n\n/// Fuzzed file that caused issues for the inflate library.\n", "file_path": "miniz_oxide/tests/test.rs", "rank": 66, "score": 88206.41755402792 }, { "content": "#[inline]\n\n#[allow(clippy::while_immutable_condition)]\n\nfn read_bits<F>(\n\n l: &mut LocalVars,\n\n amount: u32,\n\n in_iter: &mut slice::Iter<u8>,\n\n flags: u32,\n\n f: F,\n\n) -> Action\n\nwhere\n\n F: FnOnce(&mut LocalVars, BitBuffer) -> Action,\n\n{\n\n // Clippy gives a false positive warning here due to the closure.\n\n // Read enough bytes from the input iterator to cover the number of bits we want.\n\n while l.num_bits < amount {\n\n match read_byte(in_iter, flags, |byte| {\n\n l.bit_buf |= BitBuffer::from(byte) << l.num_bits;\n\n l.num_bits += 8;\n\n Action::None\n\n }) {\n\n Action::None => (),\n\n // If there are not enough bytes in the input iterator, return and signal that we need\n", "file_path": "miniz_oxide/src/inflate/core.rs", "rank": 67, "score": 86003.26554662378 }, { "content": "/// Try to decode the next huffman code, and puts it in the counter field of the decompressor\n\n/// if successful.\n\n///\n\n/// # Returns\n\n/// The specified action returned from `f` on success,\n\n/// `Action::End` if there are not enough data left to decode a symbol.\n\nfn decode_huffman_code<F>(\n\n r: &mut DecompressorOxide,\n\n l: &mut LocalVars,\n\n table: usize,\n\n flags: u32,\n\n in_iter: &mut slice::Iter<u8>,\n\n f: F,\n\n) -> Action\n\nwhere\n\n F: FnOnce(&mut DecompressorOxide, &mut LocalVars, i32) -> Action,\n\n{\n\n // As the huffman codes can be up to 15 bits long we need at least 15 bits\n\n // ready in the bit buffer to start decoding the next huffman code.\n\n if l.num_bits < 15 {\n\n // First, make sure there is enough data in the bit buffer to decode a huffman code.\n\n if in_iter.len() < 2 {\n\n // If there is less than 2 bytes left in the input buffer, we try to look up\n\n // the huffman code with what's available, and return if that doesn't succeed.\n\n // Original explanation in miniz:\n\n // /* TINFL_HUFF_BITBUF_FILL() is only used rarely, when the number of bytes\n", "file_path": "miniz_oxide/src/inflate/core.rs", "rank": 68, "score": 83736.03854130422 }, { "content": "#[inline]\n\nfn validate_zlib_header(cmf: u32, flg: u32, flags: u32, mask: usize) -> Action {\n\n let mut failed =\n\n // cmf + flg should be divisible by 31.\n\n (((cmf * 256) + flg) % 31 != 0) ||\n\n // If this flag is set, a dictionary was used for this zlib compressed data.\n\n // This is currently not supported by miniz or miniz-oxide\n\n ((flg & 0b0010_0000) != 0) ||\n\n // Compression method. Only 8(DEFLATE) is defined by the standard.\n\n ((cmf & 15) != 8);\n\n\n\n let window_size = 1 << ((cmf >> 4) + 8);\n\n if (flags & TINFL_FLAG_USING_NON_WRAPPING_OUTPUT_BUF) == 0 {\n\n // Bail if the buffer is wrapping and the window size is larger than the buffer.\n\n failed |= (mask + 1) < window_size;\n\n }\n\n\n\n // Zlib doesn't allow window sizes above 32 * 1024.\n\n failed |= window_size > 32_768;\n\n\n\n if failed {\n\n Action::Jump(BadZlibHeader)\n\n } else {\n\n Action::Jump(ReadBlockHeader)\n\n }\n\n}\n\n\n", "file_path": "miniz_oxide/src/inflate/core.rs", "rank": 69, "score": 81497.93837473242 }, { "content": "#[inline]\n\nfn pad_to_bytes<F>(l: &mut LocalVars, in_iter: &mut slice::Iter<u8>, flags: u32, f: F) -> Action\n\nwhere\n\n F: FnOnce(&mut LocalVars) -> Action,\n\n{\n\n let num_bits = l.num_bits & 7;\n\n read_bits(l, num_bits, in_iter, flags, |l, _| f(l))\n\n}\n\n\n", "file_path": "miniz_oxide/src/inflate/core.rs", "rank": 70, "score": 81291.13045596165 }, { "content": "fn start_static_table(r: &mut DecompressorOxide) {\n\n r.table_sizes[LITLEN_TABLE] = 288;\n\n r.table_sizes[DIST_TABLE] = 32;\n\n memset(&mut r.tables[LITLEN_TABLE].code_size[0..144], 8);\n\n memset(&mut r.tables[LITLEN_TABLE].code_size[144..256], 9);\n\n memset(&mut r.tables[LITLEN_TABLE].code_size[256..280], 7);\n\n memset(&mut r.tables[LITLEN_TABLE].code_size[280..288], 8);\n\n memset(&mut r.tables[DIST_TABLE].code_size[0..32], 5);\n\n}\n\n\n", "file_path": "miniz_oxide/src/inflate/core.rs", "rank": 71, "score": 74163.83853113493 }, { "content": "#[cfg(feature = \"build_orig_miniz\")]\n\nfn main() {\n\n cc::Build::new().files(&[\"miniz/miniz.c\"]).compile(\"miniz\");\n\n}\n", "file_path": "src/build.rs", "rank": 72, "score": 66728.12270287759 }, { "content": "#[test]\n\nfn c_api() {\n\n use miniz_oxide::{MZError, MZStatus};\n\n use miniz_oxide_c_api::{\n\n mz_deflate, mz_deflateEnd, mz_deflateInit, mz_deflateReset, mz_inflate, mz_inflateEnd,\n\n mz_inflateInit, mz_stream,\n\n };\n\n let mut data = get_test_data();\n\n let mut compressed = vec![0; data.len() + 10];\n\n let compressed_size;\n\n let decompressed_size;\n\n unsafe {\n\n let mut stream = mz_stream {\n\n next_in: data.as_mut_ptr(),\n\n avail_in: data.len() as u32,\n\n next_out: compressed.as_mut_ptr(),\n\n avail_out: compressed.len() as u32,\n\n ..Default::default()\n\n };\n\n\n\n assert_eq!(mz_deflateInit(&mut stream, 1), MZStatus::Ok as i32);\n", "file_path": "tests/test.rs", "rank": 73, "score": 66728.12270287759 }, { "content": "#[cfg(not(any(feature = \"build_stub_miniz\", feature = \"build_orig_miniz\")))]\n\nfn main() {}\n\n\n", "file_path": "src/build.rs", "rank": 74, "score": 66728.12270287759 }, { "content": "#[test]\n\nfn roundtrip() {\n\n let level = 9;\n\n let data = get_test_data();\n\n let enc = compress_to_vec(&data.as_slice()[..], level);\n\n println!(\n\n \"Input len: {}, compressed len: {}, level: {}\",\n\n data.len(),\n\n enc.len(),\n\n level\n\n );\n\n let dec = decompress_to_vec(enc.as_slice()).unwrap();\n\n assert!(data == dec);\n\n}\n\n\n", "file_path": "tests/test.rs", "rank": 75, "score": 66728.12270287759 }, { "content": "#[inline]\n\nfn undo_bytes(l: &mut LocalVars, max: u32) -> u32 {\n\n let res = cmp::min(l.num_bits >> 3, max);\n\n l.num_bits -= res << 3;\n\n res\n\n}\n\n\n", "file_path": "miniz_oxide/src/inflate/core.rs", "rank": 76, "score": 65953.37486308695 }, { "content": "#[test]\n\nfn roundtrip_level_1() {\n\n let level = 1;\n\n let data = get_test_data();\n\n let enc = compress_to_vec(&data.as_slice()[..], level);\n\n println!(\n\n \"Input len: {}, compressed len: {}, level: {}\",\n\n data.len(),\n\n enc.len(),\n\n level\n\n );\n\n let dec = decompress_to_vec(enc.as_slice()).unwrap();\n\n assert!(data == dec);\n\n}\n\n\n\n/// Roundtrip test using the C API.\n", "file_path": "tests/test.rs", "rank": 77, "score": 65003.68010823811 }, { "content": "#[test]\n\nfn roundtrip_oxide() {\n\n use miniz_oxide_c_api::{mz_inflate_oxide, mz_inflateInit_oxide,\n\n StreamOxide};\n\n let mut stream = StreamOxide::default();\n\n let data = get_test_data();\n\n\n\n}*/\n", "file_path": "src/lib_oxide.rs", "rank": 78, "score": 63424.81317817996 }, { "content": "fn init_tree(r: &mut DecompressorOxide, l: &mut LocalVars) -> Action {\n\n loop {\n\n let table = &mut r.tables[r.block_type as usize];\n\n let table_size = r.table_sizes[r.block_type as usize] as usize;\n\n let mut total_symbols = [0u32; 16];\n\n let mut next_code = [0u32; 17];\n\n memset(&mut table.look_up[..], 0);\n\n memset(&mut table.tree[..], 0);\n\n\n\n for &code_size in &table.code_size[..table_size] {\n\n total_symbols[code_size as usize] += 1;\n\n }\n\n\n\n let mut used_symbols = 0;\n\n let mut total = 0;\n\n for i in 1..16 {\n\n used_symbols += total_symbols[i];\n\n total += total_symbols[i];\n\n total <<= 1;\n\n next_code[i + 1] = total;\n", "file_path": "miniz_oxide/src/inflate/core.rs", "rank": 79, "score": 63278.64917918551 }, { "content": "#[test]\n\nfn roundtrip_lvl_9() {\n\n roundtrip(9);\n\n}\n\n\n", "file_path": "miniz_oxide/tests/test.rs", "rank": 80, "score": 61973.83446945652 }, { "content": "#[test]\n\nfn inf_issue_14() {\n\n let data = get_test_file_data(\"tests/test_data/issue_14.zlib\");\n\n let result = decompress_to_vec_zlib(data.as_slice());\n\n assert!(result.is_err());\n\n let error = result.unwrap_err();\n\n assert_eq!(error, TINFLStatus::Failed);\n\n}\n\n\n\n/// Fuzzed file that causes panics (subtract-with-overflow in debug, out-of-bounds in release)\n", "file_path": "miniz_oxide/tests/test.rs", "rank": 81, "score": 61973.83446945652 }, { "content": "#[test]\n\nfn roundtrip_lvl_0() {\n\n roundtrip(0);\n\n}\n\n\n", "file_path": "miniz_oxide/tests/test.rs", "rank": 82, "score": 61973.83446945652 }, { "content": "#[test]\n\nfn roundtrip_lvl_1() {\n\n roundtrip(1);\n\n}\n\n\n", "file_path": "miniz_oxide/tests/test.rs", "rank": 83, "score": 61973.83446945652 }, { "content": "#[test]\n\nfn large_file() {\n\n let data = get_test_file_data(\"large_file/lf\");\n\n let enc = compress_to_vec(&data.as_slice()[..], 3);\n\n\n\n let dec = decompress_to_vec(enc.as_slice()).unwrap();\n\n assert!(data == dec);\n\n}\n\n\n\n*/\n", "file_path": "miniz_oxide/tests/test.rs", "rank": 84, "score": 61973.83446945652 }, { "content": "#[test]\n\nfn inf_issue_19() {\n\n let data = get_test_file_data(\"tests/test_data/issue_19.deflate\");\n\n let _ = decompress_to_vec(data.as_slice());\n\n}\n\n\n\n/// Fuzzed (invalid )file that resulted in an infinite loop as inflate read a code as having 0\n\n/// length.\n", "file_path": "miniz_oxide/tests/test.rs", "rank": 85, "score": 61973.83446945652 }, { "content": "fn compress_normal(d: &mut CompressorOxide, callback: &mut CallbackOxide) -> bool {\n\n let mut src_pos = d.params.src_pos;\n\n let in_buf = match callback.in_buf {\n\n None => return true,\n\n Some(in_buf) => in_buf,\n\n };\n\n\n\n let mut lookahead_size = d.dict.lookahead_size;\n\n let mut lookahead_pos = d.dict.lookahead_pos;\n\n let mut saved_lit = d.params.saved_lit;\n\n let mut saved_match_dist = d.params.saved_match_dist;\n\n let mut saved_match_len = d.params.saved_match_len;\n\n\n\n while src_pos < in_buf.len() || (d.params.flush != TDEFLFlush::None && lookahead_size != 0) {\n\n let src_buf_left = in_buf.len() - src_pos;\n\n let num_bytes_to_process = cmp::min(src_buf_left, MAX_MATCH_LEN - lookahead_size as usize);\n\n\n\n if lookahead_size + d.dict.size >= usize::from(MIN_MATCH_LEN) - 1\n\n && num_bytes_to_process > 0\n\n {\n", "file_path": "miniz_oxide/src/deflate/core.rs", "rank": 86, "score": 61760.19612645289 }, { "content": "fn compress_fast(d: &mut CompressorOxide, callback: &mut CallbackOxide) -> bool {\n\n let mut src_pos = d.params.src_pos;\n\n let mut lookahead_size = d.dict.lookahead_size;\n\n let mut lookahead_pos = d.dict.lookahead_pos;\n\n\n\n let mut cur_pos = lookahead_pos & LZ_DICT_SIZE_MASK;\n\n let in_buf = match callback.in_buf {\n\n None => return true,\n\n Some(in_buf) => in_buf,\n\n };\n\n\n\n debug_assert!(d.lz.code_position < LZ_CODE_BUF_SIZE - 2);\n\n\n\n while src_pos < in_buf.len() || (d.params.flush != TDEFLFlush::None && lookahead_size > 0) {\n\n let mut dst_pos = ((lookahead_pos + lookahead_size) & LZ_DICT_SIZE_MASK) as usize;\n\n let mut num_bytes_to_process = cmp::min(\n\n in_buf.len() - src_pos,\n\n (COMP_FAST_LOOKAHEAD_SIZE - lookahead_size) as usize,\n\n );\n\n lookahead_size += num_bytes_to_process;\n", "file_path": "miniz_oxide/src/deflate/core.rs", "rank": 87, "score": 61760.19612645289 }, { "content": "#[test]\n\nfn zlib_header_level() {\n\n let level = 6;\n\n let data = [1, 2, 3];\n\n let enc = compress_to_vec_zlib(&data, level);\n\n let header_level = (enc[1] & 0b11000000) >> 6;\n\n assert_eq!(header_level, 2);\n\n let enc = compress_to_vec_zlib(&data, 10);\n\n let header_level = (enc[1] & 0b11000000) >> 6;\n\n assert_eq!(header_level, 3);\n\n}\n\n\n", "file_path": "miniz_oxide/tests/test.rs", "rank": 88, "score": 60635.810374290595 }, { "content": "#[test]\n\nfn decompress_zero_code_len_2() {\n\n let data = get_test_file_data(\"tests/test_data/invalid_code_len_oom\");\n\n let _ = decompress_to_vec(data.as_slice());\n\n}\n\n\n", "file_path": "miniz_oxide/tests/test.rs", "rank": 89, "score": 59398.045253878096 }, { "content": "/// Trait used for states that can be carried by BoxedState.\n\npub trait StateType {\n\n const STATE_TYPE: StateTypeEnum;\n\n fn from_enum(value: &mut InternalState) -> Option<&mut Self>;\n\n}\n\n\n\nimpl StateType for InflateState {\n\n const STATE_TYPE: StateTypeEnum = StateTypeEnum::InflateType;\n\n fn from_enum(value: &mut InternalState) -> Option<&mut Self> {\n\n if let InternalState::Inflate(state) = value {\n\n Some(state.as_mut())\n\n } else {\n\n None\n\n }\n\n }\n\n}\n\n\n\nimpl StateType for Compressor {\n\n const STATE_TYPE: StateTypeEnum = StateTypeEnum::DeflateType;\n\n fn from_enum(value: &mut InternalState) -> Option<&mut Self> {\n\n if let InternalState::Deflate(state) = value {\n", "file_path": "src/lib_oxide.rs", "rank": 90, "score": 59007.61824979707 }, { "content": "#[test]\n\nfn decompress_zero_code_len_oom() {\n\n let data = get_test_file_data(\"tests/test_data/invalid_code_len_oom\");\n\n let _ = decompress_to_vec(data.as_slice());\n\n}\n\n\n\n/// Same problem as previous test but in the end of input huffman decode part of\n\n/// `decode_huffman_code`\n", "file_path": "miniz_oxide/tests/test.rs", "rank": 91, "score": 58249.677346798984 }, { "content": "#[bench]\n\nfn create_compressor(b: &mut Bencher) {\n\n let flags = create_comp_flags_from_zip_params(6, true as i32, 0);\n\n b.iter(|| CompressorOxide::new(flags));\n\n}\n", "file_path": "benches/bench.rs", "rank": 92, "score": 52211.6950490081 }, { "content": "fn as_c_return_code(r: MZResult) -> c_int {\n\n match r {\n\n Err(status) => status as c_int,\n\n Ok(status) => status as c_int,\n\n }\n\n}\n\n\n\nmacro_rules! oxidize {\n\n ($mz_func:ident, $mz_func_oxide:ident; $($arg_name:ident: $type_name:ident),*) => {\n\n unmangle!(\n\n pub unsafe extern \"C\" fn $mz_func(stream: *mut mz_stream, $($arg_name: $type_name),*)\n\n -> c_int {\n\n match stream.as_mut() {\n\n None => MZError::Stream as c_int,\n\n Some(stream) => {\n\n // Make sure we catch a potential panic, as\n\n // this is called from C.\n\n match catch_unwind(AssertUnwindSafe(|| {\n\n // Do some checks to see if the stream object has the right type.\n\n match StreamOxide::try_new(stream) {\n", "file_path": "src/lib.rs", "rank": 93, "score": 50873.67095384218 }, { "content": "/// Wrap pointer in a buffer that frees the memory on exit.\n\nfn w(buf: *mut c_void) -> HeapBuf {\n\n HeapBuf { buf: buf }\n\n}\n\n\n", "file_path": "benches/bench.rs", "rank": 94, "score": 48992.46789722975 }, { "content": "// Currently not bubbled up outside this module, so can fill in with more\n\n// context eventually if needed.\n\ntype Result<T, E = Error> = core::result::Result<T, E>;\n", "file_path": "miniz_oxide/src/deflate/core.rs", "rank": 95, "score": 47971.142333561445 }, { "content": "#[cfg(target_bit_width = \"64\")]\n\n#[inline]\n\nfn buffer_too_large(source_len: c_ulong, dest_len: c_ulong) -> bool {\n\n (source_len | dest_len) > 0xFFFFFFFF\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 96, "score": 43023.89455869837 }, { "content": "//! This module contains functionality for compression.\n\n\n\nuse crate::alloc::vec;\n\nuse crate::alloc::vec::Vec;\n\n\n\nmod buffer;\n\npub mod core;\n\npub mod stream;\n\nuse self::core::*;\n\n\n\n/// How much processing the compressor should do to compress the data.\n\n/// `NoCompression` and `Bestspeed` have special meanings, the other levels determine the number\n\n/// of checks for matches in the hash chains and whether to use lazy or greedy parsing.\n\n#[repr(i32)]\n\n#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]\n\npub enum CompressionLevel {\n\n /// Don't do any compression, only output uncompressed blocks.\n\n NoCompression = 0,\n\n /// Fast compression. Uses a special compression routine that is optimized for speed.\n\n BestSpeed = 1,\n", "file_path": "miniz_oxide/src/deflate/mod.rs", "rank": 98, "score": 31.15360404228364 } ]
Rust
lib/src/config.rs
phylum-dev/cli
724395d036bc56c69acd69c0f59ea1308350a2c1
use chrono::{DateTime, Local}; use reqwest::Url; use serde::{Deserialize, Serialize}; use std::env; use std::error::Error; use std::fs; use std::path::PathBuf; use crate::types::*; #[derive(Debug, Serialize, Deserialize)] pub struct ConnectionInfo { pub uri: String, } #[derive(Debug, Serialize, Deserialize)] pub struct AuthInfo { pub oidc_discovery_url: Url, pub offline_access: Option<RefreshToken>, } pub type Packages = Vec<PackageDescriptor>; #[derive(Debug, Serialize, Deserialize)] pub struct Config { pub connection: ConnectionInfo, pub auth_info: AuthInfo, pub request_type: PackageType, pub packages: Option<Packages>, pub last_update: Option<usize>, pub ignore_certs: Option<bool>, } #[derive(Debug, Serialize, Deserialize)] pub struct ProjectConfig { pub id: ProjectId, pub name: String, pub created_at: DateTime<Local>, } pub fn save_config<T>(path: &str, config: &T) -> Result<(), Box<dyn Error + Send + Sync + 'static>> where T: Serialize, { let yaml = serde_yaml::to_string(config)?; fs::write(shellexpand::env(path)?.as_ref(), yaml)?; Ok(()) } pub fn parse_config<T>(path: &str) -> Result<T, Box<dyn Error + Send + Sync + 'static>> where T: serde::de::DeserializeOwned, { let contents = fs::read_to_string(shellexpand::env(path)?.as_ref())?; let config: T = serde_yaml::from_str(&contents)?; Ok(config) } pub fn read_configuration(path: &str) -> Result<Config, Box<dyn Error + Send + Sync + 'static>> { let mut config: Config = parse_config(path)?; if let Ok(key) = env::var("PHYLUM_API_KEY") { config.auth_info.offline_access = Some(RefreshToken::new(key)); } Ok(config) } pub fn find_project_conf(starting_directory: &str) -> Option<String> { let mut path: PathBuf = starting_directory.into(); let mut attempts = 0; const MAX_DEPTH: u8 = 32; loop { let search_path = path.join(PROJ_CONF_FILE); if search_path.is_file() { return Some(search_path.to_string_lossy().to_string()); } if attempts > MAX_DEPTH { return None; } path.push(".."); attempts += 1; } } pub fn get_current_project() -> Option<ProjectConfig> { find_project_conf(".").and_then(|s| { log::info!("Found project configuration file at {}", s); parse_config(&s).ok() }) } #[cfg(test)] mod tests { use super::*; use std::env::temp_dir; fn write_test_config() { let con = ConnectionInfo { uri: "http://127.0.0.1".into(), }; let auth = AuthInfo { oidc_discovery_url: Url::parse("http://example.com").unwrap(), offline_access: Some(RefreshToken::new("FAKE TOKEN")), }; let packages = vec![ PackageDescriptor { name: "foo".into(), version: "1.2.3".into(), r#type: PackageType::Npm, }, PackageDescriptor { name: "bar".into(), version: "3.4.5".into(), r#type: PackageType::Npm, }, PackageDescriptor { name: "baz".into(), version: "2020.2.12".into(), r#type: PackageType::Npm, }, ]; let config = Config { connection: con, auth_info: auth, request_type: PackageType::Npm, packages: Some(packages), last_update: None, ignore_certs: None, }; let temp_dir = temp_dir(); let test_config_file = temp_dir.as_path().join("test_config"); save_config(test_config_file.to_str().unwrap(), &config).unwrap(); } #[test] fn test_save_config() { write_test_config(); } #[test] fn test_parse_config() { write_test_config(); let temp_dir = temp_dir(); let test_config_file = temp_dir.as_path().join("test_config"); let config: Config = parse_config(test_config_file.to_str().unwrap()).unwrap(); assert_eq!(config.request_type, PackageType::Npm); } }
use chrono::{DateTime, Local}; use reqwest::Url; use serde::{Deserialize, Serialize}; use std::env; use std::error::Error; use std::fs; use std::path::PathBuf; use crate::types::*; #[derive(Debug, Serialize, Deserialize)] pub struct ConnectionInfo { pub uri: String, } #[derive(Debug, Serialize, Deserialize)] pub struct AuthInfo { pub oidc_discovery_url: Url, pub offline_access: Option<RefreshToken>, } pub type Packages = Vec<PackageDescriptor>; #[derive(Debug, Serialize, Deserialize)] pub struct Config { pub connection: ConnectionInfo, pub auth_info: AuthInfo, pub request_type: PackageType, pub packages: Option<Packages>, pub last_update: Option<usize>, pub ignore_certs: Option<bool>, } #[derive(Debug, Serialize, Deserialize)] pub struct ProjectConfig { pub id: ProjectId, pub name: String, pub created_at: DateTime<Local>, } pub fn save_config<T>(path: &str, config: &T) -> Result<(), Box<dyn Error + Send + Sync + 'static>> where T: Serialize, { let yaml = serde_yaml::to_string(config)?; fs::write(shellexpand::env(path)?.as_ref(), yaml)?; Ok(()) } pub fn parse_config<T>(path: &str) -> Result<T, Box<dyn Error + Send + Sync + 'static>> where T: serde::de::DeserializeOwned, { let contents = fs::read_to_string(shellexpand::env(path)?.as_ref())?; let config: T = serde_yaml::from_str(&contents)?; Ok(config) } pub fn read_configuration(path: &str) -> Result<Config, Box<dyn Error + Send + Sync + 'static>> { let mut config: Config = parse_config(path)?; if let Ok(key) = env::var("PHYLUM_API_KEY") { config.auth_info.offline_access = Some(RefreshToken::new(key)); } Ok(config) } pub fn find_project_conf(starting_directory: &str) -> Option<String> { let mut path: PathBuf = starting_directory.into(); let mut atte
pub fn get_current_project() -> Option<ProjectConfig> { find_project_conf(".").and_then(|s| { log::info!("Found project configuration file at {}", s); parse_config(&s).ok() }) } #[cfg(test)] mod tests { use super::*; use std::env::temp_dir; fn write_test_config() { let con = ConnectionInfo { uri: "http://127.0.0.1".into(), }; let auth = AuthInfo { oidc_discovery_url: Url::parse("http://example.com").unwrap(), offline_access: Some(RefreshToken::new("FAKE TOKEN")), }; let packages = vec![ PackageDescriptor { name: "foo".into(), version: "1.2.3".into(), r#type: PackageType::Npm, }, PackageDescriptor { name: "bar".into(), version: "3.4.5".into(), r#type: PackageType::Npm, }, PackageDescriptor { name: "baz".into(), version: "2020.2.12".into(), r#type: PackageType::Npm, }, ]; let config = Config { connection: con, auth_info: auth, request_type: PackageType::Npm, packages: Some(packages), last_update: None, ignore_certs: None, }; let temp_dir = temp_dir(); let test_config_file = temp_dir.as_path().join("test_config"); save_config(test_config_file.to_str().unwrap(), &config).unwrap(); } #[test] fn test_save_config() { write_test_config(); } #[test] fn test_parse_config() { write_test_config(); let temp_dir = temp_dir(); let test_config_file = temp_dir.as_path().join("test_config"); let config: Config = parse_config(test_config_file.to_str().unwrap()).unwrap(); assert_eq!(config.request_type, PackageType::Npm); } }
mpts = 0; const MAX_DEPTH: u8 = 32; loop { let search_path = path.join(PROJ_CONF_FILE); if search_path.is_file() { return Some(search_path.to_string_lossy().to_string()); } if attempts > MAX_DEPTH { return None; } path.push(".."); attempts += 1; } }
function_block-function_prefixed
[ { "content": "/// Produces the path to a temporary file on disk.\n\nfn tmp_path(filename: &str) -> Option<String> {\n\n let tmp_loc = env::temp_dir();\n\n let path = Path::new(&tmp_loc);\n\n let tmp_path = path.join(filename);\n\n match tmp_path.into_os_string().into_string() {\n\n Ok(x) => Some(x),\n\n Err(_) => None,\n\n }\n\n}\n\n\n\n/// Utility for handling updating the Phylum installation in place, along with\n\n/// facilities for validating the binary signature before installation.\n\nimpl ApplicationUpdater {\n\n /// Build a instance for use in tests\n\n #[cfg(test)]\n\n fn build_test_instance(mock_server: MockServer) -> Self {\n\n let pubkey = PublicKey::from_base64(PUBKEY).expect(\"Unable to decode the public key\");\n\n ApplicationUpdater {\n\n pubkey,\n\n github_uri: mock_server.uri(),\n", "file_path": "lib/src/update.rs", "rank": 4, "score": 168299.8628757466 }, { "content": "/// Check if an address is routable beyond the local network segment.\n\npub fn check_if_routable(hostname: impl AsRef<str>) -> Result<bool> {\n\n let is_routable = hostname\n\n .as_ref()\n\n .to_socket_addrs()?\n\n .map(|socket_addr| socket_addr.ip().is_routable())\n\n .reduce(|a, b| a | b)\n\n .unwrap_or(false);\n\n Ok(is_routable)\n\n}\n\n\n\npub async fn fetch_oidc_server_settings(auth_info: &AuthInfo) -> Result<OidcServerSettings> {\n\n let client = reqwest::Client::new();\n\n let response = client\n\n .get(auth_info.oidc_discovery_url.clone())\n\n .header(\"Accept\", \"application/json\")\n\n .timeout(Duration::from_secs(5))\n\n .send()\n\n .await?\n\n .json::<OidcServerSettings>()\n\n .await?;\n\n Ok(response)\n\n}\n\n\n", "file_path": "lib/src/auth/oidc.rs", "rank": 5, "score": 159222.53720829193 }, { "content": "fn take_till_blank_line(input: &str) -> Result<&str, &str> {\n\n recognize(alt((take_until(\"\\n\\n\"), take_until(\"\\r\\n\\r\\n\"))))(input)\n\n}\n\n\n", "file_path": "lib/src/lockfiles/parsers.rs", "rank": 6, "score": 151585.18809298443 }, { "content": "fn take_till_line_end(input: &str) -> Result<&str, &str> {\n\n recognize(tuple((\n\n alt((take_until(\"\\n\"), take_until(\"\\r\\n\"))),\n\n take(1usize),\n\n )))(input)\n\n}\n\n\n", "file_path": "lib/src/lockfiles/parsers.rs", "rank": 7, "score": 151585.18809298443 }, { "content": "/// Using config information, build the url for the keycloak login page.\n\npub fn build_auth_url(\n\n action: &AuthAction,\n\n oidc_settings: &OidcServerSettings,\n\n callback_url: &Url,\n\n code_challenge: &ChallengeCode,\n\n state: impl AsRef<str>,\n\n) -> Result<Url> {\n\n let mut auth_url = match *action {\n\n // Login uses the oidc defined /auth endpoint as is\n\n AuthAction::Login => oidc_settings.authorization_endpoint.to_owned(),\n\n // Register uses the non-standard /registrations endpoint\n\n AuthAction::Register => {\n\n let mut auth_url = oidc_settings.authorization_endpoint.to_owned();\n\n auth_url\n\n .path_segments_mut()\n\n .map_err(|_| anyhow!(\"Can not be base url\"))?\n\n .pop()\n\n .push(\"registrations\");\n\n auth_url\n\n }\n", "file_path": "lib/src/auth/oidc.rs", "rank": 9, "score": 133393.79097125374 }, { "content": "/// Convert the given threshold float value into a string. If no value is\n\n/// returned, i.e. a value of 0, returns a placehold to indicate that this\n\n/// value is unset.\n\nfn threshold_to_str(n: f32) -> String {\n\n let threshold = (n * 100.0) as u32;\n\n\n\n if threshold == 0 {\n\n return String::from(\"Not Set\");\n\n }\n\n\n\n format!(\"{}\", threshold)\n\n}\n\n\n\nimpl Renderable for ProjectGetDetailsRequest {\n\n fn render(&self) -> String {\n\n let title_score = format!(\"{}\", Blue.paint(\"Score\"));\n\n let title_passfail = format!(\"{}\", Blue.paint(\"P/F\"));\n\n let title_label = format!(\"{}\", Blue.paint(\"Label\"));\n\n let title_job_id = format!(\"{}\", Blue.paint(\"Job ID\"));\n\n let title_datetime = format!(\"{}\", Blue.paint(\"Datetime\"));\n\n\n\n let threshold_total = threshold_to_str(self.thresholds.total);\n\n let threshold_malicious = threshold_to_str(self.thresholds.malicious);\n", "file_path": "lib/src/render.rs", "rank": 10, "score": 130084.98179422387 }, { "content": "type Result<T, U> = IResult<T, U, VerboseError<T>>;\n\n\n\npub mod yarn {\n\n use super::*;\n\n\n\n pub fn parse(input: &str) -> Result<&str, Vec<PackageDescriptor>> {\n\n let (i, _) = yarn_lock_header(input)?;\n\n let (i, mut entries) = many1(entry)(i)?;\n\n\n\n // Attempt to parse one final entry not followed by a newline\n\n let res = entry_final(i);\n\n if let Ok((i, final_entry)) = res {\n\n entries.push(final_entry);\n\n return Ok((i, entries));\n\n }\n\n\n\n Ok((i, entries))\n\n }\n\n\n\n fn yarn_lock_header(input: &str) -> Result<&str, &str> {\n", "file_path": "lib/src/lockfiles/parsers.rs", "rank": 11, "score": 116716.21763527732 }, { "content": "#[derive(Clone)]\n\nstruct OAuth2CallbackState(Arc<String>);\n\n\n", "file_path": "lib/src/auth/server.rs", "rank": 12, "score": 96969.94016228389 }, { "content": "#[derive(Serialize, Deserialize)]\n\nstruct InvalidResource {}\n\n\n\nimpl RestPath<()> for InvalidResource {\n\n fn get_path(_: ()) -> Result<String, Error> {\n\n Ok(String::from(\"not_found\"))\n\n }\n\n}\n\n\n\nimpl RestPath<bool> for InvalidResource {\n\n fn get_path(param: bool) -> Result<String, Error> {\n\n if param {\n\n return Ok(String::from(\"path\"));\n\n }\n\n Err(Error::UrlError)\n\n }\n\n}\n\n\n", "file_path": "lib/tests/error.rs", "rank": 13, "score": 90565.0955026289 }, { "content": "#[test]\n\nfn invalid_baseurl() {\n\n match RestClient::new(\"1234\") {\n\n Err(Error::UrlError) => (),\n\n _ => panic!(\"Expected url error\"),\n\n };\n\n}\n\n\n\n#[tokio::test]\n\nasync fn invalid_get() {\n\n let mut client = RestClient::new(\"http://httpbin.org\").unwrap();\n\n\n\n if client.get::<(), InvalidResource>(()).await.is_ok() {\n\n panic!(\"expected error\");\n\n }\n\n}\n\n\n\n#[tokio::test]\n\nasync fn invalid_post() {\n\n let mut client = RestClient::new(\"http://httpbin.org\").unwrap();\n\n\n", "file_path": "lib/tests/error.rs", "rank": 14, "score": 89699.58457533708 }, { "content": "#[derive(Clone)]\n\nstruct AuthCodeState(Arc<Mutex<Option<String>>>);\n\n\n\n//State to store the oauth2 state parameter so it can be set and checked in the callback\n", "file_path": "lib/src/auth/server.rs", "rank": 15, "score": 89162.51986983756 }, { "content": "#[derive(Deserialize)]\n\nstruct HttpRelativePath {\n\n url: String,\n\n}\n\n\n", "file_path": "lib/tests/get.rs", "rank": 16, "score": 88383.50531415292 }, { "content": "#[derive(Serialize, Deserialize)]\n\nstruct HttpBinBase64 {}\n\n\n\nimpl RestPath<String> for HttpBinBase64 {\n\n fn get_path(data: String) -> Result<String, Error> {\n\n Ok(format!(\"base64/{}\", data))\n\n }\n\n}\n\n\n", "file_path": "lib/tests/error.rs", "rank": 17, "score": 88270.70608048205 }, { "content": "#[derive(Serialize, Deserialize)]\n\nstruct HttpBinDelay {}\n\n\n\nimpl RestPath<u16> for HttpBinDelay {\n\n fn get_path(delay: u16) -> Result<String, Error> {\n\n Ok(format!(\"delay/{}\", delay))\n\n }\n\n}\n\n\n", "file_path": "lib/tests/error.rs", "rank": 18, "score": 88270.70608048205 }, { "content": "#[derive(Serialize, Deserialize)]\n\nstruct HttpBinStatus {}\n\n\n\nimpl RestPath<u16> for HttpBinStatus {\n\n fn get_path(code: u16) -> Result<String, Error> {\n\n Ok(format!(\"status/{}\", code))\n\n }\n\n}\n\n\n", "file_path": "lib/tests/error.rs", "rank": 19, "score": 88270.70608048205 }, { "content": "#[test]\n\n#[cfg_attr(not(feature = \"phylum-online\"), ignore)]\n\nfn get_package_status() {\n\n let mut cmd = Command::cargo_bin(\"phylum\").unwrap();\n\n let assert = cmd.arg(\"status\").assert();\n\n\n\n let resp = String::from_utf8_lossy(&assert.get_output().stdout);\n\n let jobs: Vec<JobDescriptor> = serde_json::from_str(&resp).unwrap();\n\n\n\n let name = jobs[0].packages[0].name.to_string();\n\n let version = jobs[0].packages[0].version.to_string();\n\n\n\n let mut cmd = Command::cargo_bin(\"phylum\").unwrap();\n\n let assert = cmd.args(&[\"status\", \"-n\", &name, \"-v\", &version]).assert();\n\n\n\n let output = &assert.get_output().stderr;\n\n assert!(is_sub(output, b\"success\"));\n\n}\n\n\n", "file_path": "lib/tests/cmd.rs", "rank": 20, "score": 87442.45056955608 }, { "content": "#[test]\n\n#[cfg_attr(not(feature = \"phylum-online\"), ignore)]\n\nfn get_package_status_detailed() {\n\n let mut cmd = Command::cargo_bin(\"phylum\").unwrap();\n\n let assert = cmd.arg(\"status\").assert();\n\n\n\n let resp = String::from_utf8_lossy(&assert.get_output().stdout);\n\n let jobs: Vec<JobDescriptor> = serde_json::from_str(&resp).unwrap();\n\n\n\n let name = jobs[0].packages[0].name.to_string();\n\n let version = jobs[0].packages[0].version.to_string();\n\n\n\n let mut cmd = Command::cargo_bin(\"phylum\").unwrap();\n\n let assert = cmd\n\n .args(&[\"status\", \"-n\", &name, \"-v\", &version])\n\n .arg(\"-V\")\n\n .assert();\n\n\n\n let output = &assert.get_output().stderr;\n\n assert!(is_sub(output, b\"success\"));\n\n}\n", "file_path": "lib/tests/cmd.rs", "rank": 21, "score": 85302.28228786304 }, { "content": "/// Rest path builder trait for type.\n\n///\n\n/// Provides implementation for `rest_path` function that builds\n\n/// type (and REST endpoint) specific API path from given parameter(s).\n\n/// The built REST path is appended to the base URL given to `RestClient`.\n\n/// If `Err` is returned, it is propagated directly to API caller.\n\npub trait RestPath<T> {\n\n /// Construct type specific REST API path from given parameters\n\n /// (e.g. \"api/devices/1234\").\n\n fn get_path(par: T) -> Result<String, Error>;\n\n}\n\n\n\nimpl RestClient {\n\n /// Construct new client with default configuration to make HTTP requests.\n\n ///\n\n /// Use `Builder` to configure the client.\n\n pub fn new(url: &str) -> Result<RestClient, Error> {\n\n RestClient::with_builder(url, RestClient::builder())\n\n }\n\n\n\n fn with_builder(url: &str, builder: Builder) -> Result<RestClient, Error> {\n\n let client = match builder.client {\n\n Some(client) => client,\n\n None => {\n\n if builder.ignore_certs {\n\n Client::builder().build(HttpsConnector::without_cert_validation())\n", "file_path": "lib/src/restson.rs", "rank": 22, "score": 82713.10431777657 }, { "content": "fn build_grant_type_auth_code_post_body(\n\n redirect_url: &Url,\n\n authorization_code: &AuthorizationCode,\n\n code_verfier: &CodeVerifier,\n\n) -> Result<HashMap<String, String>> {\n\n let body = hashmap! {\n\n \"client_id\".to_owned() => OIDC_CLIENT_ID.to_owned(),\n\n \"code\".to_owned() => authorization_code.into(),\n\n \"code_verifier\".to_owned() => code_verfier.into(),\n\n \"grant_type\".to_owned() => \"authorization_code\".to_owned(),\n\n // Must match previous request to /authorize but not redirected to by server\n\n \"redirect_uri\".to_owned() => redirect_url.to_string(),\n\n \"scopes\".to_owned() => OIDC_SCOPES.join(\" \")\n\n };\n\n Ok(body)\n\n}\n\n\n", "file_path": "lib/src/auth/oidc.rs", "rank": 23, "score": 77622.31179355594 }, { "content": "fn build_grant_type_refresh_token_post_body(\n\n refresh_token: &RefreshToken,\n\n) -> Result<HashMap<String, String>> {\n\n let body = hashmap! {\n\n \"client_id\".to_owned() => OIDC_CLIENT_ID.to_owned(),\n\n \"grant_type\".to_owned() => \"refresh_token\".to_owned(),\n\n \"refresh_token\".to_owned() => refresh_token.into(),\n\n \"scopes\".to_owned() => OIDC_SCOPES.join(\" \")\n\n };\n\n Ok(body)\n\n}\n\n\n\n/// Acquire tokens with the auth code\n\npub async fn acquire_tokens(\n\n oidc_settings: &OidcServerSettings,\n\n redirect_url: &Url,\n\n authorization_code: &AuthorizationCode,\n\n code_verifier: &CodeVerifier,\n\n) -> Result<TokenResponse> {\n\n let token_url = oidc_settings.token_endpoint.clone();\n", "file_path": "lib/src/auth/oidc.rs", "rank": 24, "score": 77622.31179355594 }, { "content": "pub fn table_format(left_pad: usize, right_pad: usize) -> format::TableFormat {\n\n format::FormatBuilder::new()\n\n .column_separator(' ')\n\n .borders(' ')\n\n .separators(\n\n &[format::LinePosition::Top, format::LinePosition::Bottom],\n\n format::LineSeparator::new(' ', ' ', ' ', ' '),\n\n )\n\n .padding(left_pad, right_pad)\n\n .build()\n\n}\n", "file_path": "lib/src/utils.rs", "rank": 25, "score": 75865.74794572781 }, { "content": "}\n\n\n\n#[derive(Debug, Deserialize, Serialize)]\n\npub struct Vulnerability {\n\n pub cve: Vec<String>,\n\n #[serde(rename = \"severity\")]\n\n pub base_severity: f32,\n\n pub risk_level: RiskLevel,\n\n pub title: String,\n\n pub description: String,\n\n pub remediation: String,\n\n}\n\n\n\nimpl FromStr for PackageType {\n\n type Err = ();\n\n\n\n fn from_str(input: &str) -> Result<Self, Self::Err> {\n\n match input.to_lowercase().as_str() {\n\n \"npm\" => Ok(Self::Npm),\n\n \"python\" => Ok(Self::Python),\n", "file_path": "lib/src/types/package.rs", "rank": 26, "score": 69292.1788863727 }, { "content": " PackageType::Java => \"Java\",\n\n }\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug, Serialize, Deserialize)]\n\npub struct PackageDescriptor {\n\n pub name: String,\n\n pub version: String,\n\n pub r#type: PackageType,\n\n}\n\n\n\n#[derive(Debug, Deserialize, Serialize)]\n\npub struct PackageStatus {\n\n pub name: String,\n\n pub version: String,\n\n pub status: Status,\n\n pub last_updated: u64,\n\n pub license: Option<String>,\n\n pub package_score: Option<f64>,\n", "file_path": "lib/src/types/package.rs", "rank": 27, "score": 69291.25370809081 }, { "content": "use std::collections::HashMap;\n\nuse std::fmt;\n\nuse std::str::FromStr;\n\n\n\nuse serde::{Deserialize, Serialize};\n\n\n\nuse super::common::*;\n\n\n\n#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]\n\n#[serde(rename_all = \"lowercase\")]\n\npub enum PackageType {\n\n Npm,\n\n Python,\n\n Java,\n\n Ruby,\n\n}\n\n\n\n#[derive(Clone, Debug, Deserialize, Eq, PartialEq, PartialOrd, Ord, Serialize)]\n\npub enum RiskLevel {\n\n #[serde(rename = \"info\")]\n", "file_path": "lib/src/types/package.rs", "rank": 28, "score": 69287.8114885306 }, { "content": " \"java\" => Ok(Self::Java),\n\n \"ruby\" => Ok(Self::Ruby),\n\n _ => Err(()),\n\n }\n\n }\n\n}\n\n\n\nimpl fmt::Display for PackageType {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n let package_type = format!(\"{:?}\", self);\n\n write!(f, \"{}\", package_type.to_lowercase())\n\n }\n\n}\n\n\n\nimpl PackageType {\n\n pub fn language(&self) -> &str {\n\n match self {\n\n PackageType::Npm => \"Javascript\",\n\n PackageType::Ruby => \"Ruby\",\n\n PackageType::Python => \"Python\",\n", "file_path": "lib/src/types/package.rs", "rank": 29, "score": 69287.20443795843 }, { "content": " pub num_dependencies: u32,\n\n pub num_vulnerabilities: u32,\n\n}\n\n\n\n#[derive(Debug, Deserialize, Serialize)]\n\npub struct PackageStatusExtended {\n\n #[serde(flatten)]\n\n pub basic_status: PackageStatus,\n\n pub r#type: PackageType,\n\n #[serde(rename = \"riskVectors\")]\n\n pub risk_vectors: HashMap<String, f64>,\n\n pub dependencies: Vec<PackageDescriptor>,\n\n pub vulnerabilities: Vec<Vulnerability>,\n\n pub heuristics: HashMap<String, HeuristicResult>,\n\n pub issues: Vec<Issue>,\n\n}\n", "file_path": "lib/src/types/package.rs", "rank": 30, "score": 69285.60745657515 }, { "content": " };\n\n write!(f, \"{}\", risk_domain)\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug, Deserialize, Serialize)]\n\npub struct Issue {\n\n pub title: String,\n\n pub description: String,\n\n #[serde(alias = \"severity\")]\n\n pub risk_level: RiskLevel,\n\n #[serde(alias = \"domain\")]\n\n pub risk_domain: RiskDomain,\n\n}\n\n\n\n#[derive(Debug, Deserialize, Serialize)]\n\npub struct HeuristicResult {\n\n pub domain: RiskDomain,\n\n pub score: f64,\n\n pub risk_level: RiskLevel,\n", "file_path": "lib/src/types/package.rs", "rank": 31, "score": 69281.13741110533 }, { "content": " Info,\n\n #[serde(rename = \"low\")]\n\n Low,\n\n #[serde(rename = \"medium\")]\n\n Med,\n\n #[serde(rename = \"high\")]\n\n High,\n\n #[serde(rename = \"critical\")]\n\n Crit,\n\n}\n\n\n\nimpl fmt::Display for RiskLevel {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n let risk_level = format!(\"{:?}\", self);\n\n write!(f, \"{}\", risk_level.to_lowercase())\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug, Deserialize, Eq, Hash, PartialEq, Serialize)]\n\npub enum RiskDomain {\n", "file_path": "lib/src/types/package.rs", "rank": 32, "score": 69276.6196601358 }, { "content": " #[serde(rename = \"malicious_code\")]\n\n MaliciousCode,\n\n #[serde(rename = \"vulnerability\")]\n\n Vulnerabilities,\n\n #[serde(rename = \"engineering\")]\n\n EngineeringRisk,\n\n #[serde(rename = \"author\")]\n\n AuthorRisk,\n\n #[serde(rename = \"license\")]\n\n LicenseRisk,\n\n}\n\n\n\nimpl fmt::Display for RiskDomain {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n let risk_domain = match self {\n\n RiskDomain::MaliciousCode => \"MAL\",\n\n RiskDomain::Vulnerabilities => \"VLN\",\n\n RiskDomain::EngineeringRisk => \"ENG\",\n\n RiskDomain::AuthorRisk => \"AUT\",\n\n RiskDomain::LicenseRisk => \"LIC\",\n", "file_path": "lib/src/types/package.rs", "rank": 33, "score": 69268.06996105988 }, { "content": "#[pymodule]\n\nfn cli_python(_py: Python, m: &PyModule) -> PyResult<()> {\n\n m.add_class::<PhylumApi>()?;\n\n Ok(())\n\n}\n", "file_path": "bindings/python/src/lib.rs", "rank": 34, "score": 68331.9882809014 }, { "content": "fn main() {\n\n println!(\"Running build\");\n\n let yml = load_yaml!(\"src/bin/.conf/cli.yaml\");\n\n let mut app = App::from(yml);\n\n\n\n // Create tab completions files for some popular shells\n\n generate_to(\n\n Bash, &mut app, // We need to specify what generator to use\n\n BIN_NAME, // We need to specify the bin name manually\n\n OUT_DIR, // We need to specify where to write to\n\n )\n\n .unwrap();\n\n\n\n generate_to(\n\n Zsh, &mut app, // We need to specify what generator to use\n\n BIN_NAME, // We need to specify the bin name manually\n\n OUT_DIR, // We need to specify where to write to\n\n )\n\n .unwrap();\n\n\n\n generate_to(\n\n Fish, &mut app, // We need to specify what generator to use\n\n BIN_NAME, // We need to specify the bin name manually\n\n OUT_DIR, // We need to specify where to write to\n\n )\n\n .unwrap();\n\n}\n", "file_path": "lib/build.rs", "rank": 35, "score": 58648.165639650935 }, { "content": "#[derive(Deserialize)]\n\nstruct TestHeaders {\n\n #[serde(default)]\n\n #[serde(rename = \"User-Agent\")]\n\n user_agent: String,\n\n\n\n #[serde(default)]\n\n #[serde(rename = \"X-Test\")]\n\n test: String,\n\n}\n\n\n\nimpl RestPath<()> for HttpBinAnything {\n\n fn get_path(_: ()) -> Result<String, Error> {\n\n Ok(String::from(\"anything\"))\n\n }\n\n}\n\n\n\n#[tokio::test]\n\nasync fn headers() {\n\n let mut client = RestClient::new(\"http://httpbin.org\").unwrap();\n\n\n", "file_path": "lib/tests/headers.rs", "rank": 36, "score": 56966.9781017389 }, { "content": "#[test]\n\n#[cfg_attr(not(feature = \"phylum-online\"), ignore)]\n\nfn ping_system() {\n\n let mut cmd = Command::cargo_bin(\"phylum\").unwrap();\n\n let assert = cmd.arg(\"ping\").assert();\n\n assert.success().stdout(\"\\\"Alive\\\"\\n\");\n\n}\n\n\n", "file_path": "lib/tests/cmd.rs", "rank": 37, "score": 56105.513930682195 }, { "content": "fn vuln_to_rows(\n\n vuln: &Vulnerability,\n\n pkg_name: Option<&str>,\n\n pkg_version: Option<&str>,\n\n) -> Vec<Row> {\n\n let mut rows = Vec::new();\n\n\n\n let cve_s = if !vuln.cve.is_empty() {\n\n vuln.cve.join(\"/\")\n\n } else {\n\n \"[No CVE listed]\".to_string()\n\n };\n\n\n\n let pkg_descriptor = if pkg_name.is_some() && pkg_version.is_some() {\n\n format!(\"{}@{}\", pkg_name.unwrap(), pkg_version.unwrap())\n\n } else {\n\n \"\".to_string()\n\n };\n\n\n\n rows.push(Row::new(vec![\n", "file_path": "lib/src/summarize.rs", "rank": 38, "score": 56105.513930682195 }, { "content": "#[derive(Serialize, Deserialize)]\n\nstruct HttpBinPost {\n\n data: String,\n\n}\n\n\n", "file_path": "lib/tests/post.rs", "rank": 39, "score": 55826.546575067754 }, { "content": "#[derive(Serialize, Deserialize)]\n\nstruct HttpBinDelete {\n\n data: String,\n\n}\n\n\n\nimpl RestPath<()> for HttpBinDelete {\n\n fn get_path(_: ()) -> Result<String, Error> {\n\n Ok(String::from(\"delete\"))\n\n }\n\n}\n\n\n\n#[tokio::test]\n\nasync fn basic_delete() {\n\n let mut client = RestClient::new(\"http://httpbin.org\").unwrap();\n\n\n\n let req = HttpBinDelete {\n\n data: String::from(\"test data\"),\n\n };\n\n\n\n client.delete((), &req).await.unwrap();\n\n}\n", "file_path": "lib/tests/delete.rs", "rank": 40, "score": 55826.546575067754 }, { "content": "#[derive(Serialize, Deserialize)]\n\nstruct HttpBinPut {\n\n data: String,\n\n}\n\n\n", "file_path": "lib/tests/put.rs", "rank": 41, "score": 55826.546575067754 }, { "content": "#[derive(Serialize, Deserialize)]\n\nstruct HttpBinPatch {\n\n data: String,\n\n}\n\n\n\nimpl RestPath<()> for HttpBinPatch {\n\n fn get_path(_: ()) -> Result<String, Error> {\n\n Ok(String::from(\"patch\"))\n\n }\n\n}\n\n\n\n#[tokio::test]\n\nasync fn basic_patch() {\n\n let mut client = RestClient::new(\"http://httpbin.org\").unwrap();\n\n\n\n let data = HttpBinPatch {\n\n data: String::from(\"test data\"),\n\n };\n\n client.patch((), &data).await.unwrap();\n\n}\n\n\n", "file_path": "lib/tests/patch.rs", "rank": 42, "score": 55826.546575067754 }, { "content": "#[derive(Deserialize)]\n\nstruct HttpBinAnything {\n\n headers: TestHeaders,\n\n}\n\n\n", "file_path": "lib/tests/headers.rs", "rank": 43, "score": 55822.49981883265 }, { "content": "#[derive(Deserialize)]\n\nstruct HttpBinAnything {\n\n url: String,\n\n args: HttpBinAnythingArgs,\n\n}\n\n\n", "file_path": "lib/tests/get.rs", "rank": 44, "score": 55822.49981883265 }, { "content": "#[pyclass]\n\n#[text_signature = \"(base_url, timeout=None)\"]\n\nstruct PhylumApi {\n\n api: RustPhylumApi,\n\n}\n\n\n\n#[pymethods]\n\nimpl PhylumApi {\n\n #[new]\n\n #[args(base_url = \"\\\"https://api.phylum.io\\\"\", timeout = \"None\")]\n\n pub fn new(base_url: &str, timeout: Option<u64>) -> PyResult<Self> {\n\n RustPhylumApi::new(base_url, timeout)\n\n .map(|api| PhylumApi { api })\n\n .map_err(|e| {\n\n PyRuntimeError::new_err(format!(\"Failed to create new api instance: {:?}\", e))\n\n })\n\n }\n\n\n\n /// Register a new user in the system\n\n ///\n\n /// email\n\n /// account username\n", "file_path": "bindings/python/src/lib.rs", "rank": 45, "score": 55822.308195447906 }, { "content": "#[pyclass]\n\nstruct ApiToken {\n\n #[pyo3(get)]\n\n active: bool,\n\n #[pyo3(get)]\n\n key: String,\n\n #[pyo3(get)]\n\n user_id: String,\n\n}\n\n\n\n/// Create a new instance of the Phylum API\n\n/// \n\n/// base_url\n\n/// The base url for the api to connect to.\n\n/// timeout\n\n/// The timeout (in seconds) for requests to the api (default: 30s).\n", "file_path": "bindings/python/src/lib.rs", "rank": 46, "score": 55818.3760612693 }, { "content": "#[pyclass]\n\nstruct JwtToken {\n\n #[pyo3(get)]\n\n access: String,\n\n #[pyo3(get)]\n\n refresh: Option<String>,\n\n}\n\n\n", "file_path": "bindings/python/src/lib.rs", "rank": 47, "score": 55818.3760612693 }, { "content": "pub trait Scored {\n\n fn score(&self) -> f64;\n\n}\n\n\n\nimpl Scored for PackageStatus {\n\n fn score(&self) -> f64 {\n\n self.package_score.unwrap_or(1.0)\n\n }\n\n}\n\n\n\nimpl Scored for PackageStatusExtended {\n\n fn score(&self) -> f64 {\n\n self.basic_status.package_score.unwrap_or(1.0)\n\n }\n\n}\n\n\n", "file_path": "lib/src/summarize.rs", "rank": 48, "score": 55427.51230755226 }, { "content": "pub trait Renderable {\n\n fn render(&self) -> String;\n\n}\n\n\n\nimpl Renderable for () {\n\n fn render(&self) -> String {\n\n \"\".to_string()\n\n }\n\n}\n\n\n\nimpl<T> Renderable for Vec<T>\n\nwhere\n\n T: Renderable,\n\n{\n\n fn render(&self) -> String {\n\n self.iter()\n\n .map(|t| t.render())\n\n .collect::<Vec<_>>()\n\n .join(\"\\n\")\n\n }\n", "file_path": "lib/src/render.rs", "rank": 49, "score": 55427.51230755226 }, { "content": "pub trait Parseable {\n\n fn new(filename: &Path) -> Result<Self, io::Error>\n\n where\n\n Self: Sized;\n\n fn parse(&self) -> ParseResult;\n\n}\n\n\n\nimpl Parseable for PackageLock {\n\n fn new(filename: &Path) -> Result<Self, io::Error>\n\n where\n\n Self: Sized,\n\n {\n\n Ok(PackageLock(std::fs::read_to_string(filename)?))\n\n }\n\n\n\n /// Parses `package-lock.json` files into a vec of packages\n\n fn parse(&self) -> ParseResult {\n\n let parsed: Value = serde_json::from_str(&self.0)?;\n\n\n\n parsed[\"dependencies\"]\n", "file_path": "lib/src/lockfiles.rs", "rank": 50, "score": 55427.51230755226 }, { "content": "class AssertsAccessorType(type):\n\n dummy = DummyTestCase()\n\n\n\n def __getattr__(cls, key):\n", "file_path": "lib/tests/static_asserts.py", "rank": 51, "score": 55121.227481090806 }, { "content": "#[test]\n\n#[cfg_attr(not(feature = \"phylum-online\"), ignore)]\n\nfn get_job_status() {\n\n let mut cmd = Command::cargo_bin(\"phylum\").unwrap();\n\n let assert = cmd.arg(\"status\").assert();\n\n\n\n let resp = String::from_utf8_lossy(&assert.get_output().stdout);\n\n let obj: Vec<JobDescriptor> = serde_json::from_str(&resp).unwrap();\n\n\n\n let job_id = obj[0].job_id;\n\n\n\n let mut cmd = Command::cargo_bin(\"phylum\").unwrap();\n\n let assert = cmd.args(&[\"status\", \"-i\", &job_id.to_string()]).assert();\n\n\n\n let output = &assert.get_output().stderr;\n\n assert!(is_sub(output, b\"success\"));\n\n}\n\n\n", "file_path": "lib/tests/cmd.rs", "rank": 52, "score": 54964.73895023208 }, { "content": "#[test]\n\n#[cfg_attr(not(feature = \"phylum-online\"), ignore)]\n\nfn get_basic_status() {\n\n let mut cmd = Command::cargo_bin(\"phylum\").unwrap();\n\n let assert = cmd.arg(\"status\").assert();\n\n\n\n let output = &assert.get_output().stderr;\n\n assert!(is_sub(output, b\"success\"));\n\n}\n\n\n", "file_path": "lib/tests/cmd.rs", "rank": 53, "score": 54964.73895023208 }, { "content": "#[derive(Deserialize)]\n\nstruct HttpBinBasicAuth {}\n\n\n\nimpl<'a> RestPath<(&'a str, &'a str)> for HttpBinBasicAuth {\n\n fn get_path(auth: (&str, &str)) -> Result<String, Error> {\n\n let (user, pass) = auth;\n\n Ok(format!(\"basic-auth/{}/{}\", user, pass))\n\n }\n\n}\n\n\n\n#[tokio::test]\n\nasync fn basic_auth() {\n\n let mut client = RestClient::new(\"http://httpbin.org\").unwrap();\n\n\n\n client.set_auth(\"username\", \"passwd\");\n\n client\n\n .get::<_, HttpBinBasicAuth>((\"username\", \"passwd\"))\n\n .await\n\n .unwrap();\n\n}\n\n\n", "file_path": "lib/tests/auth.rs", "rank": 54, "score": 54753.77844452244 }, { "content": "#[derive(Deserialize)]\n\nstruct HttpBinAnythingArgs {\n\n #[serde(default)]\n\n a: String,\n\n #[serde(default)]\n\n b: String,\n\n}\n\n\n\nimpl RestPath<()> for HttpBinAnything {\n\n fn get_path(_: ()) -> Result<String, Error> {\n\n Ok(String::from(\"anything\"))\n\n }\n\n}\n\n\n\nimpl RestPath<u32> for HttpBinAnything {\n\n fn get_path(param: u32) -> Result<String, Error> {\n\n Ok(format!(\"anything/{}\", param))\n\n }\n\n}\n\n\n\nimpl<'a> RestPath<(u32, &'a str)> for HttpBinAnything {\n", "file_path": "lib/tests/get.rs", "rank": 55, "score": 54753.77844452244 }, { "content": "#[derive(Deserialize)]\n\nstruct HttpBinPutResp {\n\n json: HttpBinPut,\n\n url: String,\n\n}\n\n\n\nimpl RestPath<()> for HttpBinPut {\n\n fn get_path(_: ()) -> Result<String, Error> {\n\n Ok(String::from(\"put\"))\n\n }\n\n}\n\n\n\n#[tokio::test]\n\nasync fn basic_put() {\n\n let mut client = RestClient::new(\"https://httpbin.org\").unwrap();\n\n\n\n let data = HttpBinPut {\n\n data: String::from(\"test data\"),\n\n };\n\n client.put((), &data).await.unwrap();\n\n}\n", "file_path": "lib/tests/put.rs", "rank": 56, "score": 54753.77844452244 }, { "content": "#[derive(Deserialize)]\n\nstruct HttpBinPostResp {\n\n json: HttpBinPost,\n\n url: String,\n\n}\n\n\n\nimpl RestPath<()> for HttpBinPost {\n\n fn get_path(_: ()) -> Result<String, Error> {\n\n Ok(String::from(\"post\"))\n\n }\n\n}\n\n\n\n#[tokio::test]\n\nasync fn basic_post() {\n\n let mut client = RestClient::new(\"https://httpbin.org\").unwrap();\n\n\n\n let data = HttpBinPost {\n\n data: String::from(\"test data\"),\n\n };\n\n client.post((), &data).await.unwrap();\n\n}\n", "file_path": "lib/tests/post.rs", "rank": 57, "score": 54753.77844452244 }, { "content": " pub trait MockResponderExt {\n\n fn respond_with_fn<F>(self, function: F) -> Mock\n\n where\n\n F: Fn(&Request) -> ResponseTemplate + Send + Sync + 'static;\n\n }\n\n\n\n impl MockResponderExt for MockBuilder {\n\n fn respond_with_fn<F>(self, function: F) -> Mock\n\n where\n\n F: Fn(&Request) -> ResponseTemplate + Send + Sync + 'static,\n\n {\n\n self.respond_with(ResponderFn(function))\n\n }\n\n }\n\n\n\n pub fn build_oidc_server_settings_mock_response(base_uri: &str) -> OidcServerSettings {\n\n let base_url = Url::from_str(base_uri).expect(\"Failed to parse base url\");\n\n OidcServerSettings {\n\n issuer: base_url.clone(),\n\n authorization_endpoint: base_url.join(AUTH_URI).unwrap(),\n", "file_path": "lib/src/test.rs", "rank": 58, "score": 53206.11817536162 }, { "content": "pub trait Summarize: Renderable {\n\n fn summarize(&self, _filter: Option<Filter>) {\n\n println!(\"{}\", self.render());\n\n }\n\n}\n\n\n", "file_path": "lib/src/summarize.rs", "rank": 59, "score": 52291.04232854099 }, { "content": "#[test]\n\n#[cfg_attr(not(feature = \"phylum-online\"), ignore)]\n\nfn get_job_status_non_existent_job() {\n\n let mut cmd = Command::cargo_bin(\"phylum\").unwrap();\n\n let assert = cmd\n\n .args(&[\"status\", \"-i\", \"ffffffff-ffff-ffff-ffff-ffffffffffff\"])\n\n .assert();\n\n\n\n let output = assert.get_output();\n\n assert_eq!(output.stdout, b\"\");\n\n assert!(is_sub(&output.stderr, b\"404 Not Found\"));\n\n}\n\n\n", "file_path": "lib/tests/cmd.rs", "rank": 60, "score": 51967.35628160656 }, { "content": "pub trait IpAddrExt {\n\n /// Determine if an address is possibly routable beyond the local network\n\n /// segment. This method considers ANY ip address that is not software scope\n\n /// (0.0.0.0 / ::), loopback, or link_local to be potentially routable\n\n fn is_routable(&self) -> bool;\n\n}\n\n\n\nimpl IpAddrExt for IpAddr {\n\n fn is_routable(&self) -> bool {\n\n match self {\n\n Self::V4(ipv4) => ipv4.is_routable(),\n\n Self::V6(ipv6) => ipv6.is_routable(),\n\n }\n\n }\n\n}\n\n\n\nimpl IpAddrExt for Ipv4Addr {\n\n fn is_routable(&self) -> bool {\n\n ipv4::is_routable(self)\n\n }\n", "file_path": "lib/src/auth/ip_addr_ext.rs", "rank": 61, "score": 50375.62897699217 }, { "content": "// State to store the shutdown hook state\n\nstruct ShutdownHookState(Mutex<Option<Sender<()>>>);\n\n\n\n/// Handler to be used as the GET endpoint that keycloak redirects to.\n\n///\n\n/// This handler tries to parse the request and extract the code.\n\n///\n\n/// If a code is present, it updates the internal state and stores the code in it\n\nasync fn keycloak_callback_handler(request: Request<Body>) -> Result<Response<Body>> {\n\n log::debug!(\"Callback handler triggered!\");\n\n\n\n let shutdown_hook = request\n\n .data::<ShutdownHookState>()\n\n .expect(\"Shutdown hook not set as hyper state\");\n\n\n\n let auth_code: &AuthCodeState = request\n\n .data::<AuthCodeState>()\n\n .expect(\"State for holding auth code not set\");\n\n\n\n let saved_state: &OAuth2CallbackState = request\n\n .data::<OAuth2CallbackState>()\n", "file_path": "lib/src/auth/server.rs", "rank": 62, "score": 46048.48770364767 }, { "content": "fn check_filter_issue(filter: &Filter, issue: &Issue) -> bool {\n\n let mut include = true;\n\n if let Some(ref level) = filter.level {\n\n if issue.risk_level < *level {\n\n include = false;\n\n }\n\n }\n\n if let Some(ref domains) = filter.domains {\n\n if !domains.contains(&issue.risk_domain) {\n\n include = false;\n\n }\n\n }\n\n include\n\n}\n\n\n\nimpl Summarize for RequestStatusResponse<PackageStatusExtended> {\n\n fn summarize(&self, filter: Option<Filter>) {\n\n let table_1: Table = response_to_table(self);\n\n\n\n let mut table_2 = Table::new();\n", "file_path": "lib/src/summarize.rs", "rank": 63, "score": 42126.891065666685 }, { "content": "fn check_filter_vuln(filter: &Filter, vuln: &Vulnerability) -> bool {\n\n let mut include = true;\n\n if let Some(domains) = &filter.domains {\n\n if !domains.contains(&RiskDomain::Vulnerabilities) {\n\n include = false;\n\n }\n\n }\n\n if let Some(level) = &filter.level {\n\n if vuln.risk_level < *level {\n\n include = false;\n\n }\n\n }\n\n include\n\n}\n\n\n", "file_path": "lib/src/summarize.rs", "rank": 64, "score": 42126.891065666685 }, { "content": "fn response_to_table<T>(resp: &RequestStatusResponse<T>) -> Table\n\nwhere\n\n T: Scored,\n\n{\n\n let ecosystem = PackageType::from_str(&resp.ecosystem).unwrap_or(PackageType::Npm);\n\n\n\n let date_time = NaiveDateTime::from_timestamp(resp.created_at / 1000, 0);\n\n\n\n let details = [\n\n (\n\n \"Project\",\n\n resp.project_name.to_string(),\n\n \"Label\",\n\n resp.label.as_ref().unwrap_or(&\"\".to_string()).to_owned(),\n\n ),\n\n (\n\n \"Proj Score\",\n\n (100.0 * resp.score).round().to_string(),\n\n \"Date\",\n\n format!(\"{} UTC\", date_time),\n", "file_path": "lib/src/summarize.rs", "rank": 65, "score": 41346.45192534777 }, { "content": "fn is_sub<T: PartialEq>(haystack: &[T], needle: &[T]) -> bool {\n\n haystack.windows(needle.len()).any(|c| c == needle)\n\n}\n\n\n", "file_path": "lib/tests/cmd.rs", "rank": 66, "score": 39516.11313587107 }, { "content": " panic!(\"expected timeout error\");\n\n }\n\n}\n\n\n\n#[tokio::test]\n\nasync fn deserialize_error() {\n\n let mut client = RestClient::new(\"http://httpbin.org\").unwrap();\n\n\n\n // Service returns decoded base64 in body which should be string 'test'.\n\n // This fails JSON deserialization and is returned in the Error\n\n if let Err(Error::DeserializeParseError(_, data)) = client\n\n .get::<String, HttpBinBase64>(\"dGVzdA==\".to_string())\n\n .await\n\n {\n\n assert!(data == \"test\");\n\n } else {\n\n panic!(\"expected serialized error\");\n\n }\n\n}\n", "file_path": "lib/tests/error.rs", "rank": 71, "score": 36174.39322697866 }, { "content": "extern crate phylum_cli;\n\n\n\n#[macro_use]\n\nextern crate serde_derive;\n\n\n\nuse phylum_cli::restson::{Error, RestClient, RestPath};\n\nuse std::time::{Duration, Instant};\n\n\n\nmod logging;\n\n\n\n#[derive(Serialize, Deserialize)]\n", "file_path": "lib/tests/error.rs", "rank": 72, "score": 36173.765126047554 }, { "content": " let data = InvalidResource {};\n\n\n\n if client.post((), &data).await.is_ok() {\n\n panic!(\"expected error\");\n\n }\n\n}\n\n\n\n#[tokio::test]\n\nasync fn path_error() {\n\n let mut client = RestClient::new(\"http://httpbin.org\").unwrap();\n\n\n\n if let Err(Error::UrlError) = client.get::<bool, InvalidResource>(false).await {\n\n } else {\n\n panic!(\"expected url error\");\n\n }\n\n}\n\n\n\n#[tokio::test]\n\nasync fn http_error() {\n\n let mut client = RestClient::new(\"http://httpbin.org\").unwrap();\n", "file_path": "lib/tests/error.rs", "rank": 73, "score": 36173.41351939116 }, { "content": "\n\n match client.get::<_, HttpBinStatus>(418).await {\n\n Err(Error::HttpError(s, body)) => {\n\n assert_eq!(s, 418);\n\n assert!(!body.is_empty());\n\n }\n\n _ => panic!(\"Expected 418 error status with response body\"),\n\n };\n\n}\n\n\n\n#[tokio::test]\n\nasync fn request_timeout() {\n\n let mut client = RestClient::new(\"http://httpbin.org\").unwrap();\n\n\n\n client.set_timeout(Duration::from_secs(1));\n\n\n\n let start = Instant::now();\n\n if let Err(Error::TimeoutError) = client.get::<u16, HttpBinDelay>(3).await {\n\n assert!(start.elapsed().as_secs() == 1);\n\n } else {\n", "file_path": "lib/tests/error.rs", "rank": 74, "score": 36163.67043019099 }, { "content": "use std::str::FromStr;\n\n\n\nuse serde::{Deserialize, Serialize};\n\n\n\nmod auth;\n\nmod common;\n\nmod job;\n\nmod package;\n\nmod project;\n\nmod user_settings;\n\n\n\npub use auth::*;\n\npub use common::*;\n\npub use job::*;\n\npub use package::*;\n\npub use project::*;\n\npub use user_settings::*;\n\n\n\n#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]\n\n#[serde(rename_all = \"lowercase\")]\n", "file_path": "lib/src/types.rs", "rank": 75, "score": 35731.17676518545 }, { "content": "pub struct GithubRelease {\n\n pub name: String,\n\n pub assets: Vec<GithubReleaseAsset>,\n\n}\n\n\n\n#[derive(Clone, Debug, Deserialize)]\n\npub struct GithubReleaseAsset {\n\n pub browser_download_url: String,\n\n pub name: String,\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_risk_level_ordering() {\n\n assert!(\n\n RiskLevel::Info < RiskLevel::Low\n\n && RiskLevel::Low < RiskLevel::Med\n\n && RiskLevel::Med < RiskLevel::High\n\n && RiskLevel::High < RiskLevel::Crit,\n\n \"Ordering of risk levels is invalid\"\n\n );\n\n }\n\n}\n", "file_path": "lib/src/types.rs", "rank": 76, "score": 35729.20754671697 }, { "content": "pub enum Role {\n\n Admin,\n\n User,\n\n Observer,\n\n}\n\n\n\nimpl FromStr for Role {\n\n type Err = ();\n\n\n\n fn from_str(input: &str) -> Result<Self, Self::Err> {\n\n match input {\n\n \"administrator\" => Ok(Self::Admin),\n\n \"observer\" => Ok(Self::Observer),\n\n \"user\" => Ok(Self::User),\n\n _ => Err(()),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug, Deserialize)]\n", "file_path": "lib/src/types.rs", "rank": 77, "score": 35728.231948409375 }, { "content": " pub ecosystem: String,\n\n #[serde(default)]\n\n pub num_incomplete: u32,\n\n}\n\n\n\n/// PUT /job\n\n#[derive(Debug, Serialize, Deserialize)]\n\npub struct PackageRequest {\n\n pub r#type: PackageType,\n\n pub packages: Vec<PackageDescriptor>,\n\n pub is_user: bool,\n\n pub project: ProjectId,\n\n pub label: String,\n\n}\n\n\n\nimpl RestPath<()> for PackageRequest {\n\n fn get_path(_: ()) -> Result<String, Error> {\n\n Ok(format!(\"{}/job\", API_PATH))\n\n }\n\n}\n", "file_path": "lib/src/types/job.rs", "rank": 78, "score": 34433.936757179945 }, { "content": "#[derive(Debug, Serialize, Deserialize)]\n\npub struct ProjectCreateRequest {\n\n pub name: String,\n\n}\n\n\n\nimpl RestPath<()> for ProjectCreateRequest {\n\n fn get_path(_: ()) -> Result<String, Error> {\n\n Ok(format!(\"{}/job/projects\", API_PATH))\n\n }\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize)]\n\npub struct ProjectCreateResponse {\n\n pub id: ProjectId,\n\n}\n", "file_path": "lib/src/types/project.rs", "rank": 79, "score": 34433.32474287159 }, { "content": " pub project_name: String,\n\n pub label: Option<String>,\n\n pub thresholds: ProjectThresholds,\n\n pub packages: Vec<T>,\n\n}\n\n\n\n/// DELETE /request/packages/<job_id>\n\n#[derive(Debug, Serialize, Deserialize)]\n\npub struct CancelRequestResponse {\n\n pub msg: String,\n\n}\n\n\n\n/// GET /job/<job_id>\n\n#[derive(Debug, Serialize, Deserialize)]\n\npub struct StatusRequest {\n\n job_id: JobId,\n\n}\n\n\n\nimpl<'a> RestPath<JobId> for RequestStatusResponse<PackageStatus> {\n\n fn get_path(job_id: JobId) -> Result<String, Error> {\n", "file_path": "lib/src/types/job.rs", "rank": 80, "score": 34431.219512623 }, { "content": "\n\n#[derive(Debug, Serialize, Deserialize)]\n\npub struct PackageSubmissionResponse {\n\n pub job_id: JobId,\n\n}\n\n\n\n/// GET /job/auth_status\n\n#[derive(Debug, Serialize, Deserialize)]\n\npub struct AuthStatusResponse {\n\n pub authenticated: bool,\n\n}\n\n\n\nimpl RestPath<()> for AuthStatusResponse {\n\n fn get_path(_: ()) -> Result<String, Error> {\n\n Ok(format!(\"{}/job/auth_status\", API_PATH))\n\n }\n\n}\n\n\n\n/// GET /job/heartbeat\n\n#[derive(Debug, Serialize, Deserialize)]\n", "file_path": "lib/src/types/job.rs", "rank": 81, "score": 34430.97862941163 }, { "content": "use serde::{Deserialize, Serialize};\n\n\n\nuse crate::restson::{Error, RestPath};\n\nuse crate::types::API_PATH;\n\n\n\nuse super::common::ProjectId;\n\nuse super::job::*;\n\n\n\n#[derive(Clone, Debug, Serialize, Deserialize)]\n\npub struct Projecct {\n\n pub score: u32,\n\n pub passing: bool,\n\n pub name: String,\n\n pub id: ProjectId,\n\n pub last_updated: u64,\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize)]\n\npub struct ProjectThresholds {\n\n pub author: f32,\n", "file_path": "lib/src/types/project.rs", "rank": 82, "score": 34430.51960895111 }, { "content": "use serde::{Deserialize, Serialize};\n\n\n\nuse crate::restson::{Error, RestPath};\n\nuse crate::types::API_PATH;\n\n\n\nuse super::common::*;\n\nuse super::package::*;\n\nuse super::project::*;\n\n\n\n#[derive(Clone, Debug, Serialize, Deserialize)]\n\npub struct JobDescriptor {\n\n pub job_id: JobId,\n\n pub project: String,\n\n pub label: String,\n\n pub num_dependencies: u32,\n\n pub score: f64,\n\n pub packages: Vec<PackageDescriptor>,\n\n pub pass: bool,\n\n pub msg: String,\n\n pub date: String,\n", "file_path": "lib/src/types/job.rs", "rank": 83, "score": 34430.30788370518 }, { "content": " Ok(format!(\"{}/job/{}\", API_PATH, job_id))\n\n }\n\n}\n\n\n\nimpl<'a> RestPath<JobId> for RequestStatusResponse<PackageStatusExtended> {\n\n fn get_path(job_id: JobId) -> Result<String, Error> {\n\n Ok(format!(\"{}/job/{}?verbose=True\", API_PATH, job_id))\n\n }\n\n}\n\n\n\nimpl<'a> RestPath<JobId> for CancelRequestResponse {\n\n fn get_path(job_id: JobId) -> Result<String, Error> {\n\n Ok(format!(\"{}/job/{}\", API_PATH, job_id))\n\n }\n\n}\n\n\n\n/// GET /job/packages/<type>/<name>/<version>\n\nimpl<'a> RestPath<PackageDescriptor> for PackageStatusExtended {\n\n fn get_path(pkg: PackageDescriptor) -> Result<String, Error> {\n\n let name_escaped = pkg.name.replace(\"/\", \"~\");\n", "file_path": "lib/src/types/job.rs", "rank": 84, "score": 34429.353353384024 }, { "content": "use serde::{Deserialize, Serialize};\n\nuse uuid::Uuid;\n\n\n\npub const API_PATH: &str = \"api/v0\";\n\npub const PROJ_CONF_FILE: &str = \".phylum_project\";\n\n\n\npub type ProjectId = Uuid;\n\npub type JobId = Uuid;\n\npub type UserId = Uuid;\n\npub type Key = Uuid;\n\npub type PackageId = String;\n\n\n\n#[derive(Debug, Serialize, Deserialize)]\n\n#[serde(rename_all = \"lowercase\")]\n\npub enum Status {\n\n Complete,\n\n Incomplete,\n\n}\n\n\n\n#[derive(Clone, Debug, Serialize, Deserialize)]\n\n#[serde(rename_all = \"lowercase\")]\n\npub enum Action {\n\n None,\n\n Warn,\n\n Break,\n\n}\n", "file_path": "lib/src/types/common.rs", "rank": 85, "score": 34428.98993521613 }, { "content": " let endpoint = format!(\"{}/{}/{}\", pkg.r#type, name_escaped, pkg.version);\n\n Ok(format!(\"{}/job/packages/{}\", API_PATH, endpoint))\n\n }\n\n}\n\n\n\nimpl RestPath<&str> for ProjectGetDetailsRequest {\n\n fn get_path(pkg_id: &str) -> Result<String, Error> {\n\n Ok(format!(\"{}/job/projects/name/{}\", API_PATH, pkg_id))\n\n }\n\n}\n", "file_path": "lib/src/types/job.rs", "rank": 86, "score": 34426.67672471823 }, { "content": " Ok(format!(\"{}/job/projects/overview\", API_PATH))\n\n }\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize)]\n\npub struct ProjectGetResponse {\n\n pub id: ProjectId,\n\n}\n\n\n\n/// GET /projects/<project-id>\n\n#[derive(Debug, Serialize, Deserialize)]\n\npub struct ProjectGetDetailsRequest {\n\n pub name: String,\n\n pub id: String,\n\n pub ecosystem: String,\n\n pub thresholds: ProjectThresholds,\n\n pub jobs: Vec<JobDescriptor>,\n\n}\n\n\n\n/// PUT /projects\n", "file_path": "lib/src/types/project.rs", "rank": 87, "score": 34426.42439749197 }, { "content": " pub engineering: f32,\n\n pub license: f32,\n\n pub malicious: f32,\n\n pub total: f32,\n\n pub vulnerability: f32,\n\n}\n\n\n\n/// GET /projects/overview\n\n#[derive(Debug, Serialize, Deserialize)]\n\npub struct ProjectGetRequest {\n\n pub name: String,\n\n pub id: String,\n\n pub updated_at: String,\n\n // TODO: Need to update request manager to include thresholds with this\n\n // response.\n\n //pub thresholds: ProjectThresholds,\n\n}\n\n\n\nimpl RestPath<()> for Vec<ProjectGetRequest> {\n\n fn get_path(_: ()) -> Result<String, Error> {\n", "file_path": "lib/src/types/project.rs", "rank": 88, "score": 34425.57336341704 }, { "content": "pub struct PingResponse {\n\n pub msg: String,\n\n}\n\n\n\nimpl RestPath<()> for PingResponse {\n\n fn get_path(_: ()) -> Result<String, Error> {\n\n Ok(format!(\"{}/job/heartbeat\", API_PATH))\n\n }\n\n}\n\n\n\n/// GET /job\n\n#[derive(Debug, Serialize, Deserialize)]\n\npub struct AllJobsStatusResponse {\n\n pub jobs: Vec<JobDescriptor>,\n\n pub total_jobs: u32,\n\n pub count: u32,\n\n}\n\n\n\nimpl RestPath<u32> for AllJobsStatusResponse {\n\n fn get_path(limit: u32) -> Result<String, Error> {\n", "file_path": "lib/src/types/job.rs", "rank": 89, "score": 34425.39340981433 }, { "content": "use serde::{Deserialize, Serialize};\n\n\n\n/// Typed wrapper for AuthorizationCode\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\npub struct AuthorizationCode(String);\n\n\n\nimpl AuthorizationCode {\n\n pub fn new(string: impl AsRef<str>) -> Self {\n\n Self(string.as_ref().to_owned())\n\n }\n\n}\n\n\n\nimpl From<&AuthorizationCode> for String {\n\n fn from(val: &AuthorizationCode) -> Self {\n\n val.0.to_owned()\n\n }\n\n}\n\n\n\n/// Typed wrapper for RefreshToken\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n", "file_path": "lib/src/types/auth.rs", "rank": 90, "score": 34423.26022966785 }, { "content": " Self(string.as_ref().to_owned())\n\n }\n\n}\n\n\n\nimpl From<&AccessToken> for String {\n\n fn from(val: &AccessToken) -> Self {\n\n val.0.to_owned()\n\n }\n\n}\n\n\n\nimpl<'a> From<&'a AccessToken> for &'a str {\n\n fn from(val: &'a AccessToken) -> Self {\n\n &val.0\n\n }\n\n}\n\n\n\n/// Typed wrapper for IdToken\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\npub struct IdToken(String);\n\n\n", "file_path": "lib/src/types/auth.rs", "rank": 91, "score": 34421.348578108904 }, { "content": " Ok(format!(\"{}/job/?limit={}&verbose=1\", API_PATH, limit))\n\n }\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize)]\n\npub struct RequestStatusResponse<T> {\n\n pub job_id: JobId,\n\n pub ecosystem: String,\n\n pub user_id: UserId,\n\n pub user_email: String,\n\n pub created_at: i64, // epoch seconds\n\n pub status: Status,\n\n pub score: f64,\n\n pub pass: bool,\n\n pub msg: String,\n\n pub action: Action,\n\n #[serde(default)]\n\n pub num_incomplete: u32,\n\n pub last_updated: u64,\n\n pub project: String, // project id\n", "file_path": "lib/src/types/job.rs", "rank": 92, "score": 34421.07001163495 }, { "content": "impl IdToken {\n\n pub fn new(string: impl AsRef<str>) -> Self {\n\n Self(string.as_ref().to_owned())\n\n }\n\n}\n\n\n\nimpl From<&IdToken> for String {\n\n fn from(val: &IdToken) -> Self {\n\n val.0.to_owned()\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\npub struct TokenResponse {\n\n pub access_token: AccessToken,\n\n pub refresh_token: RefreshToken,\n\n pub id_token: IdToken,\n\n #[serde(rename = \"expires_in\")]\n\n pub expires_in_seconds: u32,\n\n}\n\n\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\npub struct AccessTokenResponse {\n\n pub access_token: AccessToken,\n\n #[serde(rename = \"expires_in\")]\n\n pub expires_in_seconds: u32,\n\n}\n", "file_path": "lib/src/types/auth.rs", "rank": 93, "score": 34419.67978695123 }, { "content": "pub struct RefreshToken(String);\n\n\n\nimpl RefreshToken {\n\n pub fn new(string: impl AsRef<str>) -> Self {\n\n Self(string.as_ref().to_owned())\n\n }\n\n}\n\n\n\nimpl From<&RefreshToken> for String {\n\n fn from(val: &RefreshToken) -> Self {\n\n val.0.to_owned()\n\n }\n\n}\n\n\n\n/// Typed wrapper for AccessToken\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\npub struct AccessToken(String);\n\n\n\nimpl AccessToken {\n\n pub fn new(string: impl AsRef<str>) -> Self {\n", "file_path": "lib/src/types/auth.rs", "rank": 94, "score": 34418.75974933746 }, { "content": "use std::collections::HashMap;\n\n\n\nuse serde::{Deserialize, Serialize};\n\n\n\nuse crate::restson::{Error, RestPath};\n\nuse crate::types::API_PATH;\n\n\n\n/// PUT /settings/current-user\n\n#[derive(Debug, Clone, Serialize, Deserialize)]\n\npub struct Threshold {\n\n pub action: String,\n\n pub active: bool,\n\n pub threshold: f32,\n\n}\n\n\n\n#[derive(Debug, Clone, Serialize, Deserialize)]\n\npub struct UserProject {\n\n pub thresholds: HashMap<String, Threshold>,\n\n}\n\n\n", "file_path": "lib/src/types/user_settings.rs", "rank": 95, "score": 33205.656870043524 }, { "content": "#[derive(Debug, Clone, Serialize, Deserialize)]\n\n#[serde(untagged)]\n\npub enum Setting {\n\n DefaultLabel(HashMap<String, String>),\n\n Project(UserProject),\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize)]\n\npub struct UserSettings {\n\n pub version: u32,\n\n pub projects: HashMap<String, Setting>,\n\n}\n\n\n\nimpl UserSettings {\n\n /// Sets the threshold for the given risk domain.\n\n pub fn set_threshold(\n\n &mut self,\n\n project_id: String,\n\n name: String,\n\n threshold: i32,\n", "file_path": "lib/src/types/user_settings.rs", "rank": 96, "score": 33202.94467130811 }, { "content": " },\n\n );\n\n }\n\n\n\n self.projects.insert(project_id, thresholds);\n\n }\n\n}\n\n\n\nimpl RestPath<()> for UserSettings {\n\n fn get_path(_: ()) -> Result<String, Error> {\n\n Ok(format!(\"{}/settings/current-user\", API_PATH))\n\n }\n\n}\n", "file_path": "lib/src/types/user_settings.rs", "rank": 97, "score": 33200.43866389079 }, { "content": " action: String,\n\n ) {\n\n log::debug!(\"Retrieving user settings for project: {}\", project_id);\n\n let mut thresholds = self\n\n .projects\n\n .get(project_id.as_str())\n\n .map(|s| s.to_owned())\n\n .unwrap_or_else(|| {\n\n Setting::Project(UserProject {\n\n thresholds: HashMap::new(),\n\n })\n\n });\n\n\n\n if let Setting::Project(ref mut t) = thresholds {\n\n t.thresholds.insert(\n\n name,\n\n Threshold {\n\n action,\n\n active: (threshold > 0),\n\n threshold: (threshold as f32) / 100.0,\n", "file_path": "lib/src/types/user_settings.rs", "rank": 98, "score": 33194.99414043691 }, { "content": "from unittest import TestCase\n\n\n\n# Dummy TestCase instance, so we can initialize an instance\n\n# and access the assert instance methods\n\nclass DummyTestCase(TestCase):\n\n def __init__(self):\n\n super(DummyTestCase, self).__init__()\n\n\n\n# A metaclass that makes __getattr__ static\n\nclass AssertsAccessorType(type):\n\n dummy = DummyTestCase()\n\n\n\n def __getattr__(cls, key):\n\n return getattr(AssertsAccessor.dummy, key)\n\n\n\n# The actual accessor, a static class, that redirect the asserts\n\nclass AssertsAccessor(object, metaclass=AssertsAccessorType):\n\n pass\n\n\n", "file_path": "lib/tests/static_asserts.py", "rank": 99, "score": 30428.03055489779 } ]
Rust
src/main.rs
advion/cartunes
83b085318b314a332d877e83c3841050b0c7699c
#![cfg_attr(not(any(test, debug_assertions)), windows_subsystem = "windows")] #![deny(clippy::all)] use crate::framework::{ConfigHandler, Framework, UserEvent}; use crate::gpu::{Error as GpuError, Gpu}; use crate::gui::{Error as GuiError, Gui}; use crate::setup::Setups; use log::error; use std::collections::VecDeque; use thiserror::Error; use winit::event::{Event, WindowEvent}; use winit::event_loop::{ControlFlow, EventLoop}; use winit::window::WindowBuilder; use winit_input_helper::WinitInputHelper; #[cfg(windows)] use winit::platform::windows::IconExtWindows; #[cfg(windows)] use winit::window::Icon; mod config; mod framework; mod gpu; mod gui; mod setup; mod str_ext; mod timer; mod updates; #[derive(Debug, Error)] enum Error { #[error("Window creation error: {0}")] Winit(#[from] winit::error::OsError), #[error("GUI Error: {0}")] Gui(#[from] GuiError), #[error("GPU Error: {0}")] Gpu(#[from] GpuError), } fn create_window() -> Result<(EventLoop<UserEvent>, winit::window::Window, Gpu, Framework), Error> { let config = Framework::load_config(); let window_builder = if let Ok(Some(config)) = config.as_ref() { if let Some(window) = config.get_window() { WindowBuilder::new() .with_position(window.position) .with_inner_size(window.size) } else { WindowBuilder::new() } } else { WindowBuilder::new() }; let window_builder = { #[cfg(target_os = "windows")] { const ICON_RESOURCE_ID: u16 = 2; window_builder.with_window_icon(Some( Icon::from_resource(ICON_RESOURCE_ID, None).expect("Unable to load icon"), )) } #[cfg(not(target_os = "windows"))] window_builder }; let event_loop = EventLoop::with_user_event(); let window = window_builder .with_title("CarTunes") .with_min_inner_size(Framework::min_size()) .build(&event_loop)?; let (gpu, framework) = { let window_size = window.inner_size(); let scale_factor = window.scale_factor() as f32; let mut errors = VecDeque::new(); let mut warnings = VecDeque::new(); let config = Framework::unwrap_config(&mut errors, event_loop.create_proxy(), config); let setups = Setups::new(&mut warnings, &config); let theme = config.theme().as_winit_theme(&window); let gui = Gui::new(config, setups, event_loop.create_proxy(), errors, warnings)?; let gpu = Gpu::new(&window, window_size)?; let framework = Framework::new( window_size, scale_factor, theme, gui, &gpu, event_loop.create_proxy(), ); (gpu, framework) }; Ok((event_loop, window, gpu, framework)) } fn main() -> Result<(), Error> { #[cfg(any(debug_assertions, not(windows)))] env_logger::init(); let (event_loop, window, mut gpu, mut framework) = create_window()?; let mut input = WinitInputHelper::new(); let mut keep_config = ConfigHandler::Replace; event_loop.run(move |event, _, control_flow| { if input.update(&event) { if let Some(scale_factor) = input.scale_factor() { framework.scale_factor(scale_factor); } if let Some(size) = input.window_resized() { if size.width > 0 && size.height > 0 { gpu.resize(size); framework.resize(size); } } window.request_redraw(); } match event { Event::UserEvent(event) => match event { UserEvent::ConfigHandler(config_handler) => { keep_config = config_handler; } UserEvent::Exit => { *control_flow = ControlFlow::Exit; } UserEvent::SetupPath(Some(setups_path)) => { framework.update_setups_path(setups_path); } UserEvent::FsChange(event) => { framework.handle_fs_change(event); } UserEvent::Theme(theme) => { let theme = theme.as_winit_theme(&window); framework.change_theme(theme, true); window.request_redraw(); } UserEvent::UpdateCheck => { framework.recreate_update_check(); } UserEvent::UpdateAvailable(notification) => { framework.add_update_notification(notification); } _ => (), }, Event::WindowEvent { event, .. } => { framework.handle_event(&event); match event { WindowEvent::ThemeChanged(theme) => { framework.change_theme(theme, false); window.request_redraw(); } WindowEvent::CloseRequested => { if keep_config == ConfigHandler::Keep || framework.save_config(&window) { *control_flow = ControlFlow::Exit; } } _ => (), } } Event::RedrawRequested(_) => { framework.prepare(&window); let (mut encoder, frame) = match gpu.prepare() { Ok((encoder, frame)) => (encoder, frame), Err(err) => { error!("gpu.prepare() failed: {}", err); *control_flow = ControlFlow::Exit; return; } }; let view = frame .texture .create_view(&wgpu::TextureViewDescriptor::default()); let render_result = framework.render(&mut encoder, &view, &gpu); if let Err(err) = render_result { error!("framework.render() failed: {}", err); *control_flow = ControlFlow::Exit; return; } gpu.queue.submit(Some(encoder.finish())); frame.present(); } _ => (), } }); }
#![cfg_attr(not(any(test, debug_assertions)), windows_subsystem = "windows")] #![deny(clippy::all)] use crate::framework::{ConfigHandler, Framework, UserEvent}; use crate::gpu::{Error as GpuError, Gpu}; use crate::gui::{Error as GuiError, Gui}; use crate::setup::Setups; use log::error; use std::collections::VecDeque; use thiserror::Error; use winit::event::{Event, WindowEvent}; use winit::event_loop::{ControlFlow, EventLoop}; use winit::window::WindowBuilder; use winit_input_helper::WinitInputHelper; #[cfg(windows)] use winit::platform::windows::IconExtWindows; #[cfg(windows)] use winit::window::Icon; mod config; mod framework; mod gpu; mod gui; mod setup; mod str_ext; mod timer; mod updates; #[derive(Debug, Error)] enum Error { #[error("Window creation error: {0}")] Winit(#[from] winit::error::OsError), #[error("GUI Error: {0}")] Gui(#[from] GuiError), #[error("GPU Error: {0}")] Gpu(#[from] GpuError), } fn create_window() -> Result<(EventLoop<UserEvent>, winit::window::Window, Gpu, Framework), Error> { let config = Framework::load_config(); let window_builder = if let Ok(Some(config)) = config.as_ref() { if let Some(window) = config.get_window() { WindowBuilder::new() .with_position(window.position) .with_inner_size(window.size) } else { WindowBuilder::new() } } else { WindowBuilder::new() }; let window_builder = { #[cfg(target_os = "windows")] { const ICON_RESOURCE_ID: u16 = 2; window_builder.with_window_icon(Some( Icon::from_resource(ICON_RESOURCE_ID, None).expect("Unable to load icon"), )) } #[cfg(not(target_os = "windows"))] window_builder }; let event_loop = EventLoop::with_user_event(); let window = window_builder .with_title("CarTunes") .with_min_inner_size(Framework::min_size()) .build(&event_loop)?; let (gpu, framework) = { let window_size = window.inner_size(); let scale_factor = window.scale_factor() as f32; let mut errors = VecDeque::new(); let mut warnings = VecDeque::new(); let config = Framework::unwrap_config(&mut errors, event_loop.create_proxy(), config); let setups = Setups::new(&mut warnings, &config); let theme = config.theme().as_winit_theme(&window); let gui = Gui::new(config, setups, event_loop.create_proxy(), errors, warnings)?; let gpu = Gpu::new(&window, window_size)?; l
fn main() -> Result<(), Error> { #[cfg(any(debug_assertions, not(windows)))] env_logger::init(); let (event_loop, window, mut gpu, mut framework) = create_window()?; let mut input = WinitInputHelper::new(); let mut keep_config = ConfigHandler::Replace; event_loop.run(move |event, _, control_flow| { if input.update(&event) { if let Some(scale_factor) = input.scale_factor() { framework.scale_factor(scale_factor); } if let Some(size) = input.window_resized() { if size.width > 0 && size.height > 0 { gpu.resize(size); framework.resize(size); } } window.request_redraw(); } match event { Event::UserEvent(event) => match event { UserEvent::ConfigHandler(config_handler) => { keep_config = config_handler; } UserEvent::Exit => { *control_flow = ControlFlow::Exit; } UserEvent::SetupPath(Some(setups_path)) => { framework.update_setups_path(setups_path); } UserEvent::FsChange(event) => { framework.handle_fs_change(event); } UserEvent::Theme(theme) => { let theme = theme.as_winit_theme(&window); framework.change_theme(theme, true); window.request_redraw(); } UserEvent::UpdateCheck => { framework.recreate_update_check(); } UserEvent::UpdateAvailable(notification) => { framework.add_update_notification(notification); } _ => (), }, Event::WindowEvent { event, .. } => { framework.handle_event(&event); match event { WindowEvent::ThemeChanged(theme) => { framework.change_theme(theme, false); window.request_redraw(); } WindowEvent::CloseRequested => { if keep_config == ConfigHandler::Keep || framework.save_config(&window) { *control_flow = ControlFlow::Exit; } } _ => (), } } Event::RedrawRequested(_) => { framework.prepare(&window); let (mut encoder, frame) = match gpu.prepare() { Ok((encoder, frame)) => (encoder, frame), Err(err) => { error!("gpu.prepare() failed: {}", err); *control_flow = ControlFlow::Exit; return; } }; let view = frame .texture .create_view(&wgpu::TextureViewDescriptor::default()); let render_result = framework.render(&mut encoder, &view, &gpu); if let Err(err) = render_result { error!("framework.render() failed: {}", err); *control_flow = ControlFlow::Exit; return; } gpu.queue.submit(Some(encoder.finish())); frame.present(); } _ => (), } }); }
et framework = Framework::new( window_size, scale_factor, theme, gui, &gpu, event_loop.create_proxy(), ); (gpu, framework) }; Ok((event_loop, window, gpu, framework)) }
function_block-function_prefixed
[ { "content": "/// Configure the theme based on system settings.\n\nfn update_theme(theme: &mut Option<Theme>, ctx: &egui::CtxRef) {\n\n if let Some(theme) = theme.take() {\n\n // Set the style\n\n ctx.set_style(create_style(theme));\n\n }\n\n}\n\n\n\npub(crate) fn cache_path() -> PathBuf {\n\n ProjectDirs::from(\"org\", \"KodeWerx\", \"CarTunes\")\n\n .map_or_else(|| PathBuf::from(\".\"), |dir| dir.cache_dir().to_path_buf())\n\n}\n\n\n", "file_path": "src/framework.rs", "rank": 1, "score": 148033.53566193912 }, { "content": "/// Create the default style for egui based on system settings.\n\nfn create_style(theme: Theme) -> egui::Style {\n\n let mut visuals = match theme {\n\n Theme::Dark => egui::Visuals::dark(),\n\n Theme::Light => {\n\n let mut visuals = egui::Visuals::light();\n\n\n\n // The default light theme has grey fonts. We want solid black.\n\n visuals.widgets.noninteractive.fg_stroke.color = egui::Color32::BLACK;\n\n visuals.widgets.inactive.fg_stroke.color = egui::Color32::from_gray(50);\n\n\n\n visuals\n\n }\n\n };\n\n\n\n // Show a background behind collapsing headers.\n\n visuals.collapsing_header_frame = true;\n\n\n\n egui::Style {\n\n visuals,\n\n ..egui::Style::default()\n\n }\n\n}\n", "file_path": "src/framework.rs", "rank": 2, "score": 107709.02545325577 }, { "content": "#[test]\n\nfn test_update_setup() {\n\n use UpdateKind::*;\n\n\n\n let mut setups = Setups::default();\n\n assert!(setups.tracks.is_empty());\n\n\n\n let config = Config::new(\"/tmp/some/path.toml\", PhysicalSize::new(0, 0));\n\n let path1 = Path::new(\"./fixtures/baseline.htm\")\n\n .canonicalize()\n\n .expect(\"Cannot canonicalize path\");\n\n let path2 = Path::new(\"./fixtures/skip_barber_centripetal.htm\")\n\n .canonicalize()\n\n .expect(\"Cannot canonicalize path\");\n\n let path3 = tempfile::Builder::new()\n\n .suffix(\".html\")\n\n .tempfile()\n\n .expect(\"Unable to create temp file\")\n\n .path()\n\n .canonicalize()\n\n .expect(\"Cannot canonicalize path\");\n", "file_path": "src/setup/tests.rs", "rank": 4, "score": 94863.79914082229 }, { "content": "#[test]\n\nfn test_load_dir() {\n\n let mut config = Config::new(\"/tmp/some/path.toml\", PhysicalSize::new(0, 0));\n\n config.update_setups_path(\"./fixtures\");\n\n let mut warnings = VecDeque::new();\n\n let setups = Setups::new(&mut warnings, &config);\n\n\n\n assert!(warnings.is_empty());\n\n\n\n let tracks = setups.tracks();\n\n let cars = &tracks[\"Centripetal Circuit\"][\"Skip Barber Formula 2000\"];\n\n assert_eq!(cars.len(), 1);\n\n let SetupInfo {\n\n setup: skip_barber,\n\n name: file_name,\n\n ..\n\n } = &cars[0];\n\n assert_eq!(file_name, \"skip_barber_centripetal\");\n\n assert_eq!(skip_barber.keys().len(), 6);\n\n\n\n let cars = &tracks[\"Charlotte Motor Speedway - Legends Oval\"][\"Global Mazda MX-5 Cup\"];\n", "file_path": "src/setup/tests.rs", "rank": 5, "score": 86615.01501813173 }, { "content": "/// Get the application configuration path.\n\nfn config_path() -> PathBuf {\n\n // If a project directory cannot be found, use the current working directory.\n\n let mut config_path = ProjectDirs::from(\"org\", \"KodeWerx\", \"CarTunes\")\n\n .map_or_else(|| PathBuf::from(\".\"), |dir| dir.config_dir().to_path_buf());\n\n config_path.push(\"config.toml\");\n\n\n\n config_path\n\n}\n\n\n", "file_path": "src/framework.rs", "rank": 6, "score": 86096.51498793063 }, { "content": "#[derive(Debug, Copy, Clone)]\n\nenum UpdateCheckerMessage {\n\n Stop,\n\n Ping,\n\n Timeout,\n\n}\n\n\n\n/// Offers update checking functionality.\n\npub(crate) struct UpdateChecker {\n\n thread: JoinHandle<()>,\n\n sender: SyncSender<UpdateCheckerMessage>,\n\n}\n\n\n", "file_path": "src/updates.rs", "rank": 7, "score": 82269.62441786019 }, { "content": "fn get_properties(mut node_ref: Option<kuchiki::NodeRef>) -> Props {\n\n let mut last_was_br = false;\n\n let mut map = Props::default();\n\n let mut name = String::new();\n\n let mut values = Vec::new();\n\n\n\n while let Some(ref node) = node_ref {\n\n if let Some(element) = node.as_element() {\n\n // The node is an element\n\n if &element.name.local == \"br\" {\n\n // Early return when it's a <br> following a <br>\n\n if last_was_br {\n\n break;\n\n }\n\n } else {\n\n // Early return when the name is empty\n\n if name.is_empty() {\n\n break;\n\n }\n\n\n", "file_path": "src/setup.rs", "rank": 9, "score": 77566.35872926543 }, { "content": "#[test]\n\nfn test_setup_mx5() {\n\n let config = Config::new(\"/tmp/some/path.toml\", PhysicalSize::new(0, 0));\n\n let (track_name, car_name, setup) =\n\n setup_from_html(\"./fixtures/mx5_charlotte_legends_oval.htm\", &config).unwrap();\n\n\n\n assert_eq!(\n\n track_name,\n\n \"Charlotte Motor Speedway - Legends Oval\".to_string()\n\n );\n\n assert_eq!(car_name, \"Global Mazda MX-5 Cup\".to_string());\n\n assert_eq!(setup.keys().len(), 6);\n\n\n\n // Front\n\n let expected = create_ordered_multimap(&[\n\n (\"Toe-in\", r#\"-0/16\"\"#),\n\n (\"Cross weight\", \"50.0%\"),\n\n (\"Anti-roll bar\", \"Firm\"),\n\n ]);\n\n let front = setup.get(\"Front\").unwrap();\n\n assert_eq!(front, &expected);\n", "file_path": "src/setup/tests.rs", "rank": 10, "score": 70552.82179879493 }, { "content": "#[test]\n\nfn test_remove_setup() {\n\n use UpdateKind::*;\n\n\n\n fn assert_removed(setups: &Setups) {\n\n let tracks = setups.tracks();\n\n assert_eq!(tracks.len(), 3);\n\n assert!(tracks.contains_key(\"Centripetal Circuit\"));\n\n assert!(tracks.contains_key(\"Charlotte Motor Speedway - Legends Oval\"));\n\n assert!(tracks.contains_key(\"Circuit des 24 Heures du Mans - 24 Heures du Mans\"));\n\n }\n\n\n\n let mut config = Config::new(\"/tmp/some/path.toml\", PhysicalSize::new(0, 0));\n\n config.update_setups_path(\"./fixtures\");\n\n let mut warnings = VecDeque::new();\n\n let mut setups = Setups::new(&mut warnings, &config);\n\n\n\n let tracks = setups.tracks();\n\n assert_eq!(tracks.len(), 4);\n\n assert!(tracks.contains_key(\"Centripetal Circuit\"));\n\n assert!(tracks.contains_key(\"Charlotte Motor Speedway - Legends Oval\"));\n", "file_path": "src/setup/tests.rs", "rank": 11, "score": 70552.82179879493 }, { "content": "#[test]\n\nfn test_add_setup() {\n\n use UpdateKind::*;\n\n\n\n fn assert_added(setups: &Setups, file_name: &str) {\n\n let tracks = setups.tracks();\n\n assert_eq!(tracks.len(), 1);\n\n assert_eq!(tracks[\"Nürburgring Combined\"].len(), 1);\n\n assert_eq!(tracks[\"Nürburgring Combined\"][\"Porsche 911 GT3 R\"].len(), 1);\n\n\n\n let SetupInfo { setup, name, .. } = &tracks[\"Nürburgring Combined\"][\"Porsche 911 GT3 R\"][0];\n\n\n\n assert_eq!(name, file_name);\n\n assert_eq!(setup.keys().len(), 12);\n\n }\n\n\n\n let mut setups = Setups::default();\n\n assert!(setups.tracks.is_empty());\n\n\n\n let config = Config::new(\"/tmp/some/path.toml\", PhysicalSize::new(0, 0));\n\n let mut result = Vec::new();\n", "file_path": "src/setup/tests.rs", "rank": 12, "score": 70552.82179879493 }, { "content": "/// A group containing a matrix of strings.\n\nstruct Group<'setup> {\n\n /// Group name is shown in a collapsible header.\n\n name: &'setup str,\n\n\n\n /// The matrix is row-major.\n\n ///\n\n /// I.e. the inner vector is a list of columns with the same length as `Grid::columns`.\n\n matrix: Vec<Vec<Label>>,\n\n}\n\n\n", "file_path": "src/gui/grid.rs", "rank": 13, "score": 68806.35277864321 }, { "content": "#[test]\n\nfn test_setup_porche_911_gt3_r() {\n\n let config = Config::new(\"/tmp/some/path.toml\", PhysicalSize::new(0, 0));\n\n let (track_name, car_name, setup) =\n\n setup_from_html(\"./fixtures/baseline.htm\", &config).unwrap();\n\n\n\n assert_eq!(track_name, \"Nürburgring Combined\".to_string());\n\n assert_eq!(car_name, \"Porsche 911 GT3 R\".to_string());\n\n assert_eq!(setup.keys().len(), 12);\n\n\n\n // Left Front Tire\n\n let expected = create_ordered_multimap(&[\n\n (\"Starting pressure\", \"20.5 psi\"),\n\n (\"Last hot pressure\", \"20.5 psi\"),\n\n (\"Last temps O M I\", \"112F\"),\n\n (\"Last temps O M I\", \"112F\"),\n\n (\"Last temps O M I\", \"112F\"),\n\n (\"Tread remaining\", \"100%\"),\n\n (\"Tread remaining\", \"100%\"),\n\n (\"Tread remaining\", \"100%\"),\n\n ]);\n", "file_path": "src/setup/tests.rs", "rank": 14, "score": 68252.1884764549 }, { "content": "#[test]\n\nfn test_setup_skip_barber() {\n\n let config = Config::new(\"/tmp/some/path.toml\", PhysicalSize::new(0, 0));\n\n let (track_name, car_name, setup) =\n\n setup_from_html(\"./fixtures/skip_barber_centripetal.htm\", &config).unwrap();\n\n\n\n assert_eq!(track_name, \"Centripetal Circuit\".to_string());\n\n assert_eq!(car_name, \"Skip Barber Formula 2000\".to_string());\n\n assert_eq!(setup.keys().len(), 6);\n\n\n\n // Front\n\n let expected = create_ordered_multimap(&[(\"Brake bias\", \"54%\")]);\n\n let front = setup.get(\"Front\").unwrap();\n\n assert_eq!(front, &expected);\n\n\n\n // Left Front\n\n let expected = create_ordered_multimap(&[\n\n (\"Cold pressure\", \"25.0 psi\"),\n\n (\"Last hot pressure\", \"25.0 psi\"),\n\n (\"Last temps O M I\", \"119F\"),\n\n (\"Last temps O M I\", \"119F\"),\n", "file_path": "src/setup/tests.rs", "rank": 15, "score": 68252.1884764549 }, { "content": "#[test]\n\nfn test_setup_dallara_p217() {\n\n let config = Config::new(\"/tmp/some/path.toml\", PhysicalSize::new(0, 0));\n\n let (track_name, car_name, setup) =\n\n setup_from_html(\"./fixtures/iracing_lemans_default.htm\", &config).unwrap();\n\n\n\n assert_eq!(\n\n track_name,\n\n \"Circuit des 24 Heures du Mans - 24 Heures du Mans\".to_string()\n\n );\n\n assert_eq!(car_name, \"Dallara P217\".to_string());\n\n assert_eq!(setup.keys().len(), 18);\n\n\n\n // Left Front Tire\n\n let expected = create_ordered_multimap(&[\n\n (\"Starting pressure\", \"20.0 psi\"),\n\n (\"Last hot pressure\", \"22.0 psi\"),\n\n (\"Last temps O M I\", \"178F\"),\n\n (\"Last temps O M I\", \"182F\"),\n\n (\"Last temps O M I\", \"187F\"),\n\n (\"Tread remaining\", \"99%\"),\n", "file_path": "src/setup/tests.rs", "rank": 16, "score": 68252.1884764549 }, { "content": "/// Get the intersection of keys that exists in each `HashMap`.\n\nfn intersect_keys<'a>(mut all_keys: impl Iterator<Item = Vec<&'a str>>) -> Vec<&'a str> {\n\n let mut output = if let Some(output) = all_keys.next() {\n\n output\n\n } else {\n\n Vec::new()\n\n };\n\n\n\n for keys in all_keys {\n\n output.retain(|key| keys.contains(key));\n\n }\n\n\n\n output\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n /// Test `intersect_keys()` with two sets.\n\n #[test]\n", "file_path": "src/gui/grid.rs", "rank": 17, "score": 67775.25690431282 }, { "content": "/// Get the width for a combo box by finding the widest string that it contains.\n\nfn get_combo_box_width<'a>(ui: &egui::Ui, choices: impl Iterator<Item = &'a String>) -> f32 {\n\n let spacing = ui.spacing();\n\n let default = spacing.interact_size.x + spacing.item_spacing.x + spacing.icon_width;\n\n choices.fold(default, |width, choice| {\n\n let galley = ui.fonts().layout_no_wrap(\n\n choice.to_string(),\n\n egui::TextStyle::Button,\n\n egui::Color32::TEMPORARY_COLOR,\n\n );\n\n\n\n width.max(\n\n galley.rect.width()\n\n + spacing.item_spacing.x\n\n + spacing.icon_width\n\n + spacing.scroll_bar_width,\n\n )\n\n })\n\n}\n", "file_path": "src/gui.rs", "rank": 18, "score": 66967.29292937316 }, { "content": "/// Parse an HTML file into a `Setup`.\n\nfn setup_from_html<P: AsRef<Path>>(\n\n path: P,\n\n config: &Config,\n\n) -> Result<(String, String, Setup), Error> {\n\n let bytes = fs::read(&path).map_err(|err| Error::io(path, err))?;\n\n let html = encoding_rs::mem::decode_latin1(&bytes);\n\n let document = kuchiki::parse_html().one(html.as_ref());\n\n\n\n // Find the document header and gather its text contents\n\n let text = document\n\n .select(r#\"h2[align=\"center\"]\"#)\n\n .unwrap()\n\n .next()\n\n .ok_or(Error::MissingHeader)?\n\n .text_contents();\n\n\n\n let mut lines = text.lines().skip(1);\n\n\n\n // Get the car unique identifier\n\n let car_id = lines\n", "file_path": "src/setup.rs", "rank": 19, "score": 65041.76765495888 }, { "content": "/// Create fonts for egui from the embedded TTFs.\n\nfn create_fonts() -> egui::FontDefinitions {\n\n let mut fonts = egui::FontDefinitions::default();\n\n\n\n // Add font data\n\n let props = FontPropertyBuilder::new().monospace().build();\n\n let font = system_fonts::get(&props)\n\n .expect(\"Unable to find a monospace font\")\n\n .0;\n\n fonts\n\n .font_data\n\n .insert(\"MonoSpace\".to_owned(), Cow::from(font));\n\n\n\n let props = FontPropertyBuilder::new().family(\"sans-serif\").build();\n\n let font = system_fonts::get(&props)\n\n .or_else(|| {\n\n let props = FontPropertyBuilder::new().family(\"Helvetica Neue\").build();\n\n system_fonts::get(&props)\n\n })\n\n .expect(\"Unable to find a sans-serif font\")\n\n .0;\n", "file_path": "src/framework.rs", "rank": 20, "score": 58367.115071199805 }, { "content": "// Check if a directory entry is an HTML file.\n\nfn is_html(file_name: Option<&str>) -> bool {\n\n file_name\n\n .map(|s| s.ends_with(\".htm\") || s.ends_with(\".html\"))\n\n .unwrap_or(false)\n\n}\n\n\n", "file_path": "src/setup.rs", "rank": 21, "score": 53350.79224278251 }, { "content": "fn color_from_str(color: &str) -> Result<egui::Color32, ()> {\n\n // Validate color format. Require HTML hex `#rrggbb` for convenience\n\n let mut validator = color.chars();\n\n if color.len() != 7\n\n || validator.next().unwrap() != '#'\n\n || validator.any(|ch| !ch.is_ascii_hexdigit())\n\n {\n\n return Err(());\n\n }\n\n\n\n let r = u8::from_str_radix(&color[1..3], 16).unwrap();\n\n let g = u8::from_str_radix(&color[3..5], 16).unwrap();\n\n let b = u8::from_str_radix(&color[5..7], 16).unwrap();\n\n\n\n Ok(egui::Color32::from_rgb(r, g, b))\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n", "file_path": "src/config.rs", "rank": 22, "score": 51759.110532600746 }, { "content": "fn main() {\n\n #[cfg(windows)]\n\n embed_resource::compile(\"assets/cartunes.rc\");\n\n}\n", "file_path": "build.rs", "rank": 23, "score": 45072.45830854545 }, { "content": "fn create_ordered_multimap(list: &[(&str, &str)]) -> ListOrderedMultimap<String, String> {\n\n list.iter()\n\n .map(|(k, v)| (k.to_string(), v.to_string()))\n\n .collect()\n\n}\n\n\n", "file_path": "src/setup/tests.rs", "rank": 24, "score": 42630.17218587225 }, { "content": "/// The thread container for update checking. This does all the actual work.\n\nstruct UpdateCheckerThread {\n\n event_loop_proxy: EventLoopProxy<UserEvent>,\n\n sender: SyncSender<UpdateCheckerMessage>,\n\n receiver: Option<Receiver<UpdateCheckerMessage>>,\n\n duration: Duration,\n\n persist: Persist,\n\n}\n\n\n\n/// Parsed API response body.\n\n#[derive(Debug, Deserialize)]\n\npub(crate) struct ReleaseBody {\n\n name: String,\n\n body: String,\n\n html_url: String,\n\n}\n\n\n\n/// Update notification.\n\n#[derive(Debug)]\n\npub(crate) struct UpdateNotification {\n\n pub(crate) version: Version,\n", "file_path": "src/updates.rs", "rank": 25, "score": 34769.96176526958 }, { "content": "fn human_compare(a: &str, b: &str) -> Ordering {\n\n if a.starts_with('-') && b.starts_with('-') {\n\n // Reverse parameter order when comparing negative numbers\n\n human_sort::compare(b, a)\n\n } else {\n\n human_sort::compare(a, b)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n /// Test the Ellipsis trait.\n\n #[test]\n\n fn test_ellipsis() {\n\n let s = Cow::from(\"Small\");\n\n assert_eq!(s.ellipsis(25), Cow::from(\"Small\"));\n\n\n\n let s = Cow::from(\"The number of graphemes in this string is too damn high!\");\n", "file_path": "src/str_ext.rs", "rank": 26, "score": 31745.545875625212 }, { "content": "//! Asynchronous timers.\n\n\n\nuse std::marker::PhantomData;\n\nuse std::sync::mpsc::{SyncSender, TrySendError};\n\nuse std::thread::JoinHandle;\n\nuse std::time::{Duration, Instant};\n\n\n\n/// Simple async timer.\n\npub(crate) struct Timer<T> {\n\n _thread: JoinHandle<()>,\n\n _phantom: PhantomData<T>,\n\n}\n\n\n\nimpl<T> Timer<T>\n\nwhere\n\n T: Copy + Send + 'static,\n\n{\n\n /// Create a timer that will expire after some duration.\n\n pub(crate) fn new(\n\n duration: Duration,\n", "file_path": "src/timer.rs", "rank": 27, "score": 31265.821505587195 }, { "content": " sender: SyncSender<T>,\n\n ping_message: T,\n\n stop_message: T,\n\n ) -> Self {\n\n let start = Instant::now();\n\n let _thread = std::thread::spawn(move || {\n\n Self::run(start, duration, sender, ping_message, stop_message)\n\n });\n\n\n\n Self {\n\n _thread,\n\n _phantom: PhantomData,\n\n }\n\n }\n\n\n\n /// Runs a thread that just sleeps.\n\n ///\n\n /// The thread will periodically check if the receiver has been dropped. This allows timers to\n\n /// be canceled without creating a large number of sleeping zombie threads. AKA brain-dead\n\n /// garbage collection.\n", "file_path": "src/timer.rs", "rank": 28, "score": 31258.08301562438 }, { "content": " fn run(\n\n start: Instant,\n\n duration: Duration,\n\n sender: SyncSender<T>,\n\n ping_message: T,\n\n stop_message: T,\n\n ) {\n\n let one_minute = Duration::from_secs(60);\n\n\n\n // Wake up every minute (max) to check if the receiving side has been closed\n\n loop {\n\n std::thread::sleep(duration.min(one_minute));\n\n\n\n if Instant::now().duration_since(start) >= duration {\n\n break;\n\n }\n\n let ping = sender.try_send(ping_message);\n\n if let Err(TrySendError::Disconnected(_)) = ping {\n\n break;\n\n }\n\n }\n\n\n\n sender.send(stop_message).ok();\n\n }\n\n}\n", "file_path": "src/timer.rs", "rank": 29, "score": 31257.938104717115 }, { "content": "//! Platform-neutral GPU state management and rendering.\n\n\n\nuse raw_window_handle::HasRawWindowHandle;\n\nuse thiserror::Error;\n\n\n\n#[derive(Debug, Error)]\n\npub(crate) enum Error {\n\n /// No suitable [`wgpu::Adapter`] found\n\n #[error(\"No suitable `wgpu::Adapter` found.\")]\n\n AdapterNotFound,\n\n /// Equivalent to [`wgpu::RequestDeviceError`]\n\n #[error(\"No wgpu::Device found.\")]\n\n DeviceNotFound(wgpu::RequestDeviceError),\n\n /// Equivalent to [`wgpu::SurfaceError`]\n\n #[error(\"The GPU failed to acquire a surface frame.\")]\n\n Surface(wgpu::SurfaceError),\n\n}\n\n\n\npub(crate) struct Gpu {\n\n pub(crate) device: wgpu::Device,\n", "file_path": "src/gpu.rs", "rank": 30, "score": 31197.532263160687 }, { "content": " .map_err(Error::DeviceNotFound)?;\n\n\n\n let gpu = Self {\n\n device,\n\n queue,\n\n surface,\n\n window_size,\n\n };\n\n gpu.reconfigure_surface();\n\n\n\n Ok(gpu)\n\n }\n\n\n\n fn reconfigure_surface(&self) {\n\n self.surface.configure(\n\n &self.device,\n\n &wgpu::SurfaceConfiguration {\n\n usage: wgpu::TextureUsages::RENDER_ATTACHMENT,\n\n format: wgpu::TextureFormat::Bgra8UnormSrgb,\n\n width: self.window_size.width,\n", "file_path": "src/gpu.rs", "rank": 31, "score": 31193.754361523523 }, { "content": " height: self.window_size.height,\n\n present_mode: wgpu::PresentMode::Fifo,\n\n },\n\n )\n\n }\n\n\n\n pub(crate) fn resize(&mut self, window_size: winit::dpi::PhysicalSize<u32>) {\n\n self.window_size = window_size;\n\n self.reconfigure_surface();\n\n }\n\n\n\n pub(crate) fn prepare(\n\n &mut self,\n\n ) -> Result<(wgpu::CommandEncoder, wgpu::SurfaceTexture), Error> {\n\n let frame = self\n\n .surface\n\n .get_current_texture()\n\n .or_else(|err| match err {\n\n wgpu::SurfaceError::Outdated => {\n\n // Recreate the swap chain to mitigate race condition on drawing surface resize.\n", "file_path": "src/gpu.rs", "rank": 32, "score": 31193.637653493683 }, { "content": " pub(crate) queue: wgpu::Queue,\n\n surface: wgpu::Surface,\n\n window_size: winit::dpi::PhysicalSize<u32>,\n\n}\n\n\n\nimpl Gpu {\n\n pub(crate) fn new<W: HasRawWindowHandle>(\n\n window: &W,\n\n window_size: winit::dpi::PhysicalSize<u32>,\n\n ) -> Result<Self, Error> {\n\n let instance = wgpu::Instance::new(wgpu::Backends::PRIMARY);\n\n let surface = unsafe { instance.create_surface(window) };\n\n let adapter = instance.request_adapter(&wgpu::RequestAdapterOptions {\n\n compatible_surface: Some(&surface),\n\n force_fallback_adapter: false,\n\n power_preference: wgpu::PowerPreference::HighPerformance,\n\n });\n\n let adapter = pollster::block_on(adapter).ok_or(Error::AdapterNotFound)?;\n\n let (device, queue) =\n\n pollster::block_on(adapter.request_device(&wgpu::DeviceDescriptor::default(), None))\n", "file_path": "src/gpu.rs", "rank": 33, "score": 31191.936075822465 }, { "content": " self.reconfigure_surface();\n\n self.surface.get_current_texture()\n\n }\n\n err => Err(err),\n\n })\n\n .map_err(Error::Surface)?;\n\n let encoder = self\n\n .device\n\n .create_command_encoder(&wgpu::CommandEncoderDescriptor {\n\n label: Some(\"gpu_command_encoder\"),\n\n });\n\n\n\n Ok((encoder, frame))\n\n }\n\n}\n", "file_path": "src/gpu.rs", "rank": 34, "score": 31189.842337536873 }, { "content": "//! Platform-neutral framework for processing events and handling app configuration.\n\n\n\nuse crate::config::{Config, Error as ConfigError, UserTheme};\n\nuse crate::gpu::Gpu;\n\nuse crate::gui::{ErrorButton, Gui, ShowError, ShowWarning};\n\nuse crate::updates::{UpdateChecker, UpdateNotification};\n\nuse directories::ProjectDirs;\n\nuse egui::{ClippedMesh, CtxRef};\n\nuse egui_wgpu_backend::{BackendError, RenderPass, ScreenDescriptor};\n\nuse font_loader::system_fonts::{self, FontPropertyBuilder};\n\nuse std::borrow::Cow;\n\nuse std::collections::VecDeque;\n\nuse std::path::PathBuf;\n\nuse thiserror::Error;\n\nuse winit::dpi::PhysicalSize;\n\nuse winit::event_loop::EventLoopProxy;\n\nuse winit::window::{Theme, Window};\n\n\n\n/// Manages all state required for rendering egui.\n\npub(crate) struct Framework {\n", "file_path": "src/framework.rs", "rank": 35, "score": 30795.292295608182 }, { "content": " ///\n\n /// This is an associated function because there will be no window or GUI available when loading\n\n /// the config.\n\n pub(crate) fn load_config() -> Result<Option<Config>, Error> {\n\n let min_size = Self::min_size();\n\n let config = Config::from_toml(config_path(), min_size)?;\n\n\n\n Ok(config)\n\n }\n\n\n\n /// Unwrap the result from [`Self::load_config`].\n\n ///\n\n /// This is an associated function because there will be no window or GUI available when loading\n\n /// the config.\n\n ///\n\n /// Always returns a valid config, and may optionally add a [`crate::gui::ShowError`] for the\n\n /// GUI to display an error message to the user.\n\n pub(crate) fn unwrap_config(\n\n show_errors: &mut VecDeque<ShowError>,\n\n event_loop_proxy: EventLoopProxy<UserEvent>,\n", "file_path": "src/framework.rs", "rank": 36, "score": 30788.632907830288 }, { "content": " /// Add an error message window to the GUI.\n\n ///\n\n /// The [`ShowError`] type allows asynchronous user feedback for error handling.\n\n pub(crate) fn add_error(&mut self, err: ShowError) {\n\n self.gui.add_error(err);\n\n }\n\n\n\n /// Add an update notification to the GUI.\n\n pub(crate) fn add_update_notification(&mut self, notification: UpdateNotification) {\n\n self.gui.add_update_notification(notification);\n\n }\n\n}\n\n\n\n/// Configure the theme based on system settings.\n", "file_path": "src/framework.rs", "rank": 37, "score": 30788.59618825914 }, { "content": " self.create_update_checker();\n\n }\n\n\n\n /// Try to save the configuration with the current window geometry.\n\n ///\n\n /// Returns true on success. When saving fails, the error is shown to the user and `false` is\n\n /// returned.\n\n pub(crate) fn save_config(&mut self, window: &winit::window::Window) -> bool {\n\n self.gui.config.update_window(window);\n\n match self.gui.config.write_toml() {\n\n Ok(()) => true,\n\n Err(err) => {\n\n let event_loop_proxy = self.gui.event_loop_proxy();\n\n\n\n // Error handling when saving the config fails\n\n let err = ShowError::new(\n\n err,\n\n \"Unable to write the config file. Exit anyway?\",\n\n (\n\n ErrorButton::new(\"Stay\", || ()),\n", "file_path": "src/framework.rs", "rank": 38, "score": 30788.13200538279 }, { "content": " .expect(\"Event loop must exist\");\n\n }),\n\n ),\n\n );\n\n show_errors.push_back(err);\n\n\n\n Config::new(config_path(), Self::min_size())\n\n }\n\n }\n\n }\n\n\n\n /// Create an update checker.\n\n ///\n\n /// Adds a warning window if the checker cannot be created.\n\n fn create_update_checker(&mut self) {\n\n self.update_checker = match UpdateChecker::new(\n\n self.event_loop_proxy.clone(),\n\n self.gui.config.get_update_check(),\n\n ) {\n\n Ok(update_checker) => update_checker,\n", "file_path": "src/framework.rs", "rank": 39, "score": 30787.80166523873 }, { "content": " // State for egui.\n\n egui_ctx: CtxRef,\n\n egui_state: egui_winit::State,\n\n screen_descriptor: ScreenDescriptor,\n\n rpass: RenderPass,\n\n paint_jobs: Vec<ClippedMesh>,\n\n theme: Option<Theme>,\n\n gui: Gui,\n\n update_checker: Option<UpdateChecker>,\n\n event_loop_proxy: EventLoopProxy<UserEvent>,\n\n}\n\n\n\n/// Framework errors.\n\n#[derive(Debug, Error)]\n\npub(crate) enum Error {\n\n #[error(\"Reading config file failed: {0}\")]\n\n ReadConfig(#[from] ConfigError),\n\n}\n\n\n\n/// User event handling is performed with this type.\n", "file_path": "src/framework.rs", "rank": 40, "score": 30786.856253399314 }, { "content": " ErrorButton::new(\"Exit Anyway\", move || {\n\n event_loop_proxy\n\n .send_event(UserEvent::Exit)\n\n .expect(\"Event loop must exist\");\n\n }),\n\n ),\n\n );\n\n\n\n self.add_error(err);\n\n\n\n false\n\n }\n\n }\n\n }\n\n\n\n /// Update the setups path on the config.\n\n pub(crate) fn update_setups_path(&mut self, setups_path: PathBuf) {\n\n self.gui.update_setups_path(setups_path);\n\n }\n\n\n", "file_path": "src/framework.rs", "rank": 41, "score": 30786.74655308338 }, { "content": " Err(err) => {\n\n let warn = ShowWarning::new(err, \"Error while creating update checker\");\n\n self.gui.add_warning(warn);\n\n\n\n None\n\n }\n\n };\n\n }\n\n\n\n /// Set the update check frequency.\n\n pub(crate) fn recreate_update_check(&mut self) {\n\n // Stop the old update checker\n\n if let Some(update_check) = self.update_checker.take() {\n\n if let Err(err) = update_check.stop(false) {\n\n let warn = ShowWarning::new(err, \"Error while stopping update checker\");\n\n self.gui.add_warning(warn);\n\n }\n\n }\n\n\n\n // Create a new update checker\n", "file_path": "src/framework.rs", "rank": 42, "score": 30785.80803657556 }, { "content": " /// Show update message.\n\n UpdateAvailable(UpdateNotification),\n\n}\n\n\n\n/// How the user wants to handle errors with reading the config file.\n\n#[derive(Debug, Eq, PartialEq)]\n\npub(crate) enum ConfigHandler {\n\n /// There were no errors,\n\n /// or there were errors and the user wants to replace the config with a new one.\n\n Replace,\n\n\n\n /// There were errors and the user wants to keep the existing config.\n\n Keep,\n\n}\n\n\n\nimpl Framework {\n\n /// Create a framework for egui.\n\n pub(crate) fn new(\n\n size: PhysicalSize<u32>,\n\n scale_factor: f32,\n", "file_path": "src/framework.rs", "rank": 43, "score": 30785.328899556585 }, { "content": " &self.screen_descriptor,\n\n Some(wgpu::Color::BLACK),\n\n )\n\n }\n\n\n\n /// Call this when the system theme changes.\n\n ///\n\n /// `force` will ignore the user's configuration preference.\n\n pub(crate) fn change_theme(&mut self, theme: Theme, force: bool) {\n\n if force || self.gui.config.theme() == &UserTheme::Auto {\n\n self.theme = Some(theme);\n\n }\n\n }\n\n\n\n /// Get the minimum size allowed for the window.\n\n pub(crate) fn min_size() -> PhysicalSize<u32> {\n\n PhysicalSize::new(400, 300)\n\n }\n\n\n\n /// Try to load the configuration.\n", "file_path": "src/framework.rs", "rank": 44, "score": 30784.472338245538 }, { "content": "#[derive(Debug)]\n\npub(crate) enum UserEvent {\n\n /// Configuration error handling events\n\n ConfigHandler(ConfigHandler),\n\n\n\n /// User wants to exit without saving.\n\n Exit,\n\n\n\n /// Change the path for setup export files.\n\n SetupPath(Option<PathBuf>),\n\n\n\n /// File system event for the setup export path.\n\n FsChange(hotwatch::Event),\n\n\n\n /// Change the theme preference.\n\n Theme(UserTheme),\n\n\n\n /// Change the update check preference.\n\n UpdateCheck,\n\n\n", "file_path": "src/framework.rs", "rank": 45, "score": 30784.33351468764 }, { "content": " egui_ctx.set_style(style);\n\n\n\n let mut result = Self {\n\n egui_ctx,\n\n egui_state,\n\n screen_descriptor,\n\n rpass,\n\n paint_jobs: Vec::new(),\n\n theme: None,\n\n gui,\n\n update_checker: None,\n\n event_loop_proxy,\n\n };\n\n result.create_update_checker();\n\n\n\n result\n\n }\n\n\n\n /// Handle input events from the window manager.\n\n pub(crate) fn handle_event(&mut self, event: &winit::event::WindowEvent) {\n", "file_path": "src/framework.rs", "rank": 46, "score": 30782.845891138175 }, { "content": " pub(crate) fn prepare(&mut self, window: &Window) {\n\n // Begin the egui frame.\n\n let raw_input = self.egui_state.take_egui_input(window);\n\n self.egui_ctx.begin_frame(raw_input);\n\n\n\n // Draw the application GUI.\n\n update_theme(&mut self.theme, &self.egui_ctx);\n\n self.gui.ui(&self.egui_ctx, window);\n\n\n\n // End the egui frame and create all paint jobs to prepare for rendering.\n\n // TODO: Handle output.needs_repaint to avoid game-mode continuous redraws.\n\n let (output, paint_commands) = self.egui_ctx.end_frame();\n\n self.egui_state\n\n .handle_output(window, &self.egui_ctx, output);\n\n self.paint_jobs = self.egui_ctx.tessellate(paint_commands);\n\n }\n\n\n\n /// Render egui.\n\n pub(crate) fn render(\n\n &mut self,\n", "file_path": "src/framework.rs", "rank": 47, "score": 30782.633172420075 }, { "content": " encoder: &mut wgpu::CommandEncoder,\n\n render_target: &wgpu::TextureView,\n\n gpu: &Gpu,\n\n ) -> Result<(), BackendError> {\n\n // Upload all resources to the GPU.\n\n self.rpass\n\n .update_texture(&gpu.device, &gpu.queue, &self.egui_ctx.texture());\n\n self.rpass.update_user_textures(&gpu.device, &gpu.queue);\n\n self.rpass.update_buffers(\n\n &gpu.device,\n\n &gpu.queue,\n\n &self.paint_jobs,\n\n &self.screen_descriptor,\n\n );\n\n\n\n // Record all render passes.\n\n self.rpass.execute(\n\n encoder,\n\n render_target,\n\n &self.paint_jobs,\n", "file_path": "src/framework.rs", "rank": 48, "score": 30781.174709893938 }, { "content": " theme: Theme,\n\n gui: Gui,\n\n gpu: &Gpu,\n\n event_loop_proxy: EventLoopProxy<UserEvent>,\n\n ) -> Self {\n\n let width = size.width;\n\n let height = size.height;\n\n let font_definitions = create_fonts();\n\n let style = create_style(theme);\n\n\n\n let egui_state = egui_winit::State::from_pixels_per_point(scale_factor);\n\n let egui_ctx = CtxRef::default();\n\n let screen_descriptor = ScreenDescriptor {\n\n physical_width: width,\n\n physical_height: height,\n\n scale_factor,\n\n };\n\n let rpass = RenderPass::new(&gpu.device, wgpu::TextureFormat::Bgra8UnormSrgb, 1);\n\n\n\n egui_ctx.set_fonts(font_definitions);\n", "file_path": "src/framework.rs", "rank": 49, "score": 30778.533690456807 }, { "content": " self.egui_state.on_event(&self.egui_ctx, event);\n\n }\n\n\n\n /// Handle file system change events.\n\n pub(crate) fn handle_fs_change(&mut self, event: hotwatch::Event) {\n\n self.gui.handle_fs_change(event);\n\n }\n\n\n\n /// Resize egui.\n\n pub(crate) fn resize(&mut self, size: PhysicalSize<u32>) {\n\n self.screen_descriptor.physical_width = size.width;\n\n self.screen_descriptor.physical_height = size.height;\n\n }\n\n\n\n /// Update scaling factor.\n\n pub(crate) fn scale_factor(&mut self, scale_factor: f64) {\n\n self.screen_descriptor.scale_factor = scale_factor as f32;\n\n }\n\n\n\n /// Prepare egui.\n", "file_path": "src/framework.rs", "rank": 50, "score": 30778.090209423295 }, { "content": " config: Result<Option<Config>, Error>,\n\n ) -> Config {\n\n match config {\n\n Ok(Some(config)) => config,\n\n Ok(None) => Config::new(config_path(), Self::min_size()),\n\n Err(err) => {\n\n // Default to keep when there is an error\n\n event_loop_proxy\n\n .send_event(UserEvent::ConfigHandler(ConfigHandler::Keep))\n\n .expect(\"Event loop must exist\");\n\n\n\n let err = ShowError::new(\n\n err,\n\n \"Unable to read the config file.\\n\\\n\n It may be corrupt, do you want to keep or replace the config file?\",\n\n (\n\n ErrorButton::new(\"Keep\", || ()),\n\n ErrorButton::new(\"Replace\", move || {\n\n event_loop_proxy\n\n .send_event(UserEvent::ConfigHandler(ConfigHandler::Replace))\n", "file_path": "src/framework.rs", "rank": 51, "score": 30777.53816283979 }, { "content": " fonts\n\n .font_data\n\n .insert(\"SansSerif\".to_owned(), Cow::from(font));\n\n\n\n // Set font families\n\n fonts\n\n .fonts_for_family\n\n .insert(egui::FontFamily::Monospace, vec![\"MonoSpace\".to_owned()]);\n\n fonts\n\n .fonts_for_family\n\n .insert(egui::FontFamily::Proportional, vec![\"SansSerif\".to_owned()]);\n\n\n\n if let Some(mut monospace) = fonts.family_and_size.get_mut(&egui::TextStyle::Monospace) {\n\n // The default monospace size is too small.\n\n monospace.1 = 14.0;\n\n }\n\n\n\n if let Some(mut heading) = fonts.family_and_size.get_mut(&egui::TextStyle::Heading) {\n\n // The default heading size is WAY too big.\n\n heading.1 = 16.0;\n\n }\n\n\n\n fonts\n\n}\n\n\n", "file_path": "src/framework.rs", "rank": 52, "score": 30772.880520200488 }, { "content": "//! Update checks are performed periodically (when enabled).\n\n//!\n\n//! This module runs checks in a thread and remembers the last time the check ran and the most\n\n//! recent version available.\n\n\n\nuse self::persist::{Error as PersistError, Persist};\n\nuse crate::framework::UserEvent;\n\nuse crate::timer::Timer;\n\nuse log::error;\n\nuse semver::Version;\n\nuse serde::Deserialize;\n\nuse std::any::Any;\n\nuse std::sync::mpsc::{sync_channel, Receiver, SyncSender};\n\nuse std::thread::JoinHandle;\n\nuse std::time::Duration;\n\nuse thiserror::Error;\n\nuse winit::event_loop::EventLoopProxy;\n\n\n\nmod persist;\n\n\n", "file_path": "src/updates.rs", "rank": 53, "score": 30683.77729580931 }, { "content": " Ok(Self {\n\n event_loop_proxy,\n\n sender,\n\n receiver: Some(receiver),\n\n duration,\n\n persist,\n\n })\n\n }\n\n\n\n /// Periodically check for updates.\n\n fn run(mut self) {\n\n // Send update notification on startup if it has been persisted\n\n self.send_update_notification();\n\n\n\n // Perform initial update check\n\n let mut duration = self.check();\n\n\n\n // Create a timer to periodically ping our message loop\n\n let mut _timer = Timer::new(\n\n duration,\n", "file_path": "src/updates.rs", "rank": 54, "score": 30678.747437468435 }, { "content": " }\n\n }\n\n }\n\n\n\n /// Check for the latest version.\n\n fn check(&mut self) -> Duration {\n\n // Check last update time\n\n match self.persist.last_check() {\n\n Ok(last_check) => {\n\n if last_check < self.duration {\n\n return self.duration - last_check;\n\n }\n\n }\n\n Err(error) => {\n\n error!(\"SystemTime error: {:?}\", error);\n\n return self.duration;\n\n }\n\n }\n\n\n\n // Send API request\n", "file_path": "src/updates.rs", "rank": 55, "score": 30676.326177576997 }, { "content": " };\n\n\n\n // Parse the version in the response\n\n let version = match Version::parse(&body.name) {\n\n Ok(version) => version,\n\n Err(error) => {\n\n error!(\"SemVer parse error: {:?}\", error);\n\n return self.duration;\n\n }\n\n };\n\n\n\n // Save the last update time\n\n if let Err(error) = self.persist.update_last_check() {\n\n error!(\"SystemTime error: {:?}\", error);\n\n return self.duration;\n\n }\n\n\n\n // Update persistence\n\n self.persist.update_last_version(version);\n\n self.persist\n", "file_path": "src/updates.rs", "rank": 56, "score": 30675.669019098692 }, { "content": " self.sender.clone(),\n\n UpdateCheckerMessage::Ping,\n\n UpdateCheckerMessage::Timeout,\n\n );\n\n\n\n for msg in self.receiver.take().expect(\"Missing receiver\").iter() {\n\n match msg {\n\n UpdateCheckerMessage::Stop => break,\n\n UpdateCheckerMessage::Ping => continue,\n\n UpdateCheckerMessage::Timeout => {\n\n duration = self.check();\n\n\n\n // Update the timer\n\n _timer = Timer::new(\n\n duration,\n\n self.sender.clone(),\n\n UpdateCheckerMessage::Ping,\n\n UpdateCheckerMessage::Timeout,\n\n );\n\n }\n", "file_path": "src/updates.rs", "rank": 57, "score": 30675.41972660469 }, { "content": " .update_release_notes(body.body.replace(\"\\r\", \"\"));\n\n self.persist.update_url(body.html_url);\n\n\n\n // Write persistence to the file system\n\n if let Err(error) = self.persist.write_toml() {\n\n error!(\"Persistence error: {:?}\", error);\n\n return self.duration;\n\n }\n\n\n\n // Send the update notification\n\n self.send_update_notification();\n\n\n\n self.duration\n\n }\n\n\n\n fn send_update_notification(&self) {\n\n // Check last update version\n\n if self.persist.last_version() > self.persist.current_version() {\n\n // Notify user of the new update\n\n self.event_loop_proxy\n\n .send_event(UserEvent::UpdateAvailable(\n\n self.persist.get_update_notification(),\n\n ))\n\n .expect(\"Event loop must exist\");\n\n }\n\n }\n\n}\n", "file_path": "src/updates.rs", "rank": 58, "score": 30675.164314374 }, { "content": "const HTTP_TIMEOUT: u64 = 15;\n\nconst RELEASES_URL: &str = \"https://api.github.com/repos/parasyte/cartunes/releases/latest\";\n\nconst USER_AGENT: &str = concat!(\"cartunes/\", env!(\"CARGO_PKG_VERSION\"));\n\n\n\n/// All the ways in which update checking can fail.\n\n#[derive(Debug, Error)]\n\npub(crate) enum Error {\n\n /// The update thread may panic.\n\n #[error(\"Update thread panicked\")]\n\n ThreadPanic(Box<dyn Any + Send + 'static>),\n\n\n\n /// Stopping the update thread may not succeed.\n\n #[error(\"Unable to stop update thread\")]\n\n Stop,\n\n\n\n /// Parsing or writing persistence may fail.\n\n #[error(\"Persistence error: {0}\")]\n\n Persist(#[from] PersistError),\n\n}\n\n\n", "file_path": "src/updates.rs", "rank": 59, "score": 30675.135381402903 }, { "content": " .map_err(|_| Error::Stop)?;\n\n\n\n if blocking {\n\n self.thread.join().map_err(|err| Error::ThreadPanic(err))?;\n\n }\n\n\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl UpdateCheckerThread {\n\n /// Create a thread for the update checker.\n\n fn new(\n\n event_loop_proxy: EventLoopProxy<UserEvent>,\n\n sender: SyncSender<UpdateCheckerMessage>,\n\n receiver: Receiver<UpdateCheckerMessage>,\n\n duration: Duration,\n\n ) -> Result<Self, Error> {\n\n let persist = Persist::new()?;\n\n\n", "file_path": "src/updates.rs", "rank": 60, "score": 30674.60385063381 }, { "content": " pub(crate) fn new(\n\n event_loop_proxy: EventLoopProxy<UserEvent>,\n\n freq: UpdateFrequency,\n\n ) -> Result<Option<Self>, Error> {\n\n let duration = match freq.into_duration() {\n\n None => return Ok(None),\n\n Some(duration) => duration,\n\n };\n\n let (sender, receiver) = sync_channel(2);\n\n let thread =\n\n UpdateCheckerThread::new(event_loop_proxy, sender.clone(), receiver, duration)?;\n\n let thread = std::thread::spawn(move || thread.run());\n\n\n\n Ok(Some(Self { thread, sender }))\n\n }\n\n\n\n /// Stop the update checker.\n\n pub(crate) fn stop(self, blocking: bool) -> Result<(), Error> {\n\n self.sender\n\n .send(UpdateCheckerMessage::Stop)\n", "file_path": "src/updates.rs", "rank": 61, "score": 30673.504090262937 }, { "content": " pub(crate) release_notes: String,\n\n pub(crate) update_url: String,\n\n}\n\n\n\nimpl Default for UpdateFrequency {\n\n fn default() -> Self {\n\n Self::Never\n\n }\n\n}\n\n\n\nimpl std::fmt::Display for UpdateFrequency {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n let text = match self {\n\n Self::Never => \"Never\",\n\n Self::Daily => \"Daily\",\n\n Self::Weekly => \"Weekly\",\n\n };\n\n write!(f, \"{}\", text)\n\n }\n\n}\n", "file_path": "src/updates.rs", "rank": 62, "score": 30673.13972288461 }, { "content": " let req = ureq::get(RELEASES_URL)\n\n .timeout(Duration::from_secs(HTTP_TIMEOUT))\n\n .set(\"Accept\", \"application/vnd.github.v3+json\")\n\n .set(\"User-Agent\", USER_AGENT);\n\n\n\n let res = match req.call() {\n\n Ok(res) => res,\n\n Err(error) => {\n\n error!(\"HTTP request error: {:?}\", error);\n\n return self.duration;\n\n }\n\n };\n\n\n\n // Parse the response\n\n let body: ReleaseBody = match res.into_json() {\n\n Ok(body) => body,\n\n Err(error) => {\n\n error!(\"HTTP response error: {:?}\", error);\n\n return self.duration;\n\n }\n", "file_path": "src/updates.rs", "rank": 63, "score": 30671.762168718436 }, { "content": "/// How often to check for updates.\n\n#[derive(Debug, Copy, Clone, Eq, PartialEq)]\n\npub(crate) enum UpdateFrequency {\n\n /// Do not check for updates. (default)\n\n Never,\n\n\n\n /// Check every 24 hours.\n\n Daily,\n\n\n\n /// Check every 7 days.\n\n Weekly,\n\n}\n\n\n\n#[derive(Debug, Copy, Clone)]\n", "file_path": "src/updates.rs", "rank": 64, "score": 30671.318972337147 }, { "content": "\n\nimpl From<&str> for UpdateFrequency {\n\n fn from(value: &str) -> Self {\n\n match value {\n\n \"daily\" => Self::Daily,\n\n \"weekly\" => Self::Weekly,\n\n _ => Self::Never,\n\n }\n\n }\n\n}\n\n\n\nimpl UpdateFrequency {\n\n /// Convert this frequency into a [`Duration`].\n\n ///\n\n /// Returns `None` when the frequency is `Never`.\n\n fn into_duration(self) -> Option<Duration> {\n\n const DAY: u64 = 60 * 60 * 24;\n\n const WEEK: u64 = DAY * 7;\n\n\n\n match self {\n", "file_path": "src/updates.rs", "rank": 65, "score": 30670.739625066788 }, { "content": " Self::Never => None,\n\n Self::Daily => Some(Duration::from_secs(DAY)),\n\n Self::Weekly => Some(Duration::from_secs(WEEK)),\n\n }\n\n }\n\n\n\n pub(crate) fn as_str(&self) -> &str {\n\n match self {\n\n Self::Never => \"never\",\n\n Self::Daily => \"daily\",\n\n Self::Weekly => \"weekly\",\n\n }\n\n }\n\n}\n\n\n\n/// Check the GitHub API periodically for a new version.\n\nimpl UpdateChecker {\n\n /// Create an update checker.\n\n ///\n\n /// Returns `None` when `freq` == `Never`.\n", "file_path": "src/updates.rs", "rank": 66, "score": 30670.556190876036 }, { "content": "\n\n let mut config = Self::new(doc_path, min_size);\n\n config.doc = doc;\n\n config.update_setups_path(setups_path);\n\n config.update_theme(theme);\n\n config.set_update_check(update_check);\n\n config.load_tracks_and_cars()?;\n\n config.load_colors()?;\n\n\n\n Ok(Some(config))\n\n }\n\n\n\n /// Create TOML file from this Config.\n\n ///\n\n /// The Config remembers the original TOML path, and this method rewrites that file. The config\n\n /// file is created if it does not exist, along with all intermediate directories in the path.\n\n pub(crate) fn write_toml(&self) -> Result<(), Error> {\n\n let toml = self.doc.to_string();\n\n if let Some(parent) = self.doc_path.parent() {\n\n fs::create_dir_all(parent)?;\n", "file_path": "src/config.rs", "rank": 67, "score": 30635.124305048943 }, { "content": " let setups_path = PathBuf::from(\n\n doc.get(\"config\")\n\n .and_then(|t| t.get(\"setups_path\"))\n\n .and_then(|t| t.as_str())\n\n .ok_or_else(|| Error::type_error(\"config.setups_path\", \"string\"))?,\n\n );\n\n\n\n let theme = doc\n\n .get(\"config\")\n\n .and_then(|t| t.get(\"theme\"))\n\n .and_then(|t| t.as_str())\n\n .unwrap_or(\"auto\");\n\n let theme = UserTheme::from_str(theme);\n\n\n\n let update_check = doc\n\n .get(\"config\")\n\n .and_then(|t| t.get(\"update_check\"))\n\n .and_then(|t| t.as_str())\n\n .map(UpdateFrequency::from)\n\n .unwrap_or_default();\n", "file_path": "src/config.rs", "rank": 68, "score": 30633.941955503902 }, { "content": "\n\n Some(Window::new(position, size))\n\n }\n\n\n\n /// Update config with external state.\n\n pub(crate) fn update_window(&mut self, window: &winit::window::Window) {\n\n self.doc[\"window\"] = Window::from_winit(window).to_table();\n\n }\n\n\n\n /// Get a reference to the setup exports path.\n\n pub(crate) fn get_setups_path(&self) -> &Path {\n\n &self.setups_path\n\n }\n\n\n\n /// Update the setup exports path.\n\n pub(crate) fn update_setups_path<P: AsRef<Path>>(&mut self, setups_path: P) {\n\n self.setups_path = setups_path\n\n .as_ref()\n\n .canonicalize()\n\n .unwrap_or_else(|_| setups_path.as_ref().to_path_buf());\n", "file_path": "src/config.rs", "rank": 69, "score": 30632.380367127367 }, { "content": "//! Application configuration parsing and validation.\n\n\n\nuse crate::updates::UpdateFrequency;\n\nuse directories::UserDirs;\n\nuse patricia_tree::PatriciaSet;\n\nuse std::collections::HashMap;\n\nuse std::fs;\n\nuse std::path::{Path, PathBuf};\n\nuse thiserror::Error;\n\nuse toml_edit::{Document, Item, TomlError};\n\nuse winit::dpi::{PhysicalPosition, PhysicalSize};\n\nuse winit::window::Theme;\n\n\n\n#[cfg(target_os = \"windows\")]\n\nuse winit::platform::windows::WindowExtWindows;\n\n\n\n/// Parsing and writing configurations can fail.\n\n#[derive(Debug, Error)]\n\npub(crate) enum Error {\n\n /// I/O error.\n", "file_path": "src/config.rs", "rank": 70, "score": 30631.987890453987 }, { "content": "\n\n /// Update the frequency for update checks.\n\n pub(crate) fn get_update_check(&self) -> UpdateFrequency {\n\n self.update_check\n\n }\n\n\n\n /// Update the frequency for update checks.\n\n pub(crate) fn set_update_check(&mut self, update_check: UpdateFrequency) {\n\n self.update_check = update_check;\n\n self.doc[\"config\"][\"update_check\"] = toml_edit::value(self.update_check.as_str());\n\n }\n\n\n\n /// Load track and car info from config.\n\n fn load_tracks_and_cars(&mut self) -> Result<(), Error> {\n\n let table = &self.doc.get(\"tracks\").and_then(|t| t.as_table());\n\n if let Some(tracks) = table {\n\n for (id, name) in tracks.iter() {\n\n let name = name\n\n .as_str()\n\n .ok_or_else(|| Error::type_error(&format!(\"tracks.{}\", id), \"string\"))?;\n", "file_path": "src/config.rs", "rank": 71, "score": 30631.155209696197 }, { "content": "\n\n // Note that to_string_lossy() is destructive when the path contains invalid UTF-8 sequences.\n\n // If this is a problem in practice, we _could_ write unencodable paths as an array of\n\n // integers. It would allow reconstructing the path from TOML (which must be valid UTF-8)\n\n // even when the path cannot be encoded as valid UTF-8.\n\n let setups_path = self.setups_path.as_path().to_string_lossy();\n\n\n\n self.doc[\"config\"][\"setups_path\"] = toml_edit::value(setups_path.as_ref());\n\n }\n\n\n\n /// Get a reference to the theme preference.\n\n pub(crate) fn theme(&self) -> &UserTheme {\n\n &self.theme\n\n }\n\n\n\n /// Update the theme preference.\n\n pub(crate) fn update_theme(&mut self, theme: UserTheme) {\n\n self.theme = theme;\n\n self.doc[\"config\"][\"theme\"] = toml_edit::value(theme.as_str());\n\n }\n", "file_path": "src/config.rs", "rank": 72, "score": 30630.409639613834 }, { "content": "\n\n Ok(())\n\n }\n\n\n\n /// Load column colors and background colors from config.\n\n fn load_colors(&mut self) -> Result<(), Error> {\n\n let mut parsed = Vec::new();\n\n let colors = &self\n\n .doc\n\n .get(\"config\")\n\n .and_then(|t| t.get(\"colors\"))\n\n .and_then(|t| t.as_array());\n\n\n\n if let Some(colors) = colors {\n\n for (i, color) in colors.iter().enumerate() {\n\n let color = color\n\n .as_str()\n\n .ok_or_else(|| Error::type_error(&format!(\"config.colors[{}]\", i), \"string\"))?;\n\n let color = color_from_str(color)\n\n .map_err(|_| Error::Color(format!(\"config.colors[{}]\", i)))?;\n", "file_path": "src/config.rs", "rank": 73, "score": 30629.352848220908 }, { "content": " fn type_error(path: &str, expected: &str) -> Self {\n\n let path = path.to_owned();\n\n let expected = expected.to_owned();\n\n\n\n Self::Type { path, expected }\n\n }\n\n}\n\n\n\nimpl Config {\n\n /// Create a new Config.\n\n ///\n\n /// The path is allowed to be nonexistent. It will not be created until the TOML is written.\n\n pub(crate) fn new<P: AsRef<Path>>(doc_path: P, min_size: PhysicalSize<u32>) -> Self {\n\n let mut config = Self {\n\n doc_path: doc_path.as_ref().to_path_buf(),\n\n doc: include_str!(\"default.toml\").parse().unwrap(),\n\n setups_path: PathBuf::new(),\n\n min_size,\n\n theme: UserTheme::Auto,\n\n colors: Vec::new(),\n", "file_path": "src/config.rs", "rank": 74, "score": 30626.781832748686 }, { "content": " match self {\n\n Self::Auto => \"auto\",\n\n Self::Dark => \"dark\",\n\n Self::Light => \"light\",\n\n }\n\n }\n\n\n\n /// Create a [`winit::window::Theme`] from this `UserTheme`.\n\n ///\n\n /// When the `UserTheme` value is set to `Auto`, the `window` reference will be used to select\n\n /// the theme based on OS preferences.\n\n #[allow(unused_variables)]\n\n pub(crate) fn as_winit_theme(&self, window: &winit::window::Window) -> Theme {\n\n match self {\n\n Self::Auto => {\n\n #[cfg(target_os = \"windows\")]\n\n let theme = window.theme();\n\n #[cfg(not(target_os = \"windows\"))]\n\n let theme = Theme::Dark;\n\n\n", "file_path": "src/config.rs", "rank": 75, "score": 30626.76498993155 }, { "content": "/// needs to be done when reading and writing TOML.\n\npub(crate) struct Config {\n\n /// Original path to TOML file.\n\n doc_path: PathBuf,\n\n\n\n /// Original parsed TOML.\n\n doc: Document,\n\n\n\n /// Setup exports path.\n\n setups_path: PathBuf,\n\n\n\n /// Window minimum inner size.\n\n min_size: PhysicalSize<u32>,\n\n\n\n /// User's theme choice.\n\n theme: UserTheme,\n\n\n\n /// User's color-coding choices.\n\n colors: Vec<egui::Color32>,\n\n\n", "file_path": "src/config.rs", "rank": 76, "score": 30626.635553590935 }, { "content": " config.load_tracks_and_cars().unwrap();\n\n config.load_colors().unwrap();\n\n\n\n config\n\n }\n\n\n\n /// Parse TOML into a Config.\n\n ///\n\n /// The path is allowed to be nonexistent. It isn't an error, but there will be no config.\n\n pub(crate) fn from_toml<P: AsRef<Path>>(\n\n doc_path: P,\n\n min_size: PhysicalSize<u32>,\n\n ) -> Result<Option<Self>, Error> {\n\n let doc_path = doc_path.as_ref().to_path_buf();\n\n if !doc_path.exists() {\n\n return Ok(None);\n\n }\n\n\n\n let doc: Document = fs::read_to_string(&doc_path)?.parse()?;\n\n\n", "file_path": "src/config.rs", "rank": 77, "score": 30625.7676435021 }, { "content": " diff_colors: (egui::Color32::TRANSPARENT, egui::Color32::TRANSPARENT),\n\n update_check: UpdateFrequency::default(),\n\n track_ids: PatriciaSet::new(),\n\n tracks: HashMap::new(),\n\n cars: HashMap::new(),\n\n };\n\n\n\n // Default setup exports path is selected with the following precedence:\n\n // 1. `$HOME/Documents/iRacing`\n\n // 2. `$HOME/iRacing`\n\n // 3. `iRacing`\n\n // This path may not exist and is _not_ created by this application.\n\n let mut setups_path = UserDirs::new().map_or_else(PathBuf::default, |dirs| {\n\n dirs.document_dir()\n\n .unwrap_or_else(|| dirs.home_dir())\n\n .to_path_buf()\n\n });\n\n setups_path.push(\"iRacing\");\n\n\n\n config.update_setups_path(setups_path);\n", "file_path": "src/config.rs", "rank": 78, "score": 30625.668782585224 }, { "content": "\n\n /// Modify user's color-coding choices.\n\n pub(crate) fn colors_mut(&mut self) -> &mut Vec<egui::Color32> {\n\n &mut self.colors\n\n }\n\n\n\n /// Update colors in TOML document.\n\n pub(crate) fn update_colors(&mut self) {\n\n let mut colors = toml_edit::Array::default();\n\n\n\n for color in &self.colors {\n\n let color = format!(\"#{:02x}{:02x}{:02x}\", color.r(), color.g(), color.b());\n\n colors.push(color);\n\n }\n\n\n\n self.doc[\"config\"][\"colors\"] = toml_edit::value(colors);\n\n\n\n self.doc[\"config\"][\"background_decrease\"] = toml_edit::value(format!(\n\n \"#{:02x}{:02x}{:02x}\",\n\n self.diff_colors.0.r(),\n", "file_path": "src/config.rs", "rank": 79, "score": 30625.472095783614 }, { "content": " theme\n\n }\n\n Self::Dark => winit::window::Theme::Dark,\n\n Self::Light => winit::window::Theme::Light,\n\n }\n\n }\n\n}\n\n\n\nimpl std::fmt::Display for UserTheme {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n let text = match self {\n\n Self::Auto => \"Automatic\",\n\n Self::Dark => \"Dark Mode\",\n\n Self::Light => \"Light Mode\",\n\n };\n\n write!(f, \"{}\", text)\n\n }\n\n}\n\n\n", "file_path": "src/config.rs", "rank": 80, "score": 30625.28891547455 }, { "content": "\n\n parsed.push(color);\n\n }\n\n } else if !colors.is_none() {\n\n return Err(Error::type_error(\"config.colors\", \"array\"));\n\n }\n\n\n\n // Parse background colors\n\n let mut background = Vec::new();\n\n for name in &[\"background_decrease\", \"background_increase\"] {\n\n let color = self\n\n .doc\n\n .get(\"config\")\n\n .and_then(|t| t.get(name))\n\n .and_then(|t| t.as_str());\n\n\n\n if let Some(color) = color {\n\n let color =\n\n color_from_str(color).map_err(|_| Error::Color(format!(\"config.{}\", name)))?;\n\n\n", "file_path": "src/config.rs", "rank": 81, "score": 30625.225875813467 }, { "content": "\n\n /// Window inner size.\n\n pub(crate) size: PhysicalSize<u32>,\n\n}\n\n\n\n/// User's theme choice.\n\n#[derive(Copy, Clone, Debug, Eq, PartialEq)]\n\npub(crate) enum UserTheme {\n\n /// Auto-select based on OS preferences (with fallback to dark mode).\n\n Auto,\n\n\n\n /// Dark mode.\n\n Dark,\n\n\n\n /// Light mode.\n\n Light,\n\n}\n\n\n\nimpl Error {\n\n /// Shortcut for creating a `TypeError`.\n", "file_path": "src/config.rs", "rank": 82, "score": 30624.349927174622 }, { "content": "\n\n /// Test default config file.\n\n #[test]\n\n fn test_default_config() {\n\n let mut config = Config::new(\"/tmp/some/path.toml\", PhysicalSize::new(100, 100));\n\n\n\n assert!(config.load_tracks_and_cars().is_ok());\n\n\n\n // Expect the PatriciaSet to have proper prefix matching.\n\n let track_ids = &config.track_ids;\n\n assert_eq!(\n\n track_ids.get_longest_common_prefix(\"charlotte_2018_2019_roval\"),\n\n Some(\"charlotte_2018_2019_roval\".as_bytes()),\n\n );\n\n assert_eq!(\n\n track_ids.get_longest_common_prefix(\"charlotte_fullroadcourse\"),\n\n Some(\"charlotte\".as_bytes()),\n\n );\n\n assert_eq!(track_ids.get_longest_common_prefix(\"san_francisco\"), None,);\n\n\n", "file_path": "src/config.rs", "rank": 83, "score": 30623.243309547444 }, { "content": " /// User's diff color choices.\n\n diff_colors: (egui::Color32, egui::Color32),\n\n\n\n /// User's update check frequency choice.\n\n update_check: UpdateFrequency,\n\n\n\n /// Map raw track IDs to unique track IDs.\n\n track_ids: PatriciaSet,\n\n\n\n /// Map track IDs to track names.\n\n tracks: HashMap<String, String>,\n\n\n\n /// Map car IDs to car names.\n\n cars: HashMap<String, String>,\n\n}\n\n\n\n/// Window settings.\n\npub(crate) struct Window {\n\n /// Window outer position.\n\n pub(crate) position: PhysicalPosition<i32>,\n", "file_path": "src/config.rs", "rank": 84, "score": 30622.288879859017 }, { "content": " }\n\n\n\n /// Create a Window from a `winit` window.\n\n fn from_winit(window: &winit::window::Window) -> Self {\n\n #[cfg(target_os = \"macos\")]\n\n let position = window.inner_position();\n\n #[cfg(not(target_os = \"macos\"))]\n\n let position = window.outer_position();\n\n\n\n let position = position.unwrap_or_else(|_| PhysicalPosition::default());\n\n let size = window.inner_size();\n\n\n\n Self { position, size }\n\n }\n\n\n\n /// Create a TOML table from this Window.\n\n fn to_table(&self) -> Item {\n\n let mut output = toml_edit::table();\n\n\n\n output[\"x\"] = toml_edit::value(self.position.x as i64);\n", "file_path": "src/config.rs", "rank": 85, "score": 30622.202908771167 }, { "content": " self.diff_colors.0.g(),\n\n self.diff_colors.0.b()\n\n ));\n\n self.doc[\"config\"][\"background_increase\"] = toml_edit::value(format!(\n\n \"#{:02x}{:02x}{:02x}\",\n\n self.diff_colors.1.r(),\n\n self.diff_colors.1.g(),\n\n self.diff_colors.1.b()\n\n ));\n\n }\n\n\n\n /// Get user's diff color choices.\n\n pub(crate) fn diff_colors(&self) -> (egui::Color32, egui::Color32) {\n\n self.diff_colors\n\n }\n\n\n\n /// Modify user's diff color choices.\n\n pub(crate) fn diff_colors_mut(&mut self) -> &mut (egui::Color32, egui::Color32) {\n\n &mut self.diff_colors\n\n }\n", "file_path": "src/config.rs", "rank": 86, "score": 30621.57334072154 }, { "content": " background.push(color);\n\n }\n\n }\n\n\n\n // If all colors are parsed successfully, replace the entire config\n\n if !parsed.is_empty() {\n\n self.colors = parsed;\n\n }\n\n if background.len() == 2 {\n\n self.diff_colors = (background[0], background[1]);\n\n }\n\n\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl Window {\n\n /// Create a Window configuration.\n\n fn new(position: PhysicalPosition<i32>, size: PhysicalSize<u32>) -> Self {\n\n Self { position, size }\n", "file_path": "src/config.rs", "rank": 87, "score": 30621.489110706425 }, { "content": " }\n\n let result = fs::write(&self.doc_path, toml)?;\n\n\n\n Ok(result)\n\n }\n\n\n\n /// Get window configuration if it's valid.\n\n pub(crate) fn get_window(&self) -> Option<Window> {\n\n let window = &self.doc.get(\"window\")?;\n\n\n\n let x = window.get(\"x\").and_then(|t| t.as_integer())?;\n\n let y = window.get(\"y\").and_then(|t| t.as_integer())?;\n\n let position = PhysicalPosition::new(x as i32, y as i32);\n\n\n\n let width = window.get(\"width\").and_then(|t| t.as_integer())?;\n\n let height = window.get(\"height\").and_then(|t| t.as_integer())?;\n\n let size = PhysicalSize::new(\n\n (width as u32).max(self.min_size.width),\n\n (height as u32).max(self.min_size.height),\n\n );\n", "file_path": "src/config.rs", "rank": 88, "score": 30620.29073203925 }, { "content": " #[error(\"I/O error: {0}\")]\n\n Io(#[from] std::io::Error),\n\n\n\n /// Configuration parse error.\n\n #[error(\"Configuration parse error: {0}\")]\n\n Parse(#[from] TomlError),\n\n\n\n /// Type error.\n\n #[error(\"Expected {path:?} to be type {expected}\")]\n\n Type { path: String, expected: String },\n\n\n\n /// Color format.\n\n #[error(\"Expected {0:?} to be a hex color in `#rrggbb` format\")]\n\n Color(String),\n\n}\n\n\n\n/// Application configuration backed by TOML.\n\n///\n\n/// This struct retains the original parsed TOML and allows runtime changes while preserving\n\n/// comments and original document structure. It is also strongly typed, so error handling only\n", "file_path": "src/config.rs", "rank": 89, "score": 30620.27768588704 }, { "content": "\n\n self.track_ids.insert(id.to_string());\n\n self.tracks.insert(id.to_string(), name.to_string());\n\n }\n\n } else if !table.is_none() {\n\n return Err(Error::type_error(\"tracks\", \"table\"));\n\n }\n\n\n\n let cars = &self.doc.get(\"cars\").and_then(|t| t.as_table());\n\n if let Some(cars) = cars {\n\n for (id, name) in cars.iter() {\n\n let name = name\n\n .as_str()\n\n .ok_or_else(|| Error::type_error(&format!(\"cars.{}\", id), \"string\"))?;\n\n\n\n self.cars.insert(id.to_string(), name.to_string());\n\n }\n\n } else if !cars.is_none() {\n\n return Err(Error::type_error(\"cars\", \"table\"));\n\n }\n", "file_path": "src/config.rs", "rank": 90, "score": 30619.91212172355 }, { "content": " // Expectations for track name mapping.\n\n assert_eq!(\n\n config.tracks.get(\"charlotte_2018_2019_roval\"),\n\n Some(&\"Charlotte Motor Speedway - Road Course\".to_string())\n\n );\n\n assert_eq!(\n\n config.tracks.get(\"charlotte\"),\n\n Some(&\"[Legacy] Charlotte Motor Speedway - 2008\".to_string())\n\n );\n\n\n\n // Expectations for car name mapping.\n\n assert_eq!(\n\n config.cars.get(\"rt2000\"),\n\n Some(&\"Skip Barber Formula 2000\".to_string())\n\n )\n\n }\n\n}\n", "file_path": "src/config.rs", "rank": 91, "score": 30619.46218590928 }, { "content": " output[\"y\"] = toml_edit::value(self.position.y as i64);\n\n output[\"width\"] = toml_edit::value(self.size.width as i64);\n\n output[\"height\"] = toml_edit::value(self.size.height as i64);\n\n\n\n output\n\n }\n\n}\n\n\n\nimpl UserTheme {\n\n /// Create a `UserTheme` from a string slice.\n\n fn from_str(value: &str) -> Self {\n\n match value {\n\n \"dark\" => Self::Dark,\n\n \"light\" => Self::Light,\n\n _ => Self::Auto,\n\n }\n\n }\n\n\n\n /// Get a string slice that is TOML-compatible for this `UserTheme`.\n\n fn as_str(&self) -> &str {\n", "file_path": "src/config.rs", "rank": 92, "score": 30619.35345923226 }, { "content": "\n\n /// Get a reference for mapping raw track IDs to unique track IDs.\n\n pub(crate) fn track_ids(&self) -> &PatriciaSet {\n\n &self.track_ids\n\n }\n\n\n\n /// Get a reference for mapping track IDs to track names.\n\n pub(crate) fn tracks(&self) -> &HashMap<String, String> {\n\n &self.tracks\n\n }\n\n\n\n /// Get a reference for mapping car IDs to car names.\n\n pub(crate) fn cars(&self) -> &HashMap<String, String> {\n\n &self.cars\n\n }\n\n\n\n /// Get user's color-coding choices.\n\n pub(crate) fn colors(&self) -> Vec<egui::Color32> {\n\n self.colors.clone()\n\n }\n", "file_path": "src/config.rs", "rank": 93, "score": 30615.84362576552 }, { "content": " self.show_warnings.push_front(ShowWarning::new(error, msg));\n\n }\n\n }\n\n\n\n /// Update setups export path.\n\n pub(crate) fn update_setups_path<P: AsRef<Path>>(&mut self, setups_path: P) {\n\n if let Err(error) = self.hotwatch.unwatch(self.config.get_setups_path()) {\n\n self.show_warnings.push_front(ShowWarning::new(\n\n error,\n\n format!(\n\n \"Unable to stop watching setup exports path for changes: `{:?}`\",\n\n self.config.get_setups_path()\n\n ),\n\n ));\n\n }\n\n\n\n self.config.update_setups_path(setups_path);\n\n self.setups = Setups::new(&mut self.show_warnings, &self.config);\n\n self.clear_filters();\n\n\n", "file_path": "src/gui.rs", "rank": 94, "score": 29646.898847559383 }, { "content": "//! User interface structure, rendering, and state management.\n\n\n\nuse self::grid::SetupGrid;\n\nuse crate::config::{Config, UserTheme};\n\nuse crate::framework::UserEvent;\n\nuse crate::setup::{Setup, Setups};\n\nuse crate::str_ext::{Ellipsis, HumanCompare};\n\nuse crate::updates::{UpdateFrequency, UpdateNotification};\n\nuse copypasta::{ClipboardContext, ClipboardProvider};\n\nuse egui::widgets::color_picker::{color_edit_button_srgba, Alpha};\n\nuse egui::{CtxRef, Widget};\n\nuse hotwatch::Hotwatch;\n\nuse std::borrow::Cow;\n\nuse std::collections::{HashMap, VecDeque};\n\nuse std::path::Path;\n\nuse std::time::{Duration, Instant};\n\nuse thiserror::Error;\n\nuse winit::event_loop::EventLoopProxy;\n\n\n\nmod grid;\n", "file_path": "src/gui.rs", "rank": 95, "score": 29645.85367731804 }, { "content": "#[derive(Debug, Error)]\n\npub(crate) enum Error {\n\n #[error(\"File system watch error: {0}\")]\n\n Notify(#[from] hotwatch::Error),\n\n}\n\n\n\nimpl Gui {\n\n /// Create a GUI.\n\n pub(crate) fn new(\n\n config: Config,\n\n setups: Setups,\n\n event_loop_proxy: EventLoopProxy<UserEvent>,\n\n show_errors: VecDeque<ShowError>,\n\n show_warnings: VecDeque<ShowWarning>,\n\n ) -> Result<Self, Error> {\n\n let mut hotwatch = Hotwatch::new()?;\n\n let watcher = Self::watch_setups_path(event_loop_proxy.clone());\n\n\n\n hotwatch.watch(config.get_setups_path(), watcher)?;\n\n\n", "file_path": "src/gui.rs", "rank": 96, "score": 29644.23851735463 }, { "content": " self.tooltip(ctx, ui, tooltip_id, Duration::from_secs(3));\n\n });\n\n });\n\n\n\n if window_open {\n\n // Put the warning back\n\n self.show_warnings.push_back(warning);\n\n }\n\n }\n\n\n\n self.warning = window_open;\n\n }\n\n\n\n /// Add a n update notification to the GUI.\n\n pub(crate) fn add_update_notification(&mut self, notification: UpdateNotification) {\n\n self.show_update_notification = Some(notification);\n\n }\n\n\n\n /// Show update notification window.\n\n fn show_update_notification(&mut self, ctx: &egui::CtxRef, enabled: bool) {\n", "file_path": "src/gui.rs", "rank": 97, "score": 29642.86087578049 }, { "content": " Ok(Self {\n\n config,\n\n setups,\n\n hotwatch,\n\n selected_track_name: None,\n\n selected_car_name: None,\n\n selected_setups: Vec::new(),\n\n event_loop_proxy,\n\n about: false,\n\n preferences: false,\n\n warning: false,\n\n update_notification: false,\n\n show_errors,\n\n show_warnings,\n\n show_update_notification: None,\n\n show_tooltips: HashMap::new(),\n\n })\n\n }\n\n\n\n /// Draw the UI using egui.\n", "file_path": "src/gui.rs", "rank": 98, "score": 29642.13766966221 }, { "content": "}\n\n\n\n/// Descriptor for a button used by the error window.\n\npub(crate) struct ErrorButton {\n\n /// Text to show on the button.\n\n label: String,\n\n\n\n /// An action to perform when the button is pressed.\n\n action: Box<dyn FnOnce()>,\n\n}\n\n\n\n/// Holds state for a warning message to show to the user.\n\npub(crate) struct ShowWarning {\n\n /// The actual warning message.\n\n warning: Box<dyn std::error::Error>,\n\n\n\n /// Provide some extra context to the user.\n\n context: String,\n\n}\n\n\n", "file_path": "src/gui.rs", "rank": 99, "score": 29640.780050839872 } ]
Rust
src/ser.rs
sisyphe-re/libnar
9ffada39937be91518eaf7fbc27dc0002e95a3db
use std::fs::{self, File}; use std::io::{self, Error, ErrorKind, Read, Write}; use std::os::unix::fs::MetadataExt; use std::path::Path; use crate::{NIX_VERSION_MAGIC, PAD_LEN}; pub fn to_vec<P: AsRef<Path>>(path: P) -> io::Result<Vec<u8>> { let mut buffer = Vec::new(); to_writer(&mut buffer, path)?; Ok(buffer) } pub fn to_writer<W, P>(writer: &mut W, path: P) -> io::Result<()> where W: Write, P: AsRef<Path>, { let target = path.as_ref(); if fs::symlink_metadata(target).is_err() { return Err(Error::new(ErrorKind::NotFound, "Path not found")); } write_padded(writer, NIX_VERSION_MAGIC)?; encode_entry(writer, target) } fn encode_entry<W: Write>(writer: &mut W, path: &Path) -> io::Result<()> { let metadata = fs::symlink_metadata(path)?; write_padded(writer, b"(")?; write_padded(writer, b"type")?; if metadata.file_type().is_dir() { write_padded(writer, b"directory")?; let mut entries: Vec<_> = fs::read_dir(path)?.collect::<Result<_, _>>()?; entries.sort_by(|x, y| x.path().cmp(&y.path())); for entry in entries { write_padded(writer, b"entry")?; write_padded(writer, b"(")?; write_padded(writer, b"name")?; write_padded(writer, entry.file_name().to_string_lossy().as_bytes())?; write_padded(writer, b"node")?; encode_entry(writer, &entry.path())?; write_padded(writer, b")")?; } } else if metadata.file_type().is_file() { write_padded(writer, b"regular")?; if metadata.mode() & 0o111 != 0 { write_padded(writer, b"executable")?; write_padded(writer, b"")?; } write_padded(writer, b"contents")?; let mut file = File::open(path)?; write_padded_from_reader(writer, &mut file, metadata.len())?; } else if metadata.file_type().is_symlink() { write_padded(writer, b"symlink")?; write_padded(writer, b"target")?; let target = fs::read_link(path)?; write_padded(writer, target.to_string_lossy().as_bytes())?; } else { return Err(Error::new(ErrorKind::InvalidData, "Unrecognized file type")); } write_padded(writer, b")")?; Ok(()) } fn write_padded<W: Write>(writer: &mut W, bytes: &[u8]) -> io::Result<()> { let len = bytes.len() as u64; writer.write_all(&len.to_le_bytes())?; writer.write_all(bytes)?; let remainder = bytes.len() % PAD_LEN; if remainder > 0 { let buf = [0u8; PAD_LEN]; let padding = PAD_LEN - remainder; writer.write_all(&buf[..padding])?; } Ok(()) } fn write_padded_from_reader<W, R>(writer: &mut W, reader: &mut R, len: u64) -> io::Result<()> where W: Write, R: Read, { writer.write_all(&len.to_le_bytes())?; io::copy(reader, writer)?; let remainder = (len % PAD_LEN as u64) as usize; if remainder > 0 { let buf = [0u8; PAD_LEN]; let padding = PAD_LEN - remainder; writer.write_all(&buf[..padding])?; } Ok(()) } #[cfg(test)] mod tests { use std::mem::size_of; use super::*; #[test] fn writes_multiple_of_eight_exactly() { let mut buffer = Vec::new(); let length = 16u64; let data = vec![1u8; length as usize]; write_padded(&mut buffer, &data[..]).unwrap(); let written_data_len = size_of::<u64>() as u64 + length; assert_eq!(buffer.len() as u64, written_data_len); let header_bytes = length.to_le_bytes(); assert_eq!(&buffer[..size_of::<u64>()], header_bytes); let data_bytes = [1u8; 16]; assert_eq!(&buffer[size_of::<u64>()..], data_bytes); } #[test] fn pads_non_multiple_of_eight() { let mut buffer = Vec::new(); let length = 5u64; let data = vec![1u8; length as usize]; write_padded(&mut buffer, &data[..]).unwrap(); let written_data_len = size_of::<u64>() as u64 + length + 3; assert_eq!(buffer.len() as u64, written_data_len); let header_bytes = length.to_le_bytes(); assert_eq!(&buffer[..size_of::<u64>()], header_bytes); let data_bytes = [1u8; 5]; assert_eq!(&buffer[size_of::<u64>()..size_of::<u64>() + 5], data_bytes); let padding_bytes = [0u8; 3]; assert_eq!(&buffer[size_of::<u64>() + 5..], padding_bytes); } }
use std::fs::{self, File}; use std::io::{self, Error, ErrorKind, Read, Write}; use std::os::unix::fs::MetadataExt; use std::path::Path; use crate::{NIX_VERSION_MAGIC, PAD_LEN}; pub fn to_vec<P: AsRef<Path>>(path: P) -> io::Result<Vec<u8>> { let mut buffer = Vec::new(); to_writer(&mut buffer, path)?; Ok(buffer) } pub fn to_writer<W, P>(writer: &mut W, path: P) -> io::Result<()> where W: Write, P: AsRef<Path>, { let target = path.as_ref(); if fs::symlink_metadata(target).is_err() { return Err(Error::new(ErrorKind::NotFound, "Path not found")); } write_padded(writer, NIX_VERSION_MAGIC)?; encode_entry(writer, target) } fn encode_entry<W: Write>(writer: &mut W, path: &Path) -> io::Result<()> { let metadata = fs::symlink_metadata(path)?; write_padded(writer, b"(")?; write_padded(writer, b"type")?; if metadata.file_type().is_dir() { write_padded(writer, b"directory")?; let mut entries: Vec<_> = fs::read_dir(path)?.collect::<Result<_, _>>()?; entries.sort_by(|x, y| x.path().cmp(&y.path())); for entry in entries { write_padded(writer, b"entry")?; write_padded(writer, b"(")?; write_padded(writer, b"name")?; write_padded(writer, entry.file_name().to_string_lossy().as_bytes())?; write_padded(writer, b"node")?; encode_entry(writer, &entry.path())?; write_padded(writer, b")")?; } } else if metadata.file_type().is_file() { write_padded(writer, b"regular")?; if metadata.mode() & 0o111 != 0 { write_padded(writer, b"executable")?; write_padded(writer, b"")?; } write_padded(writer, b"contents")?; let mut file = File::open(path)?; write_padded_from_reader(writer, &mut file, metadata.len())?; } else if metadata.file_type().is_symlink() { write_padded(writer, b"symlink")?; write_padded(writer, b"target")?; let target = fs::read_link(path)?; write_padded(writer, target.to_string_lossy().as_bytes())?; } else { return Err(Error::new(ErrorKind::InvalidData, "Unrecognized file type")); } write_padded(writer, b")")?; Ok(()) } fn write_padded<W: Write>(writer: &mut W, bytes: &[u8]) -> io::Result<()> { let len = bytes.len() as u64; writer.write_all(&len.to_le_bytes())?; writer.write_all(bytes)?; let remainder = bytes.len() % PAD_LEN; if remainder > 0 { let buf = [0u8; PAD_LEN]; let padding = PAD_LEN - remainder; writer.write_all(&buf[..padding])?; } Ok(()) } fn write_padded_from_reader<W, R>(writer: &mut W, reader: &mut R, len: u64) -> io::Result<()> where W: Write, R: Read, { writer.write_all(&len.to_le_bytes())?; io::copy(reader, writer)?; let remainder = (len % PAD_LEN as u64) as usize; if remainder > 0 { let buf = [0u8; PAD_LEN]; let padding = PAD_LEN - remainder; writer.write_all(&buf[..padding])?; } Ok(()) } #[cfg(test)] mod tests { use std::mem::size_of; use super::*; #[test] fn writes_multiple_of_eight_exactly() { let mut buffer = Vec::new(); let length = 16u64; let data = vec![1u8; length as usize];
#[test] fn pads_non_multiple_of_eight() { let mut buffer = Vec::new(); let length = 5u64; let data = vec![1u8; length as usize]; write_padded(&mut buffer, &data[..]).unwrap(); let written_data_len = size_of::<u64>() as u64 + length + 3; assert_eq!(buffer.len() as u64, written_data_len); let header_bytes = length.to_le_bytes(); assert_eq!(&buffer[..size_of::<u64>()], header_bytes); let data_bytes = [1u8; 5]; assert_eq!(&buffer[size_of::<u64>()..size_of::<u64>() + 5], data_bytes); let padding_bytes = [0u8; 3]; assert_eq!(&buffer[size_of::<u64>() + 5..], padding_bytes); } }
write_padded(&mut buffer, &data[..]).unwrap(); let written_data_len = size_of::<u64>() as u64 + length; assert_eq!(buffer.len() as u64, written_data_len); let header_bytes = length.to_le_bytes(); assert_eq!(&buffer[..size_of::<u64>()], header_bytes); let data_bytes = [1u8; 16]; assert_eq!(&buffer[size_of::<u64>()..], data_bytes); }
function_block-function_prefix_line
[ { "content": "#[test]\n\nfn serializes_regular_file() {\n\n let dir = tempfile::tempdir().unwrap();\n\n let mut file = File::create(dir.path().join(\"file.txt\")).unwrap();\n\n writeln!(file, \"lorem ipsum dolor sic amet\").unwrap();\n\n\n\n let expected: Vec<u8> = std::iter::empty()\n\n .chain(\n\n 13u64\n\n .to_le_bytes()\n\n .into_iter()\n\n .chain(b\"nix-archive-1\")\n\n .chain(&[0u8; 3]),\n\n )\n\n .chain(1u64.to_le_bytes().into_iter().chain(b\"(\").chain(&[0u8; 7]))\n\n .chain(\n\n 4u64.to_le_bytes()\n\n .into_iter()\n\n .chain(b\"type\")\n\n .chain(&[0u8; 4]),\n\n )\n", "file_path": "tests/serialize.rs", "rank": 5, "score": 62689.95007226338 }, { "content": "#[test]\n\nfn serializes_executable_file() {\n\n let dir = tempfile::tempdir().unwrap();\n\n let mut file = OpenOptions::new()\n\n .write(true)\n\n .create(true)\n\n .mode(0o777)\n\n .open(dir.path().join(\"script.sh\"))\n\n .unwrap();\n\n\n\n write!(file, \"#!/bin/sh\\nset -euo pipefail\\nexit 0\\n\").unwrap();\n\n\n\n let expected: Vec<u8> = std::iter::empty()\n\n .chain(\n\n 13u64\n\n .to_le_bytes()\n\n .into_iter()\n\n .chain(b\"nix-archive-1\")\n\n .chain(&[0u8; 3]),\n\n )\n\n .chain(1u64.to_le_bytes().into_iter().chain(b\"(\").chain(&[0u8; 7]))\n", "file_path": "tests/serialize.rs", "rank": 6, "score": 62689.95007226338 }, { "content": "#[test]\n\nfn serializes_directory() {\n\n let dir = tempfile::tempdir().unwrap();\n\n fs::create_dir(dir.path().join(\"subdir\")).unwrap();\n\n fs::write(dir.path().join(\"subdir\").join(\"file\"), \"hello world\").unwrap();\n\n\n\n let expected: Vec<u8> = std::iter::empty()\n\n .chain(\n\n 13u64\n\n .to_le_bytes()\n\n .into_iter()\n\n .chain(b\"nix-archive-1\")\n\n .chain(&[0u8; 3]),\n\n )\n\n .chain(1u64.to_le_bytes().into_iter().chain(b\"(\").chain(&[0u8; 7]))\n\n .chain(\n\n 4u64.to_le_bytes()\n\n .into_iter()\n\n .chain(b\"type\")\n\n .chain(&[0u8; 4]),\n\n )\n", "file_path": "tests/serialize.rs", "rank": 7, "score": 46185.40790851729 }, { "content": "#[test]\n\nfn serializes_symlink() {\n\n let dir = tempfile::tempdir().unwrap();\n\n std::os::unix::fs::symlink(\"./foo\", dir.path().join(\"foo\")).unwrap();\n\n\n\n let expected: Vec<u8> = std::iter::empty()\n\n .chain(\n\n 13u64\n\n .to_le_bytes()\n\n .into_iter()\n\n .chain(b\"nix-archive-1\")\n\n .chain(&[0u8; 3]),\n\n )\n\n .chain(1u64.to_le_bytes().into_iter().chain(b\"(\").chain(&[0u8; 7]))\n\n .chain(\n\n 4u64.to_le_bytes()\n\n .into_iter()\n\n .chain(b\"type\")\n\n .chain(&[0u8; 4]),\n\n )\n\n .chain(\n", "file_path": "tests/serialize.rs", "rank": 8, "score": 46185.40790851729 }, { "content": "type Co<'a> = genawaiter::sync::Co<io::Result<Entry<'a>>>;\n\n\n", "file_path": "src/de.rs", "rank": 9, "score": 39361.63559029401 }, { "content": "#[derive(Debug)]\n\nstruct ArchiveInner<R: ?Sized> {\n\n canonicalize_mtime: bool,\n\n remove_xattrs: bool,\n\n position: Cell<u64>,\n\n reader: RefCell<R>,\n\n}\n\n\n\nimpl<'a, R: ?Sized + Read> Read for &'a ArchiveInner<R> {\n\n fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {\n\n let bytes_read = self.reader.borrow_mut().read(buf)?;\n\n self.position.set(self.position.get() + bytes_read as u64);\n\n Ok(bytes_read)\n\n }\n\n}\n\n\n\npub struct Archive<R: ?Sized + Read> {\n\n inner: ArchiveInner<R>,\n\n}\n\n\n\nimpl<R: Read> Archive<R> {\n", "file_path": "src/de.rs", "rank": 10, "score": 30738.84775986753 }, { "content": "fn main() {\n\n let path = env::args().nth(1).expect(\"Expected path to *.nar archive\");\n\n let file = File::open(path).unwrap();\n\n\n\n let mut nar = Archive::new(file);\n\n let entries = nar.entries().unwrap();\n\n\n\n for entry in entries {\n\n let entry = entry.unwrap();\n\n println!(\"{:?}\", entry);\n\n }\n\n}\n", "file_path": "examples/viewer.rs", "rank": 11, "score": 28749.238753772734 }, { "content": "fn main() {\n\n let nar = Path::new(env!(\"CARGO_MANIFEST_DIR\")).join(\"libnar.nar\");\n\n\n\n let mut file = File::create(&nar).unwrap();\n\n libnar::to_writer(&mut file, TARGET_PATH).unwrap();\n\n\n\n let file = File::open(&nar).unwrap();\n\n let mut nar = Archive::new(file);\n\n nar.unpack(\"libnar\").unwrap();\n\n}\n", "file_path": "examples/round_trip.rs", "rank": 12, "score": 27515.480250485693 }, { "content": " 7u64.to_le_bytes()\n\n .into_iter()\n\n .chain(b\"symlink\")\n\n .chain(&[0u8; 1]),\n\n )\n\n .chain(\n\n 6u64.to_le_bytes()\n\n .into_iter()\n\n .chain(b\"target\")\n\n .chain(&[0u8; 2]),\n\n )\n\n .chain(\n\n 5u64.to_le_bytes()\n\n .into_iter()\n\n .chain(b\"./foo\")\n\n .chain(&[0u8; 3]),\n\n )\n\n .chain(1u64.to_le_bytes().into_iter().chain(b\")\").chain(&[0u8; 7]))\n\n .copied()\n\n .collect();\n\n\n\n let output = libnar::to_vec(dir.path().join(\"foo\")).unwrap();\n\n assert_eq!(output, expected);\n\n}\n\n\n", "file_path": "tests/serialize.rs", "rank": 13, "score": 21273.204252878044 }, { "content": " .into_iter()\n\n .chain(b\"directory\")\n\n .chain(&[0u8; 7]),\n\n )\n\n .chain(\n\n 5u64.to_le_bytes()\n\n .into_iter()\n\n .chain(b\"entry\")\n\n .chain(&[0u8; 3]),\n\n )\n\n .chain(1u64.to_le_bytes().into_iter().chain(b\"(\").chain(&[0u8; 7]))\n\n .chain(\n\n 4u64.to_le_bytes()\n\n .into_iter()\n\n .chain(b\"name\")\n\n .chain(&[0u8; 4]),\n\n )\n\n .chain(\n\n 4u64.to_le_bytes()\n\n .into_iter()\n", "file_path": "tests/serialize.rs", "rank": 14, "score": 21272.678616418794 }, { "content": " .chain(\n\n 9u64.to_le_bytes()\n\n .into_iter()\n\n .chain(b\"directory\")\n\n .chain(&[0u8; 7]),\n\n )\n\n .chain(\n\n 5u64.to_le_bytes()\n\n .into_iter()\n\n .chain(b\"entry\")\n\n .chain(&[0u8; 3]),\n\n )\n\n .chain(1u64.to_le_bytes().into_iter().chain(b\"(\").chain(&[0u8; 7]))\n\n .chain(\n\n 4u64.to_le_bytes()\n\n .into_iter()\n\n .chain(b\"name\")\n\n .chain(&[0u8; 4]),\n\n )\n\n .chain(\n", "file_path": "tests/serialize.rs", "rank": 15, "score": 21272.678616418794 }, { "content": " .chain(&[0u8; 1]),\n\n )\n\n .chain(8u64.to_le_bytes().into_iter().chain(b\"contents\"))\n\n .chain(\n\n 11u64\n\n .to_le_bytes()\n\n .into_iter()\n\n .chain(\"hello world\".as_bytes())\n\n .chain(&[0u8; 5]),\n\n )\n\n .chain(1u64.to_le_bytes().into_iter().chain(b\")\").chain(&[0u8; 7]))\n\n .chain(1u64.to_le_bytes().into_iter().chain(b\")\").chain(&[0u8; 7]))\n\n .chain(1u64.to_le_bytes().into_iter().chain(b\")\").chain(&[0u8; 7]))\n\n .chain(1u64.to_le_bytes().into_iter().chain(b\")\").chain(&[0u8; 7]))\n\n .chain(1u64.to_le_bytes().into_iter().chain(b\")\").chain(&[0u8; 7]))\n\n .copied()\n\n .collect();\n\n\n\n let output = libnar::to_vec(dir.path()).unwrap();\n\n assert_eq!(output, expected);\n\n}\n", "file_path": "tests/serialize.rs", "rank": 16, "score": 21272.194099412915 }, { "content": "use std::fs::{self, File, OpenOptions};\n\nuse std::io::Write;\n\nuse std::os::unix::fs::OpenOptionsExt;\n\n\n\n#[test]\n", "file_path": "tests/serialize.rs", "rank": 17, "score": 21271.92853104667 }, { "content": " .chain(\n\n 7u64.to_le_bytes()\n\n .into_iter()\n\n .chain(b\"regular\")\n\n .chain(&[0u8; 1]),\n\n )\n\n .chain(8u64.to_le_bytes().into_iter().chain(b\"contents\"))\n\n .chain(\n\n 27u8.to_le_bytes()\n\n .into_iter()\n\n .chain(&[0u8; 7])\n\n .chain(\"lorem ipsum dolor sic amet\\n\".as_bytes())\n\n .chain(&[0u8; 5]),\n\n )\n\n .chain(1u64.to_le_bytes().into_iter().chain(b\")\").chain(&[0u8; 7]))\n\n .copied()\n\n .collect();\n\n\n\n let output = libnar::to_vec(dir.path().join(\"file.txt\")).unwrap();\n\n assert_eq!(output, expected);\n\n}\n\n\n", "file_path": "tests/serialize.rs", "rank": 18, "score": 21271.612144234732 }, { "content": " 6u64.to_le_bytes()\n\n .into_iter()\n\n .chain(b\"subdir\")\n\n .chain(&[0u8; 2]),\n\n )\n\n .chain(\n\n 4u64.to_le_bytes()\n\n .into_iter()\n\n .chain(b\"node\")\n\n .chain(&[0u8; 4]),\n\n )\n\n .chain(1u64.to_le_bytes().into_iter().chain(b\"(\").chain(&[0u8; 7]))\n\n .chain(\n\n 4u64.to_le_bytes()\n\n .into_iter()\n\n .chain(b\"type\")\n\n .chain(&[0u8; 4]),\n\n )\n\n .chain(\n\n 9u64.to_le_bytes()\n", "file_path": "tests/serialize.rs", "rank": 19, "score": 21271.503199068786 }, { "content": " .chain(8u64.to_le_bytes().into_iter().chain(b\"contents\"))\n\n .chain(\n\n 35u8.to_le_bytes()\n\n .into_iter()\n\n .chain(&[0u8; 7])\n\n .chain(\"#!/bin/sh\\nset -euo pipefail\\nexit 0\\n\".as_bytes())\n\n .chain(&[0u8; 5]),\n\n )\n\n .chain(1u64.to_le_bytes().into_iter().chain(b\")\").chain(&[0u8; 7]))\n\n .copied()\n\n .collect();\n\n\n\n let output = libnar::to_vec(dir.path().join(\"script.sh\")).unwrap();\n\n assert_eq!(output, expected);\n\n}\n\n\n", "file_path": "tests/serialize.rs", "rank": 20, "score": 21271.325113347415 }, { "content": " .chain(b\"file\")\n\n .chain(&[0u8; 4]),\n\n )\n\n .chain(\n\n 4u64.to_le_bytes()\n\n .into_iter()\n\n .chain(b\"node\")\n\n .chain(&[0u8; 4]),\n\n )\n\n .chain(1u64.to_le_bytes().into_iter().chain(b\"(\").chain(&[0u8; 7]))\n\n .chain(\n\n 4u64.to_le_bytes()\n\n .into_iter()\n\n .chain(b\"type\")\n\n .chain(&[0u8; 4]),\n\n )\n\n .chain(\n\n 7u64.to_le_bytes()\n\n .into_iter()\n\n .chain(b\"regular\")\n", "file_path": "tests/serialize.rs", "rank": 21, "score": 21271.244568072765 }, { "content": " .chain(\n\n 4u64.to_le_bytes()\n\n .into_iter()\n\n .chain(b\"type\")\n\n .chain(&[0u8; 4]),\n\n )\n\n .chain(\n\n 7u64.to_le_bytes()\n\n .into_iter()\n\n .chain(b\"regular\")\n\n .chain(&[0u8; 1]),\n\n )\n\n .chain(\n\n 10u8.to_le_bytes()\n\n .into_iter()\n\n .chain(&[0u8; 7])\n\n .chain(b\"executable\")\n\n .chain(&[0u8; 5]),\n\n )\n\n .chain(0u8.to_le_bytes().into_iter().chain(b\"\").chain(&[0u8; 8]))\n", "file_path": "tests/serialize.rs", "rank": 22, "score": 21271.01671250733 }, { "content": " fn read_utf8_padded(&self) -> io::Result<String> {\n\n let bytes = self.read_bytes_padded()?;\n\n String::from_utf8(bytes).map_err(|e| Error::new(ErrorKind::InvalidData, e))\n\n }\n\n\n\n fn read_bytes_padded(&self) -> io::Result<Vec<u8>> {\n\n let mut len_buffer = [0u8; PAD_LEN];\n\n (&self.inner).read_exact(&mut len_buffer[..])?;\n\n let len = u64::from_le_bytes(len_buffer);\n\n\n\n let mut data_buffer = vec![0u8; len as usize];\n\n (&self.inner).read_exact(&mut data_buffer)?;\n\n\n\n let remainder = data_buffer.len() % PAD_LEN;\n\n if remainder > 0 {\n\n let mut buffer = [0u8; PAD_LEN];\n\n let padding = &mut buffer[0..PAD_LEN - remainder];\n\n (&self.inner).read_exact(padding)?;\n\n if !buffer.iter().all(|b| *b == 0) {\n\n return Err(Error::new(ErrorKind::Other, \"Bad archive padding\"));\n", "file_path": "src/de.rs", "rank": 25, "score": 24.968423291777068 }, { "content": " path,\n\n EntryKind::Regular { executable, data },\n\n archive,\n\n )))\n\n .await;\n\n }\n\n \"symlink\" => {\n\n let target = if archive.read_utf8_padded()? == \"target\" {\n\n archive.read_utf8_padded().map(PathBuf::from)?\n\n } else {\n\n return Err(Error::new(ErrorKind::Other, \"Missing target tag\"));\n\n };\n\n\n\n if archive.read_utf8_padded()? != \")\" {\n\n return Err(Error::new(ErrorKind::Other, \"Missing symlink close tag\"));\n\n }\n\n\n\n co.yield_(Ok(Entry::new(path, EntryKind::Symlink { target }, archive)))\n\n .await;\n\n }\n", "file_path": "src/de.rs", "rank": 28, "score": 16.808340703911558 }, { "content": " _marker: PhantomData<&'a Archive<R>>,\n\n}\n\n\n\nimpl<'a, R: Read> Iterator for Entries<'a, R> {\n\n type Item = io::Result<Entry<'a>>;\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n\n self.iter.next()\n\n }\n\n}\n\n\n\nimpl<'a, R: Read> Debug for Entries<'a, R> {\n\n fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {\n\n write!(fmt, stringify!(Entries))\n\n }\n\n}\n\n\n\npub struct Entry<'a> {\n\n name: PathBuf,\n\n pub kind: EntryKind,\n", "file_path": "src/de.rs", "rank": 29, "score": 16.362951633634264 }, { "content": "#![forbid(unsafe_code)]\n\n\n\n#[doc(inline)]\n\npub use self::de::Archive;\n\n#[doc(inline)]\n\npub use self::ser::{to_vec, to_writer};\n\n\n\nconst NIX_VERSION_MAGIC: &[u8] = b\"nix-archive-1\";\n\nconst PAD_LEN: usize = 8;\n\n\n\npub mod de;\n\npub mod ser;\n", "file_path": "src/lib.rs", "rank": 30, "score": 15.992064222152242 }, { "content": " let message = \"Cannot call `entries` unless reader is in position 0\";\n\n return Err(Error::new(ErrorKind::Other, message));\n\n }\n\n\n\n if self.read_bytes_padded()? != NIX_VERSION_MAGIC {\n\n return Err(Error::new(ErrorKind::Other, \"Not a valid NAR archive\"));\n\n }\n\n\n\n let gen = Gen::new(move |co| parse(co, self));\n\n Ok(Box::new(gen.into_iter()))\n\n }\n\n\n\n fn unpack_inner(&mut self, dst: &Path) -> io::Result<()> {\n\n for entry in self.entries_inner()? {\n\n let mut file = entry?;\n\n file.unpack_in(dst)?;\n\n }\n\n Ok(())\n\n }\n\n\n", "file_path": "src/de.rs", "rank": 31, "score": 14.137461224017883 }, { "content": " Box::pin(try_parse(co, archive, path.join(entry_name)));\n\n child_entry.await?;\n\n\n\n if archive.read_utf8_padded()? != \")\" {\n\n return Err(Error::new(ErrorKind::Other, \"Missing nested close tag\"));\n\n }\n\n }\n\n \")\" => break,\n\n _ => return Err(Error::new(ErrorKind::Other, \"Incorrect directory field\")),\n\n }\n\n }\n\n }\n\n _ => return Err(Error::new(ErrorKind::Other, \"Unrecognized file type\")),\n\n }\n\n\n\n Ok(())\n\n}\n\n\n\npub struct Entries<'a, R: 'a + Read> {\n\n iter: Box<dyn Iterator<Item = io::Result<Entry<'a>>> + 'a>,\n", "file_path": "src/de.rs", "rank": 32, "score": 14.109284786169129 }, { "content": " opt.mode(0o555);\n\n } else {\n\n opt.mode(0o444);\n\n }\n\n\n\n let mut file = opt.open(&dst)?;\n\n file.write_all(data.as_slice())?;\n\n Ok(())\n\n }\n\n\n\n fn unpack_symlink(dst: &Path, target: &Path) -> io::Result<()> {\n\n if fs::symlink_metadata(&dst).is_ok() {\n\n fs::remove_file(&dst)?;\n\n }\n\n\n\n std::os::unix::fs::symlink(target, dst)\n\n }\n\n}\n\n\n\nimpl<'a> Debug for Entry<'a> {\n", "file_path": "src/de.rs", "rank": 33, "score": 14.006210067580062 }, { "content": "use std::cell::{Cell, RefCell};\n\nuse std::fmt::{self, Debug, Formatter};\n\nuse std::fs::{self, OpenOptions};\n\nuse std::future::Future;\n\nuse std::io::{self, Error, ErrorKind, Read, Write};\n\nuse std::marker::PhantomData;\n\nuse std::os::unix::fs::OpenOptionsExt;\n\nuse std::path::{Component, Path, PathBuf};\n\nuse std::pin::Pin;\n\n\n\nuse filetime::FileTime;\n\nuse genawaiter::sync::Gen;\n\n\n\nuse crate::{NIX_VERSION_MAGIC, PAD_LEN};\n\n\n", "file_path": "src/de.rs", "rank": 34, "score": 13.967514453875973 }, { "content": " pub fn new(reader: R) -> Self {\n\n Archive {\n\n inner: ArchiveInner {\n\n canonicalize_mtime: true,\n\n remove_xattrs: true,\n\n position: Cell::new(0),\n\n reader: RefCell::new(reader),\n\n },\n\n }\n\n }\n\n\n\n pub fn into_inner(self) -> R {\n\n self.inner.reader.into_inner()\n\n }\n\n\n\n pub fn entries(&mut self) -> io::Result<Entries<R>> {\n\n let archive: &mut Archive<dyn Read> = self;\n\n archive.entries_inner().map(|iter| Entries {\n\n iter,\n\n _marker: PhantomData,\n", "file_path": "src/de.rs", "rank": 35, "score": 13.840909080400419 }, { "content": " .and_then(|p| fs::symlink_metadata(p).ok())\n\n .filter(|m| {\n\n FileTime::from_creation_time(&m)\n\n .filter(|time| *time == FileTime::zero())\n\n .is_some()\n\n });\n\n\n\n match &mut self.kind {\n\n EntryKind::Directory => Self::unpack_dir(&path)?,\n\n EntryKind::Regular { executable, data } => Self::unpack_file(&path, *executable, data)?,\n\n EntryKind::Symlink { target } => Self::unpack_symlink(&path, target)?,\n\n }\n\n\n\n if self.remove_xattrs {\n\n #[cfg(all(unix, feature = \"xattr\"))]\n\n for attr in xattr::list(&path)? {\n\n xattr::remove(&path, attr)?;\n\n }\n\n }\n\n\n", "file_path": "src/de.rs", "rank": 36, "score": 13.404592711902666 }, { "content": " }\n\n}\n\n\n\nasync fn try_parse(\n\n co: &mut Co<'_>,\n\n archive: &Archive<dyn Read + '_>,\n\n path: PathBuf,\n\n) -> io::Result<()> {\n\n if archive.read_utf8_padded()? != \"(\" {\n\n return Err(Error::new(ErrorKind::Other, \"Missing open tag\"));\n\n }\n\n\n\n if archive.read_utf8_padded()? != \"type\" {\n\n return Err(Error::new(ErrorKind::Other, \"Missing type tag\"));\n\n }\n\n\n\n match archive.read_utf8_padded()?.as_str() {\n\n \"regular\" => {\n\n let mut executable = false;\n\n let mut tag = archive.read_utf8_padded()?;\n", "file_path": "src/de.rs", "rank": 37, "score": 12.408030380421089 }, { "content": " fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {\n\n fmt.debug_struct(stringify!(Entry))\n\n .field(\"name\", &self.name)\n\n .field(\"kind\", &self.kind)\n\n .finish()\n\n }\n\n}\n\n\n\npub enum EntryKind {\n\n Directory,\n\n Regular { executable: bool, data: Vec<u8> },\n\n Symlink { target: PathBuf },\n\n}\n\n\n\nimpl Debug for EntryKind {\n\n fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {\n\n use EntryKind::*;\n\n match self {\n\n Directory => fmt.debug_struct(stringify!(Directory)).finish(),\n\n Regular { executable, .. } => fmt\n", "file_path": "src/de.rs", "rank": 38, "score": 12.259881071388996 }, { "content": "\n\n if tag == \"executable\" {\n\n executable = true;\n\n if archive.read_utf8_padded()? != \"\" {\n\n return Err(Error::new(ErrorKind::Other, \"Incorrect executable tag\"));\n\n }\n\n tag = archive.read_utf8_padded()?;\n\n }\n\n\n\n let data = if tag == \"contents\" {\n\n archive.read_bytes_padded()?\n\n } else {\n\n return Err(Error::new(ErrorKind::Other, \"Missing contents tag\"));\n\n };\n\n\n\n if archive.read_utf8_padded()? != \")\" {\n\n return Err(Error::new(ErrorKind::Other, \"Missing regular close tag\"));\n\n }\n\n\n\n co.yield_(Ok(Entry::new(\n", "file_path": "src/de.rs", "rank": 39, "score": 12.240011486883228 }, { "content": "# libnar\n\n\n\nLibrary for reading and writing from NAR (Nix Archive) files written in Rust.\n\n\n\nThe NAR format, developed exclusively for the [Nix package manager], is a fully\n\ndeterministic and reproducible alternative to the [tar] archive format. It is\n\nused to serialize and deserialize filesystem objects, such as files and\n\ndirectories and symlinks, in and out of the Nix store. Unlike tar, `.nar`\n\narchives have the following properties:\n\n\n\n[Nix package manager]: https://nixos.org/nix/\n\n[tar]: https://en.wikipedia.org/wiki/Tar_(computing)\n\n\n\n1. Deterministic ordering when unpacking files\n\n2. Fully specified, no undefined or implementation-specific behavior\n\n3. Strips out non-reproducible file metadata (creation time, last access time,\n\n owner and group IDs, all file mode permissions except for executable) before\n\n packing and normalizes them at unpacking time\n\n4. Strips out the `setuid` and sticky bits along with all filesystem-specific\n\n extended attributes before packing\n\n\n\n`libnar` is a fast and lightweight implementation of the Nix Archive format in\n\nRust and provides a convenient interface for opening, creating, packing, and\n\nunpacking `.nar` files. It is intentionally kept as minimal as possible with few\n\ndependencies to keep the codebase portable.\n\n\n\n## Examples\n\n\n\n### Opening an archive\n\n\n\n```rust\n\nuse std::fs::File;\n\n\n\nuse libnar::Archive;\n\n\n\nfn main() {\n\n let file = File::open(\"/path/to/archive.nar\").unwrap();\n\n let mut nar = Archive::new(file).unwrap();\n\n\n\n let entries = nar.entries().unwrap();\n\n for entry in entries {\n\n let entry = entry.unwrap();\n\n println!(\"{:?}\", entry);\n\n }\n\n}\n\n```\n\n\n\n### Extracting an archive\n\n\n\n```rust\n\nuse std::fs::File;\n\n\n\nuse libnar::Archive;\n\n\n\nfn main() {\n\n let file = File::open(\"/path/to/archive.nar\").unwrap();\n\n let mut nar = Archive::new(file).unwrap();\n\n nar.unpack(\"./archive\").unwrap();\n\n}\n\n```\n\n\n", "file_path": "README.md", "rank": 40, "score": 11.757433709131963 }, { "content": " \"directory\" => {\n\n co.yield_(Ok(Entry::new(path.clone(), EntryKind::Directory, archive)))\n\n .await;\n\n\n\n loop {\n\n match archive.read_utf8_padded()?.as_str() {\n\n \"entry\" => {\n\n if archive.read_utf8_padded()? != \"(\" {\n\n return Err(Error::new(ErrorKind::Other, \"Missing nested open tag\"));\n\n }\n\n\n\n let entry_name = if archive.read_utf8_padded()? == \"name\" {\n\n let name = archive.read_utf8_padded()?;\n\n match name.as_str() {\n\n \"\" => {\n\n return Err(Error::new(ErrorKind::Other, \"Entry name is empty\"))\n\n }\n\n \"/\" => {\n\n return Err(Error::new(ErrorKind::Other, \"Invalid name `/`\"))\n\n }\n", "file_path": "src/de.rs", "rank": 41, "score": 11.616227715627222 }, { "content": " if prev.map(|m| m.is_dir()).unwrap_or(false) {\n\n return Ok(());\n\n }\n\n }\n\n Err(Error::new(\n\n err.kind(),\n\n format!(\"{} when creating dir {}\", err, dst.display()),\n\n ))\n\n })\n\n }\n\n\n\n fn unpack_file(dst: &Path, executable: bool, data: &mut Vec<u8>) -> io::Result<()> {\n\n if dst.exists() {\n\n fs::remove_file(&dst)?;\n\n }\n\n\n\n let mut opt = OpenOptions::new();\n\n opt.create_new(true).write(true);\n\n\n\n if executable {\n", "file_path": "src/de.rs", "rank": 42, "score": 11.438136514600203 }, { "content": " }\n\n }\n\n\n\n Ok(data_buffer)\n\n }\n\n}\n\n\n\nimpl<'a, R: Read> Debug for Archive<R> {\n\n fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {\n\n fmt.debug_struct(stringify!(Archive))\n\n .field(\"canonicalize_mtime\", &self.inner.canonicalize_mtime)\n\n .field(\"remove_xattrs\", &self.inner.remove_xattrs)\n\n .field(\"position\", &self.inner.position)\n\n .finish()\n\n }\n\n}\n\n\n\nasync fn parse(mut co: Co<'_>, archive: &Archive<dyn Read + '_>) {\n\n if let Err(err) = try_parse(&mut co, archive, PathBuf::new()).await {\n\n co.yield_(Err(err)).await;\n", "file_path": "src/de.rs", "rank": 43, "score": 11.10057917262273 }, { "content": " })\n\n }\n\n\n\n pub fn set_canonicalize_mtime(&mut self, canonicalize: bool) {\n\n self.inner.canonicalize_mtime = canonicalize;\n\n }\n\n\n\n pub fn set_remove_xattrs(&mut self, remove: bool) {\n\n self.inner.remove_xattrs = remove;\n\n }\n\n\n\n pub fn unpack<P: AsRef<Path>>(&mut self, dst: P) -> io::Result<()> {\n\n let archive: &mut Archive<dyn Read> = self;\n\n archive.unpack_inner(dst.as_ref())\n\n }\n\n}\n\n\n\nimpl<'a> Archive<dyn Read + 'a> {\n\n fn entries_inner(&mut self) -> io::Result<Box<dyn Iterator<Item = io::Result<Entry>> + '_>> {\n\n if self.inner.position.get() != 0 {\n", "file_path": "src/de.rs", "rank": 44, "score": 10.725846499863412 }, { "content": "### Creating an archive\n\n\n\n```rust\n\nuse std::fs::File;\n\n\n\nfn main() {\n\n let mut file = File::create(\"/path/to/archive.nar\").unwrap();\n\n libnar::to_writer(&mut file, \"/path/to/archive\").unwrap();\n\n}\n\n```\n\n\n\n## License\n\n\n\n`libnar` is free and open source software distributed under the terms of both\n\nthe [MIT](LICENSE-MIT) and the [Apache 2.0](LICENSE-APACHE) licenses.\n\n\n\nUnless you explicitly state otherwise, any contribution intentionally submitted\n\nfor inclusion in the work by you, as defined in the Apache-2.0 license, shall be\n\ndual licensed as above, without any additional terms or conditions.\n", "file_path": "README.md", "rank": 45, "score": 10.406755801711945 }, { "content": " canonicalize_mtime: bool,\n\n remove_xattrs: bool,\n\n _marker: PhantomData<&'a ()>,\n\n}\n\n\n\nimpl<'a> Entry<'a> {\n\n fn new(name: PathBuf, kind: EntryKind, archive: &Archive<dyn Read + '_>) -> Self {\n\n Entry {\n\n name,\n\n kind,\n\n canonicalize_mtime: archive.inner.canonicalize_mtime,\n\n remove_xattrs: archive.inner.remove_xattrs,\n\n _marker: PhantomData,\n\n }\n\n }\n\n\n\n #[inline]\n\n pub fn name(&self) -> &Path {\n\n &self.name\n\n }\n", "file_path": "src/de.rs", "rank": 46, "score": 9.858007057983409 }, { "content": " if self.canonicalize_mtime {\n\n let metadata = fs::symlink_metadata(&path)?;\n\n let atime = FileTime::from_last_access_time(&metadata);\n\n filetime::set_symlink_file_times(&path, atime, FileTime::zero())?;\n\n }\n\n\n\n if let Some(metadata) = recanonicalize_parent {\n\n if let Some(parent) = path.parent() {\n\n let atime = FileTime::from_last_access_time(&metadata);\n\n filetime::set_symlink_file_times(&parent, atime, FileTime::zero())?;\n\n }\n\n }\n\n\n\n Ok(())\n\n }\n\n\n\n fn unpack_dir(dst: &Path) -> io::Result<()> {\n\n fs::create_dir(&dst).or_else(|err| {\n\n if err.kind() == ErrorKind::AlreadyExists {\n\n let prev = fs::metadata(&dst);\n", "file_path": "src/de.rs", "rank": 47, "score": 9.31862088065365 }, { "content": "use std::fs::File;\n\nuse std::path::Path;\n\n\n\nuse libnar::Archive;\n\n\n\nconst TARGET_PATH: &str = concat!(env!(\"CARGO_MANIFEST_DIR\"), \"/src\");\n\n\n", "file_path": "examples/round_trip.rs", "rank": 48, "score": 9.132473103005022 }, { "content": "\n\n pub fn unpack_in<P: AsRef<Path>>(&mut self, dst: P) -> io::Result<()> {\n\n let path = if self.name.as_os_str().is_empty() {\n\n dst.as_ref().to_owned()\n\n } else {\n\n dst.as_ref().join(&self.name)\n\n };\n\n\n\n for component in path.components() {\n\n if let Component::Prefix(_) | Component::RootDir | Component::ParentDir = component {\n\n let message = format!(\"Invalid path component in {:?}\", path);\n\n return Err(Error::new(ErrorKind::Other, message));\n\n }\n\n }\n\n\n\n // If the timestamp of our parent has been canonicalized, we want to keep it that way after\n\n // we unpack, whether we choose to canonicalize as well or not.\n\n let recanonicalize_parent = path\n\n .parent()\n\n .filter(|_| !self.name.as_os_str().is_empty())\n", "file_path": "src/de.rs", "rank": 49, "score": 7.882121448694326 }, { "content": " \"~\" => {\n\n return Err(Error::new(ErrorKind::Other, \"Invalid name `~`\"))\n\n }\n\n \".\" => {\n\n return Err(Error::new(ErrorKind::Other, \"Invalid name `.`\"))\n\n }\n\n \"..\" => {\n\n return Err(Error::new(ErrorKind::Other, \"Invalid name `..`\"))\n\n }\n\n _ => name,\n\n }\n\n } else {\n\n return Err(Error::new(ErrorKind::Other, \"Missing name field\"));\n\n };\n\n\n\n if archive.read_utf8_padded()? != \"node\" {\n\n return Err(Error::new(ErrorKind::Other, \"Missing node field\"));\n\n }\n\n\n\n let child_entry: Pin<Box<dyn Future<Output = _>>> =\n", "file_path": "src/de.rs", "rank": 50, "score": 6.738660419590305 }, { "content": " EntryKind::Regular { executable, .. } => !executable,\n\n _ => false,\n\n }\n\n }\n\n\n\n #[inline]\n\n pub fn is_symlink(&self) -> bool {\n\n match &self.kind {\n\n EntryKind::Symlink { .. } => true,\n\n _ => false,\n\n }\n\n }\n\n\n\n pub fn set_canonicalize_mtime(&mut self, canonicalize: bool) {\n\n self.canonicalize_mtime = canonicalize;\n\n }\n\n\n\n pub fn set_remove_xattrs(&mut self, remove: bool) {\n\n self.remove_xattrs = remove;\n\n }\n", "file_path": "src/de.rs", "rank": 51, "score": 6.537604192542365 }, { "content": "\n\n #[inline]\n\n pub fn is_dir(&self) -> bool {\n\n match &self.kind {\n\n EntryKind::Directory => true,\n\n _ => false,\n\n }\n\n }\n\n\n\n #[inline]\n\n pub fn is_executable(&self) -> bool {\n\n match &self.kind {\n\n EntryKind::Regular { executable, .. } => *executable,\n\n _ => false,\n\n }\n\n }\n\n\n\n #[inline]\n\n pub fn is_file(&self) -> bool {\n\n match &self.kind {\n", "file_path": "src/de.rs", "rank": 52, "score": 6.458900048566806 }, { "content": "use std::env;\n\nuse std::fs::File;\n\n\n\nuse libnar::Archive;\n\n\n", "file_path": "examples/viewer.rs", "rank": 53, "score": 5.299201878696921 }, { "content": " .debug_struct(stringify!(Regular))\n\n .field(\"executable\", executable)\n\n .finish(),\n\n Symlink { target } => fmt\n\n .debug_struct(stringify!(Symlink))\n\n .field(\"target\", target)\n\n .finish(),\n\n }\n\n }\n\n}\n", "file_path": "src/de.rs", "rank": 54, "score": 2.9030104597093587 }, { "content": "# Changelog\n\n\n\nAll notable changes to this project will be documented in this file.\n\n\n\nThe format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/)\n\nand this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html).\n\n\n\n## [Unreleased]\n\n\n\n## [0.1.0] - 2020-01-27\n\n\n\n### Added\n\n\n\n* Initial crate release.\n\n* Implement serialization via `ser::to_vec()`, `ser::to_writer()`.\n\n* Implement deserialization via `de::Archive`.\n\n\n\n[Unreleased]: https://github.com/ebkalderon/libnar/compare/v0.1.0...HEAD\n\n[0.1.0]: https://github.com/ebkalderon/libnar/releases/tag/v0.1.0\n", "file_path": "CHANGELOG.md", "rank": 55, "score": 2.347193140026521 }, { "content": "# Contributor Covenant Code of Conduct\n\n\n\n## Our Pledge\n\n\n\nIn the interest of fostering an open and welcoming environment, we as\n\ncontributors and maintainers pledge to making participation in our project and\n\nour community a harassment-free experience for everyone, regardless of age, body\n\nsize, disability, ethnicity, sex characteristics, gender identity and expression,\n\nlevel of experience, education, socio-economic status, nationality, personal\n\nappearance, race, religion, or sexual identity and orientation.\n\n\n\n## Our Standards\n\n\n\nExamples of behavior that contributes to creating a positive environment\n\ninclude:\n\n\n\n* Using welcoming and inclusive language\n\n* Being respectful of differing viewpoints and experiences\n\n* Gracefully accepting constructive criticism\n\n* Focusing on what is best for the community\n\n* Showing empathy towards other community members\n\n\n\nExamples of unacceptable behavior by participants include:\n\n\n\n* The use of sexualized language or imagery and unwelcome sexual attention or\n\n advances\n\n* Trolling, insulting/derogatory comments, and personal or political attacks\n\n* Public or private harassment\n\n* Publishing others' private information, such as a physical or electronic\n\n address, without explicit permission\n\n* Other conduct which could reasonably be considered inappropriate in a\n\n professional setting\n\n\n\n## Our Responsibilities\n\n\n\nProject maintainers are responsible for clarifying the standards of acceptable\n\nbehavior and are expected to take appropriate and fair corrective action in\n\nresponse to any instances of unacceptable behavior.\n\n\n\nProject maintainers have the right and responsibility to remove, edit, or\n\nreject comments, commits, code, wiki edits, issues, and other contributions\n\nthat are not aligned to this Code of Conduct, or to ban temporarily or\n\npermanently any contributor for other behaviors that they deem inappropriate,\n\nthreatening, offensive, or harmful.\n\n\n", "file_path": "CODE_OF_CONDUCT.md", "rank": 56, "score": 0.9987136404811161 }, { "content": "## Scope\n\n\n\nThis Code of Conduct applies both within project spaces and in public spaces\n\nwhen an individual is representing the project or its community. Examples of\n\nrepresenting a project or community include using an official project e-mail\n\naddress, posting via an official social media account, or acting as an appointed\n\nrepresentative at an online or offline event. Representation of a project may be\n\nfurther defined and clarified by project maintainers.\n\n\n\n## Enforcement\n\n\n\nInstances of abusive, harassing, or otherwise unacceptable behavior may be\n\nreported by contacting the project team at ebkalderon@gmail.com. All\n\ncomplaints will be reviewed and investigated and will result in a response that\n\nis deemed necessary and appropriate to the circumstances. The project team is\n\nobligated to maintain confidentiality with regard to the reporter of an incident.\n\nFurther details of specific enforcement policies may be posted separately.\n\n\n\nProject maintainers who do not follow or enforce the Code of Conduct in good\n\nfaith may face temporary or permanent repercussions as determined by other\n\nmembers of the project's leadership.\n\n\n\n## Attribution\n\n\n\nThis Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,\n\navailable at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html\n\n\n\n[homepage]: https://www.contributor-covenant.org\n\n\n\nFor answers to common questions about this code of conduct, see\n\nhttps://www.contributor-covenant.org/faq\n", "file_path": "CODE_OF_CONDUCT.md", "rank": 57, "score": 0.6495543502806709 } ]
Rust
src/sys/pkg/testing/blobfs-ramdisk/src/lib.rs
dahlia-os/fuchsia-pine64-pinephone
57aace6f0b0bd75306426c98ab9eb3ff4524a61d
#![deny(missing_docs)] use { anyhow::{format_err, Context as _, Error}, fdio::{SpawnAction, SpawnOptions}, fidl::endpoints::{ClientEnd, ServerEnd}, fidl_fuchsia_io::{ DirectoryAdminMarker, DirectoryAdminProxy, DirectoryMarker, DirectoryProxy, NodeProxy, }, fuchsia_component::server::ServiceFs, fuchsia_merkle::{Hash, MerkleTreeBuilder}, fuchsia_runtime::{HandleInfo, HandleType}, fuchsia_zircon::{self as zx, prelude::*}, futures::prelude::*, ramdevice_client::RamdiskClient, scoped_task::Scoped, std::{borrow::Cow, collections::BTreeSet, ffi::CString}, }; #[cfg(test)] mod test; #[derive(Debug, Clone)] pub struct BlobInfo { merkle: Hash, contents: Cow<'static, [u8]>, } impl<B> From<B> for BlobInfo where B: Into<Cow<'static, [u8]>>, { fn from(bytes: B) -> Self { let bytes = bytes.into(); let mut tree = MerkleTreeBuilder::new(); tree.write(&bytes); Self { merkle: tree.finish().root(), contents: bytes } } } pub struct BlobfsRamdiskBuilder { ramdisk: Option<Ramdisk>, blobs: Vec<BlobInfo>, } impl BlobfsRamdiskBuilder { fn new() -> Self { Self { ramdisk: None, blobs: vec![] } } pub fn ramdisk(mut self, ramdisk: Ramdisk) -> Self { self.ramdisk = Some(ramdisk); self } pub fn with_blob(mut self, blob: impl Into<BlobInfo>) -> Self { self.blobs.push(blob.into()); self } pub fn start(self) -> Result<BlobfsRamdisk, Error> { let ramdisk = match self.ramdisk { Some(ramdisk) => ramdisk, None => { let ramdisk = Ramdisk::start().context("creating backing ramdisk for blobfs")?; mkblobfs(&ramdisk)?; ramdisk } }; let block_device_handle_id = HandleInfo::new(HandleType::User0, 1); let fs_root_handle_id = HandleInfo::new(HandleType::User0, 0); let block_handle = ramdisk.clone_channel().context("cloning ramdisk channel")?; let (proxy, blobfs_server_end) = fidl::endpoints::create_proxy::<DirectoryAdminMarker>()?; let process = scoped_task::spawn_etc( scoped_task::job_default(), SpawnOptions::CLONE_ALL, &CString::new("/pkg/bin/blobfs").unwrap(), &[&CString::new("blobfs").unwrap(), &CString::new("mount").unwrap()], None, &mut [ SpawnAction::add_handle(block_device_handle_id, block_handle.into()), SpawnAction::add_handle(fs_root_handle_id, blobfs_server_end.into()), ], ) .map_err(|(status, _)| status) .context("spawning 'blobfs mount'")?; let blobfs = BlobfsRamdisk { backing_ramdisk: ramdisk, process, proxy }; if !self.blobs.is_empty() { let mut present_blobs = blobfs.list_blobs()?; for blob in self.blobs { if present_blobs.contains(&blob.merkle) { continue; } blobfs .write_blob_sync(&blob.merkle, &blob.contents) .context(format!("writing {}", blob.merkle))?; present_blobs.insert(blob.merkle); } } Ok(blobfs) } } pub struct BlobfsRamdisk { backing_ramdisk: Ramdisk, process: Scoped<fuchsia_zircon::Process>, proxy: DirectoryAdminProxy, } impl BlobfsRamdisk { pub fn builder() -> BlobfsRamdiskBuilder { BlobfsRamdiskBuilder::new() } pub fn start() -> Result<Self, Error> { Self::builder().start() } pub fn root_dir_handle(&self) -> Result<ClientEnd<DirectoryMarker>, Error> { let (root_clone, server_end) = zx::Channel::create()?; self.proxy.clone(fidl_fuchsia_io::CLONE_FLAG_SAME_RIGHTS, server_end.into())?; Ok(root_clone.into()) } pub fn root_dir_proxy(&self) -> Result<DirectoryProxy, Error> { Ok(self.root_dir_handle()?.into_proxy()?) } pub fn root_dir(&self) -> Result<openat::Dir, Error> { fdio::create_fd(self.root_dir_handle()?.into()).context("failed to create fd") } pub async fn into_builder(self) -> Result<BlobfsRamdiskBuilder, Error> { let ramdisk = self.unmount().await?; Ok(Self::builder().ramdisk(ramdisk)) } pub async fn unmount(self) -> Result<Ramdisk, Error> { zx::Status::ok(self.proxy.unmount().await.context("sending blobfs unmount")?) .context("unmounting blobfs")?; self.process .wait_handle( zx::Signals::PROCESS_TERMINATED, zx::Time::after(zx::Duration::from_seconds(30)), ) .context("waiting for 'blobfs mount' to exit")?; let ret = self.process.info().context("getting 'blobfs mount' process info")?.return_code; if ret != 0 { return Err(format_err!("'blobfs mount' returned nonzero exit code {}", ret)); } Ok(self.backing_ramdisk) } pub async fn stop(self) -> Result<(), Error> { self.unmount().await?.stop() } pub fn list_blobs(&self) -> Result<BTreeSet<Hash>, Error> { self.root_dir()? .list_dir(".")? .map(|entry| { Ok(entry? .file_name() .to_str() .ok_or_else(|| anyhow::format_err!("expected valid utf-8"))? .parse()?) }) .collect() } pub fn add_blob_from( &self, merkle: &Hash, mut source: impl std::io::Read, ) -> Result<(), Error> { let mut bytes = vec![]; source.read_to_end(&mut bytes)?; self.write_blob_sync(merkle, &bytes) } fn write_blob_sync(&self, merkle: &Hash, bytes: &[u8]) -> Result<(), Error> { use std::{convert::TryInto, io::Write}; let mut file = self.root_dir().unwrap().write_file(merkle.to_string(), 0o777)?; file.set_len(bytes.len().try_into().unwrap())?; file.write_all(&bytes)?; Ok(()) } } pub struct Ramdisk { proxy: NodeProxy, client: RamdiskClient, } impl Ramdisk { pub fn start() -> Result<Self, Error> { let client = RamdiskClient::builder(512, 1 << 20).isolated_dev_root().build()?; let proxy = NodeProxy::new(fuchsia_async::Channel::from_channel(client.open()?)?); Ok(Ramdisk { proxy, client }) } fn clone_channel(&self) -> Result<zx::Channel, Error> { let (result, server_end) = zx::Channel::create()?; self.proxy.clone(fidl_fuchsia_io::CLONE_FLAG_SAME_RIGHTS, ServerEnd::new(server_end))?; Ok(result) } fn clone_handle(&self) -> Result<zx::Handle, Error> { Ok(self.clone_channel().context("cloning ramdisk channel")?.into()) } pub fn stop(self) -> Result<(), Error> { Ok(self.client.destroy()?) } pub async fn corrupt_blob(&self, merkle: &Hash) { let ramdisk = Clone::clone(&self.proxy); blobfs_corrupt_blob(ramdisk, merkle).await.unwrap(); } } async fn blobfs_corrupt_blob(ramdisk: NodeProxy, merkle: &Hash) -> Result<(), Error> { let mut fs = ServiceFs::new(); fs.root_dir().add_service_at("block", |chan| { ramdisk .clone( fidl_fuchsia_io::CLONE_FLAG_SAME_RIGHTS | fidl_fuchsia_io::OPEN_FLAG_DESCRIBE, ServerEnd::new(chan), ) .unwrap(); None }); let (devfs_client, devfs_server) = zx::Channel::create()?; fs.serve_connection(devfs_server)?; let serve_fs = fs.collect::<()>(); let spawn_and_wait = async move { let p = fdio::spawn_etc( &fuchsia_runtime::job_default(), SpawnOptions::CLONE_ALL - SpawnOptions::CLONE_NAMESPACE, &CString::new("/pkg/bin/blobfs-corrupt").unwrap(), &[ &CString::new("blobfs-corrupt").unwrap(), &CString::new("--device").unwrap(), &CString::new("/dev/block").unwrap(), &CString::new("--merkle").unwrap(), &CString::new(merkle.to_string()).unwrap(), ], None, &mut [SpawnAction::add_namespace_entry( &CString::new("/dev").unwrap(), devfs_client.into(), )], ) .map_err(|(status, _)| status) .context("spawning 'blobfs-corrupt'")?; wait_for_process_async(p).await.context("'blobfs-corrupt'")?; Ok(()) }; let ((), res) = futures::join!(serve_fs, spawn_and_wait); res } async fn wait_for_process_async(proc: fuchsia_zircon::Process) -> Result<(), Error> { let signals = fuchsia_async::OnSignals::new(&proc.as_handle_ref(), zx::Signals::PROCESS_TERMINATED) .await .context("waiting for tool to terminate")?; assert_eq!(signals, zx::Signals::PROCESS_TERMINATED); let ret = proc.info().context("getting tool process info")?.return_code; if ret != 0 { return Err(format_err!("tool returned nonzero exit code {}", ret)); } Ok(()) } fn mkblobfs_block(block_device: zx::Handle) -> Result<(), Error> { let block_device_handle_id = HandleInfo::new(HandleType::User0, 1); let p = fdio::spawn_etc( &fuchsia_runtime::job_default(), SpawnOptions::CLONE_ALL, &CString::new("/pkg/bin/blobfs").unwrap(), &[&CString::new("blobfs").unwrap(), &CString::new("mkfs").unwrap()], None, &mut [SpawnAction::add_handle(block_device_handle_id, block_device)], ) .map_err(|(status, _)| status) .context("spawning 'blobfs mkfs'")?; wait_for_process(p).context("'blobfs mkfs'")?; Ok(()) } fn wait_for_process(proc: fuchsia_zircon::Process) -> Result<(), Error> { proc.wait_handle( zx::Signals::PROCESS_TERMINATED, zx::Time::after(zx::Duration::from_seconds(30)), ) .context("waiting for tool to terminate")?; let ret = proc.info().context("getting tool process info")?.return_code; if ret != 0 { return Err(format_err!("tool returned nonzero exit code {}", ret)); } Ok(()) } fn mkblobfs(ramdisk: &Ramdisk) -> Result<(), Error> { mkblobfs_block(ramdisk.clone_handle()?) } #[cfg(test)] mod tests { use {super::*, maplit::btreeset, std::io::Write}; #[fuchsia_async::run_singlethreaded(test)] async fn clean_start_and_stop() { let blobfs = BlobfsRamdisk::start().unwrap(); let proxy = blobfs.root_dir_proxy().unwrap(); drop(proxy); blobfs.stop().await.unwrap(); } #[fuchsia_async::run_singlethreaded(test)] async fn clean_start_contains_no_blobs() { let blobfs = BlobfsRamdisk::start().unwrap(); assert_eq!(blobfs.list_blobs().unwrap(), btreeset![]); blobfs.stop().await.unwrap(); } #[test] fn blob_info_conversions() { let a = BlobInfo::from(&b"static slice"[..]); let b = BlobInfo::from(b"owned vec".to_vec()); let c = BlobInfo::from(Cow::from(&b"cow"[..])); assert_ne!(a.merkle, b.merkle); assert_ne!(b.merkle, c.merkle); assert_eq!( a.merkle, fuchsia_merkle::MerkleTree::from_reader(&b"static slice"[..]).unwrap().root() ); let _ = BlobfsRamdisk::builder() .with_blob(&b"static slice"[..]) .with_blob(b"owned vec".to_vec()) .with_blob(Cow::from(&b"cow"[..])); } #[fuchsia_async::run_singlethreaded(test)] async fn with_blob_ignores_duplicates() { let blob = BlobInfo::from(&b"duplicate"[..]); let blobfs = BlobfsRamdisk::builder() .with_blob(blob.clone()) .with_blob(blob.clone()) .start() .unwrap(); assert_eq!(blobfs.list_blobs().unwrap(), btreeset![blob.merkle.clone()]); let blobfs = blobfs.into_builder().await.unwrap().with_blob(blob.clone()).start().unwrap(); assert_eq!(blobfs.list_blobs().unwrap(), btreeset![blob.merkle.clone()]); } #[fuchsia_async::run_singlethreaded(test)] async fn build_with_two_blobs() { let blobfs = BlobfsRamdisk::builder() .with_blob(&b"blob 1"[..]) .with_blob(&b"blob 2"[..]) .start() .unwrap(); let expected = btreeset![ fuchsia_merkle::MerkleTree::from_reader(&b"blob 1"[..]).unwrap().root(), fuchsia_merkle::MerkleTree::from_reader(&b"blob 2"[..]).unwrap().root(), ]; assert_eq!(expected.len(), 2); assert_eq!(blobfs.list_blobs().unwrap(), expected); blobfs.stop().await.unwrap(); } #[fuchsia_async::run_singlethreaded(test)] async fn remount() { let blobfs = BlobfsRamdisk::builder().with_blob(&b"test"[..]).start().unwrap(); let blobs = blobfs.list_blobs().unwrap(); let blobfs = blobfs.into_builder().await.unwrap().start().unwrap(); assert_eq!(blobs, blobfs.list_blobs().unwrap()); blobfs.stop().await.unwrap(); } #[fuchsia_async::run_singlethreaded(test)] async fn blob_appears_in_readdir() { let blobfs = BlobfsRamdisk::start().unwrap(); let root = blobfs.root_dir().unwrap(); let hello_merkle = write_blob(&root, "Hello blobfs!".as_bytes()); assert_eq!(list_blobs(&root), vec![hello_merkle]); drop(root); blobfs.stop().await.unwrap(); } fn write_blob(dir: &openat::Dir, payload: &[u8]) -> String { let merkle = fuchsia_merkle::MerkleTree::from_reader(payload).unwrap().root().to_string(); let mut f = dir.new_file(&merkle, 0600).unwrap(); f.set_len(payload.len() as u64).unwrap(); f.write_all(payload).unwrap(); merkle } fn list_blobs(dir: &openat::Dir) -> Vec<String> { dir.list_dir(".") .unwrap() .map(|entry| entry.unwrap().file_name().to_owned().into_string().unwrap()) .collect() } }
#![deny(missing_docs)] use { anyhow::{format_err, Context as _, Error}, fdio::{SpawnAction, SpawnOptions}, fidl::endpoints::{ClientEnd, ServerEnd}, fidl_fuchsia_io::{ DirectoryAdminMarker, DirectoryAdminProxy, DirectoryMarker, DirectoryProxy, NodeProxy, }, fuchsia_component::server::ServiceFs, fuchsia_merkle::{Hash, MerkleTreeBuilder}, fuchsia_runtime::{HandleInfo, HandleType}, fuchsia_zircon::{self as zx, prelude::*}, futures::prelude::*, ramdevice_client::RamdiskClient, scoped_task::Scoped, std::{borrow::Cow, collections::BTreeSet, ffi::CString}, }; #[cfg(test)] mod test; #[derive(Debug, Clone)] pub struct BlobInfo { merkle: Hash, contents: Cow<'static, [u8]>, } impl<B> From<B> for BlobInfo where B: Into<Cow<'static, [u8]>>, { fn from(bytes: B) -> Self { let bytes = bytes.into(); let mut tree = MerkleTreeBuilder::new(); tree.write(&bytes); Self { merkle: tree.finish().root(), contents: bytes } } } pub struct BlobfsRamdiskBuilder { ramdisk: Option<Ramdisk>, blobs: Vec<BlobInfo>, } impl BlobfsRamdiskBuilder { fn new() -> Self { Self { ramdisk: None, blobs: vec![] } } pub fn ramdisk(mut self, ramdisk: Ramdisk) -> Self { self.ramdisk = Some(ramdisk); self } pub fn with_blob(mut self, blob: impl Into<BlobInfo>) -> Self { self.blobs.push(blob.into()); self } pub fn start(self) -> Result<BlobfsRamdisk, Error> { let ramdisk =
; let block_device_handle_id = HandleInfo::new(HandleType::User0, 1); let fs_root_handle_id = HandleInfo::new(HandleType::User0, 0); let block_handle = ramdisk.clone_channel().context("cloning ramdisk channel")?; let (proxy, blobfs_server_end) = fidl::endpoints::create_proxy::<DirectoryAdminMarker>()?; let process = scoped_task::spawn_etc( scoped_task::job_default(), SpawnOptions::CLONE_ALL, &CString::new("/pkg/bin/blobfs").unwrap(), &[&CString::new("blobfs").unwrap(), &CString::new("mount").unwrap()], None, &mut [ SpawnAction::add_handle(block_device_handle_id, block_handle.into()), SpawnAction::add_handle(fs_root_handle_id, blobfs_server_end.into()), ], ) .map_err(|(status, _)| status) .context("spawning 'blobfs mount'")?; let blobfs = BlobfsRamdisk { backing_ramdisk: ramdisk, process, proxy }; if !self.blobs.is_empty() { let mut present_blobs = blobfs.list_blobs()?; for blob in self.blobs { if present_blobs.contains(&blob.merkle) { continue; } blobfs .write_blob_sync(&blob.merkle, &blob.contents) .context(format!("writing {}", blob.merkle))?; present_blobs.insert(blob.merkle); } } Ok(blobfs) } } pub struct BlobfsRamdisk { backing_ramdisk: Ramdisk, process: Scoped<fuchsia_zircon::Process>, proxy: DirectoryAdminProxy, } impl BlobfsRamdisk { pub fn builder() -> BlobfsRamdiskBuilder { BlobfsRamdiskBuilder::new() } pub fn start() -> Result<Self, Error> { Self::builder().start() } pub fn root_dir_handle(&self) -> Result<ClientEnd<DirectoryMarker>, Error> { let (root_clone, server_end) = zx::Channel::create()?; self.proxy.clone(fidl_fuchsia_io::CLONE_FLAG_SAME_RIGHTS, server_end.into())?; Ok(root_clone.into()) } pub fn root_dir_proxy(&self) -> Result<DirectoryProxy, Error> { Ok(self.root_dir_handle()?.into_proxy()?) } pub fn root_dir(&self) -> Result<openat::Dir, Error> { fdio::create_fd(self.root_dir_handle()?.into()).context("failed to create fd") } pub async fn into_builder(self) -> Result<BlobfsRamdiskBuilder, Error> { let ramdisk = self.unmount().await?; Ok(Self::builder().ramdisk(ramdisk)) } pub async fn unmount(self) -> Result<Ramdisk, Error> { zx::Status::ok(self.proxy.unmount().await.context("sending blobfs unmount")?) .context("unmounting blobfs")?; self.process .wait_handle( zx::Signals::PROCESS_TERMINATED, zx::Time::after(zx::Duration::from_seconds(30)), ) .context("waiting for 'blobfs mount' to exit")?; let ret = self.process.info().context("getting 'blobfs mount' process info")?.return_code; if ret != 0 { return Err(format_err!("'blobfs mount' returned nonzero exit code {}", ret)); } Ok(self.backing_ramdisk) } pub async fn stop(self) -> Result<(), Error> { self.unmount().await?.stop() } pub fn list_blobs(&self) -> Result<BTreeSet<Hash>, Error> { self.root_dir()? .list_dir(".")? .map(|entry| { Ok(entry? .file_name() .to_str() .ok_or_else(|| anyhow::format_err!("expected valid utf-8"))? .parse()?) }) .collect() } pub fn add_blob_from( &self, merkle: &Hash, mut source: impl std::io::Read, ) -> Result<(), Error> { let mut bytes = vec![]; source.read_to_end(&mut bytes)?; self.write_blob_sync(merkle, &bytes) } fn write_blob_sync(&self, merkle: &Hash, bytes: &[u8]) -> Result<(), Error> { use std::{convert::TryInto, io::Write}; let mut file = self.root_dir().unwrap().write_file(merkle.to_string(), 0o777)?; file.set_len(bytes.len().try_into().unwrap())?; file.write_all(&bytes)?; Ok(()) } } pub struct Ramdisk { proxy: NodeProxy, client: RamdiskClient, } impl Ramdisk { pub fn start() -> Result<Self, Error> { let client = RamdiskClient::builder(512, 1 << 20).isolated_dev_root().build()?; let proxy = NodeProxy::new(fuchsia_async::Channel::from_channel(client.open()?)?); Ok(Ramdisk { proxy, client }) } fn clone_channel(&self) -> Result<zx::Channel, Error> { let (result, server_end) = zx::Channel::create()?; self.proxy.clone(fidl_fuchsia_io::CLONE_FLAG_SAME_RIGHTS, ServerEnd::new(server_end))?; Ok(result) } fn clone_handle(&self) -> Result<zx::Handle, Error> { Ok(self.clone_channel().context("cloning ramdisk channel")?.into()) } pub fn stop(self) -> Result<(), Error> { Ok(self.client.destroy()?) } pub async fn corrupt_blob(&self, merkle: &Hash) { let ramdisk = Clone::clone(&self.proxy); blobfs_corrupt_blob(ramdisk, merkle).await.unwrap(); } } async fn blobfs_corrupt_blob(ramdisk: NodeProxy, merkle: &Hash) -> Result<(), Error> { let mut fs = ServiceFs::new(); fs.root_dir().add_service_at("block", |chan| { ramdisk .clone( fidl_fuchsia_io::CLONE_FLAG_SAME_RIGHTS | fidl_fuchsia_io::OPEN_FLAG_DESCRIBE, ServerEnd::new(chan), ) .unwrap(); None }); let (devfs_client, devfs_server) = zx::Channel::create()?; fs.serve_connection(devfs_server)?; let serve_fs = fs.collect::<()>(); let spawn_and_wait = async move { let p = fdio::spawn_etc( &fuchsia_runtime::job_default(), SpawnOptions::CLONE_ALL - SpawnOptions::CLONE_NAMESPACE, &CString::new("/pkg/bin/blobfs-corrupt").unwrap(), &[ &CString::new("blobfs-corrupt").unwrap(), &CString::new("--device").unwrap(), &CString::new("/dev/block").unwrap(), &CString::new("--merkle").unwrap(), &CString::new(merkle.to_string()).unwrap(), ], None, &mut [SpawnAction::add_namespace_entry( &CString::new("/dev").unwrap(), devfs_client.into(), )], ) .map_err(|(status, _)| status) .context("spawning 'blobfs-corrupt'")?; wait_for_process_async(p).await.context("'blobfs-corrupt'")?; Ok(()) }; let ((), res) = futures::join!(serve_fs, spawn_and_wait); res } async fn wait_for_process_async(proc: fuchsia_zircon::Process) -> Result<(), Error> { let signals = fuchsia_async::OnSignals::new(&proc.as_handle_ref(), zx::Signals::PROCESS_TERMINATED) .await .context("waiting for tool to terminate")?; assert_eq!(signals, zx::Signals::PROCESS_TERMINATED); let ret = proc.info().context("getting tool process info")?.return_code; if ret != 0 { return Err(format_err!("tool returned nonzero exit code {}", ret)); } Ok(()) } fn mkblobfs_block(block_device: zx::Handle) -> Result<(), Error> { let block_device_handle_id = HandleInfo::new(HandleType::User0, 1); let p = fdio::spawn_etc( &fuchsia_runtime::job_default(), SpawnOptions::CLONE_ALL, &CString::new("/pkg/bin/blobfs").unwrap(), &[&CString::new("blobfs").unwrap(), &CString::new("mkfs").unwrap()], None, &mut [SpawnAction::add_handle(block_device_handle_id, block_device)], ) .map_err(|(status, _)| status) .context("spawning 'blobfs mkfs'")?; wait_for_process(p).context("'blobfs mkfs'")?; Ok(()) } fn wait_for_process(proc: fuchsia_zircon::Process) -> Result<(), Error> { proc.wait_handle( zx::Signals::PROCESS_TERMINATED, zx::Time::after(zx::Duration::from_seconds(30)), ) .context("waiting for tool to terminate")?; let ret = proc.info().context("getting tool process info")?.return_code; if ret != 0 { return Err(format_err!("tool returned nonzero exit code {}", ret)); } Ok(()) } fn mkblobfs(ramdisk: &Ramdisk) -> Result<(), Error> { mkblobfs_block(ramdisk.clone_handle()?) } #[cfg(test)] mod tests { use {super::*, maplit::btreeset, std::io::Write}; #[fuchsia_async::run_singlethreaded(test)] async fn clean_start_and_stop() { let blobfs = BlobfsRamdisk::start().unwrap(); let proxy = blobfs.root_dir_proxy().unwrap(); drop(proxy); blobfs.stop().await.unwrap(); } #[fuchsia_async::run_singlethreaded(test)] async fn clean_start_contains_no_blobs() { let blobfs = BlobfsRamdisk::start().unwrap(); assert_eq!(blobfs.list_blobs().unwrap(), btreeset![]); blobfs.stop().await.unwrap(); } #[test] fn blob_info_conversions() { let a = BlobInfo::from(&b"static slice"[..]); let b = BlobInfo::from(b"owned vec".to_vec()); let c = BlobInfo::from(Cow::from(&b"cow"[..])); assert_ne!(a.merkle, b.merkle); assert_ne!(b.merkle, c.merkle); assert_eq!( a.merkle, fuchsia_merkle::MerkleTree::from_reader(&b"static slice"[..]).unwrap().root() ); let _ = BlobfsRamdisk::builder() .with_blob(&b"static slice"[..]) .with_blob(b"owned vec".to_vec()) .with_blob(Cow::from(&b"cow"[..])); } #[fuchsia_async::run_singlethreaded(test)] async fn with_blob_ignores_duplicates() { let blob = BlobInfo::from(&b"duplicate"[..]); let blobfs = BlobfsRamdisk::builder() .with_blob(blob.clone()) .with_blob(blob.clone()) .start() .unwrap(); assert_eq!(blobfs.list_blobs().unwrap(), btreeset![blob.merkle.clone()]); let blobfs = blobfs.into_builder().await.unwrap().with_blob(blob.clone()).start().unwrap(); assert_eq!(blobfs.list_blobs().unwrap(), btreeset![blob.merkle.clone()]); } #[fuchsia_async::run_singlethreaded(test)] async fn build_with_two_blobs() { let blobfs = BlobfsRamdisk::builder() .with_blob(&b"blob 1"[..]) .with_blob(&b"blob 2"[..]) .start() .unwrap(); let expected = btreeset![ fuchsia_merkle::MerkleTree::from_reader(&b"blob 1"[..]).unwrap().root(), fuchsia_merkle::MerkleTree::from_reader(&b"blob 2"[..]).unwrap().root(), ]; assert_eq!(expected.len(), 2); assert_eq!(blobfs.list_blobs().unwrap(), expected); blobfs.stop().await.unwrap(); } #[fuchsia_async::run_singlethreaded(test)] async fn remount() { let blobfs = BlobfsRamdisk::builder().with_blob(&b"test"[..]).start().unwrap(); let blobs = blobfs.list_blobs().unwrap(); let blobfs = blobfs.into_builder().await.unwrap().start().unwrap(); assert_eq!(blobs, blobfs.list_blobs().unwrap()); blobfs.stop().await.unwrap(); } #[fuchsia_async::run_singlethreaded(test)] async fn blob_appears_in_readdir() { let blobfs = BlobfsRamdisk::start().unwrap(); let root = blobfs.root_dir().unwrap(); let hello_merkle = write_blob(&root, "Hello blobfs!".as_bytes()); assert_eq!(list_blobs(&root), vec![hello_merkle]); drop(root); blobfs.stop().await.unwrap(); } fn write_blob(dir: &openat::Dir, payload: &[u8]) -> String { let merkle = fuchsia_merkle::MerkleTree::from_reader(payload).unwrap().root().to_string(); let mut f = dir.new_file(&merkle, 0600).unwrap(); f.set_len(payload.len() as u64).unwrap(); f.write_all(payload).unwrap(); merkle } fn list_blobs(dir: &openat::Dir) -> Vec<String> { dir.list_dir(".") .unwrap() .map(|entry| entry.unwrap().file_name().to_owned().into_string().unwrap()) .collect() } }
match self.ramdisk { Some(ramdisk) => ramdisk, None => { let ramdisk = Ramdisk::start().context("creating backing ramdisk for blobfs")?; mkblobfs(&ramdisk)?; ramdisk } }
if_condition
[]
Rust
src/lib.rs
andreyk0/st7920
c7db218fddbc48d2054924be326cbbbefac1a1ea
#![no_std] use num_derive::ToPrimitive; use num_traits::ToPrimitive; use embedded_hal::blocking::delay::DelayUs; use embedded_hal::blocking::spi; use embedded_hal::digital::v2::OutputPin; #[derive(Debug)] pub enum Error<CommError, PinError> { Comm(CommError), Pin(PinError), } #[derive(ToPrimitive)] enum Instruction { BasicFunction = 0x30, ExtendedFunction = 0x34, ClearScreen = 0x01, EntryMode = 0x06, DisplayOnCursorOff = 0x0C, GraphicsOn = 0x36, SetGraphicsAddress = 0x80, } pub const WIDTH: u32 = 128; pub const HEIGHT: u32 = 64; const ROW_SIZE: usize = (WIDTH / 8) as usize; const BUFFER_SIZE: usize = ROW_SIZE * HEIGHT as usize; const X_ADDR_DIV: u8 = 16; pub struct ST7920<SPI, RST, CS> where SPI: spi::Write<u8>, RST: OutputPin, CS: OutputPin, { spi: SPI, rst: RST, cs: Option<CS>, buffer: [u8; BUFFER_SIZE], flip: bool, } impl<SPI, RST, CS, PinError, SPIError> ST7920<SPI, RST, CS> where SPI: spi::Write<u8, Error = SPIError>, RST: OutputPin<Error = PinError>, CS: OutputPin<Error = PinError>, { pub fn new(spi: SPI, rst: RST, cs: Option<CS>, flip: bool) -> Self { let buffer = [0; BUFFER_SIZE]; ST7920 { spi, rst, cs, buffer, flip, } } fn enable_cs(&mut self, delay: &mut dyn DelayUs<u32>) -> Result<(), Error<SPIError, PinError>> { if let Some(cs) = self.cs.as_mut() { cs.set_high().map_err(Error::Pin)?; delay.delay_us(1); } Ok(()) } fn disable_cs( &mut self, delay: &mut dyn DelayUs<u32>, ) -> Result<(), Error<SPIError, PinError>> { if let Some(cs) = self.cs.as_mut() { delay.delay_us(1); cs.set_high().map_err(Error::Pin)?; } Ok(()) } pub fn init(&mut self, delay: &mut dyn DelayUs<u32>) -> Result<(), Error<SPIError, PinError>> { self.enable_cs(delay)?; self.hard_reset(delay)?; self.write_command(Instruction::BasicFunction)?; delay.delay_us(200); self.write_command(Instruction::DisplayOnCursorOff)?; delay.delay_us(100); self.write_command(Instruction::ClearScreen)?; delay.delay_us(10 * 1000); self.write_command(Instruction::EntryMode)?; delay.delay_us(100); self.write_command(Instruction::ExtendedFunction)?; delay.delay_us(10 * 1000); self.write_command(Instruction::GraphicsOn)?; delay.delay_us(100 * 1000); self.disable_cs(delay)?; Ok(()) } fn hard_reset( &mut self, delay: &mut dyn DelayUs<u32>, ) -> Result<(), Error<SPIError, PinError>> { self.rst.set_low().map_err(Error::Pin)?; delay.delay_us(40 * 1000); self.rst.set_high().map_err(Error::Pin)?; delay.delay_us(40 * 1000); Ok(()) } fn write_command(&mut self, command: Instruction) -> Result<(), Error<SPIError, PinError>> { self.write_command_param(command, 0) } fn write_command_param( &mut self, command: Instruction, param: u8, ) -> Result<(), Error<SPIError, PinError>> { let command_param = command.to_u8().unwrap() | param; let cmd: u8 = 0xF8; self.spi .write(&[cmd, command_param & 0xF0, (command_param << 4) & 0xF0]) .map_err(Error::Comm)?; Ok(()) } fn write_data(&mut self, data: u8) -> Result<(), Error<SPIError, PinError>> { self.spi .write(&[0xFA, data & 0xF0, (data << 4) & 0xF0]) .map_err(Error::Comm)?; Ok(()) } fn set_address(&mut self, x: u8, y: u8) -> Result<(), Error<SPIError, PinError>> { const HALF_HEIGHT: u8 = HEIGHT as u8 / 2; self.write_command_param( Instruction::SetGraphicsAddress, if y < HALF_HEIGHT { y } else { y - HALF_HEIGHT }, )?; self.write_command_param( Instruction::SetGraphicsAddress, if y < HALF_HEIGHT { x / X_ADDR_DIV } else { x / X_ADDR_DIV + (WIDTH as u8 / X_ADDR_DIV) }, )?; Ok(()) } pub fn clear(&mut self, delay: &mut dyn DelayUs<u32>) -> Result<(), Error<SPIError, PinError>> { self.enable_cs(delay)?; for y in 0..HEIGHT as u8 / 2 { self.set_address(0, y)?; for _x in 0..ROW_SIZE { self.write_data(0)?; self.write_data(0)?; } } self.disable_cs(delay)?; Ok(()) } pub fn set_pixel(&mut self, mut x: u8, mut y: u8, val: u8) { if self.flip { y = (HEIGHT - 1) as u8 - y; x = (WIDTH - 1) as u8 - x; } let x_mask = 0x80 >> (x % 8); if val != 0 { self.buffer[y as usize * ROW_SIZE + x as usize / 8] |= x_mask; } else { self.buffer[y as usize * ROW_SIZE + x as usize / 8] &= !x_mask; } } pub fn flush(&mut self, delay: &mut dyn DelayUs<u32>) -> Result<(), Error<SPIError, PinError>> { self.enable_cs(delay)?; for y in 0..HEIGHT as u8 / 2 { self.set_address(0, y)?; let mut row_start = y as usize * ROW_SIZE; for x in 0..ROW_SIZE { self.write_data(self.buffer[row_start + x])?; } row_start += (HEIGHT as usize / 2) * ROW_SIZE; for x in 0..ROW_SIZE { self.write_data(self.buffer[row_start + x])?; } } self.disable_cs(delay)?; Ok(()) } pub fn flush_region( &mut self, x: u8, mut y: u8, w: u8, h: u8, delay: &mut dyn DelayUs<u32>, ) -> Result<(), Error<SPIError, PinError>> { self.enable_cs(delay)?; let mut adj_x = x; if self.flip { y = HEIGHT as u8 - (y + h); adj_x = WIDTH as u8 - (x + w); } let left = (adj_x / X_ADDR_DIV) * X_ADDR_DIV; let mut right = ((adj_x + w) / X_ADDR_DIV) * X_ADDR_DIV; if right < adj_x + w { right += X_ADDR_DIV; } let mut row_start = y as usize * ROW_SIZE; for y in y..y + h { self.set_address(adj_x, y)?; for x in left / 8..right / 8 { self.write_data(self.buffer[row_start + x as usize])?; } row_start += ROW_SIZE; } self.disable_cs(delay)?; Ok(()) } } #[cfg(feature = "graphics")] use embedded_graphics; #[cfg(feature = "graphics")] use self::embedded_graphics::{ geometry::Point, drawable::Pixel, pixelcolor::BinaryColor, prelude::*, DrawTarget, }; #[cfg(feature = "graphics")] impl<SPI, CS, RST, PinError, SPIError> DrawTarget<BinaryColor> for ST7920<SPI, CS, RST> where SPI: spi::Write<u8, Error = SPIError>, RST: OutputPin<Error = PinError>, CS: OutputPin<Error = PinError>, { type Error = core::convert::Infallible; fn draw_pixel(&mut self, pixel: Pixel<BinaryColor>) -> Result<(), Self::Error> { let Pixel(coord, color) = pixel; let x = coord.x as u8; let y = coord.y as u8; let c = match color { BinaryColor::Off => 0 , BinaryColor::On => 1 }; self.set_pixel(x, y, c); Ok(()) } fn size(&self) -> Size { if self.flip { Size::new(HEIGHT, WIDTH) } else { Size::new(WIDTH, HEIGHT) } } } impl<SPI, RST, CS, PinError, SPIError> ST7920<SPI, RST, CS> where SPI: spi::Write<u8, Error = SPIError>, RST: OutputPin<Error = PinError>, CS: OutputPin<Error = PinError>, { pub fn flush_region_graphics( &mut self, region: (Point, Size), delay: &mut dyn DelayUs<u32>, ) -> Result<(), Error<SPIError, PinError>> { self.flush_region( region.0.x as u8, region.0.y as u8, region.1.width as u8, region.1.height as u8, delay, ) } }
#![no_std] use num_derive::ToPrimitive; use num_traits::ToPrimitive; use embedded_hal::blocking::delay::DelayUs; use embedded_hal::blocking::spi; use embedded_hal::digital::v2::OutputPin; #[derive(Debug)] pub enum Error<CommError, PinError> { Comm(CommError), Pin(PinError), } #[derive(ToPrimitive)] enum Instruction { BasicFunction = 0x30, ExtendedFunction = 0x34, ClearScreen = 0x01, EntryMode = 0x06, DisplayOnCursorOff = 0x0C, GraphicsOn = 0x36, SetGraphicsAddress = 0x80, } pub const WIDTH: u32 = 128; pub const HEIGHT: u32 = 64; const ROW_SIZE: usize = (WIDTH / 8) as usize; const BUFFER_SIZE: usize = ROW_SIZE * HEIGHT as usize; const X_ADDR_DIV: u8 = 16; pub struct ST7920<SPI, RST, CS> where SPI: spi::Write<u8>, RST: OutputPin, CS: OutputPin, { spi: SPI, rst: RST, cs: Option<CS>, buffer: [u8; BUFFER_SIZE], flip: bool, } impl<SPI, RST, CS, PinError, SPIError> ST7920<SPI, RST, CS> where SPI: spi::Write<u8, Error = SPIError>, RST: OutputPin<Error = PinError>, CS: OutputPin<Error = PinError>, { pub fn new(spi: SPI, rst: RST, cs: Option<CS>, flip: bool) -> Self { let buffer = [0; BUFFER_SIZE]; ST7920 { spi, rst, cs, buffer, flip, } } fn enable_cs(&mut self, delay: &mut dyn DelayUs<u32>) -> Result<(), Error<SPIError, PinError>> { if let Some(cs) = self.cs.as_mut() { cs.set_high().map_err(Error::Pin)?; delay.delay_us(1); } Ok(()) } fn disable_cs( &mut self, delay: &mut dyn DelayUs<u32>, ) -> Result<(), Error<SPIError, PinError>> { if let Some(cs) = self.cs.as_mut() { delay.delay_us(1); cs.set_high().map_err(Error::Pin)?; } Ok(()) } pub fn init(&mut self, delay: &mut d
t_high().map_err(Error::Pin)?; delay.delay_us(40 * 1000); Ok(()) } fn write_command(&mut self, command: Instruction) -> Result<(), Error<SPIError, PinError>> { self.write_command_param(command, 0) } fn write_command_param( &mut self, command: Instruction, param: u8, ) -> Result<(), Error<SPIError, PinError>> { let command_param = command.to_u8().unwrap() | param; let cmd: u8 = 0xF8; self.spi .write(&[cmd, command_param & 0xF0, (command_param << 4) & 0xF0]) .map_err(Error::Comm)?; Ok(()) } fn write_data(&mut self, data: u8) -> Result<(), Error<SPIError, PinError>> { self.spi .write(&[0xFA, data & 0xF0, (data << 4) & 0xF0]) .map_err(Error::Comm)?; Ok(()) } fn set_address(&mut self, x: u8, y: u8) -> Result<(), Error<SPIError, PinError>> { const HALF_HEIGHT: u8 = HEIGHT as u8 / 2; self.write_command_param( Instruction::SetGraphicsAddress, if y < HALF_HEIGHT { y } else { y - HALF_HEIGHT }, )?; self.write_command_param( Instruction::SetGraphicsAddress, if y < HALF_HEIGHT { x / X_ADDR_DIV } else { x / X_ADDR_DIV + (WIDTH as u8 / X_ADDR_DIV) }, )?; Ok(()) } pub fn clear(&mut self, delay: &mut dyn DelayUs<u32>) -> Result<(), Error<SPIError, PinError>> { self.enable_cs(delay)?; for y in 0..HEIGHT as u8 / 2 { self.set_address(0, y)?; for _x in 0..ROW_SIZE { self.write_data(0)?; self.write_data(0)?; } } self.disable_cs(delay)?; Ok(()) } pub fn set_pixel(&mut self, mut x: u8, mut y: u8, val: u8) { if self.flip { y = (HEIGHT - 1) as u8 - y; x = (WIDTH - 1) as u8 - x; } let x_mask = 0x80 >> (x % 8); if val != 0 { self.buffer[y as usize * ROW_SIZE + x as usize / 8] |= x_mask; } else { self.buffer[y as usize * ROW_SIZE + x as usize / 8] &= !x_mask; } } pub fn flush(&mut self, delay: &mut dyn DelayUs<u32>) -> Result<(), Error<SPIError, PinError>> { self.enable_cs(delay)?; for y in 0..HEIGHT as u8 / 2 { self.set_address(0, y)?; let mut row_start = y as usize * ROW_SIZE; for x in 0..ROW_SIZE { self.write_data(self.buffer[row_start + x])?; } row_start += (HEIGHT as usize / 2) * ROW_SIZE; for x in 0..ROW_SIZE { self.write_data(self.buffer[row_start + x])?; } } self.disable_cs(delay)?; Ok(()) } pub fn flush_region( &mut self, x: u8, mut y: u8, w: u8, h: u8, delay: &mut dyn DelayUs<u32>, ) -> Result<(), Error<SPIError, PinError>> { self.enable_cs(delay)?; let mut adj_x = x; if self.flip { y = HEIGHT as u8 - (y + h); adj_x = WIDTH as u8 - (x + w); } let left = (adj_x / X_ADDR_DIV) * X_ADDR_DIV; let mut right = ((adj_x + w) / X_ADDR_DIV) * X_ADDR_DIV; if right < adj_x + w { right += X_ADDR_DIV; } let mut row_start = y as usize * ROW_SIZE; for y in y..y + h { self.set_address(adj_x, y)?; for x in left / 8..right / 8 { self.write_data(self.buffer[row_start + x as usize])?; } row_start += ROW_SIZE; } self.disable_cs(delay)?; Ok(()) } } #[cfg(feature = "graphics")] use embedded_graphics; #[cfg(feature = "graphics")] use self::embedded_graphics::{ geometry::Point, drawable::Pixel, pixelcolor::BinaryColor, prelude::*, DrawTarget, }; #[cfg(feature = "graphics")] impl<SPI, CS, RST, PinError, SPIError> DrawTarget<BinaryColor> for ST7920<SPI, CS, RST> where SPI: spi::Write<u8, Error = SPIError>, RST: OutputPin<Error = PinError>, CS: OutputPin<Error = PinError>, { type Error = core::convert::Infallible; fn draw_pixel(&mut self, pixel: Pixel<BinaryColor>) -> Result<(), Self::Error> { let Pixel(coord, color) = pixel; let x = coord.x as u8; let y = coord.y as u8; let c = match color { BinaryColor::Off => 0 , BinaryColor::On => 1 }; self.set_pixel(x, y, c); Ok(()) } fn size(&self) -> Size { if self.flip { Size::new(HEIGHT, WIDTH) } else { Size::new(WIDTH, HEIGHT) } } } impl<SPI, RST, CS, PinError, SPIError> ST7920<SPI, RST, CS> where SPI: spi::Write<u8, Error = SPIError>, RST: OutputPin<Error = PinError>, CS: OutputPin<Error = PinError>, { pub fn flush_region_graphics( &mut self, region: (Point, Size), delay: &mut dyn DelayUs<u32>, ) -> Result<(), Error<SPIError, PinError>> { self.flush_region( region.0.x as u8, region.0.y as u8, region.1.width as u8, region.1.height as u8, delay, ) } }
yn DelayUs<u32>) -> Result<(), Error<SPIError, PinError>> { self.enable_cs(delay)?; self.hard_reset(delay)?; self.write_command(Instruction::BasicFunction)?; delay.delay_us(200); self.write_command(Instruction::DisplayOnCursorOff)?; delay.delay_us(100); self.write_command(Instruction::ClearScreen)?; delay.delay_us(10 * 1000); self.write_command(Instruction::EntryMode)?; delay.delay_us(100); self.write_command(Instruction::ExtendedFunction)?; delay.delay_us(10 * 1000); self.write_command(Instruction::GraphicsOn)?; delay.delay_us(100 * 1000); self.disable_cs(delay)?; Ok(()) } fn hard_reset( &mut self, delay: &mut dyn DelayUs<u32>, ) -> Result<(), Error<SPIError, PinError>> { self.rst.set_low().map_err(Error::Pin)?; delay.delay_us(40 * 1000); self.rst.se
random
[ { "content": "fn main() {\n\n // Put the linker script somewhere the linker can find it\n\n let out = &PathBuf::from(env::var_os(\"OUT_DIR\").unwrap());\n\n File::create(out.join(\"memory.x\"))\n\n .unwrap()\n\n .write_all(include_bytes!(\"memory.x\"))\n\n .unwrap();\n\n println!(\"cargo:rustc-link-search={}\", out.display());\n\n\n\n // Only re-run the build script when memory.x is changed,\n\n // instead of when any part of the source code changes.\n\n println!(\"cargo:rerun-if-changed=memory.x\");\n\n}\n", "file_path": "build.rs", "rank": 1, "score": 19838.249311243522 }, { "content": "#[entry]\n\nfn main() -> ! {\n\n if let (Some(p), Some(cp)) = (stm32::Peripherals::take(), Peripherals::take()) {\n\n let rcc = p.RCC.constrain();\n\n\n\n let clocks = rcc\n\n .cfgr\n\n .sysclk(stm32f4xx_hal::time::MegaHertz(168))\n\n .freeze();\n\n\n\n let mut delay = Delay::new(cp.SYST, clocks);\n\n\n\n let gpiob = p.GPIOB.split();\n\n\n\n let sck = gpiob.pb3.into_alternate_af5();\n\n let mosi = gpiob.pb5.into_alternate_af5();\n\n let reset = gpiob.pb7.into_push_pull_output();\n\n let cs = gpiob.pb6.into_push_pull_output();\n\n\n\n let spi = Spi::spi1(\n\n p.SPI1,\n", "file_path": "examples/graphics.rs", "rank": 2, "score": 18558.029940921882 }, { "content": "# `ST7920`\n\n\n\nThis is a Rust driver library for LCD displays using the [ST7920] controller. It supports graphics mode of the controller, 128x64 in 1bpp. SPI connection to MCU is supported.\n\n\n\nIt implements [embedded-graphics] driver API.\n\n\n\nIt is platform independent as it uses [embedded-hal] APIs to access hardware.\n\n\n\nThe examples are based on the [stm32f4xx_hal] implementation of embedded-hal.\n\n\n\n\n\n\n\n# Documentation\n\n\n\nSee [examples].\n\n\n\nThe controller supports 1 bit-per-pixel displays, so an off-screen buffer has to be used to provide random access to pixels.\n\nSize of the buffer is 1024 bytes.\n\n\n\nThe buffer has to be flushed to update the display after a group of draw calls has been completed. The flush is not part of embedded-graphics API.\n\n\n\n# License\n\n\n\nThis library is licensed under MIT license ([LICENSE](LICENSE) or http://opensource.org/licenses/MIT)\n\n\n\n[embedded-graphics]: https://docs.rs/embedded-graphics/0.6.0-alpha.2/embedded_graphics/\n\n[embedded-hal]: https://docs.rs/embedded-hal/0.2.3/embedded_hal/\n\n[stm32f4xx_hal]: https://docs.rs/stm32f4xx-hal/0.5.0/stm32f4xx_hal/\n\n[examples]: https://github.com/wjakobczyk/st7920/tree/master/examples\n", "file_path": "README.md", "rank": 3, "score": 7762.460768289734 }, { "content": "#![no_main]\n\n#![no_std]\n\n\n\nextern crate panic_semihosting;\n\n\n\nuse cortex_m::peripheral::Peripherals;\n\nuse cortex_m_rt::entry;\n\n\n\nuse hal::delay::Delay;\n\nuse hal::gpio::*;\n\nuse hal::rcc::RccExt;\n\nuse hal::spi::*;\n\nuse hal::stm32;\n\nuse stm32f4xx_hal as hal;\n\n\n\nuse embedded_graphics::{\n\n prelude::*,\n\n fonts::{Font6x8, Text},\n\n pixelcolor::BinaryColor,\n\n primitives::Circle,\n\n style::{PrimitiveStyle, TextStyle},\n\n};\n\n\n\nuse st7920::ST7920;\n\n\n\n#[entry]\n", "file_path": "examples/graphics.rs", "rank": 19, "score": 5.513587093507516 }, { "content": " (sck, NoMiso, mosi),\n\n Mode {\n\n polarity: Polarity::IdleLow,\n\n phase: Phase::CaptureOnFirstTransition,\n\n },\n\n stm32f4xx_hal::time::KiloHertz(600).into(),\n\n clocks,\n\n );\n\n\n\n let mut disp = ST7920::new(spi, reset, Some(cs), false);\n\n\n\n disp.init(&mut delay).expect(\"could not init display\");\n\n disp.clear(&mut delay).expect(\"could not clear display\");\n\n\n\n let c = Circle::new(Point::new(20, 20), 8).into_styled(PrimitiveStyle::with_fill(BinaryColor::On));\n\n let t = Text::new(\"Hello Rust!\", Point::new(40, 16))\n\n .into_styled(TextStyle::new(Font6x8, BinaryColor::On));\n\n\n\n c.draw(&mut disp).unwrap();\n\n t.draw(&mut disp).unwrap();\n\n\n\n disp.flush( &mut delay).expect(\"could not flush display\");\n\n }\n\n\n\n loop {\n\n continue;\n\n }\n\n}\n", "file_path": "examples/graphics.rs", "rank": 20, "score": 4.660775064417901 }, { "content": "use std::env;\n\nuse std::fs::File;\n\nuse std::io::Write;\n\nuse std::path::PathBuf;\n\n\n", "file_path": "build.rs", "rank": 21, "score": 2.374667802854178 } ]
Rust
src/connectivity/network/testing/netemul/runner/helpers/netstack_cfg/src/main.rs
EnderNightLord-ChromeBook/fuchsia-pine64-pinephone
05e2c059b57b6217089090a0315971d1735ecf57
use { anyhow::{format_err, Context as _, Error}, fidl_fuchsia_net, fidl_fuchsia_net_stack::StackMarker, fidl_fuchsia_net_stack_ext::FidlReturn, fidl_fuchsia_netemul_network::{EndpointManagerMarker, NetworkContextMarker}, fidl_fuchsia_netstack::{InterfaceConfig, NetstackMarker}, fuchsia_async as fasync, fuchsia_component::client, structopt::StructOpt, }; #[derive(StructOpt, Debug)] #[structopt(name = "netstack_cfg")] struct Opt { #[structopt(long, short = "e")] endpoint: String, #[structopt(long, short = "i")] ip: Option<String>, #[structopt(long, short = "g")] gateway: Option<String>, #[structopt(long = "skip-up-check")] skip_up_check: bool, } const DEFAULT_METRIC: u32 = 100; async fn config_netstack(opt: Opt) -> Result<(), Error> { log::info!("Configuring endpoint {}", opt.endpoint); let netctx = client::connect_to_service::<NetworkContextMarker>()?; let (epm, epmch) = fidl::endpoints::create_proxy::<EndpointManagerMarker>()?; netctx.get_endpoint_manager(epmch)?; let ep = epm.get_endpoint(&opt.endpoint).await?; let ep = ep.ok_or_else(|| format_err!("can't find endpoint {}", opt.endpoint))?.into_proxy()?; log::info!("Got endpoint."); let device_connection = ep.get_device().await?; log::info!("Got device connection."); let netstack = client::connect_to_service::<NetstackMarker>()?; let mut cfg = InterfaceConfig { name: opt.endpoint.clone(), filepath: format!("/vdev/{}", opt.endpoint), metric: DEFAULT_METRIC, }; let nicid = match device_connection { fidl_fuchsia_netemul_network::DeviceConnection::Ethernet(e) => netstack .add_ethernet_device(&format!("/vdev/{}", opt.endpoint), &mut cfg, e) .await .with_context(|| format!("add_ethernet_device FIDL error ({:?})", cfg))? .map_err(fuchsia_zircon::Status::from_raw) .with_context(|| format!("add_ethernet_device error ({:?})", cfg))?, fidl_fuchsia_netemul_network::DeviceConnection::NetworkDevice(device) => todo!( "(48860) Support NetworkDevice configuration. Got unexpected NetworkDevice {:?}", device ), }; let () = netstack.set_interface_status(nicid as u32, true)?; log::info!("Added ethernet to stack."); let subnet: Option<fidl_fuchsia_net::Subnet> = opt.ip.as_ref().map(|ip| { ip.parse::<fidl_fuchsia_net_ext::Subnet>().expect("Can't parse provided ip").into() }); if let Some(mut subnet) = subnet { let _ = netstack .set_interface_address(nicid as u32, &mut subnet.addr, subnet.prefix_len) .await .context("set interface address error")?; } else { let (dhcp_client, server_end) = fidl::endpoints::create_proxy::<fidl_fuchsia_net_dhcp::ClientMarker>() .context("failed to create fidl endpoints")?; netstack .get_dhcp_client(nicid, server_end) .await .context("failed to call get_dhcp_client")? .map_err(fuchsia_zircon::Status::from_raw) .context("failed to get dhcp client")?; dhcp_client .start() .await .context("failed to call dhcp_client.start")? .map_err(fuchsia_zircon::Status::from_raw) .context("failed to start dhcp client")?; }; log::info!("Configured nic address."); if let Some(gateway) = &opt.gateway { let gw_addr: fidl_fuchsia_net::IpAddress = fidl_fuchsia_net_ext::IpAddress( gateway.parse::<std::net::IpAddr>().context("failed to parse gateway address")?, ) .into(); let unspec_addr: fidl_fuchsia_net::IpAddress = match gw_addr { fidl_fuchsia_net::IpAddress::Ipv4(..) => fidl_fuchsia_net_ext::IpAddress( std::net::IpAddr::V4(std::net::Ipv4Addr::UNSPECIFIED), ), fidl_fuchsia_net::IpAddress::Ipv6(..) => fidl_fuchsia_net_ext::IpAddress( std::net::IpAddr::V6(std::net::Ipv6Addr::UNSPECIFIED), ), } .into(); let stack = client::connect_to_service::<StackMarker>()?; let () = stack .add_forwarding_entry(&mut fidl_fuchsia_net_stack::ForwardingEntry { subnet: fidl_fuchsia_net::Subnet { addr: unspec_addr, prefix_len: 0 }, destination: fidl_fuchsia_net_stack::ForwardingDestination::NextHop(gw_addr), }) .await .squash_result() .context("failed to add forwarding entry for gateway")?; log::info!("Configured the default route with gateway address."); } log::info!("Waiting for interface up..."); let interface_state = client::connect_to_service::<fidl_fuchsia_net_interfaces::StateMarker>()?; let () = fidl_fuchsia_net_interfaces_ext::wait_interface_with_id( fidl_fuchsia_net_interfaces_ext::event_stream_from_state(&interface_state)?, &mut fidl_fuchsia_net_interfaces_ext::InterfaceState::Unknown(nicid.into()), |properties| { if !opt.skip_up_check && !properties.online.unwrap_or(false) { log::info!("Found interface, but it's down. waiting."); return None; } if subnet.is_some() { if properties .addresses .as_ref() .map_or(false, |addresses| addresses.iter().any(|a| a.addr == subnet)) { Some(()) } else { log::info!("Found interface, but address not yet present. waiting."); None } } else { Some(()) } }, ) .await .context("wait for interface")?; log::info!("Found ethernet with id {}", nicid); Ok(()) } fn main() -> Result<(), Error> { let () = fuchsia_syslog::init().context("cannot init logger")?; let opt = Opt::from_args(); let mut executor = fasync::Executor::new().context("Error creating executor")?; executor.run_singlethreaded(config_netstack(opt)) }
use { anyhow::{format_err, Context as _, Error}, fidl_fuchsia_net, fidl_fuchsia_net_stack::StackMarker, fidl_fuchsia_net_stack_ext::FidlReturn, fidl_fuchsia_netemul_network::{EndpointManagerMarker, NetworkContextMarker}, fidl_fuchsia_netstack::{InterfaceConfig, NetstackMarker}, fuchsia_async as fasync, fuchsia_component::client, structopt::StructOpt, }; #[derive(StructOpt, Debug)] #[structopt(name = "netstack_cfg")] struct Opt { #[structopt(long, short = "e")] endpoint: String, #[structopt(long, short = "i")] ip: Option<String>, #[structopt(long, short = "g")] gateway: Option<String>, #[structopt(long = "skip-up-check")] skip_up_check: bool, } const DEFAULT_METRIC: u32 = 100; async fn config_netstack(opt: Opt) -> Result<(), Error> { log::info!("Configuring endpoint {}", opt.endpoint); let netctx = client::connect_to_service::<NetworkContextMarker>()?; let (epm, epmch) = fidl::endpoints::create_proxy::<EndpointManagerMarker>()?; netctx.get_endpoint_manager(epmch)?; let ep = epm.get_endpoint(&opt.endpoint).await?; let ep = ep.ok_or_else(|| format_err!("can't find endpoint {}", opt.endpoint))?.into_proxy()?; log::info!("Got endpoint."); let device_connection = ep.get_device().await?; log::info!("Got device connection."); let netstack = client::connect_to_service::<NetstackMarker>()?; let mut cfg = InterfaceConfig { name: opt.endpoint.clone(), filepath: format!("/vdev/{}", opt.endpoint), metric: DEFAULT_METRIC, }; let nicid = match device_connection { fidl_fuchsia_netemul_network::DeviceConnection::Ethernet(e) => netstack .add_ethernet_device(&format!("/vdev/{}", opt.endpoint), &mut cfg, e) .await .with_context(|| format!("add_ethernet_device FIDL error ({:?})", cfg))? .map_err(fuchsia_zircon::Status::from_raw) .with_context(|| format!("add_ethernet_device error ({:?})", cfg))?, fidl_fuchsia_netemul_network::DeviceConnection::NetworkDevice(device) => todo!( "(48860) Support NetworkDevice configuration. Got unexpected NetworkDevice {:?}", device ), }; let () = netstack.set_interface_status(nicid as u32, true)?; log::info!("Added ethernet to stack."); let subnet: Option<fidl_fuchsia_net::Subnet> = opt.ip.as_ref().map(|ip| { ip.parse::<fidl_fuchsia_net_ext::Subnet>().expect("Can't parse provided ip").into() }); if let Some(mut subnet) = subnet { let _ = netstack .set_interface_address(nicid as u32, &mut subnet.addr, subnet.prefix_len) .await .context("set interface address error")?; } else { let (dhcp_client, server_end) = fidl::endpoints::create_proxy::<fidl_fuchsia_net_dhcp::ClientMarker>() .context("failed to create fidl endpoints")?; netstack .get_dhcp_client(nicid, server_end) .await .context("failed to call get_dhcp_client")? .map_err(fuchsia_zircon::Status::from_raw) .context("failed to get dhcp client")?; dhcp_client .start() .await .context("failed to call dhcp_client.start")? .map_err(fuchsia_zircon::Status::from_raw) .context("failed to start dhcp client")?; }; log::info!("Configured nic address."); if let Some(gateway) = &opt.gateway { let gw_addr: fidl_fuchsia_net::IpAddress = fidl_fuchsia_net_ext::IpAddress( gateway.parse::<std::net::IpAddr>().context("failed to parse gateway address")?, ) .into(); let unspec_addr: fidl_fuchsia_net::IpAddress = match gw_addr { fidl_fuchsia_net::IpAddress::Ipv4(..)
_net_interfaces_ext::wait_interface_with_id( fidl_fuchsia_net_interfaces_ext::event_stream_from_state(&interface_state)?, &mut fidl_fuchsia_net_interfaces_ext::InterfaceState::Unknown(nicid.into()), |properties| { if !opt.skip_up_check && !properties.online.unwrap_or(false) { log::info!("Found interface, but it's down. waiting."); return None; } if subnet.is_some() { if properties .addresses .as_ref() .map_or(false, |addresses| addresses.iter().any(|a| a.addr == subnet)) { Some(()) } else { log::info!("Found interface, but address not yet present. waiting."); None } } else { Some(()) } }, ) .await .context("wait for interface")?; log::info!("Found ethernet with id {}", nicid); Ok(()) } fn main() -> Result<(), Error> { let () = fuchsia_syslog::init().context("cannot init logger")?; let opt = Opt::from_args(); let mut executor = fasync::Executor::new().context("Error creating executor")?; executor.run_singlethreaded(config_netstack(opt)) }
=> fidl_fuchsia_net_ext::IpAddress( std::net::IpAddr::V4(std::net::Ipv4Addr::UNSPECIFIED), ), fidl_fuchsia_net::IpAddress::Ipv6(..) => fidl_fuchsia_net_ext::IpAddress( std::net::IpAddr::V6(std::net::Ipv6Addr::UNSPECIFIED), ), } .into(); let stack = client::connect_to_service::<StackMarker>()?; let () = stack .add_forwarding_entry(&mut fidl_fuchsia_net_stack::ForwardingEntry { subnet: fidl_fuchsia_net::Subnet { addr: unspec_addr, prefix_len: 0 }, destination: fidl_fuchsia_net_stack::ForwardingDestination::NextHop(gw_addr), }) .await .squash_result() .context("failed to add forwarding entry for gateway")?; log::info!("Configured the default route with gateway address."); } log::info!("Waiting for interface up..."); let interface_state = client::connect_to_service::<fidl_fuchsia_net_interfaces::StateMarker>()?; let () = fidl_fuchsia
random
[]
Rust
game/src/sandbox/minimap.rs
balbok0/abstreet
3af15fefdb2772c83864c08724318418da8190a9
use abstutil::prettyprint_usize; use map_gui::tools::{MinimapControls, Navigator}; use widgetry::{ ControlState, EventCtx, GfxCtx, HorizontalAlignment, Image, Key, Line, Panel, ScreenDims, Text, VerticalAlignment, Widget, }; use crate::app::App; use crate::app::Transition; use crate::common::Warping; use crate::layer::PickLayer; use crate::sandbox::dashboards::TripTable; pub struct MinimapController; impl MinimapControls<App> for MinimapController { fn has_zorder(&self, app: &App) -> bool { app.opts.dev } fn has_layer(&self, app: &App) -> bool { app.primary.layer.is_some() } fn draw_extra(&self, g: &mut GfxCtx, app: &App) { if let Some(ref l) = app.primary.layer { l.draw_minimap(g); } let mut cache = app.primary.agents.borrow_mut(); cache.draw_unzoomed_agents(g, app); } fn make_unzoomed_panel(&self, ctx: &mut EventCtx, app: &App) -> Panel { let unzoomed_agents = &app.primary.agents.borrow().unzoomed_agents; let is_enabled = [ unzoomed_agents.cars(), unzoomed_agents.bikes(), unzoomed_agents.buses_and_trains(), unzoomed_agents.peds(), ]; Panel::new(Widget::row(vec![ make_tool_panel(ctx, app).align_right(), Widget::col(make_agent_toggles(ctx, app, is_enabled)) .bg(app.cs.panel_bg) .padding(16), ])) .aligned( HorizontalAlignment::Right, VerticalAlignment::BottomAboveOSD, ) .build_custom(ctx) } fn make_legend(&self, ctx: &mut EventCtx, app: &App) -> Widget { let unzoomed_agents = &app.primary.agents.borrow().unzoomed_agents; let is_enabled = [ unzoomed_agents.cars(), unzoomed_agents.bikes(), unzoomed_agents.buses_and_trains(), unzoomed_agents.peds(), ]; Widget::custom_row(make_agent_toggles(ctx, app, is_enabled)) .margin_left(26) } fn make_zoomed_side_panel(&self, ctx: &mut EventCtx, app: &App) -> Widget { make_tool_panel(ctx, app) } fn panel_clicked(&self, ctx: &mut EventCtx, app: &mut App, action: &str) -> Option<Transition> { match action { "search" => { return Some(Transition::Push(Navigator::new(ctx, app))); } "zoom out fully" => { return Some(Transition::Push(Warping::new( ctx, app.primary.map.get_bounds().get_rectangle().center(), Some(ctx.canvas.min_zoom()), None, &mut app.primary, ))); } "zoom in fully" => { return Some(Transition::Push(Warping::new( ctx, ctx.canvas.center_to_map_pt(), Some(10.0), None, &mut app.primary, ))); } "change layers" => { return Some(Transition::Push(PickLayer::pick(ctx, app))); } "more data" => { return Some(Transition::Push(Box::new(TripTable::new(ctx, app)))); } _ => unreachable!(), } } fn panel_changed(&self, _: &mut EventCtx, app: &mut App, panel: &Panel) { if panel.has_widget("Car") { app.primary .agents .borrow_mut() .unzoomed_agents .update(panel); } } } fn make_agent_toggles(ctx: &mut EventCtx, app: &App, is_enabled: [bool; 4]) -> Vec<Widget> { use widgetry::{include_labeled_bytes, Color, GeomBatchStack, RewriteColor, Toggle}; let [is_car_enabled, is_bike_enabled, is_bus_enabled, is_pedestrian_enabled] = is_enabled; pub fn colored_checkbox( ctx: &EventCtx, action: &str, is_enabled: bool, color: Color, icon: &str, label: &str, tooltip: Text, ) -> Widget { let buttons = ctx .style() .btn_plain .btn() .label_text(label) .padding(4.0) .tooltip(tooltip) .image_color(RewriteColor::NoOp, ControlState::Default); let icon_batch = Image::from_path(icon) .build_batch(ctx) .expect("invalid svg") .0; let false_btn = { let checkbox = Image::from_bytes(include_labeled_bytes!( "../../../widgetry/icons/checkbox_no_border_unchecked.svg" )) .color(RewriteColor::Change(Color::BLACK, color.alpha(0.3))); let mut row = GeomBatchStack::horizontal(vec![ checkbox.build_batch(ctx).expect("invalid svg").0, icon_batch.clone(), ]); row.spacing(8.0); let row_batch = row.batch(); let bounds = row_batch.get_bounds(); buttons.clone().image_batch(row_batch, bounds) }; let true_btn = { let checkbox = Image::from_bytes(include_labeled_bytes!( "../../../widgetry/icons/checkbox_no_border_checked.svg" )) .color(RewriteColor::Change(Color::BLACK, color)); let mut row = GeomBatchStack::horizontal(vec![ checkbox.build_batch(ctx).expect("invalid svg").0, icon_batch, ]); row.spacing(8.0); let row_batch = row.batch(); let bounds = row_batch.get_bounds(); buttons.image_batch(row_batch, bounds) }; Toggle::new( is_enabled, false_btn.build(ctx, action), true_btn.build(ctx, action), ) .named(action) .container() .force_width(137.0) } let counts = app.primary.sim.num_commuters_vehicles(); let pedestrian_details = { let tooltip = Text::from_multiline(vec![ Line("Pedestrians"), Line(format!( "Walking commuters: {}", prettyprint_usize(counts.walking_commuters) )) .secondary(), Line(format!( "To/from public transit: {}", prettyprint_usize(counts.walking_to_from_transit) )) .secondary(), Line(format!( "To/from a car: {}", prettyprint_usize(counts.walking_to_from_car) )) .secondary(), Line(format!( "To/from a bike: {}", prettyprint_usize(counts.walking_to_from_bike) )) .secondary(), ]); let count = prettyprint_usize( counts.walking_commuters + counts.walking_to_from_transit + counts.walking_to_from_car + counts.walking_to_from_bike, ); colored_checkbox( ctx, "Walk", is_pedestrian_enabled, app.cs.unzoomed_pedestrian, "system/assets/meters/pedestrian.svg", &count, tooltip, ) }; let bike_details = { let tooltip = Text::from_multiline(vec![ Line("Cyclists"), Line(prettyprint_usize(counts.cyclists)).secondary(), ]); colored_checkbox( ctx, "Bike", is_bike_enabled, app.cs.unzoomed_bike, "system/assets/meters/bike.svg", &prettyprint_usize(counts.cyclists), tooltip, ) }; let car_details = { let tooltip = Text::from_multiline(vec![ Line("Cars"), Line(format!( "Single-occupancy vehicles: {}", prettyprint_usize(counts.sov_drivers) )) .secondary(), ]); colored_checkbox( ctx, "Car", is_car_enabled, app.cs.unzoomed_car, "system/assets/meters/car.svg", &prettyprint_usize(counts.sov_drivers), tooltip, ) }; let bus_details = { let tooltip = Text::from_multiline(vec![ Line("Public transit"), Line(format!( "{} passengers on {} buses", prettyprint_usize(counts.bus_riders), prettyprint_usize(counts.buses) )) .secondary(), Line(format!( "{} passengers on {} trains", prettyprint_usize(counts.train_riders), prettyprint_usize(counts.trains) )) .secondary(), ]); colored_checkbox( ctx, "Bus", is_bus_enabled, app.cs.unzoomed_bus, "system/assets/meters/bus.svg", &prettyprint_usize(counts.bus_riders + counts.train_riders), tooltip, ) }; vec![car_details, bike_details, bus_details, pedestrian_details] } fn make_tool_panel(ctx: &mut EventCtx, app: &App) -> Widget { let buttons = ctx .style() .btn_floating .btn() .image_dims(ScreenDims::square(20.0)) .bg_color(app.cs.inner_panel_bg, ControlState::Default) .padding(8); Widget::col(vec![ (if ctx.canvas.cam_zoom >= app.opts.min_zoom_for_detail { buttons .clone() .image_path("system/assets/minimap/zoom_out_fully.svg") .build_widget(ctx, "zoom out fully") } else { buttons .clone() .image_path("system/assets/minimap/zoom_in_fully.svg") .build_widget(ctx, "zoom in fully") }), buttons .clone() .image_path("system/assets/tools/layers.svg") .hotkey(Key::L) .build_widget(ctx, "change layers"), buttons .clone() .image_path("system/assets/tools/search.svg") .hotkey(Key::K) .build_widget(ctx, "search"), buttons .image_path("system/assets/meters/trip_histogram.svg") .hotkey(Key::Q) .build_widget(ctx, "more data"), ]) }
use abstutil::prettyprint_usize; use map_gui::tools::{MinimapControls, Navigator}; use widgetry::{ ControlState, EventCtx, GfxCtx, HorizontalAlignment, Image, Key, Line, Panel, ScreenDims, Text, VerticalAlignment, Widget, }; use crate::app::App; use crate::app::Transition; use crate::common::Warping; use crate::layer::PickLayer; use crate::sandbox::dashboards::TripTable; pub struct MinimapController; impl MinimapControls<App> for MinimapController { fn has_zorder(&self, app: &App) -> bool { app.opts.dev } fn has_layer(&self, app: &App) -> bool { app.primary.layer.is_some() } fn draw_extra(&self, g: &mut GfxCtx, app: &App) { if let Some(ref l) = app.primary.layer { l.draw_minimap(g); } let mut cache = app.primary.agents.borrow_mut(); cache.draw_unzoomed_agents(g, app); } fn make_unzoomed_panel(&self, ctx: &mut EventCtx, app: &App) -> Panel { let unzoomed_agents = &app.primary.agents.borrow().unzoomed_agents; let is_enabled = [ unzoomed_agents.cars(), unzoomed_agents.bikes(), unzoomed_agents.buses_and_trains(), unzoomed_agents.peds(), ]; Panel::new(Widget::row(vec![ make_tool_panel(ctx, app).align_right(), Widget::col(make_agent_toggles(ctx, app, is_enabled)) .bg(app.cs.panel_bg) .padding(16), ])) .aligned( HorizontalAlignment::Right, VerticalAlignment::BottomAboveOSD, ) .build_custom(ctx) } fn make_legend(&self, ctx: &mut EventCtx, app: &App) -> Widget { let unzoomed_agents = &app.primary.agents.borrow().unzoomed_agents; let is_enabled = [ unzoomed_agents.cars(), unzoomed_agents.bikes(), unzoomed_agents.buses_and_trains(), unzoomed_agents.peds(), ]; Widget::custom_row(make_agent_toggles(ctx, app, is_enabled)) .margin_left(26) } fn make_zoomed_side_panel(&self, ctx: &mut EventCtx, app: &App) -> Widget { make_tool_panel(ctx, app) } fn panel_clicked(&self, ctx: &mut EventCtx, app: &mut App, action: &str) -> Option<Transition> { match action { "search" => { return Some(Transition::Push(Navigator::new(ctx, app))); } "zoom out fully" => { return Some(Transition::Push(Warping::new( ctx, app.primary.map.get_bounds().get_rectangle().center(), Some(ctx.canvas.min_zoom()), None, &mut app.primary, ))); } "zoom in fully" => { return Some(Transition::Push(Warping::new( ctx, ctx.canvas.center_to_map_pt(), Some(10.0), None, &mut app.primary, ))); } "change layers" => { return Some(Transition::Push(PickLayer::pick(ctx, app))); } "more data" => { return Some(Transition::Push(Box::new(TripTable::new(ctx, app)))); } _ => unreachable!(), } } fn panel_changed(&self, _: &mut EventCtx, app: &mut App, panel: &Panel) { if panel.has_widget("Car") { app.primary .agents .borrow_mut() .unzoomed_agents .update(panel); } } } fn make_agent_toggles(ctx: &mut EventCtx, app: &App, is_enabled: [bool; 4]) -> Vec<Widget> { use widgetry::{include_labeled_bytes, Color, GeomBatchStack, RewriteColor, Toggle}; let [is_car_enabled, is_bike_enabled, is_bus_enabled, is_pedestrian_enabled] = is_enabled; pub fn colored_checkbox( ctx: &EventCtx, action: &str, is_enabled: bool, color: Color, icon: &str, label: &str, tooltip: Text, ) -> Widget { let buttons = ctx .style() .btn_plain .btn() .label_text(label) .padding(4.0) .tooltip(tooltip) .image_color(RewriteColor::NoOp, ControlState::Default); let icon_batch = Image::from_path(icon) .build_batch(ctx) .expect("invalid svg") .0; let false_btn = { let checkbox = Image::from_bytes(include_labeled_bytes!( "../../../
let counts = app.primary.sim.num_commuters_vehicles(); let pedestrian_details = { let tooltip = Text::from_multiline(vec![ Line("Pedestrians"), Line(format!( "Walking commuters: {}", prettyprint_usize(counts.walking_commuters) )) .secondary(), Line(format!( "To/from public transit: {}", prettyprint_usize(counts.walking_to_from_transit) )) .secondary(), Line(format!( "To/from a car: {}", prettyprint_usize(counts.walking_to_from_car) )) .secondary(), Line(format!( "To/from a bike: {}", prettyprint_usize(counts.walking_to_from_bike) )) .secondary(), ]); let count = prettyprint_usize( counts.walking_commuters + counts.walking_to_from_transit + counts.walking_to_from_car + counts.walking_to_from_bike, ); colored_checkbox( ctx, "Walk", is_pedestrian_enabled, app.cs.unzoomed_pedestrian, "system/assets/meters/pedestrian.svg", &count, tooltip, ) }; let bike_details = { let tooltip = Text::from_multiline(vec![ Line("Cyclists"), Line(prettyprint_usize(counts.cyclists)).secondary(), ]); colored_checkbox( ctx, "Bike", is_bike_enabled, app.cs.unzoomed_bike, "system/assets/meters/bike.svg", &prettyprint_usize(counts.cyclists), tooltip, ) }; let car_details = { let tooltip = Text::from_multiline(vec![ Line("Cars"), Line(format!( "Single-occupancy vehicles: {}", prettyprint_usize(counts.sov_drivers) )) .secondary(), ]); colored_checkbox( ctx, "Car", is_car_enabled, app.cs.unzoomed_car, "system/assets/meters/car.svg", &prettyprint_usize(counts.sov_drivers), tooltip, ) }; let bus_details = { let tooltip = Text::from_multiline(vec![ Line("Public transit"), Line(format!( "{} passengers on {} buses", prettyprint_usize(counts.bus_riders), prettyprint_usize(counts.buses) )) .secondary(), Line(format!( "{} passengers on {} trains", prettyprint_usize(counts.train_riders), prettyprint_usize(counts.trains) )) .secondary(), ]); colored_checkbox( ctx, "Bus", is_bus_enabled, app.cs.unzoomed_bus, "system/assets/meters/bus.svg", &prettyprint_usize(counts.bus_riders + counts.train_riders), tooltip, ) }; vec![car_details, bike_details, bus_details, pedestrian_details] } fn make_tool_panel(ctx: &mut EventCtx, app: &App) -> Widget { let buttons = ctx .style() .btn_floating .btn() .image_dims(ScreenDims::square(20.0)) .bg_color(app.cs.inner_panel_bg, ControlState::Default) .padding(8); Widget::col(vec![ (if ctx.canvas.cam_zoom >= app.opts.min_zoom_for_detail { buttons .clone() .image_path("system/assets/minimap/zoom_out_fully.svg") .build_widget(ctx, "zoom out fully") } else { buttons .clone() .image_path("system/assets/minimap/zoom_in_fully.svg") .build_widget(ctx, "zoom in fully") }), buttons .clone() .image_path("system/assets/tools/layers.svg") .hotkey(Key::L) .build_widget(ctx, "change layers"), buttons .clone() .image_path("system/assets/tools/search.svg") .hotkey(Key::K) .build_widget(ctx, "search"), buttons .image_path("system/assets/meters/trip_histogram.svg") .hotkey(Key::Q) .build_widget(ctx, "more data"), ]) }
widgetry/icons/checkbox_no_border_unchecked.svg" )) .color(RewriteColor::Change(Color::BLACK, color.alpha(0.3))); let mut row = GeomBatchStack::horizontal(vec![ checkbox.build_batch(ctx).expect("invalid svg").0, icon_batch.clone(), ]); row.spacing(8.0); let row_batch = row.batch(); let bounds = row_batch.get_bounds(); buttons.clone().image_batch(row_batch, bounds) }; let true_btn = { let checkbox = Image::from_bytes(include_labeled_bytes!( "../../../widgetry/icons/checkbox_no_border_checked.svg" )) .color(RewriteColor::Change(Color::BLACK, color)); let mut row = GeomBatchStack::horizontal(vec![ checkbox.build_batch(ctx).expect("invalid svg").0, icon_batch, ]); row.spacing(8.0); let row_batch = row.batch(); let bounds = row_batch.get_bounds(); buttons.image_batch(row_batch, bounds) }; Toggle::new( is_enabled, false_btn.build(ctx, action), true_btn.build(ctx, action), ) .named(action) .container() .force_width(137.0) }
function_block-function_prefix_line
[ { "content": "fn make_btn(ctx: &EventCtx, label: &str, tooltip: &str, is_persisten_split: bool) -> Button {\n\n // If we want to make Dropdown configurable, pass in or expose its button builder?\n\n let builder = if is_persisten_split {\n\n // Quick hacks to make PersistentSplit's dropdown look a little better.\n\n // It's not ideal, but we only use one persistent split in the whole app\n\n // and it's front and center - we'll notice if something breaks.\n\n ctx.style()\n\n .btn_solid\n\n .dropdown()\n\n .padding(EdgeInsets {\n\n top: 15.0,\n\n bottom: 15.0,\n\n left: 8.0,\n\n right: 8.0,\n\n })\n\n .corner_rounding(CornerRounding::CornerRadii(CornerRadii {\n\n top_left: 0.0,\n\n bottom_left: 0.0,\n\n bottom_right: 2.0,\n\n top_right: 2.0,\n\n }))\n\n // override any outline element within persistent split\n\n .outline((0.0, Color::CLEAR), ControlState::Default)\n\n } else {\n\n ctx.style().btn_outline.dropdown().label_text(label)\n\n };\n\n\n\n builder.build(ctx, tooltip)\n\n}\n", "file_path": "widgetry/src/widgets/dropdown.rs", "rank": 0, "score": 603461.4304314916 }, { "content": "/// Creates the top row for any layer panel.\n\npub fn header(ctx: &mut EventCtx, name: &str) -> Widget {\n\n Widget::row(vec![\n\n Image::from_path(\"system/assets/tools/layers.svg\")\n\n .into_widget(ctx)\n\n .centered_vert(),\n\n name.text_widget(ctx).centered_vert(),\n\n ctx.style().btn_close_widget(ctx),\n\n ])\n\n}\n\n\n\npub const PANEL_PLACEMENT: (HorizontalAlignment, VerticalAlignment) = (\n\n HorizontalAlignment::Percent(0.02),\n\n VerticalAlignment::Percent(0.2),\n\n);\n", "file_path": "game/src/layer/mod.rs", "rank": 2, "score": 484535.5374963428 }, { "content": "pub fn execute(ctx: &mut EventCtx, app: &mut App, id: ID, action: &str) -> Transition {\n\n let mut tut = app.session.tutorial.as_mut().unwrap();\n\n let response = match (id, action.as_ref()) {\n\n (ID::Car(c), \"draw WASH ME\") => {\n\n let is_parked = app\n\n .primary\n\n .sim\n\n .agent_to_trip(AgentID::Car(ESCORT))\n\n .is_none();\n\n if c == ESCORT {\n\n if is_parked {\n\n tut.prank_done = true;\n\n PopupMsg::new(\n\n ctx,\n\n \"Prank in progress\",\n\n vec![\"You quickly scribble on the window...\"],\n\n )\n\n } else {\n\n PopupMsg::new(\n\n ctx,\n", "file_path": "game/src/sandbox/gameplay/tutorial.rs", "rank": 3, "score": 482174.79744546406 }, { "content": "pub fn execute(ctx: &mut EventCtx, app: &mut App, id: ID, action: &str) -> Transition {\n\n match (id, action.as_ref()) {\n\n (ID::Building(b), \"start a trip here\") => {\n\n Transition::Push(spawner::AgentSpawner::new(ctx, app, Some(b)))\n\n }\n\n (ID::Intersection(id), \"spawn agents here\") => {\n\n spawn_agents_around(id, app);\n\n Transition::Keep\n\n }\n\n _ => unreachable!(),\n\n }\n\n}\n", "file_path": "game/src/sandbox/gameplay/freeform/mod.rs", "rank": 4, "score": 476802.3416461323 }, { "content": "fn link(ctx: &mut EventCtx, label: &str, url: &str) -> Widget {\n\n ctx.style()\n\n .btn_plain\n\n .text(label)\n\n .build_widget(ctx, format!(\"open {}\", url))\n\n}\n\n\n\nimpl SimpleState<App> for Credits {\n\n fn on_click(&mut self, _: &mut EventCtx, _: &mut App, x: &str, _: &Panel) -> Transition {\n\n match x {\n\n \"close\" | \"Back\" => Transition::Pop,\n\n x => {\n\n if let Some(url) = x.strip_prefix(\"open \") {\n\n open_browser(url);\n\n return Transition::Keep;\n\n }\n\n\n\n unreachable!()\n\n }\n\n }\n", "file_path": "santa/src/title.rs", "rank": 5, "score": 462785.12014108757 }, { "content": "pub fn custom_bar(ctx: &mut EventCtx, filled_color: Color, pct_full: f64, txt: Text) -> Widget {\n\n let total_width = 300.0;\n\n let height = 32.0;\n\n let radius = 4.0;\n\n\n\n let mut batch = GeomBatch::new();\n\n // Background\n\n batch.push(\n\n Color::hex(\"#666666\"),\n\n Polygon::rounded_rectangle(total_width, height, radius),\n\n );\n\n // Foreground\n\n if let Some(poly) = Polygon::maybe_rounded_rectangle(pct_full * total_width, height, radius) {\n\n batch.push(filled_color, poly);\n\n }\n\n // Text\n\n let label = txt.render_autocropped(ctx);\n\n let dims = label.get_dims();\n\n batch.append(label.translate(10.0, height / 2.0 - dims.height / 2.0));\n\n batch.into_widget(ctx)\n\n}\n\n\n", "file_path": "santa/src/meters.rs", "rank": 6, "score": 452913.2362091851 }, { "content": "pub fn warp_to_id(ctx: &mut EventCtx, app: &mut App, input: &str) -> Transition {\n\n if let Some(t) = inner_warp_to_id(ctx, app, input) {\n\n t\n\n } else {\n\n Transition::Replace(PopupMsg::new(\n\n ctx,\n\n \"Bad warp ID\",\n\n vec![format!(\"{} isn't a valid ID\", input)],\n\n ))\n\n }\n\n}\n\n\n", "file_path": "game/src/common/warp.rs", "rank": 7, "score": 448529.5497441493 }, { "content": "fn make_controls(ctx: &mut EventCtx, app: &App, opts: &Options, legend: Option<Widget>) -> Panel {\n\n let (total_ppl, ppl_in_bldg, ppl_off_map) = app.primary.sim.num_ppl();\n\n\n\n let mut col = vec![\n\n header(\n\n ctx,\n\n &format!(\"Population: {}\", prettyprint_usize(total_ppl)),\n\n ),\n\n Widget::row(vec![\n\n Widget::row(vec![\n\n Image::from_path(\"system/assets/tools/home.svg\").into_widget(ctx),\n\n Line(prettyprint_usize(ppl_in_bldg))\n\n .small()\n\n .into_widget(ctx),\n\n ]),\n\n Line(format!(\"Off-map: {}\", prettyprint_usize(ppl_off_map)))\n\n .small()\n\n .into_widget(ctx),\n\n ])\n\n .centered(),\n", "file_path": "game/src/layer/population.rs", "rank": 8, "score": 446820.1565977798 }, { "content": "fn make_controls(ctx: &mut EventCtx, app: &App, opts: &Options, legend: Option<Widget>) -> Panel {\n\n let model = app.primary.sim.get_pandemic_model().unwrap();\n\n let pct = 100.0 / (model.count_total() as f64);\n\n\n\n let mut col = vec![\n\n header(ctx, \"Pandemic model\"),\n\n Text::from_multiline(vec![\n\n Line(format!(\n\n \"{} Sane ({:.1}%)\",\n\n prettyprint_usize(model.count_sane()),\n\n (model.count_sane() as f64) * pct\n\n )),\n\n Line(format!(\n\n \"{} Exposed ({:.1}%)\",\n\n prettyprint_usize(model.count_exposed()),\n\n (model.count_exposed() as f64) * pct\n\n )),\n\n Line(format!(\n\n \"{} Infected ({:.1}%)\",\n\n prettyprint_usize(model.count_infected()),\n", "file_path": "game/src/layer/pandemic.rs", "rank": 9, "score": 446820.1565977797 }, { "content": "fn inner_warp_to_id(ctx: &mut EventCtx, app: &mut App, line: &str) -> Option<Transition> {\n\n if line.is_empty() {\n\n return None;\n\n }\n\n if line == \"j\" {\n\n if let Some((pt, zoom)) = app.primary.last_warped_from {\n\n return Some(Transition::Replace(Warping::new(\n\n ctx,\n\n pt,\n\n Some(zoom),\n\n None,\n\n &mut app.primary,\n\n )));\n\n }\n\n return None;\n\n }\n\n\n\n let id = match usize::from_str_radix(&line[1..line.len()], 10) {\n\n Ok(idx) => match line.chars().next().unwrap() {\n\n 'r' => {\n", "file_path": "game/src/common/warp.rs", "rank": 11, "score": 444794.39760238404 }, { "content": "fn make_top_panel(ctx: &mut EventCtx, app: &App, can_undo: bool, can_redo: bool) -> Panel {\n\n let row = vec![\n\n ctx.style()\n\n .btn_solid_primary\n\n .text(\"Finish\")\n\n .hotkey(Key::Enter)\n\n .build_def(ctx),\n\n ctx.style()\n\n .btn_outline\n\n .text(\"Preview\")\n\n .hotkey(lctrl(Key::P))\n\n .build_def(ctx),\n\n ctx.style()\n\n .btn_plain\n\n .icon(\"system/assets/tools/undo.svg\")\n\n .disabled(!can_undo)\n\n .hotkey(lctrl(Key::Z))\n\n .build_widget(ctx, \"undo\"),\n\n ctx.style()\n\n .btn_plain\n", "file_path": "game/src/edit/traffic_signals/mod.rs", "rank": 12, "score": 441655.1352582674 }, { "content": "pub fn area(ctx: &EventCtx, app: &App, _: &mut Details, id: AreaID) -> Widget {\n\n let header = Widget::row(vec![\n\n Line(id.to_string()).small_heading().into_widget(ctx),\n\n header_btns(ctx),\n\n ]);\n\n\n\n Widget::custom_col(vec![header, area_body(ctx, app, id).tab_body(ctx)])\n\n}\n\n\n", "file_path": "game/src/info/debug.rs", "rank": 13, "score": 438798.65250469174 }, { "content": "fn make_vehicle_panel(ctx: &mut EventCtx, app: &App) -> Panel {\n\n let mut buttons = Vec::new();\n\n for name in &app.session.vehicles_unlocked {\n\n let vehicle = Vehicle::get(name);\n\n let batch = vehicle\n\n .animate(ctx.prerender, Time::START_OF_DAY)\n\n .scale(10.0);\n\n\n\n buttons.push(\n\n if name == &app.session.current_vehicle {\n\n batch\n\n .into_widget(ctx)\n\n .container()\n\n .padding(5)\n\n .outline((2.0, Color::WHITE))\n\n } else {\n\n let normal = batch.clone().color(RewriteColor::MakeGrayscale);\n\n let hovered = batch;\n\n ButtonBuilder::new()\n\n .custom_batch(normal, ControlState::Default)\n", "file_path": "santa/src/before_level.rs", "rank": 14, "score": 426894.82945828105 }, { "content": "pub fn info(ctx: &mut EventCtx, app: &App, details: &mut Details, id: BuildingID) -> Widget {\n\n Widget::custom_col(vec![\n\n header(ctx, app, details, id, Tab::BldgInfo(id)),\n\n info_body(ctx, app, details, id).tab_body(ctx),\n\n ])\n\n}\n\n\n", "file_path": "game/src/info/building.rs", "rank": 15, "score": 424611.09545327805 }, { "content": "pub fn people(ctx: &mut EventCtx, app: &App, details: &mut Details, id: BuildingID) -> Widget {\n\n Widget::custom_col(vec![\n\n header(ctx, app, details, id, Tab::BldgPeople(id)),\n\n people_body(ctx, app, details, id).tab_body(ctx),\n\n ])\n\n}\n\n\n", "file_path": "game/src/info/building.rs", "rank": 16, "score": 424611.09545327805 }, { "content": "fn make_panel(ctx: &mut EventCtx, app: &App) -> Panel {\n\n Panel::new(Widget::col(vec![\n\n Widget::row(vec![\n\n Line(\"Commute map by block\")\n\n .small_heading()\n\n .into_widget(ctx),\n\n ctx.style().btn_close_widget(ctx),\n\n ]),\n\n Toggle::choice(ctx, \"from / to this block\", \"from\", \"to\", Key::Space, true),\n\n Toggle::switch(ctx, \"include borders\", None, true),\n\n Widget::row(vec![\n\n \"Departing from:\".text_widget(ctx).margin_right(20),\n\n Slider::area(ctx, 0.15 * ctx.canvas.window_width, 0.0).named(\"depart from\"),\n\n ]),\n\n Widget::row(vec![\n\n \"Departing until:\".text_widget(ctx).margin_right(20),\n\n Slider::area(ctx, 0.15 * ctx.canvas.window_width, 1.0).named(\"depart until\"),\n\n ]),\n\n checkbox_per_mode(ctx, app, &TripMode::all().into_iter().collect()),\n\n ColorLegend::gradient(ctx, &app.cs.good_to_bad_red, vec![\"0\", \"0\"]).named(\"scale\"),\n\n \"None selected\".text_widget(ctx).named(\"current\"),\n\n ]))\n\n .aligned(HorizontalAlignment::Right, VerticalAlignment::Top)\n\n .build(ctx)\n\n}\n", "file_path": "game/src/sandbox/dashboards/commuter.rs", "rank": 17, "score": 422025.46045816457 }, { "content": "pub fn stop(ctx: &mut EventCtx, app: &App, details: &mut Details, id: BusStopID) -> Widget {\n\n let header = Widget::row(vec![\n\n Line(\"Bus stop\").small_heading().into_widget(ctx),\n\n header_btns(ctx),\n\n ]);\n\n\n\n Widget::custom_col(vec![header, stop_body(ctx, app, details, id).tab_body(ctx)])\n\n}\n\n\n", "file_path": "game/src/info/bus.rs", "rank": 18, "score": 420008.65921408427 }, { "content": "pub fn bus_status(ctx: &mut EventCtx, app: &App, details: &mut Details, id: CarID) -> Widget {\n\n Widget::custom_col(vec![\n\n bus_header(ctx, app, details, id, Tab::BusStatus(id)),\n\n bus_status_body(ctx, app, details, id).tab_body(ctx),\n\n ])\n\n}\n\n\n", "file_path": "game/src/info/bus.rs", "rank": 19, "score": 420008.65921408427 }, { "content": "pub fn route(ctx: &mut EventCtx, app: &App, details: &mut Details, id: BusRouteID) -> Widget {\n\n let header = {\n\n let map = &app.primary.map;\n\n let route = map.get_br(id);\n\n\n\n Widget::row(vec![\n\n Line(format!(\"Route {}\", route.short_name))\n\n .small_heading()\n\n .into_widget(ctx),\n\n header_btns(ctx),\n\n ])\n\n };\n\n\n\n Widget::custom_col(vec![\n\n header,\n\n route_body(ctx, app, details, id).tab_body(ctx),\n\n ])\n\n}\n\n\n", "file_path": "game/src/info/bus.rs", "rank": 20, "score": 420008.65921408427 }, { "content": "pub fn info(ctx: &mut EventCtx, app: &App, details: &mut Details, id: ParkingLotID) -> Widget {\n\n Widget::custom_col(vec![\n\n header(ctx, details, id, Tab::ParkingLot(id)),\n\n info_body(ctx, app, id).tab_body(ctx),\n\n ])\n\n}\n\n\n", "file_path": "game/src/info/parking_lot.rs", "rank": 21, "score": 415576.6176443477 }, { "content": "// TODO Kinda misnomer\n\npub fn tool_panel(ctx: &mut EventCtx) -> Panel {\n\n Panel::new(Widget::row(vec![\n\n ctx.style()\n\n .btn_plain\n\n .icon(\"system/assets/tools/home.svg\")\n\n .hotkey(Key::Escape)\n\n .build_widget(ctx, \"back\"),\n\n ctx.style()\n\n .btn_plain\n\n .icon(\"system/assets/tools/settings.svg\")\n\n .build_widget(ctx, \"settings\"),\n\n ]))\n\n .aligned(HorizontalAlignment::Left, VerticalAlignment::BottomAboveOSD)\n\n .build(ctx)\n\n}\n\n\n", "file_path": "game/src/common/mod.rs", "rank": 22, "score": 415002.59811394813 }, { "content": "fn make_changelist(ctx: &mut EventCtx, app: &App) -> Panel {\n\n // TODO Support redo. Bit harder here to reset the redo_stack when the edits\n\n // change, because nested other places modify it too.\n\n let edits = app.primary.map.get_edits();\n\n let mut col = vec![\n\n Widget::row(vec![\n\n ctx.style()\n\n .btn_outline\n\n .popup(&edits.edits_name)\n\n .hotkey(lctrl(Key::P))\n\n .build_widget(ctx, \"manage proposals\"),\n\n \"autosaved\"\n\n .text_widget(ctx)\n\n .container()\n\n .padding(10)\n\n .bg(Color::hex(\"#5D9630\")),\n\n ]),\n\n ColorLegend::row(\n\n ctx,\n\n app.cs.edits_layer,\n", "file_path": "game/src/edit/mod.rs", "rank": 23, "score": 412389.20619312034 }, { "content": "fn make_topcenter(ctx: &mut EventCtx, app: &App) -> Panel {\n\n Panel::new(Widget::col(vec![\n\n Line(\"Editing map\")\n\n .small_heading()\n\n .into_widget(ctx)\n\n .centered_horiz(),\n\n ctx.style()\n\n .btn_solid_primary\n\n .text(format!(\n\n \"Finish & resume from {}\",\n\n app.primary\n\n .suspended_sim\n\n .as_ref()\n\n .unwrap()\n\n .time()\n\n .ampm_tostring()\n\n ))\n\n .hotkey(Key::Escape)\n\n .build_widget(ctx, \"finish editing\"),\n\n ]))\n\n .aligned(HorizontalAlignment::Center, VerticalAlignment::Top)\n\n .build(ctx)\n\n}\n\n\n", "file_path": "game/src/edit/mod.rs", "rank": 24, "score": 412389.20619312034 }, { "content": "pub fn debug(ctx: &EventCtx, app: &App, details: &mut Details, id: LaneID) -> Widget {\n\n Widget::custom_col(vec![\n\n header(ctx, app, details, id, Tab::LaneDebug(id)),\n\n debug_body(ctx, app, id).tab_body(ctx),\n\n ])\n\n}\n\n\n", "file_path": "game/src/info/lane.rs", "rank": 25, "score": 410261.7383150162 }, { "content": "pub fn info(ctx: &EventCtx, app: &App, details: &mut Details, id: IntersectionID) -> Widget {\n\n Widget::custom_col(vec![\n\n header(ctx, app, details, id, Tab::IntersectionInfo(id)),\n\n info_body(ctx, app, id).tab_body(ctx),\n\n ])\n\n}\n\n\n", "file_path": "game/src/info/intersection.rs", "rank": 26, "score": 410261.73831501626 }, { "content": "pub fn info(ctx: &EventCtx, app: &App, details: &mut Details, id: LaneID) -> Widget {\n\n Widget::custom_col(vec![\n\n header(ctx, app, details, id, Tab::LaneInfo(id)),\n\n info_body(ctx, app, id).tab_body(ctx),\n\n ])\n\n}\n\n\n", "file_path": "game/src/info/lane.rs", "rank": 27, "score": 410261.73831501626 }, { "content": "pub fn make_bar(ctx: &mut EventCtx, filled_color: Color, value: usize, max: usize) -> Widget {\n\n let pct_full = if max == 0 {\n\n 0.0\n\n } else {\n\n (value as f64) / (max as f64)\n\n };\n\n let txt = Text::from(format!(\n\n \"{} / {}\",\n\n prettyprint_usize(value),\n\n prettyprint_usize(max)\n\n ));\n\n custom_bar(ctx, filled_color, pct_full, txt)\n\n}\n", "file_path": "santa/src/meters.rs", "rank": 28, "score": 399472.90219235024 }, { "content": "fn mouseover_unzoomed_agent_circle(ctx: &mut EventCtx, app: &mut App) {\n\n let cursor = if let Some(pt) = ctx.canvas.get_cursor_in_map_space() {\n\n pt\n\n } else {\n\n return;\n\n };\n\n\n\n for (id, _, _) in app\n\n .primary\n\n .agents\n\n .borrow_mut()\n\n .calculate_unzoomed_agents(ctx, app)\n\n .query(\n\n Circle::new(cursor, Distance::meters(3.0))\n\n .get_bounds()\n\n .as_bbox(),\n\n )\n\n {\n\n if let Some(pt) = app\n\n .primary\n\n .sim\n\n .canonical_pt_for_agent(*id, &app.primary.map)\n\n {\n\n if Circle::new(pt, unzoomed_agent_radius(id.to_vehicle_type())).contains_pt(cursor) {\n\n app.primary.current_selection = Some(ID::from_agent(*id));\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "game/src/sandbox/mod.rs", "rank": 29, "score": 398868.5849263099 }, { "content": "fn make_upzone_panel(ctx: &mut EventCtx, app: &App, num_picked: usize) -> Panel {\n\n // Don't overwhelm players on the very first level.\n\n if app.session.upzones_unlocked == 0 {\n\n return Panel::new(\n\n ctx.style()\n\n .btn_solid_primary\n\n .text(\"Start game\")\n\n .hotkey(Key::Enter)\n\n .build_def(ctx)\n\n .container(),\n\n )\n\n .aligned(\n\n HorizontalAlignment::RightInset,\n\n VerticalAlignment::BottomInset,\n\n )\n\n .build(ctx);\n\n }\n\n\n\n Panel::new(Widget::col(vec![\n\n Widget::row(vec![\n", "file_path": "santa/src/before_level.rs", "rank": 30, "score": 396069.86090458906 }, { "content": "pub fn apply_map_edits(ctx: &mut EventCtx, app: &mut App, edits: MapEdits) {\n\n let mut timer = Timer::new(\"apply map edits\");\n\n\n\n let (roads_changed, turns_deleted, turns_added, mut modified_intersections) =\n\n app.primary.map.must_apply_edits(edits);\n\n\n\n if !roads_changed.is_empty() || !modified_intersections.is_empty() {\n\n app.primary\n\n .draw_map\n\n .draw_all_unzoomed_roads_and_intersections =\n\n DrawMap::regenerate_unzoomed_layer(&app.primary.map, &app.cs, ctx, &mut timer);\n\n }\n\n\n\n for r in roads_changed {\n\n let road = app.primary.map.get_r(r);\n\n app.primary.draw_map.roads[r.0].clear_rendering();\n\n\n\n // An edit to one lane potentially affects markings in all lanes in the same road, because\n\n // of one-way markings, driving lines, etc.\n\n for l in road.all_lanes() {\n", "file_path": "game/src/edit/mod.rs", "rank": 31, "score": 390373.24608695426 }, { "content": "fn traffic_body(ctx: &mut EventCtx, app: &App, id: LaneID, opts: &DataOptions) -> Widget {\n\n let mut rows = vec![];\n\n\n\n let map = &app.primary.map;\n\n let l = map.get_l(id);\n\n let r = map.get_r(l.parent);\n\n\n\n // Since this applies to the entire road, ignore lane type.\n\n let mut txt = Text::from(\"Traffic over entire road, not just this lane\");\n\n txt.add_line(format!(\n\n \"Since midnight: {} commuters and vehicles crossed\",\n\n prettyprint_usize(app.primary.sim.get_analytics().road_thruput.total_for(r.id))\n\n ));\n\n rows.push(txt.into_widget(ctx));\n\n\n\n rows.push(opts.to_controls(ctx, app));\n\n\n\n let r = map.get_l(id).parent;\n\n let time = if opts.show_end_of_day {\n\n app.primary.sim.get_end_of_day()\n", "file_path": "game/src/info/lane.rs", "rank": 32, "score": 389268.95621579635 }, { "content": "fn traffic_body(ctx: &mut EventCtx, app: &App, id: IntersectionID, opts: &DataOptions) -> Widget {\n\n let mut rows = vec![];\n\n let mut txt = Text::new();\n\n\n\n txt.add_line(format!(\n\n \"Since midnight: {} commuters and vehicles crossed\",\n\n prettyprint_usize(\n\n app.primary\n\n .sim\n\n .get_analytics()\n\n .intersection_thruput\n\n .total_for(id)\n\n )\n\n ));\n\n rows.push(txt.into_widget(ctx));\n\n\n\n rows.push(opts.to_controls(ctx, app));\n\n\n\n let time = if opts.show_end_of_day {\n\n app.primary.sim.get_end_of_day()\n", "file_path": "game/src/info/intersection.rs", "rank": 33, "score": 389268.95621579635 }, { "content": "fn challenge_header(ctx: &mut EventCtx, title: &str) -> Widget {\n\n Widget::row(vec![\n\n Line(title).small_heading().into_widget(ctx).centered_vert(),\n\n ctx.style()\n\n .btn_plain\n\n .icon(\"system/assets/tools/info.svg\")\n\n .build_widget(ctx, \"instructions\")\n\n .centered_vert(),\n\n Widget::vert_separator(ctx, 50.0),\n\n ctx.style()\n\n .btn_outline\n\n .icon_text(\"system/assets/tools/pencil.svg\", \"Edit map\")\n\n .hotkey(lctrl(Key::E))\n\n .build_widget(ctx, \"edit map\")\n\n .centered_vert(),\n\n ])\n\n .padding(5)\n\n}\n\n\n\npub struct FinalScore {\n", "file_path": "game/src/sandbox/gameplay/mod.rs", "rank": 34, "score": 381323.251692981 }, { "content": "fn scatter_plot(ctx: &mut EventCtx, app: &App, filter: &Filter) -> Widget {\n\n if app.has_prebaked().is_none() {\n\n return Widget::nothing();\n\n }\n\n\n\n let points = filter.get_trips(app);\n\n if points.is_empty() {\n\n return Widget::nothing();\n\n }\n\n\n\n Widget::col(vec![\n\n Line(\"Trip time before and after\")\n\n .small_heading()\n\n .into_widget(ctx),\n\n CompareTimes::new(\n\n ctx,\n\n format!(\n\n \"Trip time before \\\"{}\\\"\",\n\n app.primary.map.get_edits().edits_name\n\n ),\n", "file_path": "game/src/sandbox/dashboards/summaries.rs", "rank": 35, "score": 380969.43240500987 }, { "content": "fn contingency_table(ctx: &mut EventCtx, app: &App, filter: &Filter) -> Widget {\n\n if app.has_prebaked().is_none() {\n\n return Widget::nothing();\n\n }\n\n\n\n let total_width = 500.0;\n\n let total_height = 300.0;\n\n\n\n let points = filter.get_trips(app);\n\n if points.is_empty() {\n\n return Widget::nothing();\n\n }\n\n\n\n // bucket by trip duration _before_ changes\n\n let num_buckets = 10;\n\n let (_, endpts) = points\n\n .iter()\n\n .map(|(b, _a)| b)\n\n .max()\n\n .unwrap()\n", "file_path": "game/src/sandbox/dashboards/summaries.rs", "rank": 36, "score": 380969.43240500987 }, { "content": "fn summary_boxes(ctx: &mut EventCtx, app: &App, filter: &Filter) -> Widget {\n\n if app.has_prebaked().is_none() {\n\n return Widget::nothing();\n\n }\n\n\n\n let mut num_same = 0;\n\n let mut num_faster = 0;\n\n let mut num_slower = 0;\n\n let mut sum_faster = Duration::ZERO;\n\n let mut sum_slower = Duration::ZERO;\n\n for (_, b, a, mode) in app\n\n .primary\n\n .sim\n\n .get_analytics()\n\n .both_finished_trips(app.primary.sim.time(), app.prebaked())\n\n {\n\n if !filter.modes.contains(&mode) {\n\n continue;\n\n }\n\n let same = if let Some(pct) = filter.changes_pct {\n", "file_path": "game/src/sandbox/dashboards/summaries.rs", "rank": 37, "score": 380969.43240500987 }, { "content": "fn schedule_body(ctx: &mut EventCtx, app: &App, id: PersonID) -> Widget {\n\n let mut rows = vec![];\n\n let person = app.primary.sim.get_person(id);\n\n let mut rng = XorShiftRng::seed_from_u64(id.0 as u64);\n\n\n\n // TODO Proportional 24-hour timeline would be easier to understand\n\n let mut last_t = Time::START_OF_DAY;\n\n for t in &person.trips {\n\n let trip = app.primary.sim.trip_info(*t);\n\n let at = match trip.start {\n\n TripEndpoint::Bldg(b) => {\n\n let b = app.primary.map.get_b(b);\n\n if b.amenities.is_empty() {\n\n b.address.clone()\n\n } else {\n\n let list = b\n\n .amenities\n\n .iter()\n\n .map(|a| a.names.get(app.opts.language.as_ref()))\n\n .collect::<Vec<_>>();\n", "file_path": "game/src/info/person.rs", "rank": 38, "score": 380969.43240500987 }, { "content": "fn build_panel(ctx: &mut EventCtx, app: &App, start: &Building, isochrone: &Isochrone) -> Panel {\n\n let mut rows = Vec::new();\n\n\n\n rows.push(\n\n Line(\"15-minute neighborhood explorer\")\n\n .small_heading()\n\n .into_widget(ctx),\n\n );\n\n\n\n rows.push(\n\n ctx.style()\n\n .btn_popup_icon_text(\n\n \"system/assets/tools/map.svg\",\n\n nice_map_name(app.map.get_name()),\n\n )\n\n .hotkey(lctrl(Key::L))\n\n .build_widget(ctx, \"change map\"),\n\n );\n\n\n\n rows.push(\n", "file_path": "fifteen_min/src/viewer.rs", "rank": 39, "score": 377810.1035092074 }, { "content": "fn traffic_signal_body(ctx: &mut EventCtx, app: &App, id: IntersectionID) -> Widget {\n\n let mut rows = vec![];\n\n // Slightly inaccurate -- the turn rendering may slightly exceed the intersection polygon --\n\n // but this is close enough.\n\n let bounds = app.primary.map.get_i(id).polygon.get_bounds();\n\n // Pick a zoom so that we fit a fixed width in pixels\n\n let zoom = 150.0 / bounds.width();\n\n let bbox = Polygon::rectangle(zoom * bounds.width(), zoom * bounds.height());\n\n\n\n let signal = app.primary.map.get_traffic_signal(id);\n\n {\n\n let mut txt = Text::new();\n\n txt.add_line(Line(format!(\"{} stages\", signal.stages.len())).small_heading());\n\n txt.add_line(format!(\"Signal offset: {}\", signal.offset));\n\n {\n\n let mut total = Duration::ZERO;\n\n for s in &signal.stages {\n\n total += s.stage_type.simple_duration();\n\n }\n\n // TODO Say \"normally\" or something?\n", "file_path": "game/src/info/intersection.rs", "rank": 40, "score": 376645.06013213407 }, { "content": "fn current_demand_body(ctx: &mut EventCtx, app: &App, id: IntersectionID) -> Widget {\n\n let mut rows = vec![];\n\n let mut total_demand = 0;\n\n let mut demand_per_movement: Vec<(&PolyLine, usize)> = Vec::new();\n\n for m in app.primary.map.get_traffic_signal(id).movements.values() {\n\n let demand = app\n\n .primary\n\n .sim\n\n .get_analytics()\n\n .demand\n\n .get(&m.id)\n\n .cloned()\n\n .unwrap_or(0);\n\n if demand > 0 {\n\n total_demand += demand;\n\n demand_per_movement.push((&m.geom, demand));\n\n }\n\n }\n\n\n\n let mut batch = GeomBatch::new();\n", "file_path": "game/src/info/intersection.rs", "rank": 41, "score": 376645.0601321341 }, { "content": "fn search_osm(filter: String, ctx: &mut EventCtx, app: &mut App) -> Transition {\n\n let mut num_matches = 0;\n\n let mut batch = GeomBatch::new();\n\n\n\n // TODO Case insensitive\n\n let map = &app.primary.map;\n\n let color = Color::RED.alpha(0.8);\n\n for r in map.all_roads() {\n\n if r.osm_tags\n\n .inner()\n\n .iter()\n\n .any(|(k, v)| format!(\"{} = {}\", k, v).contains(&filter))\n\n {\n\n num_matches += 1;\n\n batch.push(color, r.get_thick_polygon(map));\n\n }\n\n }\n\n for a in map.all_areas() {\n\n if a.osm_tags\n\n .inner()\n", "file_path": "game/src/debug/mod.rs", "rank": 42, "score": 374432.6973778194 }, { "content": "fn people_body(ctx: &mut EventCtx, app: &App, details: &mut Details, id: BuildingID) -> Widget {\n\n let mut rows = vec![];\n\n\n\n // Two caveats about these counts:\n\n // 1) A person might use multiple modes through the day, but this just picks a single category.\n\n // 2) Only people currently in the building currently are counted, whether or not that's their\n\n // home.\n\n let mut drivers = 0;\n\n let mut cyclists = 0;\n\n let mut others = 0;\n\n\n\n let mut ppl: Vec<(Time, Widget)> = Vec::new();\n\n for p in app.primary.sim.bldg_to_people(id) {\n\n let person = app.primary.sim.get_person(p);\n\n\n\n let mut has_car = false;\n\n let mut has_bike = false;\n\n for vehicle in &person.vehicles {\n\n if vehicle.vehicle_type == VehicleType::Car {\n\n has_car = true;\n", "file_path": "game/src/info/building.rs", "rank": 43, "score": 374263.3892757745 }, { "content": "fn bio_body(ctx: &mut EventCtx, app: &App, details: &mut Details, id: PersonID) -> Widget {\n\n let mut rows = vec![];\n\n let person = app.primary.sim.get_person(id);\n\n let mut rng = XorShiftRng::seed_from_u64(id.0 as u64);\n\n\n\n let mut svg_data = Vec::new();\n\n svg_face::generate_face(&mut svg_data, &mut rng).unwrap();\n\n let batch = GeomBatch::load_svg_bytes_uncached(&svg_data).autocrop();\n\n let dims = batch.get_dims();\n\n let batch = batch.scale((200.0 / dims.width).min(200.0 / dims.height));\n\n rows.push(batch.into_widget(ctx).centered_horiz());\n\n\n\n let nickname = petname::Petnames::default().generate(&mut rng, 2, \" \");\n\n let age = rng.gen_range(5..100);\n\n\n\n let mut table = vec![(\"Nickname\", nickname), (\"Age\", age.to_string())];\n\n if app.opts.dev {\n\n table.push((\"Debug ID\", format!(\"{:?}\", person.orig_id)));\n\n }\n\n rows.extend(make_table(ctx, table));\n", "file_path": "game/src/info/person.rs", "rank": 44, "score": 374263.3892757745 }, { "content": "fn info_body(ctx: &mut EventCtx, app: &App, details: &mut Details, id: BuildingID) -> Widget {\n\n let mut rows = vec![];\n\n\n\n let b = app.primary.map.get_b(id);\n\n\n\n let mut kv = Vec::new();\n\n\n\n kv.push((\"Address\", b.address.clone()));\n\n if let Some(ref names) = b.name {\n\n kv.push((\"Name\", names.get(app.opts.language.as_ref()).to_string()));\n\n }\n\n if app.opts.dev {\n\n kv.push((\"OSM ID\", format!(\"{}\", b.orig_id.inner())));\n\n }\n\n\n\n let num_spots = b.num_parking_spots();\n\n if app.primary.sim.infinite_parking() {\n\n kv.push((\n\n \"Parking\",\n\n format!(\n", "file_path": "game/src/info/building.rs", "rank": 45, "score": 374263.3892757745 }, { "content": "fn info_body(ctx: &mut EventCtx, app: &App, id: ParkingLotID) -> Widget {\n\n let mut rows = vec![];\n\n let pl = app.primary.map.get_pl(id);\n\n let capacity = pl.capacity();\n\n\n\n rows.push(\n\n format!(\n\n \"{} / {} spots available\",\n\n prettyprint_usize(app.primary.sim.get_free_lot_spots(pl.id).len()),\n\n prettyprint_usize(capacity)\n\n )\n\n .text_widget(ctx),\n\n );\n\n\n\n let mut series = vec![Series {\n\n label: format!(\"After \\\"{}\\\"\", app.primary.map.get_edits().edits_name),\n\n color: app.cs.after_changes,\n\n pts: app.primary.sim.get_analytics().parking_lot_availability(\n\n app.primary.sim.time(),\n\n pl.id,\n", "file_path": "game/src/info/parking_lot.rs", "rank": 46, "score": 372496.91472054843 }, { "content": "fn parked_car_body(ctx: &mut EventCtx, app: &App, details: &mut Details, id: CarID) -> Widget {\n\n // TODO prev trips, next trips, etc\n\n let mut rows = vec![];\n\n\n\n let p = app.primary.sim.get_owner_of_car(id).unwrap();\n\n rows.push(\n\n ctx.style()\n\n .btn_outline\n\n .text(format!(\"Owned by {}\", p))\n\n .build_def(ctx),\n\n );\n\n details.hyperlinks.insert(\n\n format!(\"Owned by {}\", p),\n\n Tab::PersonTrips(p, BTreeMap::new()),\n\n );\n\n\n\n if let Some(p) = app.primary.sim.lookup_parked_car(id) {\n\n match p.spot {\n\n ParkingSpot::Onstreet(_, _) | ParkingSpot::Lot(_, _) => {\n\n ctx.canvas.center_on_map_pt(\n", "file_path": "game/src/info/person.rs", "rank": 47, "score": 370421.4107116003 }, { "content": "fn stop_body(ctx: &mut EventCtx, app: &App, details: &mut Details, id: BusStopID) -> Widget {\n\n let mut rows = vec![];\n\n\n\n let bs = app.primary.map.get_bs(id);\n\n let sim = &app.primary.sim;\n\n\n\n rows.push(Line(&bs.name).into_widget(ctx));\n\n\n\n let all_arrivals = &sim.get_analytics().bus_arrivals;\n\n for r in app.primary.map.get_routes_serving_stop(id) {\n\n // Full names can overlap, so include the ID\n\n let label = format!(\"{} ({})\", r.full_name, r.id);\n\n rows.push(\n\n ctx.style()\n\n .btn_outline\n\n .text(format!(\"Route {}\", r.short_name))\n\n .build_widget(ctx, &label),\n\n );\n\n details.hyperlinks.insert(label, Tab::BusRoute(r.id));\n\n\n", "file_path": "game/src/info/bus.rs", "rank": 48, "score": 370421.4107116003 }, { "content": "fn route_body(ctx: &mut EventCtx, app: &App, details: &mut Details, id: BusRouteID) -> Widget {\n\n let mut rows = vec![];\n\n\n\n let map = &app.primary.map;\n\n let route = map.get_br(id);\n\n rows.push(\n\n Text::from(&route.full_name)\n\n .wrap_to_pct(ctx, 20)\n\n .into_widget(ctx),\n\n );\n\n\n\n if app.opts.dev {\n\n rows.push(\n\n ctx.style()\n\n .btn_outline\n\n .text(\"Open OSM relation\")\n\n .build_widget(ctx, format!(\"open {}\", route.osm_rel_id)),\n\n );\n\n }\n\n\n", "file_path": "game/src/info/bus.rs", "rank": 49, "score": 370421.41071160045 }, { "content": "fn bus_status_body(ctx: &mut EventCtx, app: &App, details: &mut Details, id: CarID) -> Widget {\n\n let mut rows = vec![];\n\n\n\n let route = app\n\n .primary\n\n .map\n\n .get_br(app.primary.sim.bus_route_id(id).unwrap());\n\n\n\n rows.push(\n\n ctx.style()\n\n .btn_outline\n\n .text(format!(\"Serves route {}\", route.short_name))\n\n .build_def(ctx),\n\n );\n\n details.hyperlinks.insert(\n\n format!(\"Serves route {}\", route.short_name),\n\n Tab::BusRoute(route.id),\n\n );\n\n\n\n rows.push(\n\n Line(format!(\n\n \"Currently has {} passengers\",\n\n app.primary.sim.num_transit_passengers(id),\n\n ))\n\n .into_widget(ctx),\n\n );\n\n\n\n Widget::col(rows)\n\n}\n\n\n", "file_path": "game/src/info/bus.rs", "rank": 50, "score": 370421.4107116003 }, { "content": "pub fn color_for_agent_type(app: &App, a: AgentType) -> Color {\n\n match a {\n\n AgentType::Pedestrian => app.cs.unzoomed_pedestrian,\n\n AgentType::Bike => app.cs.unzoomed_bike,\n\n AgentType::Bus | AgentType::Train => app.cs.unzoomed_bus,\n\n AgentType::TransitRider => app.cs.bus_trip,\n\n AgentType::Car => app.cs.unzoomed_car,\n\n }\n\n}\n\n\n", "file_path": "game/src/common/mod.rs", "rank": 51, "score": 367523.54135702905 }, { "content": "/// Make it clear the map can't be interacted with right now.\n\npub fn grey_out_map(g: &mut GfxCtx, app: &dyn AppLike) {\n\n g.fork_screenspace();\n\n // TODO - OSD height\n\n g.draw_polygon(\n\n app.cs().fade_map_dark,\n\n Polygon::rectangle(g.canvas.window_width, g.canvas.window_height),\n\n );\n\n g.unfork();\n\n}\n\n\n", "file_path": "map_gui/src/tools/mod.rs", "rank": 52, "score": 367248.70413095935 }, { "content": "fn unlocked_level(ctx: &mut EventCtx, app: &App, level: &Level, idx: usize) -> Widget {\n\n let normal = level_btn(ctx, app, level, idx);\n\n let hovered = normal\n\n .clone()\n\n .color(RewriteColor::Change(Color::WHITE, Color::WHITE.alpha(0.6)));\n\n\n\n ButtonBuilder::new()\n\n .custom_batch(normal, ControlState::Default)\n\n .custom_batch(hovered, ControlState::Hovered)\n\n .build_widget(ctx, &level.title)\n\n}\n\n\n", "file_path": "santa/src/title.rs", "rank": 53, "score": 367038.16259443684 }, { "content": "// TODO Preview the map, add padding, add the linear gradient...\n\nfn locked_level(ctx: &mut EventCtx, app: &App, level: &Level, idx: usize) -> Widget {\n\n let mut batch = level_btn(ctx, app, level, idx);\n\n let hitbox = batch.get_bounds().get_rectangle();\n\n let center = hitbox.center();\n\n batch.push(app.cs.fade_map_dark, hitbox);\n\n batch.append(GeomBatch::load_svg(ctx, \"system/assets/tools/locked.svg\").centered_on(center));\n\n batch.into_widget(ctx)\n\n}\n\n\n", "file_path": "santa/src/title.rs", "rank": 54, "score": 367038.16259443684 }, { "content": "fn make_controls(ctx: &mut EventCtx, tabs: &mut TabController) -> Panel {\n\n Panel::new(Widget::col(vec![\n\n Text::from(Line(\"widgetry demo\").big_heading_styled()).into_widget(ctx),\n\n Widget::col(vec![\n\n Text::from(\n\n \"Click and drag the background to pan, use touchpad or scroll wheel to zoom\",\n\n )\n\n .into_widget(ctx),\n\n Widget::row(vec![\n\n ctx.style()\n\n .btn_outline\n\n .text(\"New faces\")\n\n .hotkey(Key::F)\n\n .build_widget(ctx, \"generate new faces\"),\n\n Toggle::switch(ctx, \"Draw scrollable canvas\", None, true),\n\n Toggle::switch(ctx, \"Show timeseries\", lctrl(Key::T), false),\n\n ]),\n\n \"Stopwatch: ...\"\n\n .text_widget(ctx)\n\n .named(\"stopwatch\")\n", "file_path": "widgetry_demo/src/lib.rs", "rank": 55, "score": 365467.4031844058 }, { "content": "fn bus_header(ctx: &mut EventCtx, app: &App, details: &mut Details, id: CarID, tab: Tab) -> Widget {\n\n let route = app.primary.sim.bus_route_id(id).unwrap();\n\n\n\n if let Some(pt) = app\n\n .primary\n\n .sim\n\n .canonical_pt_for_agent(AgentID::Car(id), &app.primary.map)\n\n {\n\n ctx.canvas.center_on_map_pt(pt);\n\n }\n\n\n\n let mut rows = vec![];\n\n rows.push(Widget::row(vec![\n\n Line(format!(\n\n \"{} (route {})\",\n\n id,\n\n app.primary.map.get_br(route).short_name\n\n ))\n\n .small_heading()\n\n .into_widget(ctx),\n", "file_path": "game/src/info/bus.rs", "rank": 56, "score": 355623.6923989471 }, { "content": "fn export_for_leaflet(ctx: &mut EventCtx, app: &App) {\n\n let name = app.primary.map.get_name();\n\n let bounds = app.primary.map.get_bounds();\n\n let map_length = bounds.width().max(bounds.height());\n\n\n\n // At zoom level N, the entire map fits into (N + 1) * (N + 1) tiles\n\n for zoom_level in 0..=25 {\n\n let num_tiles = zoom_level + 1;\n\n // How do we fit the entire map_length into this many tiles?\n\n let zoom = 256.0 * (num_tiles as f64) / map_length;\n\n ctx.request_update(UpdateType::ScreenCaptureEverything {\n\n dir: format!(\n\n \"screenshots/{}/{}/{}/{}\",\n\n name.city.country, name.city.city, name.map, zoom_level\n\n ),\n\n zoom,\n\n dims: ScreenDims::new(256.0, 256.0),\n\n leaflet_naming: true,\n\n });\n\n }\n\n}\n\n\n", "file_path": "game/src/debug/mod.rs", "rank": 57, "score": 354596.1110939493 }, { "content": "fn make_elevation(ctx: &EventCtx, color: Color, walking: bool, path: &Path, map: &Map) -> Widget {\n\n let mut pts: Vec<(Distance, Distance)> = Vec::new();\n\n let mut dist = Distance::ZERO;\n\n for step in path.get_steps() {\n\n if let PathStep::Turn(t) = step {\n\n pts.push((dist, map.get_i(t.parent).elevation));\n\n }\n\n dist += step.as_traversable().length(map);\n\n }\n\n // TODO Show roughly where we are in the trip; use distance covered by current path for this\n\n LinePlot::new(\n\n ctx,\n\n vec![Series {\n\n label: if walking {\n\n \"Elevation for walking\"\n\n } else {\n\n \"Elevation for biking\"\n\n }\n\n .to_string(),\n\n color,\n\n pts,\n\n }],\n\n PlotOptions::fixed(),\n\n )\n\n}\n\n\n", "file_path": "game/src/info/trip.rs", "rank": 58, "score": 353824.65380335005 }, { "content": "fn make_btn(ctx: &mut EventCtx, num: usize) -> Widget {\n\n let title = match num {\n\n 0 => \"Record 0 intersections\".to_string(),\n\n 1 => \"Record 1 intersection\".to_string(),\n\n _ => format!(\"Record {} intersections\", num),\n\n };\n\n ctx.style()\n\n .btn_solid_primary\n\n .text(title)\n\n .disabled(num == 0)\n\n .hotkey(Key::Enter)\n\n .build_widget(ctx, \"record\")\n\n}\n", "file_path": "game/src/sandbox/misc_tools.rs", "rank": 59, "score": 351688.18851614534 }, { "content": "fn make_panel(ctx: &mut EventCtx, story: &StoryMap, mode: &Mode, dirty: bool) -> Panel {\n\n Panel::new(Widget::col(vec![\n\n Widget::row(vec![\n\n Line(\"Story map editor\").small_heading().into_widget(ctx),\n\n Widget::vert_separator(ctx, 30.0),\n\n ctx.style()\n\n .btn_outline\n\n .popup(&story.name)\n\n .hotkey(lctrl(Key::L))\n\n .build_widget(ctx, \"load\"),\n\n ctx.style()\n\n .btn_plain\n\n .icon(\"system/assets/tools/save.svg\")\n\n .hotkey(lctrl(Key::S))\n\n .disabled(!dirty)\n\n .build_widget(ctx, \"save\"),\n\n ctx.style().btn_close_widget(ctx),\n\n ]),\n\n Widget::row(vec![\n\n ctx.style()\n", "file_path": "game/src/devtools/story.rs", "rank": 60, "score": 348070.0434584499 }, { "content": "fn make_pagination(ctx: &mut EventCtx, total: usize, skip: usize) -> Widget {\n\n let next = ctx\n\n .style()\n\n .btn_next()\n\n .disabled(skip + 1 + ROWS >= total)\n\n .hotkey(Key::RightArrow);\n\n let prev = ctx\n\n .style()\n\n .btn_prev()\n\n .disabled(skip == 0)\n\n .hotkey(Key::LeftArrow);\n\n\n\n Widget::row(vec![\n\n prev.build_widget(ctx, \"previous\"),\n\n format!(\n\n \"{}-{} of {}\",\n\n if total > 0 {\n\n prettyprint_usize(skip + 1)\n\n } else {\n\n \"0\".to_string()\n\n },\n\n prettyprint_usize((skip + 1 + ROWS).min(total)),\n\n prettyprint_usize(total)\n\n )\n\n .text_widget(ctx)\n\n .centered_vert(),\n\n next.build_widget(ctx, \"next\"),\n\n ])\n\n}\n\n\n", "file_path": "widgetry/src/widgets/table.rs", "rank": 61, "score": 346931.30769062263 }, { "content": "fn make_btn(ctx: &mut EventCtx, num: usize) -> Widget {\n\n let title = match num {\n\n 0 => \"Edit 0 signals\".to_string(),\n\n 1 => \"Edit 1 signal\".to_string(),\n\n _ => format!(\"Edit {} signals\", num),\n\n };\n\n ctx.style()\n\n .btn_solid_primary\n\n .text(title)\n\n .disabled(num == 0)\n\n .hotkey(hotkeys(vec![Key::Enter, Key::E]))\n\n .build_widget(ctx, \"edit\")\n\n}\n", "file_path": "game/src/edit/traffic_signals/picker.rs", "rank": 62, "score": 346922.06727707945 }, { "content": "fn header(ctx: &EventCtx, app: &App, details: &mut Details, id: BuildingID, tab: Tab) -> Widget {\n\n let mut rows = vec![];\n\n\n\n rows.push(Widget::row(vec![\n\n Line(id.to_string()).small_heading().into_widget(ctx),\n\n header_btns(ctx),\n\n ]));\n\n\n\n rows.push(make_tabs(\n\n ctx,\n\n &mut details.hyperlinks,\n\n tab,\n\n vec![(\"Info\", Tab::BldgInfo(id)), (\"People\", Tab::BldgPeople(id))],\n\n ));\n\n\n\n draw_occupants(details, app, id, None);\n\n // TODO Draw cars parked inside?\n\n\n\n Widget::custom_col(rows)\n\n}\n\n\n", "file_path": "game/src/info/building.rs", "rank": 63, "score": 342823.9876023629 }, { "content": "fn header(ctx: &EventCtx, app: &App, details: &mut Details, id: LaneID, tab: Tab) -> Widget {\n\n let mut rows = vec![];\n\n\n\n let map = &app.primary.map;\n\n let l = map.get_l(id);\n\n let r = map.get_r(l.parent);\n\n\n\n let label = if l.is_shoulder() {\n\n \"Shoulder\"\n\n } else if l.is_sidewalk() {\n\n \"Sidewalk\"\n\n } else {\n\n \"Lane\"\n\n };\n\n\n\n // Navbar\n\n rows.push(Widget::row(vec![\n\n Line(format!(\"{} #{}\", label, id.0))\n\n .small_heading()\n\n .into_widget(ctx),\n", "file_path": "game/src/info/lane.rs", "rank": 64, "score": 342823.98760236293 }, { "content": "#[derive(Clone, Debug, Default)]\n\nstruct Label {\n\n text: Option<String>,\n\n color: Option<Color>,\n\n styled_text: Option<Text>,\n\n font_size: Option<usize>,\n\n font: Option<Font>,\n\n}\n\n\n\n// Like an image map from the old HTML days\n\npub struct MultiButton {\n\n draw: Drawable,\n\n hitboxes: Vec<(Polygon, String)>,\n\n hovering: Option<usize>,\n\n\n\n top_left: ScreenPt,\n\n dims: ScreenDims,\n\n}\n\n\n\nimpl MultiButton {\n\n pub fn new(ctx: &EventCtx, batch: GeomBatch, hitboxes: Vec<(Polygon, String)>) -> Widget {\n", "file_path": "widgetry/src/widgets/button.rs", "rank": 65, "score": 339791.5621105155 }, { "content": "#[allow(non_snake_case)]\n\npub fn Line<S: Into<String>>(text: S) -> TextSpan {\n\n TextSpan {\n\n text: text.into(),\n\n fg_color: None,\n\n size: DEFAULT_FONT_SIZE,\n\n font: DEFAULT_FONT,\n\n underlined: false,\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct Text {\n\n // The bg_color will cover the entire block, but some lines can have extra highlighting.\n\n lines: Vec<(Option<Color>, Vec<TextSpan>)>,\n\n // TODO Stop using this as much as possible.\n\n bg_color: Option<Color>,\n\n}\n\n\n\nimpl From<TextSpan> for Text {\n\n fn from(line: TextSpan) -> Text {\n", "file_path": "widgetry/src/text.rs", "rank": 66, "score": 339248.328135212 }, { "content": "fn level_btn(ctx: &mut EventCtx, app: &App, level: &Level, idx: usize) -> GeomBatch {\n\n let mut txt = Text::new();\n\n txt.add_line(Line(format!(\"LEVEL {}\", idx + 1)).small_heading());\n\n txt.add_line(Line(&level.title).small_heading());\n\n txt.add_line(&level.description);\n\n let batch = txt.wrap_to_pct(ctx, 15).render_autocropped(ctx);\n\n\n\n // Add padding\n\n let (mut batch, hitbox) = batch\n\n .batch()\n\n .container()\n\n .padding(EdgeInsets {\n\n top: 20.0,\n\n bottom: 20.0,\n\n left: 10.0,\n\n right: 10.0,\n\n })\n\n .to_geom(ctx, None);\n\n batch.unshift(app.cs.unzoomed_bike, hitbox);\n\n batch\n\n}\n\n\n", "file_path": "santa/src/title.rs", "rank": 67, "score": 338624.3601237482 }, { "content": "fn draw_banned_turns(ctx: &mut EventCtx, app: &App) -> Drawable {\n\n let mut batch = GeomBatch::new();\n\n let map = &app.primary.map;\n\n for i in map.all_intersections() {\n\n let mut pairs: HashSet<(RoadID, RoadID)> = HashSet::new();\n\n // Don't call out one-ways, so use incoming/outgoing roads, and just for cars.\n\n for l1 in i.get_incoming_lanes(map, PathConstraints::Car) {\n\n for l2 in i.get_outgoing_lanes(map, PathConstraints::Car) {\n\n pairs.insert((map.get_l(l1).parent, map.get_l(l2).parent));\n\n }\n\n }\n\n for t in &i.turns {\n\n let r1 = map.get_l(t.src).parent;\n\n let r2 = map.get_l(t.dst).parent;\n\n pairs.remove(&(r1, r2));\n\n }\n\n\n\n for (r1, r2) in pairs {\n\n if let Ok(pl) = PolyLine::new(vec![\n\n map.get_r(r1).center_pts.middle(),\n", "file_path": "game/src/debug/mod.rs", "rank": 68, "score": 338528.7670212858 }, { "content": "// Convenience\n\nfn hex(x: &str) -> Color {\n\n Color::hex(x)\n\n}\n", "file_path": "widgetry/src/style/mod.rs", "rank": 69, "score": 337236.95340937295 }, { "content": "fn calc_all_routes(ctx: &EventCtx, app: &mut App) -> (usize, Drawable) {\n\n let agents = app.primary.sim.active_agents();\n\n let mut batch = GeomBatch::new();\n\n let mut cnt = 0;\n\n let sim = &app.primary.sim;\n\n let map = &app.primary.map;\n\n for maybe_trace in Timer::new(\"calculate all routes\").parallelize(\n\n \"route to geometry\",\n\n Parallelism::Fastest,\n\n agents,\n\n |id| {\n\n sim.trace_route(id, map)\n\n .map(|trace| trace.make_polygons(NORMAL_LANE_THICKNESS))\n\n },\n\n ) {\n\n if let Some(t) = maybe_trace {\n\n cnt += 1;\n\n batch.push(app.cs.route, t);\n\n }\n\n }\n\n (cnt, ctx.upload(batch))\n\n}\n\n\n", "file_path": "game/src/debug/mod.rs", "rank": 70, "score": 331250.90714701865 }, { "content": "fn debug_body(ctx: &EventCtx, app: &App, id: LaneID) -> Widget {\n\n let mut rows = vec![];\n\n\n\n let map = &app.primary.map;\n\n let l = map.get_l(id);\n\n let r = map.get_r(l.parent);\n\n\n\n let mut kv = Vec::new();\n\n\n\n kv.push((\"Parent\".to_string(), r.id.to_string()));\n\n\n\n if l.lane_type.is_for_moving_vehicles() {\n\n kv.push((\n\n \"Driving blackhole\".to_string(),\n\n l.driving_blackhole.to_string(),\n\n ));\n\n kv.push((\n\n \"Biking blackhole\".to_string(),\n\n l.biking_blackhole.to_string(),\n\n ));\n", "file_path": "game/src/info/lane.rs", "rank": 71, "score": 327902.83143854915 }, { "content": "fn info_body(ctx: &EventCtx, app: &App, id: LaneID) -> Widget {\n\n let mut rows = vec![];\n\n\n\n let map = &app.primary.map;\n\n let l = map.get_l(id);\n\n let r = map.get_r(l.parent);\n\n\n\n let mut kv = Vec::new();\n\n\n\n if !l.is_walkable() {\n\n kv.push((\"Type\", l.lane_type.describe().to_string()));\n\n }\n\n if r.is_private() {\n\n let mut ban = Vec::new();\n\n for p in PathConstraints::all() {\n\n if !r.access_restrictions.allow_through_traffic.contains(p) {\n\n ban.push(format!(\"{:?}\", p).to_ascii_lowercase());\n\n }\n\n }\n\n if !ban.is_empty() {\n", "file_path": "game/src/info/lane.rs", "rank": 72, "score": 327902.83143854915 }, { "content": "fn info_body(ctx: &EventCtx, app: &App, id: IntersectionID) -> Widget {\n\n let mut rows = vec![];\n\n\n\n let i = app.primary.map.get_i(id);\n\n\n\n let mut txt = Text::from(\"Connecting\");\n\n let mut road_names = BTreeSet::new();\n\n for r in &i.roads {\n\n road_names.insert(\n\n app.primary\n\n .map\n\n .get_r(*r)\n\n .get_name(app.opts.language.as_ref()),\n\n );\n\n }\n\n for r in road_names {\n\n txt.add_line(format!(\" {}\", r));\n\n }\n\n rows.push(txt.into_widget(ctx));\n\n\n", "file_path": "game/src/info/intersection.rs", "rank": 73, "score": 327902.83143854915 }, { "content": "fn area_body(ctx: &EventCtx, app: &App, id: AreaID) -> Widget {\n\n let mut rows = vec![];\n\n let area = app.primary.map.get_a(id);\n\n\n\n if let Some(osm_id) = area.osm_id {\n\n rows.push(\n\n ctx.style()\n\n .btn_outline\n\n .text(\"Open in OSM\")\n\n .build_widget(ctx, format!(\"open {}\", osm_id)),\n\n );\n\n }\n\n\n\n rows.extend(make_table(\n\n ctx,\n\n area.osm_tags\n\n .inner()\n\n .iter()\n\n .map(|(k, v)| (k.to_string(), v.to_string()))\n\n .collect(),\n\n ));\n\n\n\n Widget::col(rows)\n\n}\n", "file_path": "game/src/info/debug.rs", "rank": 74, "score": 327902.83143854915 }, { "content": "pub fn spawn_agents_around(i: IntersectionID, app: &mut App) {\n\n let map = &app.primary.map;\n\n let mut rng = app.primary.current_flags.sim_flags.make_rng();\n\n let mut scenario = Scenario::empty(map, \"one-shot\");\n\n\n\n if map.all_buildings().is_empty() {\n\n println!(\"No buildings, can't pick destinations\");\n\n return;\n\n }\n\n\n\n let mut timer = Timer::new(format!(\n\n \"spawning agents around {} (rng seed {:?})\",\n\n i, app.primary.current_flags.sim_flags.rng_seed\n\n ));\n\n\n\n for l in &map.get_i(i).incoming_lanes {\n\n let lane = map.get_l(*l);\n\n if lane.is_driving() || lane.is_biking() {\n\n for _ in 0..10 {\n\n let mode = if rng.gen_bool(0.7) && lane.is_driving() {\n", "file_path": "game/src/sandbox/gameplay/freeform/mod.rs", "rank": 75, "score": 327828.92757585685 }, { "content": "// TODO Can we automatically transform text and SVG colors?\n\nfn cutscene_pt1_task(ctx: &mut EventCtx) -> Widget {\n\n let icon_builder = Image::empty().color(Color::BLACK).dims(50.0);\n\n Widget::custom_col(vec![\n\n Text::from_multiline(vec![\n\n Line(format!(\n\n \"Don't let anyone be delayed by one traffic signal more than {}!\",\n\n THRESHOLD\n\n ))\n\n .fg(Color::BLACK),\n\n Line(\"Survive as long as possible through 24 hours of a busy weekday.\")\n\n .fg(Color::BLACK),\n\n ])\n\n .into_widget(ctx)\n\n .margin_below(30),\n\n Widget::custom_row(vec![\n\n Widget::col(vec![\n\n Line(\"Time\").fg(Color::BLACK).into_widget(ctx),\n\n icon_builder\n\n .clone()\n\n .source_path(\"system/assets/tools/time.svg\")\n", "file_path": "game/src/sandbox/gameplay/fix_traffic_signals.rs", "rank": 76, "score": 326083.9713648015 }, { "content": "fn make_select_panel(ctx: &mut EventCtx, selector: &RoadSelector) -> Panel {\n\n Panel::new(Widget::col(vec![\n\n Line(\"Edit many roads\").small_heading().into_widget(ctx),\n\n selector.make_controls(ctx),\n\n Widget::row(vec![\n\n ctx.style()\n\n .btn_outline\n\n .text(format!(\"Edit {} roads\", selector.roads.len()))\n\n .disabled(selector.roads.is_empty())\n\n .hotkey(hotkeys(vec![Key::E, Key::Enter]))\n\n .build_widget(ctx, \"edit roads\"),\n\n ctx.style()\n\n .btn_outline\n\n .text(format!(\n\n \"Export {} roads to shared-row\",\n\n selector.roads.len()\n\n ))\n\n .build_widget(ctx, \"export roads to shared-row\"),\n\n ctx.style()\n\n .btn_outline\n", "file_path": "game/src/edit/bulk.rs", "rank": 77, "score": 323875.58290380903 }, { "content": "pub fn maybe_exit_sandbox(ctx: &mut EventCtx) -> Transition {\n\n Transition::Push(ChooseSomething::new(\n\n ctx,\n\n \"Are you ready to leave this mode?\",\n\n vec![\n\n Choice::string(\"keep playing\"),\n\n Choice::string(\"quit to main screen\").key(Key::Q),\n\n ],\n\n Box::new(|resp, ctx, app| {\n\n if resp == \"keep playing\" {\n\n return Transition::Pop;\n\n }\n\n\n\n if app.primary.map.unsaved_edits() {\n\n return Transition::Multi(vec![\n\n Transition::Push(Box::new(BackToMainMenu)),\n\n Transition::Push(SaveEdits::new(\n\n ctx,\n\n app,\n\n \"Do you want to save your proposal first?\",\n", "file_path": "game/src/sandbox/mod.rs", "rank": 78, "score": 323684.2864362633 }, { "content": "pub fn actions(_: &App, id: ID) -> Vec<(Key, String)> {\n\n match id {\n\n ID::Building(_) => vec![(Key::Z, \"start a trip here\".to_string())],\n\n ID::Intersection(_) => vec![(Key::Z, \"spawn agents here\".to_string())],\n\n _ => Vec::new(),\n\n }\n\n}\n\n\n", "file_path": "game/src/sandbox/gameplay/freeform/mod.rs", "rank": 79, "score": 323550.90418182366 }, { "content": "pub fn actions(app: &App, id: ID) -> Vec<(Key, String)> {\n\n match (app.session.tutorial.as_ref().unwrap().interaction(), id) {\n\n (Task::LowParking, ID::Lane(_)) => {\n\n vec![(Key::C, \"check the parking occupancy\".to_string())]\n\n }\n\n (Task::Escort, ID::Car(_)) => vec![(Key::C, \"draw WASH ME\".to_string())],\n\n _ => Vec::new(),\n\n }\n\n}\n\n\n", "file_path": "game/src/sandbox/gameplay/tutorial.rs", "rank": 80, "score": 322603.97827369935 }, { "content": "fn draw_unwalkable_roads(ctx: &mut EventCtx, app: &App, opts: &Options) -> Drawable {\n\n let allow_shoulders = match opts {\n\n Options::Walking(ref opts) => opts.allow_shoulders,\n\n Options::Biking => {\n\n return Drawable::empty(ctx);\n\n }\n\n };\n\n\n\n let mut batch = GeomBatch::new();\n\n 'ROADS: for road in app.map.all_roads() {\n\n if road.is_light_rail() {\n\n continue;\n\n }\n\n for (_, _, lt) in road.lanes_ltr() {\n\n if lt == LaneType::Sidewalk || (lt == LaneType::Shoulder && allow_shoulders) {\n\n continue 'ROADS;\n\n }\n\n }\n\n // TODO Skip highways\n\n batch.push(Color::BLUE.alpha(0.5), road.get_thick_polygon(&app.map));\n\n }\n\n ctx.upload(batch)\n\n}\n", "file_path": "fifteen_min/src/viewer.rs", "rank": 81, "score": 317290.99239793303 }, { "content": "fn launch(ctx: &mut EventCtx, app: &App, edits: PermanentMapEdits) -> Transition {\n\n #[cfg(not(target_arch = \"wasm32\"))]\n\n {\n\n if !abstio::file_exists(edits.map_name.path()) {\n\n return map_gui::tools::prompt_to_download_missing_data(ctx, edits.map_name.clone());\n\n }\n\n }\n\n\n\n Transition::Push(MapLoader::new(\n\n ctx,\n\n app,\n\n edits.map_name.clone(),\n\n Box::new(move |ctx, app| {\n\n // Apply edits before setting up the sandbox, for simplicity\n\n let maybe_err = ctx.loading_screen(\"apply edits\", |ctx, mut timer| {\n\n match edits.to_edits(&app.primary.map) {\n\n Ok(edits) => {\n\n apply_map_edits(ctx, app, edits);\n\n app.primary\n\n .map\n", "file_path": "game/src/pregame/proposals.rs", "rank": 82, "score": 317290.9923979331 }, { "content": "pub fn color_for_mode(app: &App, m: TripMode) -> Color {\n\n match m {\n\n TripMode::Walk => app.cs.unzoomed_pedestrian,\n\n TripMode::Bike => app.cs.unzoomed_bike,\n\n TripMode::Transit => app.cs.unzoomed_bus,\n\n TripMode::Drive => app.cs.unzoomed_car,\n\n }\n\n}\n\n\n", "file_path": "game/src/common/mod.rs", "rank": 83, "score": 316988.48625427607 }, { "content": "fn options_to_controls(ctx: &mut EventCtx, opts: &Options) -> Widget {\n\n let mut rows = vec![Toggle::choice(\n\n ctx,\n\n \"walking / biking\",\n\n \"walking\",\n\n \"biking\",\n\n None,\n\n match opts {\n\n Options::Walking(_) => true,\n\n Options::Biking => false,\n\n },\n\n )];\n\n match opts {\n\n Options::Walking(ref opts) => {\n\n rows.push(Toggle::switch(\n\n ctx,\n\n \"Allow walking on the shoulder of the road without a sidewalk\",\n\n None,\n\n opts.allow_shoulders,\n\n ));\n", "file_path": "fifteen_min/src/viewer.rs", "rank": 84, "score": 316558.94709967106 }, { "content": "fn button_builder<'a, 'c>(ctx: &EventCtx) -> ButtonBuilder<'a, 'c> {\n\n ctx.style()\n\n .btn_plain\n\n .btn()\n\n .outline((0.0, Color::CLEAR), ControlState::Default)\n\n}\n\n\n\nimpl<T: 'static + PartialEq + Clone> PersistentSplit<T> {\n\n pub fn current_value(&self) -> T {\n\n self.current_value.clone()\n\n }\n\n}\n\n\n\nimpl<T: 'static + Clone + PartialEq> WidgetImpl for PersistentSplit<T> {\n\n fn get_dims(&self) -> ScreenDims {\n\n let dims1 = self.btn.get_dims();\n\n let dims2 = self.spacer.get_dims();\n\n let dims3 = self.dropdown.get_dims();\n\n ScreenDims::new(\n\n dims1.width + dims2.width + dims3.width,\n", "file_path": "widgetry/src/widgets/persistent_split.rs", "rank": 85, "score": 312825.1362806635 }, { "content": "pub fn color_for_trip_phase(app: &App, tpt: TripPhaseType) -> Color {\n\n match tpt {\n\n TripPhaseType::Driving => app.cs.unzoomed_car,\n\n TripPhaseType::Walking => app.cs.unzoomed_pedestrian,\n\n TripPhaseType::Biking => app.cs.bike_trip,\n\n TripPhaseType::Parking => app.cs.parking_trip,\n\n TripPhaseType::WaitingForBus(_, _) => app.cs.bus_layer,\n\n TripPhaseType::RidingBus(_, _, _) => app.cs.bus_trip,\n\n TripPhaseType::Cancelled | TripPhaseType::Finished => unreachable!(),\n\n TripPhaseType::DelayedStart => Color::YELLOW,\n\n }\n\n}\n\n\n", "file_path": "game/src/common/mod.rs", "rank": 86, "score": 305227.0033750355 }, { "content": "/// Highlights intersections which were \"slow\" on the map\n\nfn highlight_slow_intersections(ctx: &EventCtx, app: &App, details: &mut Details, id: TripID) {\n\n if let Some(delays) = app\n\n .primary\n\n .sim\n\n .get_analytics()\n\n .trip_intersection_delays\n\n .get(&id)\n\n {\n\n for (id, time) in delays {\n\n let intersection = app.primary.map.get_i(id.parent);\n\n let (normal_delay_time, slow_delay_time) = if intersection.is_traffic_signal() {\n\n (30, 120)\n\n } else {\n\n (5, 30)\n\n };\n\n let (fg_color, bg_color) = if *time < normal_delay_time {\n\n (Color::WHITE, app.cs.normal_slow_intersection)\n\n } else if *time < slow_delay_time {\n\n (Color::BLACK, app.cs.slow_intersection)\n\n } else {\n", "file_path": "game/src/info/trip.rs", "rank": 87, "score": 304490.7903990877 }, { "content": "/// Highlights lanes which were \"slow\" on the map\n\nfn highlight_slow_lanes(ctx: &EventCtx, app: &App, details: &mut Details, id: TripID) {\n\n if let Some(lane_speeds) = app\n\n .primary\n\n .sim\n\n .get_analytics()\n\n .lane_speed_percentage\n\n .get(&id)\n\n {\n\n for (id, speed_percent) in lane_speeds.iter() {\n\n let lane = app.primary.map.get_l(*id);\n\n let (fg_color, bg_color) = if speed_percent > &95 {\n\n (Color::WHITE, app.cs.normal_slow_intersection)\n\n } else if speed_percent > &60 {\n\n (Color::BLACK, app.cs.slow_intersection)\n\n } else {\n\n (Color::WHITE, app.cs.very_slow_intersection)\n\n };\n\n details.unzoomed.push(\n\n bg_color,\n\n lane.lane_center_pts.make_polygons(Distance::meters(10.0)),\n", "file_path": "game/src/info/trip.rs", "rank": 88, "score": 304490.7903990877 }, { "content": "// This prepares a bunch of geometry (colored polygons) and uploads it to the GPU once. Then it can\n\n// be redrawn cheaply later.\n\nfn setup_scrollable_canvas(ctx: &mut EventCtx) -> Drawable {\n\n let mut batch = GeomBatch::new();\n\n batch.push(\n\n Color::hex(\"#4E30A6\"),\n\n Polygon::rounded_rectangle(5000.0, 5000.0, 25.0),\n\n );\n\n // SVG support using lyon and usvg. Map-space means don't scale for high DPI monitors.\n\n batch\n\n .append(GeomBatch::load_svg(ctx, \"system/assets/pregame/logo.svg\").translate(300.0, 300.0));\n\n // Text rendering also goes through lyon and usvg.\n\n batch.append(\n\n Text::from(Line(\"Awesome vector text thanks to usvg and lyon\").fg(Color::hex(\"#DF8C3D\")))\n\n .render_autocropped(ctx)\n\n .scale(2.0)\n\n .centered_on(Pt2D::new(600.0, 500.0))\n\n .rotate(Angle::degrees(-30.0)),\n\n );\n\n\n\n let mut rng = if cfg!(target_arch = \"wasm32\") {\n\n XorShiftRng::seed_from_u64(0)\n", "file_path": "widgetry_demo/src/lib.rs", "rank": 89, "score": 304476.20208766055 }, { "content": "fn make_tabs(ctx: &mut EventCtx) -> TabController {\n\n let style = ctx.style();\n\n\n\n let mut tabs = TabController::new(\"demo_tabs\");\n\n\n\n let gallery_bar_item = style.btn_tab.text(\"Component Gallery\");\n\n let gallery_content = Widget::col(vec![\n\n Text::from(Line(\"Text\").big_heading_styled().size(18)).into_widget(ctx),\n\n Text::from_all(vec![\n\n Line(\"You can \"),\n\n Line(\"change fonts \").big_heading_plain(),\n\n Line(\"on the same \").small().fg(Color::BLUE),\n\n Line(\"line!\").small_heading(),\n\n ])\n\n .bg(Color::GREEN)\n\n .into_widget(ctx),\n\n // Button Style Gallery\n\n Text::from(Line(\"Buttons\").big_heading_styled().size(18)).into_widget(ctx),\n\n Widget::row(vec![\n\n style\n", "file_path": "widgetry_demo/src/lib.rs", "rank": 90, "score": 304470.44077667047 }, { "content": "pub fn lctrl(key: Key) -> MultiKey {\n\n MultiKey::LCtrl(key)\n\n}\n\n\n", "file_path": "widgetry/src/event.rs", "rank": 91, "score": 303648.1060989375 }, { "content": "pub fn can_edit_lane(mode: &GameplayMode, l: LaneID, app: &App) -> bool {\n\n let l = app.primary.map.get_l(l);\n\n mode.can_edit_lanes()\n\n && !l.is_walkable()\n\n && l.lane_type != LaneType::SharedLeftTurn\n\n && !l.is_light_rail()\n\n && !app.primary.map.get_parent(l.id).is_service()\n\n}\n\n\n", "file_path": "game/src/edit/mod.rs", "rank": 92, "score": 302913.15924917755 }, { "content": "fn preview_route(g: &mut GfxCtx, app: &App, id: TripID, batch: &mut GeomBatch) {\n\n for p in app\n\n .primary\n\n .sim\n\n .get_analytics()\n\n .get_trip_phases(id, &app.primary.map)\n\n {\n\n if let Some(path) = &p.path {\n\n if let Some(trace) = path.trace(&app.primary.map) {\n\n batch.push(\n\n color_for_trip_phase(app, p.phase_type),\n\n trace.make_polygons(Distance::meters(20.0)),\n\n );\n\n }\n\n }\n\n }\n\n\n\n let trip = app.primary.sim.trip_info(id);\n\n batch.append(map_gui::tools::start_marker(\n\n g,\n", "file_path": "game/src/sandbox/dashboards/generic_trip_table.rs", "rank": 93, "score": 301734.4552875174 }, { "content": "fn intro_story(ctx: &mut EventCtx) -> Box<dyn State<App>> {\n\n CutsceneBuilder::new(\"Introduction\")\n\n .boss(\n\n \"Argh, the mayor's on my case again about the West Seattle bridge. This day couldn't \\\n\n get any worse.\",\n\n )\n\n .player(\"Er, hello? Boss? I'm --\")\n\n .boss(\"Yet somehow it did.. You're the new recruit. Yeah, yeah. Come in.\")\n\n .boss(\n\n \"Due to budget cuts, we couldn't hire a real traffic engineer, so we just called some \\\n\n know-it-all from Reddit who seems to think they can fix Seattle traffic.\",\n\n )\n\n .player(\"Yes, hi, my name is --\")\n\n .boss(\"We can't afford name-tags, didn't you hear, budget cuts? Your name doesn't matter.\")\n\n .player(\"What about my Insta handle?\")\n\n .boss(\"-glare-\")\n\n .boss(\n\n \"Look, you think fixing traffic is easy? Hah! You can't fix one intersection without \\\n\n breaking ten more.\",\n\n )\n", "file_path": "game/src/sandbox/gameplay/tutorial.rs", "rank": 94, "score": 301377.3839086869 }, { "content": "pub fn angle_from_arrow_keys(ctx: &EventCtx) -> Option<Angle> {\n\n let mut x: f64 = 0.0;\n\n let mut y: f64 = 0.0;\n\n if ctx.is_key_down(Key::LeftArrow) || ctx.is_key_down(Key::A) {\n\n x -= 1.0;\n\n }\n\n if ctx.is_key_down(Key::RightArrow) || ctx.is_key_down(Key::D) {\n\n x += 1.0;\n\n }\n\n if ctx.is_key_down(Key::UpArrow) || ctx.is_key_down(Key::W) {\n\n y -= 1.0;\n\n }\n\n if ctx.is_key_down(Key::DownArrow) || ctx.is_key_down(Key::S) {\n\n y += 1.0;\n\n }\n\n\n\n if x == 0.0 && y == 0.0 {\n\n return None;\n\n }\n\n Some(Angle::new_rads(y.atan2(x)))\n\n}\n", "file_path": "santa/src/controls.rs", "rank": 95, "score": 297307.3964797894 }, { "content": "pub fn load_svg(prerender: &Prerender, filename: &str) -> (GeomBatch, Bounds) {\n\n let cache_key = format!(\"file://{}\", filename);\n\n if let Some(pair) = prerender.assets.get_cached_svg(&cache_key) {\n\n return pair;\n\n }\n\n\n\n let bytes = (prerender.assets.read_svg)(filename);\n\n load_svg_from_bytes_uncached(&bytes)\n\n .map(|(batch, bounds)| {\n\n prerender\n\n .assets\n\n .cache_svg(cache_key, batch.clone(), bounds.clone());\n\n (batch, bounds)\n\n })\n\n .expect(&format!(\"error loading svg: {}\", filename))\n\n}\n\n\n", "file_path": "widgetry/src/svg.rs", "rank": 96, "score": 292742.017611243 }, { "content": "fn make_instructions(ctx: &mut EventCtx, allow_through_traffic: &BTreeSet<TripMode>) -> Widget {\n\n if allow_through_traffic == &TripMode::all().into_iter().collect() {\n\n Text::from(\n\n \"Through-traffic is allowed for everyone, meaning this is just a normal public road. \\\n\n Would you like to restrict it?\",\n\n )\n\n .wrap_to_pct(ctx, 30)\n\n .into_widget(ctx)\n\n } else {\n\n Line(\"Trips may start or end in this zone, but through-traffic is only allowed for:\")\n\n .into_widget(ctx)\n\n }\n\n}\n", "file_path": "game/src/edit/zones.rs", "rank": 97, "score": 291050.75563913514 }, { "content": "fn params_to_controls(ctx: &mut EventCtx, mode: TripMode, params: &RoutingParams) -> Widget {\n\n let mut rows = vec![Widget::custom_row(vec![\n\n ctx.style()\n\n .btn_plain\n\n .icon(\"system/assets/meters/bike.svg\")\n\n .disabled(mode == TripMode::Bike)\n\n .build_widget(ctx, \"bikes\"),\n\n ctx.style()\n\n .btn_plain\n\n .icon(\"system/assets/meters/car.svg\")\n\n .disabled(mode == TripMode::Drive)\n\n .build_widget(ctx, \"cars\"),\n\n ctx.style()\n\n .btn_plain\n\n .icon(\"system/assets/meters/pedestrian.svg\")\n\n .disabled(mode == TripMode::Walk)\n\n .build_widget(ctx, \"pedestrians\"),\n\n ])\n\n .evenly_spaced()];\n\n if mode == TripMode::Drive || mode == TripMode::Bike {\n", "file_path": "game/src/debug/routes.rs", "rank": 98, "score": 287770.3449160521 }, { "content": "#[derive(Clone, Debug, Default)]\n\nstruct ButtonStateStyle<'a, 'c> {\n\n image: Option<Image<'a, 'c>>,\n\n label: Option<Label>,\n\n outline: Option<OutlineStyle>,\n\n bg_color: Option<Color>,\n\n custom_batch: Option<GeomBatch>,\n\n}\n\n\n\n// can we take 'b out? and make the func that uses it generic?\n\nimpl<'b, 'a: 'b, 'c> ButtonBuilder<'a, 'c> {\n\n pub fn new() -> Self {\n\n ButtonBuilder {\n\n padding: EdgeInsets {\n\n top: 8.0,\n\n bottom: 8.0,\n\n left: 16.0,\n\n right: 16.0,\n\n },\n\n stack_spacing: 10.0,\n\n ..Default::default()\n", "file_path": "widgetry/src/widgets/button.rs", "rank": 99, "score": 287534.518858129 } ]
Rust
crates/sipmsg/src/headers/header.rs
armatusmiles/sipcore
7e0bd478d47a53082467bb231655b6f3f5733cb2
use crate::{ common::{bnfcore::*, errorparse::SipParseError, nom_wrappers::from_utf8_nom, take_sws_token}, headers::{ parsers::ExtensionParser, traits::{HeaderValueParserFn, SipHeaderParser}, GenericParams, SipRFCHeader, SipUri, }, }; use alloc::collections::{BTreeMap, VecDeque}; use core::str; use nom::{bytes::complete::take_while1, character::complete}; use unicase::Ascii; #[derive(PartialEq, Debug)] pub enum HeaderValueType { EmptyValue, TokenValue, Digit, AbsoluteURI, QuotedValue, AuthentificationInfo, CSeq, DateString, Utf8Text, Version, AuthorizationDigest, CallID, CallInfo, NameAddr, Timestamp, RetryAfter, UserAgent, Via, Warning, ExtensionHeader, } #[derive(PartialEq, Debug, Eq, PartialOrd, Ord)] pub enum HeaderTagType { PureValue, AinfoType, AinfoValue, AbsoluteURI, AuthSchema, Username, Domain, Realm, Nonce, DigestUri, Dresponse, Algorithm, Cnonce, Opaque, Stale, QopValue, NonceCount, Number, Method, ID, Host, Port, Star, DisplayName, Seconds, Comment, Major, Minor, TimveVal, Delay, ProtocolName, ProtocolVersion, ProtocolTransport, WarnCode, WarnAgent, WarnText, } pub type HeaderTags<'a> = BTreeMap<HeaderTagType, &'a [u8]>; #[derive(PartialEq, Debug)] pub struct HeaderValue<'a> { pub vstr: &'a str, pub vtype: HeaderValueType, vtags: Option<HeaderTags<'a>>, sip_uri: Option<SipUri<'a>>, } impl<'a> HeaderValue<'a> { pub fn create_empty_value() -> HeaderValue<'a> { HeaderValue { vstr: "", vtype: HeaderValueType::EmptyValue, vtags: None, sip_uri: None, } } pub fn new( val: &'a [u8], vtype: HeaderValueType, vtags: Option<HeaderTags<'a>>, sip_uri: Option<SipUri<'a>>, ) -> nom::IResult<&'a [u8], HeaderValue<'a>, SipParseError<'a>> { let (_, vstr) = from_utf8_nom(val)?; Ok(( val, HeaderValue { vstr: vstr, vtype: vtype, vtags: vtags, sip_uri: sip_uri, }, )) } pub fn tags(&self) -> Option<&HeaderTags<'a>> { self.vtags.as_ref() } pub fn sip_uri(&self) -> Option<&SipUri<'a>> { self.sip_uri.as_ref() } } #[derive(PartialEq, Debug)] pub struct Header<'a> { pub name: Ascii<&'a str>, pub value: HeaderValue<'a>, parameters: Option<GenericParams<'a>>, pub raw_value_param: &'a[u8] } impl<'a> Header<'a> { pub fn new( name: &'a str, value: HeaderValue<'a>, parameters: Option<GenericParams<'a>>, raw_value_param: &'a[u8], ) -> Header<'a> { Header { name: { Ascii::new(name) }, value: value, parameters: parameters, raw_value_param: raw_value_param } } pub fn params(&self) -> Option<&GenericParams<'a>> { self.parameters.as_ref() } pub fn find_parser(header_name: &'a str) -> (Option<SipRFCHeader>, HeaderValueParserFn) { match SipRFCHeader::from_str(&header_name) { Some(rfc_header) => (Some(rfc_header), rfc_header.get_parser()), None => (None, ExtensionParser::take_value), } } pub fn take_name(source_input: &'a [u8]) -> nom::IResult<&[u8], &'a str, SipParseError> { let (input, header_name) = take_while1(is_token_char)(source_input)?; let (input, _) = take_sws_token::colon(input)?; match str::from_utf8(header_name) { Ok(hdr_str) => Ok((input, hdr_str)), Err(_) => sip_parse_error!(1, "Bad header name"), } } pub fn take_value( input: &'a [u8], parser: HeaderValueParserFn, ) -> nom::IResult<&'a [u8], (HeaderValue<'a>, Option<GenericParams<'a>>), SipParseError<'a>> { if is_crlf(input) { return Ok((input, (HeaderValue::create_empty_value(), None))); } let (inp, value) = parser(input)?; let (inp, _) = complete::space0(inp)?; if inp.is_empty() { return sip_parse_error!(1, "Error parse header value"); } if inp[0] != b',' && inp[0] != b';' && inp[0] != b' ' && !is_crlf(inp) { return sip_parse_error!(2, "Error parse header value"); } if inp[0] == b';' { let (inp, params) = Header::try_take_parameters(inp)?; return Ok((inp, (value, params))); } Ok((inp, (value, None))) } fn try_take_parameters( input: &'a [u8], ) -> nom::IResult<&'a [u8], Option<GenericParams<'a>>, SipParseError<'a>> { if input.is_empty() || input[0] != b';' { return Ok((input, None)); } let (input, parameters) = GenericParams::parse(input)?; Ok((input, Some(parameters))) } pub fn parse( input: &'a [u8], ) -> nom::IResult<&[u8], (Option<SipRFCHeader>, VecDeque<Header<'a>>), SipParseError> { let mut headers = VecDeque::new(); let (input, header_name) = Header::take_name(input)?; let (rfc_type, value_parser) = Header::find_parser(header_name); let mut inp = input; loop { let (input, (value, params)) = Header::take_value(inp, value_parser)?; headers.push_back(Header::new(header_name, value, params, &inp[..inp.len() - input.len()])); if input[0] == b',' { let (input, _) = take_sws_token::comma(input)?; inp = input; continue; } inp = input; break; } Ok((inp, (rfc_type, headers))) } }
use crate::{ common::{bnfcore::*, errorparse::SipParseError, nom_wrappers::from_utf8_nom, take_sws_token}, headers::{ parsers::ExtensionParser, traits::{HeaderValueParserFn, SipHeaderParser}, GenericParams, SipRFCHeader, SipUri, }, }; use alloc::collections::{BTreeMap, VecDeque}; use core::str; use nom::{bytes::complete::take_while1, character::complete}; use unicase::Ascii; #[derive(PartialEq, Debug)] pub enum HeaderValueType { EmptyValue, TokenValue, Digit, AbsoluteURI, QuotedValue, AuthentificationInfo, CSeq, DateString, Utf8Text, Version, AuthorizationDigest, CallID, CallInfo, NameAddr, Timestamp, RetryAfter, UserAgent, Via, Warning, ExtensionHeader, } #[derive(PartialEq, Debug, Eq, PartialOrd, Ord)] pub enum HeaderTagType { PureValue, AinfoType, AinfoValue, AbsoluteURI, AuthSchema, Username, Doma
nput[0] != b';' { return Ok((input, None)); } let (input, parameters) = GenericParams::parse(input)?; Ok((input, Some(parameters))) } pub fn parse( input: &'a [u8], ) -> nom::IResult<&[u8], (Option<SipRFCHeader>, VecDeque<Header<'a>>), SipParseError> { let mut headers = VecDeque::new(); let (input, header_name) = Header::take_name(input)?; let (rfc_type, value_parser) = Header::find_parser(header_name); let mut inp = input; loop { let (input, (value, params)) = Header::take_value(inp, value_parser)?; headers.push_back(Header::new(header_name, value, params, &inp[..inp.len() - input.len()])); if input[0] == b',' { let (input, _) = take_sws_token::comma(input)?; inp = input; continue; } inp = input; break; } Ok((inp, (rfc_type, headers))) } }
in, Realm, Nonce, DigestUri, Dresponse, Algorithm, Cnonce, Opaque, Stale, QopValue, NonceCount, Number, Method, ID, Host, Port, Star, DisplayName, Seconds, Comment, Major, Minor, TimveVal, Delay, ProtocolName, ProtocolVersion, ProtocolTransport, WarnCode, WarnAgent, WarnText, } pub type HeaderTags<'a> = BTreeMap<HeaderTagType, &'a [u8]>; #[derive(PartialEq, Debug)] pub struct HeaderValue<'a> { pub vstr: &'a str, pub vtype: HeaderValueType, vtags: Option<HeaderTags<'a>>, sip_uri: Option<SipUri<'a>>, } impl<'a> HeaderValue<'a> { pub fn create_empty_value() -> HeaderValue<'a> { HeaderValue { vstr: "", vtype: HeaderValueType::EmptyValue, vtags: None, sip_uri: None, } } pub fn new( val: &'a [u8], vtype: HeaderValueType, vtags: Option<HeaderTags<'a>>, sip_uri: Option<SipUri<'a>>, ) -> nom::IResult<&'a [u8], HeaderValue<'a>, SipParseError<'a>> { let (_, vstr) = from_utf8_nom(val)?; Ok(( val, HeaderValue { vstr: vstr, vtype: vtype, vtags: vtags, sip_uri: sip_uri, }, )) } pub fn tags(&self) -> Option<&HeaderTags<'a>> { self.vtags.as_ref() } pub fn sip_uri(&self) -> Option<&SipUri<'a>> { self.sip_uri.as_ref() } } #[derive(PartialEq, Debug)] pub struct Header<'a> { pub name: Ascii<&'a str>, pub value: HeaderValue<'a>, parameters: Option<GenericParams<'a>>, pub raw_value_param: &'a[u8] } impl<'a> Header<'a> { pub fn new( name: &'a str, value: HeaderValue<'a>, parameters: Option<GenericParams<'a>>, raw_value_param: &'a[u8], ) -> Header<'a> { Header { name: { Ascii::new(name) }, value: value, parameters: parameters, raw_value_param: raw_value_param } } pub fn params(&self) -> Option<&GenericParams<'a>> { self.parameters.as_ref() } pub fn find_parser(header_name: &'a str) -> (Option<SipRFCHeader>, HeaderValueParserFn) { match SipRFCHeader::from_str(&header_name) { Some(rfc_header) => (Some(rfc_header), rfc_header.get_parser()), None => (None, ExtensionParser::take_value), } } pub fn take_name(source_input: &'a [u8]) -> nom::IResult<&[u8], &'a str, SipParseError> { let (input, header_name) = take_while1(is_token_char)(source_input)?; let (input, _) = take_sws_token::colon(input)?; match str::from_utf8(header_name) { Ok(hdr_str) => Ok((input, hdr_str)), Err(_) => sip_parse_error!(1, "Bad header name"), } } pub fn take_value( input: &'a [u8], parser: HeaderValueParserFn, ) -> nom::IResult<&'a [u8], (HeaderValue<'a>, Option<GenericParams<'a>>), SipParseError<'a>> { if is_crlf(input) { return Ok((input, (HeaderValue::create_empty_value(), None))); } let (inp, value) = parser(input)?; let (inp, _) = complete::space0(inp)?; if inp.is_empty() { return sip_parse_error!(1, "Error parse header value"); } if inp[0] != b',' && inp[0] != b';' && inp[0] != b' ' && !is_crlf(inp) { return sip_parse_error!(2, "Error parse header value"); } if inp[0] == b';' { let (inp, params) = Header::try_take_parameters(inp)?; return Ok((inp, (value, params))); } Ok((inp, (value, None))) } fn try_take_parameters( input: &'a [u8], ) -> nom::IResult<&'a [u8], Option<GenericParams<'a>>, SipParseError<'a>> { if input.is_empty() || i
random
[ { "content": "pub fn take(input: &[u8]) -> nom::IResult<&[u8], HeaderValue, SipParseError> {\n\n let (inp, res_val) = take_while1(is_digit)(input)?;\n\n let (_, hdr_val) = HeaderValue::new(res_val, HeaderValueType::Digit, None, None)?;\n\n Ok((inp, hdr_val))\n\n}\n", "file_path": "crates/sipmsg/src/headers/parsers/digit_header.rs", "rank": 0, "score": 104053.29358543671 }, { "content": "pub trait SipHeaderParser {\n\n // It should returns COMMA in first parameter if it header with multiple value\n\n // or SEMI if it contains perameters\n\n fn take_value(input: &[u8]) -> nom::IResult<&[u8], HeaderValue, SipParseError>;\n\n}\n", "file_path": "crates/sipmsg/src/headers/traits.rs", "rank": 1, "score": 95889.0240832187 }, { "content": "pub fn take<'a>(\n\n source_input: &'a [u8],\n\n) -> nom::IResult<&'a [u8], (&[u8], HeaderTags<'a>, Option<SipUri>), SipParseError<'a>> {\n\n if source_input.len() < 5 {\n\n return sip_parse_error!(2, \"name-addr header value is too short\");\n\n }\n\n let mut tags = HeaderTags::new();\n\n let next_value_type = predict_value_type(source_input);\n\n let input = if next_value_type == NameAddrValueType::QuotedDisplayName\n\n || next_value_type == NameAddrValueType::TokenDisplayName\n\n {\n\n let (input, display_name) = take_display_name(source_input, next_value_type)?;\n\n tags.insert(HeaderTagType::DisplayName, display_name);\n\n input\n\n } else {\n\n source_input\n\n };\n\n\n\n if input.is_empty() {\n\n return sip_parse_error!(3, \"Contact header value is invalid\");\n", "file_path": "crates/sipmsg/src/headers/name_addr.rs", "rank": 2, "score": 88049.20475688708 }, { "content": "pub fn take<'a>(\n\n source_input: &'a [u8],\n\n) -> nom::IResult<&[u8], (&[u8] /*vstr*/, HeaderTags<'a>), SipParseError> {\n\n let (input, auth_schema) = take_while1(is_token_char)(source_input)?;\n\n let mut tags = HeaderTags::new();\n\n tags.insert(HeaderTagType::AuthSchema, auth_schema);\n\n let (input, _) = take_sws(input)?; // LWS\n\n let mut input_tmp = input;\n\n // I use this value in end of fucntion. But compiler throw warning:\n\n // \"value assigned to `count_wsps_after_last_value` is never read\"\n\n // So, lets name it _* do supress warning\n\n let mut _count_wsps_after_last_value = 0;\n\n loop {\n\n let (input, param_name) = take_while1(is_token_char)(input_tmp)?;\n\n let (input, _) = take_sws_token::equal(input)?;\n\n\n\n let (input, (param_name, param_value)) = if input[0] == b'\"' {\n\n let (input, (_, param_value, wsps)) = nom_wrappers::take_quoted_string(input)?;\n\n _count_wsps_after_last_value = wsps.len();\n\n (input, (param_name, param_value))\n", "file_path": "crates/sipmsg/src/headers/auth_params.rs", "rank": 3, "score": 88049.20475688708 }, { "content": "#[inline]\n\npub fn is_digit(c: u8) -> bool {\n\n c >= 0x30 && c <= 0x39\n\n}\n\n\n\n/// DQUOTE = %x22\n", "file_path": "crates/sipmsg/src/common/bnfcore.rs", "rank": 4, "score": 87931.79166619579 }, { "content": "pub fn param_name_to_tag(value: &[u8]) -> Option<HeaderTagType> {\n\n let val = from_utf8(value).unwrap();\n\n\n\n let aval = Ascii::new(val);\n\n macro_rules! match_str {\n\n ($input_str:expr, $enum_result:expr) => {\n\n if aval == $input_str {\n\n return Some($enum_result);\n\n }\n\n };\n\n }\n\n match_str!(\"username\", HeaderTagType::Username);\n\n match_str!(\"realm\", HeaderTagType::Realm);\n\n match_str!(\"nonce\", HeaderTagType::Nonce);\n\n match_str!(\"uri\", HeaderTagType::DigestUri);\n\n match_str!(\"response\", HeaderTagType::Dresponse);\n\n match_str!(\"algorithm\", HeaderTagType::Algorithm);\n\n match_str!(\"cnonce\", HeaderTagType::Cnonce);\n\n match_str!(\"opaque\", HeaderTagType::Opaque);\n\n match_str!(\"qop\", HeaderTagType::QopValue);\n\n match_str!(\"nc\", HeaderTagType::NonceCount);\n\n match_str!(\"domain\", HeaderTagType::Domain);\n\n match_str!(\"stale\", HeaderTagType::Stale);\n\n None\n\n}\n\n\n", "file_path": "crates/sipmsg/src/headers/auth_params.rs", "rank": 5, "score": 80938.33921734459 }, { "content": "pub fn take(input: &[u8]) -> nom::IResult<&[u8], HeaderValue, SipParseError> {\n\n let (inp, res_val) = take_while1(is_token_char)(input)?;\n\n let (_, hdr_val) = HeaderValue::new(res_val, HeaderValueType::TokenValue, None, None)?;\n\n Ok((inp, hdr_val))\n\n}\n", "file_path": "crates/sipmsg/src/headers/parsers/token_header.rs", "rank": 6, "score": 79911.51278460019 }, { "content": "use crate::{\n\n common::{bnfcore::is_digit, errorparse::SipParseError},\n\n headers::header::{HeaderValue, HeaderValueType},\n\n};\n\nuse nom::bytes::complete::take_while1;\n\n\n", "file_path": "crates/sipmsg/src/headers/parsers/digit_header.rs", "rank": 7, "score": 77857.12058009698 }, { "content": "pub fn take(source_input: &[u8]) -> nom::IResult<&[u8], HeaderValue, SipParseError> {\n\n let mut tmp_input = source_input;\n\n loop {\n\n let (input, _) = take_until(\"\\r\\n\")(tmp_input)?;\n\n if input.len() > 3 && is_wsp(input[2]) {\n\n let (input, _) = take_sws(input)?;\n\n tmp_input = input;\n\n continue;\n\n }\n\n tmp_input = input;\n\n break;\n\n }\n\n let (_, hdr_val) = HeaderValue::new(\n\n &source_input[..source_input.len() - tmp_input.len()],\n\n HeaderValueType::Utf8Text,\n\n None,\n\n None,\n\n )?;\n\n Ok((tmp_input, hdr_val))\n\n}\n", "file_path": "crates/sipmsg/src/headers/parsers/utf8_trim_header.rs", "rank": 8, "score": 77809.22223246722 }, { "content": "use crate::{\n\n common::bnfcore::is_unreserved, common::hostport::HostPort,\n\n common::nom_wrappers::from_utf8_nom, common::nom_wrappers::take_while_with_escaped,\n\n errorparse::SipParseError, headers::GenericParams, userinfo::UserInfo,\n\n};\n\nuse alloc::collections::btree_map::BTreeMap;\n\nuse nom::bytes::complete::{take, take_till, take_until};\n\n\n\nuse core::str;\n\n\n\n#[derive(Copy, Clone, PartialEq, Debug)]\n\npub enum RequestUriScheme {\n\n SIP,\n\n SIPS,\n\n}\n\n\n\nimpl RequestUriScheme {\n\n pub fn from_bytes(s: &[u8]) -> Result<RequestUriScheme, nom::Err<SipParseError>> {\n\n match s {\n\n b\"sip\" => Ok(Self::SIP),\n\n b\"sips\" => Ok(Self::SIPS),\n\n _ => sip_parse_error!(101, \"Can't parse sipuri scheme\"),\n\n }\n\n }\n\n}\n\n\n\n/// hnv-unreserved = \"[\" / \"]\" / \"/\" / \"?\" / \":\" / \"+\" / \"$\"\n\n#[inline]\n", "file_path": "crates/sipmsg/src/headers/sipuri.rs", "rank": 9, "score": 73945.00902051333 }, { "content": " Ok((inp2, result))\n\n }\n\n \n\n}\n\n\n\n// URI = SIP-URI / SIPS-URI\n\n// SIP-URI = \"sip:\" [ userinfo ] hostport\n\n// uri-parameters [ headers ]\n\n// SIPS-URI = \"sips:\" [ userinfo ] hostport\n\n// uri-parameters [ headers ]\n\n// userinfo = ( user / telephone-subscriber ) [ \":\" password ] \"@\"\n\n// hostport = host [ \":\" port ]\n\n/// Its general form, in the case of a SIP URI, is: sip:user:password@host:port;uri-parameters?headers\n\n#[derive(PartialEq, Debug)]\n\npub struct SipUri<'a> {\n\n pub scheme: RequestUriScheme,\n\n user_info: Option<UserInfo<'a>>,\n\n pub hostport: HostPort<'a>,\n\n // Temporary use parsing from generic-parameters.rs\n\n // TODO make according RFC\n", "file_path": "crates/sipmsg/src/headers/sipuri.rs", "rank": 10, "score": 73943.18254190481 }, { "content": " }\n\n\n\n pub fn parse(input: &'a [u8]) -> nom::IResult<&[u8], SipUri<'a>, SipParseError> {\n\n SipUri::parse_ext(input, true)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_sip_uri_parse() {\n\n let (rest, sip_uri) =\n\n SipUri::parse_ext(\"sip:192.0.2.254:5061>\\r\\nblablabla@somm\".as_bytes(), true).unwrap();\n\n assert_eq!(rest, \">\\r\\nblablabla@somm\".as_bytes());\n\n assert_eq!(sip_uri.scheme, RequestUriScheme::SIP);\n\n assert_eq!(sip_uri.hostport.host, \"192.0.2.254\");\n\n assert_eq!(sip_uri.hostport.port, Some(5061));\n\n /************************************************/\n", "file_path": "crates/sipmsg/src/headers/sipuri.rs", "rank": 11, "score": 73938.87000673203 }, { "content": " assert_eq!(\n\n sip_uri.headers().unwrap().get(&\"subject\"),\n\n Some(&\"project%20x\")\n\n );\n\n assert_eq!(sip_uri.headers().unwrap().get(&\"priority\"), Some(&\"urgent\"));\n\n assert_eq!(sip_uri.user_info().unwrap().value, \"alice\");\n\n assert_eq!(sip_uri.scheme, RequestUriScheme::SIPS);\n\n assert_eq!(sip_uri.hostport.host, \"atlanta.com\");\n\n\n\n assert_eq!(sip_uri.params(), None);\n\n\n\n assert_eq!(rest, b\" ;transport=tcp\");\n\n }\n\n}\n", "file_path": "crates/sipmsg/src/headers/sipuri.rs", "rank": 12, "score": 73938.62950943843 }, { "content": " parameters: Option<GenericParams<'a>>,\n\n headers: Option<BTreeMap<&'a str, &'a str>>,\n\n}\n\n\n\nimpl<'a> SipUri<'a> {\n\n pub fn user_info(&self) -> Option<&UserInfo<'a>> {\n\n self.user_info.as_ref()\n\n }\n\n\n\n pub fn params(&self) -> Option<&GenericParams<'a>> {\n\n self.parameters.as_ref()\n\n }\n\n\n\n pub fn headers(&self) -> Option<&BTreeMap<&'a str, &'a str>> {\n\n self.headers.as_ref()\n\n }\n\n\n\n fn try_parse_params(\n\n input: &'a [u8],\n\n ) -> nom::IResult<&[u8], Option<GenericParams<'a>>, SipParseError> {\n", "file_path": "crates/sipmsg/src/headers/sipuri.rs", "rank": 13, "score": 73938.48474799712 }, { "content": "\n\n let (rest, sip_uri) = SipUri::parse_ext(\n\n \"sips:alice@atlanta.com?subject=project%20x&priority=urgent\".as_bytes(),\n\n true,\n\n )\n\n .unwrap();\n\n assert_eq!(rest.len(), 0);\n\n assert_eq!(\n\n sip_uri.headers().unwrap().get(&\"subject\"),\n\n Some(&\"project%20x\")\n\n );\n\n assert_eq!(sip_uri.headers().unwrap().get(&\"priority\"), Some(&\"urgent\"));\n\n assert_eq!(sip_uri.scheme, RequestUriScheme::SIPS);\n\n assert_eq!(sip_uri.user_info().unwrap().value, \"alice\");\n\n assert_eq!(sip_uri.hostport.host, \"atlanta.com\");\n\n\n\n let (rest, sip_uri) = SipUri::parse_ext(\n\n \"sip:atlanta.com;method=REGISTER?to=alice%40atlanta.com\".as_bytes(),\n\n true,\n\n )\n", "file_path": "crates/sipmsg/src/headers/sipuri.rs", "rank": 14, "score": 73937.71027913412 }, { "content": " .unwrap();\n\n assert_eq!(rest.len(), 0);\n\n assert_eq!(\n\n sip_uri.headers().unwrap().get(&\"to\"),\n\n Some(&\"alice%40atlanta.com\")\n\n );\n\n assert_eq!(\n\n sip_uri.params().unwrap().get(&\"method\"),\n\n Some(&Some(\"REGISTER\"))\n\n );\n\n assert_eq!(sip_uri.scheme, RequestUriScheme::SIP);\n\n assert_eq!(sip_uri.hostport.host, \"atlanta.com\");\n\n assert_eq!(sip_uri.user_info(), None);\n\n\n\n let (rest, sip_uri) = SipUri::parse_ext(\n\n \"sips:alice@atlanta.com?subject=project%20x&priority=urgent ;transport=tcp\".as_bytes(),\n\n false,\n\n )\n\n .unwrap();\n\n // assert_eq!(rest.len(), 0);\n", "file_path": "crates/sipmsg/src/headers/sipuri.rs", "rank": 15, "score": 73937.31145557376 }, { "content": " Ok((input, headers)) => {\n\n return Ok((input, Some(headers)));\n\n }\n\n Err(e) => {\n\n return Err(e);\n\n }\n\n }\n\n }\n\n\n\n // This function written not well. So, if you want, you can refactor this function\n\n pub fn parse_ext(\n\n input: &'a [u8],\n\n parse_with_parameters: bool,\n\n ) -> nom::IResult<&[u8], SipUri<'a>, SipParseError> {\n\n let (input, uri_scheme) = take_until(\":\")(input)?;\n\n let (input_after_scheme, _) = take(1usize)(input)?; // skip ':'\n\n let scheme = RequestUriScheme::from_bytes(uri_scheme)?;\n\n\n\n let (right_with_ampersat, before_ampersat) =\n\n take_till(|c| c == b'@' || c == b'\\n' || c == b',')(input_after_scheme)?;\n", "file_path": "crates/sipmsg/src/headers/sipuri.rs", "rank": 16, "score": 73936.37154233143 }, { "content": " assert_eq!(sip_uri.hostport.host, \"atlanta.com\");\n\n assert_eq!(sip_uri.hostport.port, None);\n\n assert_eq!(\n\n sip_uri.params().unwrap().get(&\"transport\"),\n\n Some(&Some(\"tcp\"))\n\n );\n\n\n\n let (rest, sip_uri) = SipUri::parse_ext(\n\n \"sip:+1-212-555-1212:1234@gateway.com;user=phone\".as_bytes(),\n\n true,\n\n )\n\n .unwrap();\n\n assert_eq!(rest.len(), 0);\n\n assert_eq!(sip_uri.scheme, RequestUriScheme::SIP);\n\n assert_eq!(sip_uri.user_info().unwrap().value, \"+1-212-555-1212\");\n\n assert_eq!(sip_uri.user_info().unwrap().password, Some(\"1234\"));\n\n assert_eq!(sip_uri.hostport.host, \"gateway.com\");\n\n assert_eq!(sip_uri.hostport.port, None);\n\n assert_eq!(sip_uri.params().unwrap().get(&\"user\"), Some(&Some(\"phone\")));\n\n\n", "file_path": "crates/sipmsg/src/headers/sipuri.rs", "rank": 17, "score": 73935.80078812332 }, { "content": " let (rest, sip_uri) = SipUri::parse_ext(\"sips:1212@gateway.com\".as_bytes(), true).unwrap();\n\n assert_eq!(rest.len(), 0);\n\n assert_eq!(sip_uri.scheme, RequestUriScheme::SIPS);\n\n assert_eq!(sip_uri.user_info().unwrap().value, \"1212\");\n\n assert_eq!(sip_uri.hostport.host, \"gateway.com\");\n\n\n\n let (rest, sip_uri) =\n\n SipUri::parse_ext(\"sip:alice@192.0.2.4:8888\".as_bytes(), true).unwrap();\n\n assert_eq!(rest.len(), 0);\n\n assert_eq!(sip_uri.scheme, RequestUriScheme::SIP);\n\n assert_eq!(sip_uri.user_info().unwrap().value, \"alice\");\n\n assert_eq!(sip_uri.hostport.host, \"192.0.2.4\");\n\n assert_eq!(sip_uri.hostport.port, Some(8888));\n\n\n\n let (rest, sip_uri) =\n\n SipUri::parse_ext(\"sip:alice;day=tuesday@atlanta.com\".as_bytes(), true).unwrap();\n\n assert_eq!(rest.len(), 0);\n\n assert_eq!(sip_uri.scheme, RequestUriScheme::SIP);\n\n assert_eq!(sip_uri.user_info().unwrap().value, \"alice;day=tuesday\");\n\n assert_eq!(sip_uri.hostport.host, \"atlanta.com\");\n", "file_path": "crates/sipmsg/src/headers/sipuri.rs", "rank": 18, "score": 73935.72031271642 }, { "content": " let (rest, sip_uri) = SipUri::parse_ext(\"sip:atlanta.com\".as_bytes(), true).unwrap();\n\n assert_eq!(rest.len(), 0);\n\n assert_eq!(sip_uri.scheme, RequestUriScheme::SIP);\n\n assert_eq!(sip_uri.hostport.host, \"atlanta.com\");\n\n\n\n let (rest, sip_uri) = SipUri::parse_ext(\"sip:alice@atlanta.com\".as_bytes(), true).unwrap();\n\n assert_eq!(rest.len(), 0);\n\n assert_eq!(sip_uri.scheme, RequestUriScheme::SIP);\n\n assert_eq!(sip_uri.user_info().unwrap().value, \"alice\");\n\n assert_eq!(sip_uri.hostport.host, \"atlanta.com\");\n\n\n\n let (rest, sip_uri) = SipUri::parse_ext(\n\n \"sip:alice:secretword@atlanta.com;transport=tcp\".as_bytes(),\n\n true,\n\n )\n\n .unwrap();\n\n assert_eq!(rest.len(), 0);\n\n assert_eq!(sip_uri.scheme, RequestUriScheme::SIP);\n\n assert_eq!(sip_uri.user_info().unwrap().value, \"alice\");\n\n assert_eq!(sip_uri.user_info().unwrap().password, Some(\"secretword\"));\n", "file_path": "crates/sipmsg/src/headers/sipuri.rs", "rank": 19, "score": 73935.62856883045 }, { "content": " } else {\n\n SipUri::try_parse_params(input)?\n\n };\n\n\n\n let (input, headers) = if input.is_empty() {\n\n (input, None)\n\n } else {\n\n SipUri::try_parse_headers(input)?\n\n };\n\n\n\n Ok((\n\n input,\n\n SipUri {\n\n scheme: scheme,\n\n user_info: userinfo,\n\n hostport: hostport,\n\n parameters: params,\n\n headers: headers,\n\n },\n\n ))\n", "file_path": "crates/sipmsg/src/headers/sipuri.rs", "rank": 20, "score": 73935.50171859302 }, { "content": " let (input, headers) = if input.is_empty() {\n\n (input, None)\n\n } else {\n\n SipUri::try_parse_headers(input)?\n\n };\n\n\n\n return Ok((\n\n input,\n\n SipUri {\n\n scheme: scheme,\n\n user_info: userinfo,\n\n hostport: hostport,\n\n parameters: None,\n\n headers: headers,\n\n },\n\n ));\n\n }\n\n\n\n let (input, params) = if input.is_empty() {\n\n (input, None)\n", "file_path": "crates/sipmsg/src/headers/sipuri.rs", "rank": 21, "score": 73935.48249062405 }, { "content": "\n\n fn parse(input: &'a [u8]) -> nom::IResult<&[u8], BTreeMap<&'a str, &'a str>, SipParseError> {\n\n let (input, c) = take(1usize)(input)?;\n\n if c[0] != b'?' {\n\n return sip_parse_error!(1, \"The first character of headers must be '?'\");\n\n }\n\n\n\n let mut result = BTreeMap::new();\n\n let mut inp2 = input;\n\n loop {\n\n let (input, sip_uri_header) = SipUriHeader::parse_header(inp2)?;\n\n result.insert(sip_uri_header.name, sip_uri_header.value);\n\n if input.len() == 0 || input[0] != b'&' {\n\n inp2 = input;\n\n break;\n\n }\n\n let (input, _) = take(1usize)(input)?;\n\n inp2 = input;\n\n }\n\n\n", "file_path": "crates/sipmsg/src/headers/sipuri.rs", "rank": 22, "score": 73935.32455384823 }, { "content": " SipUriHeader {\n\n name: hname_str,\n\n value: \"\",\n\n },\n\n ));\n\n }\n\n\n\n let (input, _) = take(1usize)(input)?; // skip =\n\n\n\n let (input, hvalue) = take_while_with_escaped(input, is_hnv_char)?;\n\n let (_, hname_str) = from_utf8_nom(hname)?;\n\n let (_, hvalue_str) = from_utf8_nom(hvalue)?;\n\n Ok((\n\n input,\n\n SipUriHeader {\n\n name: hname_str,\n\n value: hvalue_str,\n\n },\n\n ))\n\n }\n", "file_path": "crates/sipmsg/src/headers/sipuri.rs", "rank": 23, "score": 73934.58995257334 }, { "content": " if input[0] != b';' {\n\n return Ok((input, None));\n\n }\n\n match GenericParams::parse(input) {\n\n Ok((input, params)) => {\n\n return Ok((input, Some(params)));\n\n }\n\n Err(e) => {\n\n return Err(e);\n\n }\n\n }\n\n }\n\n\n\n fn try_parse_headers(\n\n input: &'a [u8],\n\n ) -> nom::IResult<&[u8], Option<BTreeMap<&'a str, &'a str>>, SipParseError> {\n\n if input[0] != b'?' {\n\n return Ok((input, None));\n\n }\n\n match SipUriHeader::parse(input) {\n", "file_path": "crates/sipmsg/src/headers/sipuri.rs", "rank": 24, "score": 73934.5225637998 }, { "content": " let is_user_info_present = right_with_ampersat.is_empty()\n\n || right_with_ampersat[0] == b'\\n'\n\n || right_with_ampersat[0] == b',';\n\n // If right_with_apersat reach '\\n' is empty there is no user info\n\n let userinfo = if is_user_info_present {\n\n None\n\n } else {\n\n Some(UserInfo::from_bytes(before_ampersat)?)\n\n };\n\n // if: right_with_apersat is empty we take whole string to further parsing\n\n // else: otherwise need to skip userinfo part\n\n let input = if is_user_info_present {\n\n input_after_scheme\n\n } else {\n\n &right_with_ampersat[1..] /* skip '@' */\n\n };\n\n\n\n let (input, hostport) = HostPort::parse(input)?;\n\n\n\n if !parse_with_parameters {\n", "file_path": "crates/sipmsg/src/headers/sipuri.rs", "rank": 25, "score": 73931.52121869437 }, { "content": "#[test]\n\nfn callinfo_test() {\n\n let res = SipHeader::parse(\n\n \"Call-Info: <http://wwww.example.com/alice/photo.jpg> ;purpose=icon,\\r\\n \\\n\n <http://www.example.com/alice/> ;purpose=info\\r\\n\"\n\n .as_bytes(),\n\n );\n\n let (input, (_, hdrs)) = res.unwrap();\n\n assert_eq!(\n\n hdrs[0].value.vstr,\n\n \"<http://wwww.example.com/alice/photo.jpg>\"\n\n );\n\n assert_eq!(\n\n hdrs[0].value.tags().unwrap()[&SipHeaderTagType::AbsoluteURI],\n\n \"http://wwww.example.com/alice/photo.jpg\".as_bytes()\n\n );\n\n\n\n assert_eq!(\n\n hdrs[0].params().unwrap().get(\"purpose\"),\n\n Some(&Some(\"icon\"))\n\n );\n", "file_path": "crates/sipmsg/tests/header.rs", "rank": 26, "score": 71857.88082885777 }, { "content": "use crate::common::nom_wrappers::take_sws;\n\nuse crate::common::{\n\n bnfcore::{is_digit, is_token_char},\n\n errorparse::SipParseError,\n\n};\n\nuse crate::headers::{\n\n header::{HeaderTagType, HeaderTags, HeaderValue, HeaderValueType},\n\n traits::SipHeaderParser,\n\n};\n\n\n\nuse nom::bytes::complete::take_while1;\n\n\n\n/// CSeq = \"CSeq\" HCOLON 1*DIGIT LWS Method\n\npub struct CSeq;\n\n\n\nimpl SipHeaderParser for CSeq {\n\n fn take_value(source_input: &[u8]) -> nom::IResult<&[u8], HeaderValue, SipParseError> {\n\n let mut tags = HeaderTags::new();\n\n let (input, number) = take_while1(is_digit)(source_input)?;\n\n let (input, _) = take_sws(input)?;\n", "file_path": "crates/sipmsg/src/headers/parsers/cseq.rs", "rank": 27, "score": 71840.78579267857 }, { "content": " let (input, method) = take_while1(is_token_char)(input)?;\n\n tags.insert(HeaderTagType::Number, number);\n\n tags.insert(HeaderTagType::Method, method);\n\n\n\n let (_, hdr_val) = HeaderValue::new(\n\n &source_input[..source_input.len() - input.len()],\n\n HeaderValueType::CSeq,\n\n Some(tags),\n\n None,\n\n )?;\n\n Ok((input, hdr_val))\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_cseq_value() {\n", "file_path": "crates/sipmsg/src/headers/parsers/cseq.rs", "rank": 28, "score": 71833.23143292252 }, { "content": " let (input, val) = CSeq::take_value(\"4711 INVITE\\r\\n\".as_bytes()).unwrap();\n\n assert_eq!(input, \"\\r\\n\".as_bytes());\n\n assert_eq!(val.vstr, \"4711 INVITE\");\n\n assert_eq!(\n\n val.tags().unwrap()[&HeaderTagType::Number],\n\n \"4711\".as_bytes()\n\n );\n\n assert_eq!(\n\n val.tags().unwrap()[&HeaderTagType::Method],\n\n \"INVITE\".as_bytes()\n\n );\n\n }\n\n}\n", "file_path": "crates/sipmsg/src/headers/parsers/cseq.rs", "rank": 29, "score": 71832.45493000513 }, { "content": "use crate::{\n\n common::{bnfcore::is_token_char, errorparse::SipParseError, nom_wrappers::take_quoted_string},\n\n headers::{\n\n header::{HeaderTagType, HeaderTags, HeaderValue, HeaderValueType},\n\n traits::SipHeaderParser,\n\n },\n\n};\n\nuse nom::{\n\n bytes::complete::take_while1,\n\n character::{complete::space1, is_digit},\n\n};\n\n\n\n// Warning = \"Warning\" HCOLON warning-value *(COMMA warning-value)\n\n// warning-value = warn-code SP warn-agent SP warn-text\n\n// warn-code = 3DIGIT\n\n// warn-agent = hostport / pseudonym\n\n// warn-text = quoted-string\n\n// pseudonym = token\n\npub struct Warning;\n\n\n", "file_path": "crates/sipmsg/src/headers/parsers/warning.rs", "rank": 30, "score": 71827.89655320672 }, { "content": "use crate::{\n\n common::{\n\n bnfcore::{is_crlf, is_digit},\n\n errorparse::SipParseError,\n\n nom_wrappers::take_sws,\n\n },\n\n headers::{\n\n header::{HeaderTagType, HeaderTags, HeaderValue, HeaderValueType},\n\n traits::SipHeaderParser,\n\n },\n\n};\n\nuse nom::bytes::complete::take_while1;\n\n\n\npub struct Timestamp;\n\n\n\nimpl SipHeaderParser for Timestamp {\n\n fn take_value(source_input: &[u8]) -> nom::IResult<&[u8], HeaderValue, SipParseError> {\n\n let (input, _int_part_time) = take_while1(is_digit)(source_input)?;\n\n if input.is_empty() {\n\n return sip_parse_error!(1, \"Invalid Timestamp Header\");\n", "file_path": "crates/sipmsg/src/headers/parsers/timestamp.rs", "rank": 31, "score": 71826.73780864471 }, { "content": "\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_via_value() {\n\n let (input, val) =\n\n Via::take_value(\"SIP/2.0/UDP bobspc.biloxi.com:5060;received=192.0.2.4\\r\\n\".as_bytes())\n\n .unwrap();\n\n assert_eq!(val.vstr, \"SIP/2.0/UDP bobspc.biloxi.com:5060\");\n\n assert_eq!(input, b\";received=192.0.2.4\\r\\n\");\n\n assert_eq!(val.tags().unwrap()[&HeaderTagType::ProtocolName], b\"SIP\");\n\n assert_eq!(val.tags().unwrap()[&HeaderTagType::ProtocolVersion], b\"2.0\");\n\n assert_eq!(\n\n val.tags().unwrap()[&HeaderTagType::ProtocolTransport],\n\n b\"UDP\"\n\n );\n\n assert_eq!(\n\n val.tags().unwrap()[&HeaderTagType::Host],\n\n b\"bobspc.biloxi.com\"\n\n );\n\n assert_eq!(val.tags().unwrap()[&HeaderTagType::Port], b\"5060\");\n\n }\n\n}\n", "file_path": "crates/sipmsg/src/headers/parsers/via.rs", "rank": 32, "score": 71823.03711924095 }, { "content": "use crate::{\n\n common::{\n\n bnfcore::is_token_char, errorparse::SipParseError, hostport::HostPort,\n\n nom_wrappers::take_lws, take_sws_token,\n\n },\n\n headers::{\n\n header::{HeaderTagType, HeaderTags, HeaderValue, HeaderValueType},\n\n traits::SipHeaderParser,\n\n },\n\n};\n\n\n\nuse nom::bytes::complete::take_while1;\n\n\n\n// Via = ( \"Via\" / \"v\" ) HCOLON via-parm *(COMMA via-parm)\n\n// via-parm = sent-protocol LWS sent-by *( SEMI via-params )\n\n// via-params = via-ttl / via-maddr\n\n// / via-received / via-branch\n\n// / via-extension\n\n// via-ttl = \"ttl\" EQUAL ttl\n\n// via-maddr = \"maddr\" EQUAL host\n", "file_path": "crates/sipmsg/src/headers/parsers/via.rs", "rank": 33, "score": 71823.00525487856 }, { "content": " use super::*;\n\n\n\n #[test]\n\n fn test_timestamp_value() {\n\n let (input, val) = Timestamp::take_value(b\"1.2\\r\\n\").unwrap();\n\n assert_eq!(input, b\"\\r\\n\");\n\n assert_eq!(val.vstr, \"1.2\");\n\n assert_eq!(\n\n val.tags().unwrap()[&HeaderTagType::TimveVal],\n\n \"1.2\".as_bytes()\n\n );\n\n let (input, val) = Timestamp::take_value(b\"12.34 0.5\\r\\n\").unwrap();\n\n assert_eq!(input, b\"\\r\\n\");\n\n assert_eq!(val.vstr, \"12.34 0.5\");\n\n assert_eq!(val.tags().unwrap()[&HeaderTagType::TimveVal], b\"12.34\");\n\n assert_eq!(val.tags().unwrap()[&HeaderTagType::Delay], b\"0.5\");\n\n }\n\n}\n", "file_path": "crates/sipmsg/src/headers/parsers/timestamp.rs", "rank": 34, "score": 71822.87889900769 }, { "content": " None,\n\n )?;\n\n Ok((input, hdr_val))\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_warn_value() {\n\n let (input, val) =\n\n Warning::take_value(\"370 devnull \\\"Choose a bigger pipe\\\"\\r\\n\".as_bytes()).unwrap();\n\n assert_eq!(val.vstr, \"370 devnull \\\"Choose a bigger pipe\\\"\");\n\n assert_eq!(input, b\"\\r\\n\");\n\n assert_eq!(val.tags().unwrap()[&HeaderTagType::WarnCode], b\"370\");\n\n assert_eq!(val.tags().unwrap()[&HeaderTagType::WarnAgent], b\"devnull\");\n\n assert_eq!(\n\n val.tags().unwrap()[&HeaderTagType::WarnText],\n", "file_path": "crates/sipmsg/src/headers/parsers/warning.rs", "rank": 35, "score": 71822.82949309766 }, { "content": "use crate::{\n\n common::{bnfcore::is_word_char, errorparse::SipParseError},\n\n headers::{\n\n header::{HeaderTagType, HeaderTags, HeaderValue, HeaderValueType},\n\n traits::SipHeaderParser,\n\n },\n\n};\n\nuse nom::bytes::complete::{take, take_while1};\n\n\n\n/// Call-ID = ( \"Call-ID\" / \"i\" ) HCOLON callid\n\n/// callid = word [ \"@\" word ]\n\npub struct CallID;\n\n\n\nimpl SipHeaderParser for CallID {\n\n fn take_value(source_input: &[u8]) -> nom::IResult<&[u8], HeaderValue, SipParseError> {\n\n let mut tags = HeaderTags::new();\n\n\n\n let (input, id) = take_while1(is_word_char)(source_input)?;\n\n tags.insert(HeaderTagType::ID, id);\n\n if !input.is_empty() && input[0] == b'@' {\n", "file_path": "crates/sipmsg/src/headers/parsers/callid.rs", "rank": 36, "score": 71822.47491608959 }, { "content": "// via-received = \"received\" EQUAL (IPv4address / IPv6address)\n\n// via-branch = \"branch\" EQUAL token\n\n// via-extension = generic-param\n\n// sent-protocol = protocol-name SLASH protocol-version\n\n// SLASH transport\n\n// protocol-name = \"SIP\" / token\n\n// protocol-version = token\n\n// transport = \"UDP\" / \"TCP\" / \"TLS\" / \"SCTP\"\n\n// / other-transport\n\n// sent-by = host [ COLON port ]\n\n// ttl = 1*3DIGIT ; 0 to 255\n\npub struct Via;\n\n\n\nimpl SipHeaderParser for Via {\n\n fn take_value(source_input: &[u8]) -> nom::IResult<&[u8], HeaderValue, SipParseError> {\n\n let (input, protocol_name) = take_while1(is_token_char)(source_input)?;\n\n let (input, _) = take_sws_token::slash(input)?;\n\n let (input, protocol_version) = take_while1(is_token_char)(input)?;\n\n let (input, _) = take_sws_token::slash(input)?;\n\n let (input, protocol_transport) = take_while1(is_token_char)(input)?;\n", "file_path": "crates/sipmsg/src/headers/parsers/via.rs", "rank": 37, "score": 71821.76924419006 }, { "content": " b\"Choose a bigger pipe\"\n\n );\n\n\n\n let (input, val) = Warning::take_value(\n\n \"307 isi.edu \\\"Session parameter 'foo' not understood\\\"\\r\\n\".as_bytes(),\n\n )\n\n .unwrap();\n\n assert_eq!(input, b\"\\r\\n\");\n\n assert_eq!(\n\n val.vstr,\n\n \"307 isi.edu \\\"Session parameter 'foo' not understood\\\"\"\n\n );\n\n assert_eq!(val.tags().unwrap()[&HeaderTagType::WarnCode], b\"307\");\n\n assert_eq!(val.tags().unwrap()[&HeaderTagType::WarnAgent], b\"isi.edu\");\n\n assert_eq!(\n\n val.tags().unwrap()[&HeaderTagType::WarnText],\n\n \"Session parameter 'foo' not understood\".as_bytes()\n\n );\n\n }\n\n}\n", "file_path": "crates/sipmsg/src/headers/parsers/warning.rs", "rank": 38, "score": 71820.74996179105 }, { "content": "impl SipHeaderParser for Warning {\n\n fn take_value(source_input: &[u8]) -> nom::IResult<&[u8], HeaderValue, SipParseError> {\n\n let (input, warn_code) = take_while1(is_digit)(source_input)?;\n\n if warn_code.len() != 3 {\n\n return sip_parse_error!(1, \"Invalid warning code\");\n\n }\n\n let (input, _) = space1(input)?;\n\n let (input, warn_agent) = take_while1(is_token_char)(input)?;\n\n let (input, _) = space1(input)?;\n\n let (input, (_, warn_text, _)) = take_quoted_string(input)?;\n\n\n\n let mut tags = HeaderTags::new();\n\n tags.insert(HeaderTagType::WarnCode, warn_code);\n\n tags.insert(HeaderTagType::WarnAgent, warn_agent);\n\n tags.insert(HeaderTagType::WarnText, warn_text);\n\n\n\n let (_, hdr_val) = HeaderValue::new(\n\n &source_input[..source_input.len() - input.len()],\n\n HeaderValueType::Warning,\n\n Some(tags),\n", "file_path": "crates/sipmsg/src/headers/parsers/warning.rs", "rank": 39, "score": 71819.83583877576 }, { "content": " let (input, _) = take_lws(input)?;\n\n let (input, (host, port)) = HostPort::take_hostport(input)?;\n\n let mut tags = HeaderTags::new();\n\n tags.insert(HeaderTagType::ProtocolName, protocol_name);\n\n tags.insert(HeaderTagType::ProtocolVersion, protocol_version);\n\n tags.insert(HeaderTagType::ProtocolTransport, protocol_transport);\n\n tags.insert(HeaderTagType::Host, host);\n\n if port != None {\n\n tags.insert(HeaderTagType::Port, port.unwrap());\n\n }\n\n\n\n let (_, hdr_val) = HeaderValue::new(\n\n &source_input[..source_input.len() - input.len()],\n\n HeaderValueType::Via,\n\n Some(tags),\n\n None,\n\n )?;\n\n Ok((input, hdr_val))\n\n }\n\n}\n", "file_path": "crates/sipmsg/src/headers/parsers/via.rs", "rank": 40, "score": 71818.14771188115 }, { "content": " }\n\n let mut tags = HeaderTags::new();\n\n\n\n let input = if input[0] == b'.' {\n\n let (input, _) = take_while1(is_digit)(&input[1..])?; // take fractional_part_time\n\n input\n\n } else {\n\n input\n\n };\n\n tags.insert(\n\n HeaderTagType::TimveVal,\n\n &source_input[..source_input.len() - input.len()],\n\n );\n\n let (start_possible_delay_val, _) = take_sws(input)?;\n\n let mut tmp_inp = start_possible_delay_val;\n\n if !is_crlf(input) {\n\n let (input, _) = take_while1(is_digit)(tmp_inp)?;\n\n if !input.is_empty() && input[0] == b'.' {\n\n let (input, _) = take_while1(is_digit)(&input[1..])?;\n\n tmp_inp = input;\n", "file_path": "crates/sipmsg/src/headers/parsers/timestamp.rs", "rank": 41, "score": 71815.99998336374 }, { "content": " } else {\n\n tmp_inp = input;\n\n }\n\n tags.insert(\n\n HeaderTagType::Delay,\n\n &start_possible_delay_val[..start_possible_delay_val.len() - tmp_inp.len()],\n\n );\n\n };\n\n let (_, hdr_val) = HeaderValue::new(\n\n &source_input[..source_input.len() - tmp_inp.len()],\n\n HeaderValueType::Timestamp,\n\n Some(tags),\n\n None,\n\n )?;\n\n Ok((tmp_inp, hdr_val))\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n", "file_path": "crates/sipmsg/src/headers/parsers/timestamp.rs", "rank": 42, "score": 71815.54243208979 }, { "content": "mod test {\n\n fn call_id_test_case(\n\n source_input: &str,\n\n expected_string_value: &str,\n\n expected_id_value: &str,\n\n expected_host_value: Option<&str>,\n\n ) {\n\n let val = CallID::take_value(source_input.as_bytes());\n\n let (input, val) = val.unwrap();\n\n assert_eq!(\n\n val.tags().unwrap()[&HeaderTagType::ID],\n\n expected_id_value.as_bytes()\n\n );\n\n if expected_host_value != None {\n\n assert_eq!(\n\n val.tags().unwrap()[&HeaderTagType::Host],\n\n expected_host_value.unwrap().as_bytes()\n\n );\n\n }\n\n assert_eq!(val.vstr, expected_string_value);\n", "file_path": "crates/sipmsg/src/headers/parsers/callid.rs", "rank": 43, "score": 71815.50828797361 }, { "content": " assert_eq!(input, b\"\\r\\n\");\n\n }\n\n\n\n use super::*;\n\n #[test]\n\n fn authorization_parser_test() {\n\n call_id_test_case(\n\n \"3848276298220188511@atlanta.example.com\\r\\n\",\n\n \"3848276298220188511@atlanta.example.com\",\n\n \"3848276298220188511\",\n\n Some(\"atlanta.example.com\"),\n\n );\n\n\n\n call_id_test_case(\n\n \"f81d4fae-7dec-11d0-a765-00a0c91e6bf6@foo.bar.com\\r\\n\",\n\n \"f81d4fae-7dec-11d0-a765-00a0c91e6bf6@foo.bar.com\",\n\n \"f81d4fae-7dec-11d0-a765-00a0c91e6bf6\",\n\n Some(\"foo.bar.com\"),\n\n );\n\n\n\n call_id_test_case(\n\n \"a84b4c76e66710\\r\\n\",\n\n \"a84b4c76e66710\",\n\n \"a84b4c76e66710\",\n\n None,\n\n );\n\n }\n\n}\n", "file_path": "crates/sipmsg/src/headers/parsers/callid.rs", "rank": 44, "score": 71813.88764660212 }, { "content": " let (input, _) = take(1usize)(input)?;\n\n let (input, host) = take_while1(is_word_char)(input)?;\n\n tags.insert(HeaderTagType::Host, host);\n\n\n\n let (_, hdr_val) = HeaderValue::new(\n\n &source_input[..id.len() + host.len() + 1 /* 1 - is '@' */],\n\n HeaderValueType::CallID,\n\n Some(tags),\n\n None,\n\n )?;\n\n\n\n return Ok((input, hdr_val));\n\n }\n\n let (_, hdr_val) = HeaderValue::new(id, HeaderValueType::CallID, Some(tags), None)?;\n\n\n\n return Ok((input, hdr_val));\n\n }\n\n}\n\n\n\n#[cfg(test)]\n", "file_path": "crates/sipmsg/src/headers/parsers/callid.rs", "rank": 45, "score": 71813.58253027254 }, { "content": "use crate::{\n\n common::{bnfcore::is_digit, errorparse::SipParseError},\n\n headers::{\n\n header::{HeaderTagType, HeaderTags, HeaderValue, HeaderValueType},\n\n traits::SipHeaderParser\n\n }\n\n};\n\nuse nom::bytes::complete::take_while1;\n\n\n\npub struct MimeVersion;\n\n\n\nimpl SipHeaderParser for MimeVersion {\n\n fn take_value(source_input: &[u8]) -> nom::IResult<&[u8], HeaderValue, SipParseError> {\n\n let (inp, major) = take_while1(is_digit)(source_input)?;\n\n let (inp, _) = nom::character::complete::char('.')(inp)?;\n\n let (inp, minor) = take_while1(is_digit)(inp)?;\n\n let mut tags = HeaderTags::new();\n\n tags.insert(HeaderTagType::Major, major);\n\n tags.insert(HeaderTagType::Minor, minor);\n\n let (_, hdr_val) = HeaderValue::new(\n", "file_path": "crates/sipmsg/src/headers/parsers/mime_version.rs", "rank": 46, "score": 69625.97182872052 }, { "content": " &source_input[..source_input.len() - inp.len()],\n\n HeaderValueType::Digit,\n\n Some(tags),\n\n None,\n\n )?;\n\n Ok((inp, hdr_val))\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_mime_value() {\n\n let (input, val) = MimeVersion::take_value(\"1.2 \\r\\n\".as_bytes()).unwrap();\n\n assert_eq!(input, \" \\r\\n\".as_bytes());\n\n assert_eq!(val.vstr, \"1.2\");\n\n assert_eq!(\n\n val.tags().unwrap()[&HeaderTagType::Major],\n", "file_path": "crates/sipmsg/src/headers/parsers/mime_version.rs", "rank": 47, "score": 69623.71300090768 }, { "content": " \"1\".as_bytes()\n\n );\n\n assert_eq!(\n\n val.tags().unwrap()[&HeaderTagType::Minor],\n\n \"2\".as_bytes()\n\n );\n\n }\n\n}\n\n\n", "file_path": "crates/sipmsg/src/headers/parsers/mime_version.rs", "rank": 48, "score": 69617.32627519837 }, { "content": "#[inline]\n\nfn is_hnv_char(c: u8) -> bool {\n\n is_unreserved(c) || is_hnv_unreserved_char(c)\n\n}\n\n\n\n// header = hname \"=\" hvalue\n\n// hname = 1*( hnv-unreserved / unreserved / escaped )\n\n// hvalue = *( hnv-unreserved / unreserved / escaped )\n\n// headers = \"?\" header *( \"&\" header )\n\npub struct SipUriHeader<'a> {\n\n pub name: &'a str,\n\n pub value: &'a str,\n\n}\n\n\n\nimpl<'a> SipUriHeader<'a> {\n\n fn parse_header(input: &[u8]) -> nom::IResult<&[u8], SipUriHeader, SipParseError> {\n\n let (input, hname) = take_while_with_escaped(input, is_hnv_char)?;\n\n if input.len() == 0 || input[0] != b'=' {\n\n let (_, hname_str) = from_utf8_nom(hname)?;\n\n return Ok((\n\n input,\n", "file_path": "crates/sipmsg/src/headers/sipuri.rs", "rank": 49, "score": 65343.05424950045 }, { "content": "#[inline]\n\nfn is_hnv_unreserved_char(c: u8) -> bool {\n\n c == b'[' || c == b']' || c == b'/' || c == b'?' || c == b':' || c == b'+' || c == b'$'\n\n}\n\n\n", "file_path": "crates/sipmsg/src/headers/sipuri.rs", "rank": 50, "score": 63498.92073801521 }, { "content": "pub fn take_while_with_escaped(\n\n input: &[u8],\n\n is_fun: fn(c: u8) -> bool,\n\n) -> nom::IResult<&[u8], &[u8], SipParseError> {\n\n let mut idx = 0;\n\n while idx < input.len() {\n\n if is_fun(input[idx]) {\n\n idx += 1;\n\n continue;\n\n } else if is_escaped(&input[idx..]) {\n\n idx += 3;\n\n continue;\n\n }\n\n break;\n\n }\n\n\n\n Ok((&input[idx..], &input[..idx]))\n\n}\n\n\n", "file_path": "crates/sipmsg/src/common/nom_wrappers.rs", "rank": 51, "score": 60239.8868923986 }, { "content": "/// trim start and end swses\n\n/// assert_eq(take_while_trim_sws(\" ab c\", is_char), Ok((\"ab\", \"c\")));\n\n/// assert_eq(take_while_trim_sws(\" \\r\\n\\tab c\", is_char), Ok((\"ab\", \"c\")));\n\npub fn take_while_trim_sws(\n\n input: &[u8],\n\n cond_fun: fn(c: u8) -> bool,\n\n) -> nom::IResult<&[u8], (&[u8], &[u8], &[u8]), SipParseError> {\n\n let (input, (sws1, result, sws2)) = tuple((take_sws, take_while1(cond_fun), take_sws))(input)?;\n\n Ok((input, (sws1, result, sws2)))\n\n}\n\n\n", "file_path": "crates/sipmsg/src/common/nom_wrappers.rs", "rank": 52, "score": 59180.70326280533 }, { "content": "pub fn take_quoted_string(\n\n source_input: &[u8],\n\n) -> nom::IResult<&[u8], (&[u8], &[u8], &[u8]), SipParseError> {\n\n let (input, ldqout_wsps) = take_sws_token::ldquot(source_input)?;\n\n let (input, result) = take_until_nonescaped_quote(input)?;\n\n let (input, rdqout_wsps) = take_sws_token::rdquot(input)?;\n\n Ok((input, (ldqout_wsps, result, rdqout_wsps)))\n\n}\n\n\n", "file_path": "crates/sipmsg/src/common/nom_wrappers.rs", "rank": 53, "score": 59176.91853919925 }, { "content": "#[inline]\n\npub fn is_dquote(c: u8) -> bool {\n\n c == 0x22\n\n}\n\n\n\n/// HEXDIG = DIGIT / \"A\" / \"B\" / \"C\" / \"D\" / \"E\" / \"F\" / \"a\" / \"b\" / \"c\" / \"d\" / \"e\" / \"f\"\n", "file_path": "crates/sipmsg/src/common/bnfcore.rs", "rank": 54, "score": 56778.76303873276 }, { "content": "#[inline]\n\npub fn is_unreserved(c: u8) -> bool {\n\n is_alphanum(c) || is_mark(c)\n\n}\n\n\n", "file_path": "crates/sipmsg/src/common/bnfcore.rs", "rank": 55, "score": 56778.76303873276 }, { "content": "#[inline]\n\npub fn is_alpha(c: u8) -> bool {\n\n (c >= 0x41 && c <= 0x5A) || (c >= 0x61 && c <= 0x7A)\n\n}\n\n\n\n/// BIT = \"0\" / \"1\"\n", "file_path": "crates/sipmsg/src/common/bnfcore.rs", "rank": 56, "score": 56778.76303873276 }, { "content": "#[inline]\n\npub fn is_crlf(i: &[u8]) -> bool {\n\n if i.len() < 2 {\n\n return false;\n\n }\n\n is_cr(i[0]) && is_lf(i[1])\n\n}\n\n\n\n/// CTL = %x00-1F / %x7F\n", "file_path": "crates/sipmsg/src/common/bnfcore.rs", "rank": 57, "score": 56778.76303873276 }, { "content": "#[inline]\n\npub fn is_char(c: u8) -> bool {\n\n c >= 0x01 && c <= 0x7F\n\n}\n\n\n\n/// CR = %x0D\n\n/// Carriage return\n", "file_path": "crates/sipmsg/src/common/bnfcore.rs", "rank": 58, "score": 56778.76303873276 }, { "content": "#[inline]\n\npub fn is_mark(c: u8) -> bool {\n\n c == b'-'\n\n || c == b'_'\n\n || c == b'.'\n\n || c == b'!'\n\n || c == b'~'\n\n || c == b'*'\n\n || c == b'\\''\n\n || c == b'('\n\n || c == b')'\n\n}\n\n\n", "file_path": "crates/sipmsg/src/common/bnfcore.rs", "rank": 59, "score": 56778.76303873276 }, { "content": "#[inline]\n\npub fn is_lhex(c: u8) -> bool {\n\n is_digit(c) || c >= 0x61 && c <= 0x66\n\n}\n\n\n\n/// separators = \"(\" / \")\" / \"<\" / \">\" / \"@\" /\n\n/// \",\" / \";\" / \":\" / \"\\\" / DQUOTE /\n\n/// \"/\" / \"[\" / \"]\" / \"?\" / \"=\" /\n\n/// \"{\" / \"}\" / SP / HTAB\n", "file_path": "crates/sipmsg/src/common/bnfcore.rs", "rank": 60, "score": 56778.76303873276 }, { "content": "#[inline]\n\npub fn is_wsp(c: u8) -> bool {\n\n is_sp(c) || is_htab(c)\n\n}\n\n\n", "file_path": "crates/sipmsg/src/common/bnfcore.rs", "rank": 61, "score": 56778.76303873276 }, { "content": "#[inline]\n\npub fn is_htab(c: u8) -> bool {\n\n c == 0x09\n\n}\n\n\n\n/// LF = %x0A\n", "file_path": "crates/sipmsg/src/common/bnfcore.rs", "rank": 62, "score": 56778.76303873276 }, { "content": "#[inline]\n\npub fn is_sp(c: u8) -> bool {\n\n c == 0x20\n\n}\n\n\n\n/// VCHAR = %x21-7E\n\n/// visible (printing) characters\n", "file_path": "crates/sipmsg/src/common/bnfcore.rs", "rank": 63, "score": 56778.76303873276 }, { "content": "#[inline]\n\npub fn is_lf(c: u8) -> bool {\n\n c == 0x0A\n\n}\n\n\n\n/// OCTET = %x00-FF\n\n/// 8 bits of data\n", "file_path": "crates/sipmsg/src/common/bnfcore.rs", "rank": 64, "score": 56778.76303873276 }, { "content": "#[inline]\n\npub fn is_hexdig(c: u8) -> bool {\n\n is_digit(c)\n\n || c == b'A'\n\n || c == b'B'\n\n || c == b'C'\n\n || c == b'D'\n\n || c == b'E'\n\n || c == b'F'\n\n || c == b'a'\n\n || c == b'b'\n\n || c == b'c'\n\n || c == b'd'\n\n || c == b'e'\n\n || c == b'f'\n\n}\n\n\n\n/// HTAB = %x09\n", "file_path": "crates/sipmsg/src/common/bnfcore.rs", "rank": 65, "score": 56778.76303873276 }, { "content": "#[inline]\n\npub fn is_cr(c: u8) -> bool {\n\n c == 0x0D\n\n}\n\n\n\n/// CRLF = CR LF\n\n/// Internet standard newline\n", "file_path": "crates/sipmsg/src/common/bnfcore.rs", "rank": 66, "score": 56778.76303873276 }, { "content": "#[inline]\n\npub fn is_reserved(c: u8) -> bool {\n\n c == b';'\n\n || c == b'/'\n\n || c == b'?'\n\n || c == b':'\n\n || c == b'@'\n\n || c == b'&'\n\n || c == b'='\n\n || c == b'+'\n\n || c == b'$'\n\n || c == b','\n\n}\n\n\n", "file_path": "crates/sipmsg/src/common/bnfcore.rs", "rank": 67, "score": 56778.76303873276 }, { "content": "#[inline]\n\npub fn is_alphanum(c: u8) -> bool {\n\n is_digit(c) || is_alpha(c)\n\n}\n\n\n", "file_path": "crates/sipmsg/src/common/bnfcore.rs", "rank": 68, "score": 56778.76303873276 }, { "content": "#[inline]\n\npub fn is_escaped(i: &[u8]) -> bool {\n\n if i.len() < 3 {\n\n return false;\n\n }\n\n i[0] == b'%' && is_hexdig(i[1]) && is_hexdig(i[2])\n\n}\n\n\n", "file_path": "crates/sipmsg/src/common/bnfcore.rs", "rank": 69, "score": 56778.76303873276 }, { "content": "#[inline]\n\npub fn is_separators(c: u8) -> bool {\n\n c == b'('\n\n || c == b')'\n\n || c == b'<'\n\n || c == b'>'\n\n || c == b'@'\n\n || c == b','\n\n || c == b';'\n\n || c == b':'\n\n || c == b'\\\\'\n\n || is_dquote(c)\n\n || c == b'/'\n\n || c == b'['\n\n || c == b']'\n\n || c == b'?'\n\n || c == b'='\n\n || c == b'{'\n\n || c == b'}'\n\n || is_sp(c)\n\n || is_htab(c)\n\n}\n\n\n", "file_path": "crates/sipmsg/src/common/bnfcore.rs", "rank": 70, "score": 56778.76303873276 }, { "content": "#[inline]\n\npub fn is_ctl(c: u8) -> bool {\n\n c <= 0x1F || c == 0x7F\n\n}\n\n\n\n/// DIGIT = %x30-39\n", "file_path": "crates/sipmsg/src/common/bnfcore.rs", "rank": 71, "score": 56778.76303873276 }, { "content": "#[inline]\n\npub fn is_bit(c: u8) -> bool {\n\n c == b'0' || c == b'1'\n\n}\n\n\n\n/// CHAR = %x01-7F\n\n/// any 7-bit US-ASCII character, excluding NUL\n", "file_path": "crates/sipmsg/src/common/bnfcore.rs", "rank": 72, "score": 56778.76303873276 }, { "content": "#[inline]\n\npub fn is_octet(_: u8) -> bool {\n\n return true;\n\n}\n\n\n\n/// SP = %x20\n", "file_path": "crates/sipmsg/src/common/bnfcore.rs", "rank": 73, "score": 56778.76303873276 }, { "content": "#[inline]\n\npub fn is_vchar(c: u8) -> bool {\n\n c >= 0x21 && c <= 0x7E\n\n}\n\n\n\n/// WSP = SP / HTAB\n\n/// white space\n", "file_path": "crates/sipmsg/src/common/bnfcore.rs", "rank": 74, "score": 56778.76303873276 }, { "content": "#[inline]\n\npub fn is_utf8_cont(c: u8) -> bool {\n\n c >= 0x80 && c <= 0xBF\n\n}\n\n\n\n/// user-unreserved = \"&\" / \"=\" / \"+\" / \"$\" / \",\" / \";\" / \"?\" / \"/\"\n", "file_path": "crates/sipmsg/src/common/bnfcore.rs", "rank": 75, "score": 55715.79468553341 }, { "content": "#[inline]\n\npub fn is_password_char(c: u8) -> bool {\n\n is_unreserved(c) || c == b'&' || c == b'=' || c == b'+' || c == b'$' || c == b','\n\n}\n", "file_path": "crates/sipmsg/src/common/bnfcore.rs", "rank": 76, "score": 55715.79468553341 }, { "content": "#[inline]\n\npub fn is_token_char(c: u8) -> bool {\n\n is_alphanum(c)\n\n || c == b'-'\n\n || c == b'.'\n\n || c == b'!'\n\n || c == b'%'\n\n || c == b'*'\n\n || c == b'_'\n\n || c == b'+'\n\n || c == b'`'\n\n || c == b'\\''\n\n || c == b'~'\n\n}\n\n\n\n/// UTF8-CONT = %x80-BF\n", "file_path": "crates/sipmsg/src/common/bnfcore.rs", "rank": 77, "score": 55715.79468553341 }, { "content": "#[inline]\n\npub fn is_quoted_pair(i: &[u8]) -> bool {\n\n i[0] == b'\\\\' && (i[1] <= 0x09 || i[1] >= 0x0B && i[1] <= 0x0C || i[1] >= 0x0E && i[1] <= 0x7F)\n\n}\n\n\n", "file_path": "crates/sipmsg/src/common/bnfcore.rs", "rank": 78, "score": 55715.79468553341 }, { "content": "#[inline]\n\npub fn is_word_char(c: u8) -> bool {\n\n is_alphanum(c)\n\n || c == b'-'\n\n || c == b'.'\n\n || c == b'!'\n\n || c == b'%'\n\n || c == b'*'\n\n || c == b'_'\n\n || c == b'+'\n\n || c == b'`'\n\n || c == b'\\''\n\n || c == b'~'\n\n || c == b'('\n\n || c == b')'\n\n || c == b'<'\n\n || c == b'>'\n\n || c == b':'\n\n || c == b'\\\\'\n\n || is_dquote(c)\n\n || c == b'/'\n\n || c == b'['\n\n || c == b']'\n\n || c == b'?'\n\n || c == b'{'\n\n || c == b'}'\n\n}\n\n\n", "file_path": "crates/sipmsg/src/common/bnfcore.rs", "rank": 79, "score": 55715.79468553341 }, { "content": "#[inline]\n\npub fn is_user_unreserved_char(c: u8) -> bool {\n\n c == b'&'\n\n || c == b'='\n\n || c == b'+'\n\n || c == b'$'\n\n || c == b','\n\n || c == b';'\n\n || c == b'?'\n\n || c == b'/'\n\n}\n\n\n\n/// password = *( unreserved / escaped /\n\n/// \"&\" / \"=\" / \"+\" / \"$\" / \",\" )\n", "file_path": "crates/sipmsg/src/common/bnfcore.rs", "rank": 80, "score": 54711.178396979354 }, { "content": "/// Fast determinates message type and minimal validate for further transmission to suitable parser.\n\n/// Does not validate full first line, just first 3 bytes.\n\n/// ```rust\n\n/// assert_eq!(\n\n/// sipmsg::get_sip_message_type(\n\n/// \"INVITE sip:vivekg@chair-dnrc.example.com;unknownparam SIP/2.0\".as_bytes()\n\n/// ),\n\n/// sipmsg::SipMessageType::Request\n\n/// );\n\n/// ```\n\npub fn get_message_type(mt: &[u8]) -> MessageType {\n\n if mt.len() < 3 {\n\n MessageType::Unknown\n\n } else {\n\n match &mt[0..3] {\n\n SIP => MessageType::Response,\n\n ACK => MessageType::Request,\n\n BYE => MessageType::Request,\n\n REG => MessageType::Request,\n\n CAN => MessageType::Request,\n\n INF => MessageType::Request,\n\n INV => MessageType::Request,\n\n MES => MessageType::Request,\n\n NOT => MessageType::Request,\n\n OPT => MessageType::Request,\n\n PRA => MessageType::Request,\n\n PUB => MessageType::Request,\n\n REF => MessageType::Request,\n\n SUB => MessageType::Request,\n\n UPD => MessageType::Request,\n\n _ => MessageType::Unknown,\n\n }\n\n }\n\n}\n", "file_path": "crates/sipmsg/src/message.rs", "rank": 81, "score": 53762.617991370804 }, { "content": "use crate::{\n\n common::{bnfcore::is_crlf, errorparse::SipParseError},\n\n headers::{SipHeader, SipRFCHeader},\n\n};\n\nuse alloc::collections::{\n\n btree_map::{BTreeMap, Keys},\n\n VecDeque,\n\n};\n\nuse core::str;\n\nuse nom::bytes::complete::tag;\n\nuse unicase::Ascii;\n\n\n\npub struct Headers<'a> {\n\n rfc_headers: BTreeMap<SipRFCHeader, VecDeque<SipHeader<'a>>>,\n\n ext_headers: Option<BTreeMap<Ascii<&'a str>, VecDeque<SipHeader<'a>>>>,\n\n}\n\n\n\nimpl<'a> Headers<'a> {\n\n pub fn get_ext(&self, key: &'a str) -> Option<&VecDeque<SipHeader<'a>>> {\n\n match &self.ext_headers {\n", "file_path": "crates/sipmsg/src/headers/headers.rs", "rank": 86, "score": 46817.33051168115 }, { "content": " \"Accept: application/sdp, application/pkcs7-mime, application/h.245;q=0.1\\r\\n\\\n\n To: sip:user@example.com\\r\\n\\\n\n Route: <sip:192.0.2.254:5060>\\r\\n\\\n\n Route: <sip:[2001:db8::1]>\\r\\n\\\n\n Max-Forwards: 70\\r\\n\\\n\n Call-ID: lwsdisp.1234abcd@funky.example.com\\r\\n\\\n\n CSeq: 60 OPTIONS\\r\\n\\\n\n Extention-Header: Value\\r\\n\\\n\n Via: SIP/2.0/UDP funky.example.com;branch=z9hG4bKkdjuw\\r\\n\\r\\nsomebody\"\n\n .as_bytes(),\n\n );\n\n\n\n match parse_headers_result {\n\n Ok((_, hdrs)) => {\n\n assert_eq!(\n\n hdrs.get_rfc(SipRFCHeader::Accept).unwrap()[0].value.vstr,\n\n \"application/sdp\"\n\n );\n\n assert_eq!(\n\n hdrs.get_rfc(SipRFCHeader::Accept).unwrap()[1].value.vstr,\n", "file_path": "crates/sipmsg/src/headers/headers.rs", "rank": 88, "score": 46812.43505508269 }, { "content": " \"application/pkcs7-mime\"\n\n );\n\n assert_eq!(\n\n hdrs.get_rfc(SipRFCHeader::Accept).unwrap()[2].value.vstr,\n\n \"application/h.245\"\n\n );\n\n assert_eq!(\n\n hdrs.get_rfc(SipRFCHeader::Accept).unwrap()[2]\n\n .params()\n\n .unwrap()\n\n .get(&\"q\"),\n\n Some(&Some(\"0.1\"))\n\n );\n\n\n\n assert_eq!(\n\n hdrs.get_rfc(SipRFCHeader::Route).unwrap()[0].value.vstr,\n\n \"<sip:192.0.2.254:5060>\"\n\n );\n\n assert_eq!(\n\n hdrs.get_rfc(SipRFCHeader::Route).unwrap()[1].value.vstr,\n", "file_path": "crates/sipmsg/src/headers/headers.rs", "rank": 90, "score": 46812.11574949493 }, { "content": "\n\n pub fn get_ext_headers_keys(\n\n &self,\n\n ) -> Option<Keys<'_, Ascii<&'a str>, VecDeque<SipHeader<'a>>>> {\n\n if self.ext_headers == None {\n\n return None;\n\n }\n\n Some(self.ext_headers.as_ref().unwrap().keys())\n\n }\n\n\n\n pub fn parse(input: &'a [u8]) -> nom::IResult<&[u8], Headers<'a>, SipParseError> {\n\n let mut headers_result = Headers::new();\n\n let mut inp2 = input;\n\n loop {\n\n let (input, (rfc_type, vec_headers)) = SipHeader::parse(inp2)?;\n\n match rfc_type {\n\n Some(hdr_type) => {\n\n headers_result.add_rfc_header(hdr_type, vec_headers);\n\n }\n\n None => {\n", "file_path": "crates/sipmsg/src/headers/headers.rs", "rank": 91, "score": 46811.93467508849 }, { "content": " \"<sip:[2001:db8::1]>\"\n\n );\n\n assert_eq!(\n\n hdrs.get_ext(\"extention-header\").unwrap()[0].value.vstr,\n\n \"Value\"\n\n );\n\n assert_eq!(hdrs.get_rfc(SipRFCHeader::Route).unwrap().len(), 2);\n\n }\n\n Err(_) => panic!(),\n\n }\n\n }\n\n}\n", "file_path": "crates/sipmsg/src/headers/headers.rs", "rank": 92, "score": 46811.799825055015 }, { "content": " Some(hdrs) => hdrs.get(&Ascii::new(key)),\n\n None => None,\n\n }\n\n }\n\n /// Get headers that defined in rfc\n\n pub fn get_rfc(&self, hdr: SipRFCHeader) -> Option<&VecDeque<SipHeader<'a>>> {\n\n self.rfc_headers.get(&hdr)\n\n }\n\n\n\n /// get single value\n\n /// Returns some value if header by key should be present only one time\n\n pub fn get_ext_s(&self, key: &'a str) -> Option<&SipHeader<'a>> {\n\n match &self.ext_headers {\n\n Some(hdrs) => match hdrs.get(&Ascii::new(key)) {\n\n Some(s) => {\n\n if s.len() == 1 {\n\n return Some(&s[0]);\n\n } else {\n\n return None;\n\n };\n", "file_path": "crates/sipmsg/src/headers/headers.rs", "rank": 93, "score": 46811.78598745362 }, { "content": " }\n\n None => None,\n\n },\n\n None => None,\n\n }\n\n }\n\n\n\n /// Get header that defined in rfc\n\n pub fn get_rfc_s(&self, hdr: SipRFCHeader) -> Option<&SipHeader<'a>> {\n\n match self.rfc_headers.get(&hdr) {\n\n Some(s) => {\n\n if s.len() == 1 {\n\n return Some(&s[0]);\n\n } else {\n\n return None;\n\n };\n\n }\n\n None => None,\n\n }\n\n }\n", "file_path": "crates/sipmsg/src/headers/headers.rs", "rank": 94, "score": 46811.64847355785 }, { "content": " .unwrap()\n\n .contains_key(&vec_headers[0].name)\n\n {\n\n self.ext_headers\n\n .as_mut()\n\n .unwrap()\n\n .get_mut(&vec_headers[0].name)\n\n .unwrap()\n\n .append(&mut vec_headers)\n\n } else {\n\n self.ext_headers\n\n .as_mut()\n\n .unwrap()\n\n .insert(vec_headers[0].name, vec_headers);\n\n }\n\n }\n\n\n\n pub fn get_rfc_headers_keys(&self) -> Keys<'_, SipRFCHeader, VecDeque<SipHeader<'a>>> {\n\n self.rfc_headers.keys()\n\n }\n", "file_path": "crates/sipmsg/src/headers/headers.rs", "rank": 95, "score": 46811.49425071065 }, { "content": " headers_result.add_extension_header(vec_headers);\n\n }\n\n }\n\n let (input, _) = tag(\"\\r\\n\")(input)?; // move to header parse\n\n inp2 = input; // skip crlf of header field\n\n if is_crlf(inp2) {\n\n // end of headers and start of body part\n\n break;\n\n }\n\n }\n\n Ok((inp2, headers_result))\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n #[test]\n\n fn headers_parse_test() {\n\n let parse_headers_result = Headers::parse(\n", "file_path": "crates/sipmsg/src/headers/headers.rs", "rank": 96, "score": 46811.467858320764 }, { "content": "\n\n /// Returns length of unique headers\n\n // TODO rename to unique_len and add total_len\n\n pub fn len(&self) -> usize {\n\n match &self.ext_headers {\n\n Some(ext_headers) => ext_headers.len() + self.rfc_headers.len(),\n\n None => self.rfc_headers.len(),\n\n }\n\n }\n\n\n\n fn new() -> Headers<'a> {\n\n Headers {\n\n ext_headers: None,\n\n rfc_headers: BTreeMap::<SipRFCHeader, VecDeque<SipHeader<'a>>>::new(),\n\n }\n\n }\n\n\n\n fn add_rfc_header(\n\n &mut self,\n\n header_type: SipRFCHeader,\n", "file_path": "crates/sipmsg/src/headers/headers.rs", "rank": 97, "score": 46811.32843211519 } ]
Rust
src/peripherals.rs
tstellanova/px4flow_bsp
751151cb0c826148013b0709e7a246a9d9ca774d
/* Copyright (c) 2020 Todd Stellanova LICENSE: BSD3 (see LICENSE file) */ use p_hal::stm32 as pac; use stm32f4xx_hal as p_hal; use pac::{DCMI, RCC}; use embedded_hal::blocking::delay::{DelayMs, DelayUs}; use embedded_hal::digital::v2::{OutputPin, ToggleableOutputPin}; use embedded_hal::timer::CountDown; use embedded_hal::PwmPin; use p_hal::timer::{self, Timer}; use p_hal::gpio::{GpioExt, Output, PushPull, Speed}; use p_hal::pwm; use p_hal::rcc::RccExt; use p_hal::time::U32Ext; #[cfg(feature = "rttdebug")] use panic_rtt_core::rprintln; use shared_bus::{BusManager, BusProxy, CortexMBusManager}; use stm32f4xx_hal::timer::{PinC3, PinC4}; use stm32f4xx_hal::dwt::{Dwt,DwtExt}; pub fn setup_peripherals() -> ( (LedOutputActivity, LedOutputComm, LedOutputError), DelaySource, Dwt, I2c1Port, I2c2Port, Spi2Port, SpiGyroCsn, Usart2Port, Usart3Port, Uart4Port, DcmiCtrlPins, DcmiDataPins, pac::DMA2, pac::DCMI, ) { let mut dp = pac::Peripherals::take().unwrap(); let mut cp = cortex_m::Peripherals::take().unwrap(); let mut rcc = dp.RCC.constrain(); let mut clocks = rcc .cfgr .use_hse(24.mhz()) .sysclk(168.mhz()) .pclk1(42.mhz()) .pclk2(84.mhz()) .freeze(); let mut delay_source = p_hal::delay::Delay::new(cp.SYST, clocks); let dwt = cp.DWT.constrain(cp.DCB, clocks); let rcc2 = unsafe { &(*RCC::ptr()) }; rcc2.ahb2enr.modify(|_, w| w.dcmien().set_bit()); rcc2.ahb1enr.modify(|_, w| w.dma2en().set_bit()); let gpioa = dp.GPIOA.split(); let gpiob = dp.GPIOB.split(); let gpioc = dp.GPIOC.split(); let gpiod = dp.GPIOD.split(); let gpioe = dp.GPIOE.split(); #[cfg(feature = "breakout")] let user_led0 = gpioa .pa1 .into_push_pull_output() .set_speed(Speed::Low) .downgrade(); #[cfg(not(feature = "breakout"))] let user_led0 = gpioe.pe2.into_push_pull_output().downgrade(); let user_led1 = gpioe.pe3.into_push_pull_output().downgrade(); let user_led2 = gpioe.pe7.into_push_pull_output().downgrade(); let i2c1_port = { let scl = gpiob.pb8.into_alternate_af4().set_open_drain(); let sda = gpiob.pb9.into_alternate_af4().set_open_drain(); p_hal::i2c::I2c::i2c1(dp.I2C1, (scl, sda), 400.khz(), clocks) }; let i2c2_port = { let scl = gpiob .pb10 .into_alternate_af4() .internal_pull_up(true) .set_speed(Speed::Low) .set_open_drain(); let sda = gpiob .pb11 .into_alternate_af4() .internal_pull_up(true) .set_speed(Speed::Low) .set_open_drain(); p_hal::i2c::I2c::i2c2(dp.I2C2, (scl, sda), 100.khz(), clocks) }; let usart2_port = { let config = p_hal::serial::config::Config::default().baudrate(115200.bps()); let tx = gpiod.pd5.into_alternate_af7(); let rx = gpiod.pd6.into_alternate_af7(); p_hal::serial::Serial::usart2(dp.USART2, (tx, rx), config, clocks) .unwrap() }; let usart3_port = { let config = p_hal::serial::config::Config::default().baudrate(115200.bps()); let tx = gpiod.pd8.into_alternate_af7(); let rx = gpiod.pd9.into_alternate_af7(); p_hal::serial::Serial::usart3(dp.USART3, (tx, rx), config, clocks) .unwrap() }; let uart4_port = { let config = p_hal::serial::config::Config::default().baudrate(9600.bps()); let tx = gpioa.pa0.into_alternate_af8(); let rx = gpioc.pc11.into_alternate_af8(); p_hal::serial::Serial::uart4(dp.UART4, (tx, rx), config, clocks) .unwrap() }; let spi2_port = { let sck = gpiob.pb13.into_alternate_af5(); let cipo = gpiob.pb14.into_alternate_af5(); let copi = gpiob.pb15.into_alternate_af5(); p_hal::spi::Spi::spi2( dp.SPI2, (sck, cipo, copi), embedded_hal::spi::MODE_3, 1_000_000.hz(), clocks, ) }; let mut spi_cs_gyro = gpiob.pb12.into_push_pull_output(); let _ = spi_cs_gyro.set_high(); let dcmi_ctrl_pins = { let pixck = gpioa .pa6 .into_pull_up_input() .into_alternate_af13() .internal_pull_up(true) .set_speed(Speed::VeryHigh); let hsync = gpioa .pa4 .into_pull_up_input() .into_alternate_af13() .internal_pull_up(true) .set_speed(Speed::VeryHigh); let vsync = gpiob .pb7 .into_pull_up_input() .into_alternate_af13() .internal_pull_up(true) .set_speed(Speed::VeryHigh); (pixck, hsync, vsync) }; let dcmi_data_pins = ( gpioc .pc6 .into_pull_up_input() .into_alternate_af13() .internal_pull_up(true) .set_speed(Speed::VeryHigh), gpioc .pc7 .into_pull_up_input() .into_alternate_af13() .internal_pull_up(true) .set_speed(Speed::VeryHigh), gpioe .pe0 .into_pull_up_input() .into_alternate_af13() .internal_pull_up(true) .set_speed(Speed::VeryHigh), gpioe .pe1 .into_pull_up_input() .into_alternate_af13() .internal_pull_up(true) .set_speed(Speed::VeryHigh), gpioe .pe4 .into_pull_up_input() .into_alternate_af13() .internal_pull_up(true) .set_speed(Speed::VeryHigh), gpiob .pb6 .into_pull_up_input() .into_alternate_af13() .internal_pull_up(true) .set_speed(Speed::VeryHigh), gpioe .pe5 .into_pull_up_input() .into_alternate_af13() .internal_pull_up(true) .set_speed(Speed::VeryHigh), gpioe .pe6 .into_pull_up_input() .into_alternate_af13() .internal_pull_up(true) .set_speed(Speed::VeryHigh), gpioc .pc10 .into_pull_up_input() .into_alternate_af13() .internal_pull_up(true) .set_speed(Speed::VeryHigh), gpioc .pc12 .into_pull_up_input() .into_alternate_af13() .internal_pull_up(true) .set_speed(Speed::VeryHigh), ); let dcmi = dp.DCMI; let dma2 = dp.DMA2; let mut exposure_line = gpioa.pa2.into_push_pull_output().set_speed(Speed::Low); let mut standby_line = gpioa.pa3.into_push_pull_output().set_speed(Speed::Low); let _ = exposure_line.set_low(); let _ = standby_line.set_low(); let channels = ( gpioc.pc8.into_alternate_af2(), gpioc.pc9.into_alternate_af2(), ); let (mut ch1, _ch2) = pwm::tim3(dp.TIM3, channels, clocks, 24u32.mhz()); let max_duty = ch1.get_max_duty(); let duty_avg = (max_duty / 2) + 1; #[cfg(feature = "rttdebug")] rprintln!("duty cycle: {} max: {}", duty_avg, max_duty); ch1.set_duty(duty_avg); ch1.enable(); #[cfg(feature = "rttdebug")] rprintln!("TIM3 XCLK config done"); ( (user_led0, user_led1, user_led2), delay_source, dwt, i2c1_port, i2c2_port, spi2_port, spi_cs_gyro, usart2_port, usart3_port, uart4_port, dcmi_ctrl_pins, dcmi_data_pins, dma2, dcmi, ) } pub type I2c1Port = p_hal::i2c::I2c< pac::I2C1, ( p_hal::gpio::gpiob::PB8<p_hal::gpio::AlternateOD<p_hal::gpio::AF4>>, p_hal::gpio::gpiob::PB9<p_hal::gpio::AlternateOD<p_hal::gpio::AF4>>, ), >; pub type I2c2Port = p_hal::i2c::I2c< pac::I2C2, ( p_hal::gpio::gpiob::PB10<p_hal::gpio::AlternateOD<p_hal::gpio::AF4>>, p_hal::gpio::gpiob::PB11<p_hal::gpio::AlternateOD<p_hal::gpio::AF4>>, ), >; pub type Spi2Port = p_hal::spi::Spi< pac::SPI2, ( p_hal::gpio::gpiob::PB13<p_hal::gpio::Alternate<p_hal::gpio::AF5>>, p_hal::gpio::gpiob::PB14<p_hal::gpio::Alternate<p_hal::gpio::AF5>>, p_hal::gpio::gpiob::PB15<p_hal::gpio::Alternate<p_hal::gpio::AF5>>, ), >; pub type SpiGyroCsn = p_hal::gpio::gpiob::PB12<p_hal::gpio::Output<p_hal::gpio::PushPull>>; pub type DcmiCtrlPins = ( p_hal::gpio::gpioa::PA6<DcmiControlPin>, p_hal::gpio::gpioa::PA4<DcmiControlPin>, p_hal::gpio::gpiob::PB7<DcmiControlPin>, ); pub type DcmiControlPin = p_hal::gpio::Alternate<p_hal::gpio::AF13>; pub type DcmiParallelDataPin = DcmiControlPin; pub type DcmiDataPins = ( p_hal::gpio::gpioc::PC6<DcmiParallelDataPin>, p_hal::gpio::gpioc::PC7<DcmiParallelDataPin>, p_hal::gpio::gpioe::PE0<DcmiParallelDataPin>, p_hal::gpio::gpioe::PE1<DcmiParallelDataPin>, p_hal::gpio::gpioe::PE4<DcmiParallelDataPin>, p_hal::gpio::gpiob::PB6<DcmiParallelDataPin>, p_hal::gpio::gpioe::PE5<DcmiParallelDataPin>, p_hal::gpio::gpioe::PE6<DcmiParallelDataPin>, p_hal::gpio::gpioc::PC10<DcmiParallelDataPin>, p_hal::gpio::gpioc::PC12<DcmiParallelDataPin>, ); pub type LedOutputPinA = p_hal::gpio::gpioa::PA<Output<PushPull>>; pub type LedOutputPinE = p_hal::gpio::gpioe::PE<Output<PushPull>>; #[cfg(feature = "breakout")] pub type LedOutputActivity = LedOutputPinA; #[cfg(not(feature = "breakout"))] pub type LedOutputActivity = LedOutputPinE; pub type LedOutputComm = LedOutputPinE; pub type LedOutputError = LedOutputPinE; pub type DelaySource = p_hal::delay::Delay; pub type UsartIoPin = p_hal::gpio::Alternate<p_hal::gpio::AF7>; pub type Usart2Port = p_hal::serial::Serial< pac::USART2, ( p_hal::gpio::gpiod::PD5<UsartIoPin>, p_hal::gpio::gpiod::PD6<UsartIoPin>, ), >; pub type Usart3Port = p_hal::serial::Serial< pac::USART3, ( p_hal::gpio::gpiod::PD8<UsartIoPin>, p_hal::gpio::gpiod::PD9<UsartIoPin>, ), >; pub type UartIoPin = p_hal::gpio::Alternate<p_hal::gpio::AF8>; pub type Uart4Port = p_hal::serial::Serial< pac::UART4, ( p_hal::gpio::gpioa::PA0<UartIoPin>, p_hal::gpio::gpioc::PC11<UartIoPin>, ), >;
/* Copyright (c) 2020 Todd Stellanova LICENSE: BSD3 (see LICENSE file) */ use p_hal::stm32 as pac; use stm32f4xx_hal as p_hal; use pac::{DCMI, RCC}; use embedded_hal::blocking::delay::{DelayMs, DelayUs}; use embedded_hal::digital::v2::{OutputPin, ToggleableOutputPin}; use embedded_hal::timer::CountDown; use embedded_hal::PwmPin; use p_hal::timer::{self, Timer}; use p_hal::gpio::{GpioExt, Output, PushPull, Speed}; use p_hal::pwm; use p_hal::rcc::RccExt; use p_hal::time::U32Ext; #[cfg(feature = "rttdebug")] use panic_rtt_core::rprintln; use shared_bus::{BusManager, BusProxy, CortexMBusManager}; use stm32f4xx_hal::timer::{PinC3, PinC4}; use stm32f4xx_hal::dwt::{Dwt,DwtExt}; pub fn setup_peripherals() -> ( (LedOutputActivity, LedOutputComm, LedOutputError), DelaySource, Dwt, I2c1Port, I2c2Port, Spi2Port, SpiGyroCsn, Usart2Port, Usart3Port, Uart4Port, DcmiCtrlPins, DcmiDataPins, pac::DMA2, pac::DCMI, ) { let mut dp = pac::Peripherals::take().unwrap(); let mut cp = cortex_m::Peripherals::take().unwrap(); let mut rcc = dp.RCC.constrain(); let mut clocks = rcc .cfgr .use_hse(24.mhz()) .sysclk(168.mhz()) .pclk1(42.mhz()) .pclk2(84.mhz()) .freeze(); let mut delay_source = p_hal::delay::Delay::new(cp.SYST, clocks); let dwt = cp.DWT.constrain(cp.DCB, clocks); let rcc2 = unsafe { &(*RCC::ptr()) }; rcc2.ahb2enr.modify(|_, w| w.dcmien().set_bit()); rcc2.ahb1enr.modify(|_, w| w.dma2en().set_bit()); let gpioa = dp.GPIOA.split(); let gpiob = dp.GPIOB.split(); let gpioc = dp.GPIOC.split(); let gpiod = dp.GPIOD.split(); let gpioe = dp.GPIOE.split(); #[cfg(feature = "breakout")] let user_led0 = gpioa .pa1 .into_push_pull_output() .set_speed(Speed::Low) .downgrade(); #[cfg(not(feature = "breakout"))] let user_led0 = gpioe.pe2.into_push_pull_output().downgrade(); let user_led1 = gpioe.pe3.into_push_pull_output().downgrade(); let user_led2 = gpioe.pe7.into_push_pull_output().downgrade(); let i2c1_port = { let scl = gpiob.pb8.into_alternate_af4().set_open_drain(); let sda = gpiob.pb9.into_alternate_af4().set_open_drain(); p_hal::i2c::I2c::i2c1(dp.I2C1, (scl, sda), 400.khz(), clocks) }; let i2c2_port = { let scl = gpiob .pb10 .into_alternate_af4() .internal_pull_up(true) .set_speed(Speed::Low) .set_open_drain(); let sda = gpiob .pb11 .into_alternate_af4() .internal_pull_up(true) .set_speed(Speed::Low) .set_open_drain(); p_hal::i2c::I2c::i2c2(dp.I2C2, (scl, sda), 100.khz(), clocks) }; let usart2_port = { let config = p_hal::serial::config::Config::default().baudrate(115200.bps()); let tx = gpiod.pd5.into_alternate_af7(); let rx = gpiod.pd6.into_alternate_af7(); p_hal::serial::Serial::usart2(dp.USART2, (tx, rx), config, clocks) .unwrap() }; let usart3_port = { let config = p_hal::serial::config::Config::default().baudrate(115200.bps()); let tx = gpiod.pd8.into_alternate_af7(); let rx = gpiod.pd9.into_alternate_af7(); p_hal::serial::Serial::usart3(dp.USART3, (tx, rx), config, clocks) .unwrap() }; let uart4_port = { let config = p_hal::serial::config::Config::default().baudrate(9600.bps()); let tx = gpioa.pa0.into_alternate_af8(); let rx = gpioc.pc11.into_alternate_af8(); p_hal::serial::Serial::uart4(dp.UART4, (tx, rx), config, clocks) .unwrap() }; let spi2_port = { let sck = gpiob.pb13.into_alternate_af5(); let cipo = gpiob.pb14.into_alternate_af5(); let copi = gpiob.pb15.into_alternate_af5(); p_hal::spi::Spi::spi2( dp.SPI2, (sck, cipo, copi), embedded_hal::spi::MODE_3, 1_000_000.hz(), clocks, ) }; let mut spi_cs_gyro = gpiob.pb12.into_push_pull_output(); let _ = spi_cs_gyro.set_high(); let dcmi_ctrl_pins = { let pixck = gpioa .pa6 .into_pull_up_input() .into_alternate_af13() .internal_pull_up(true) .set_speed(Speed::VeryHigh); let hsync = gpioa .pa4 .into_pull_up_input() .into_alternate_af13() .internal_pull_up(true) .set_speed(Speed::VeryHigh); let vsync = gpiob .pb7 .into_pull_up_input() .into_alternate_af13() .internal_pull_up(true) .set_speed(Speed::VeryHigh); (pixck, hsync, vsync) }; let dcmi_data_pins = ( gpioc .pc6 .into_pull_up_input() .into_alternate_af13() .internal_pull_up(true) .set_speed(Speed::VeryHigh), gpioc .pc7 .into_pull_up_input() .into_alternate_af13() .internal_pull_up(true) .set_speed(Speed::VeryHigh), gpioe .pe0 .into_pull_up_input() .into_alternate_af13() .internal_pull_up(true) .set_speed(Speed::VeryHigh), gpioe .pe1 .into_pull_up_input() .into_alternate_af13() .internal_pull_up(true) .set_speed(Speed::VeryHigh), gpioe .pe4 .into_pull_up_input() .into_alternate_af13() .internal_pull_up(true) .set_speed(Speed::VeryHigh), gpiob .pb6 .into_pull_up_input() .into_alternate_af13() .internal_pull_up(true) .set_speed(Speed::VeryHig
pub type I2c1Port = p_hal::i2c::I2c< pac::I2C1, ( p_hal::gpio::gpiob::PB8<p_hal::gpio::AlternateOD<p_hal::gpio::AF4>>, p_hal::gpio::gpiob::PB9<p_hal::gpio::AlternateOD<p_hal::gpio::AF4>>, ), >; pub type I2c2Port = p_hal::i2c::I2c< pac::I2C2, ( p_hal::gpio::gpiob::PB10<p_hal::gpio::AlternateOD<p_hal::gpio::AF4>>, p_hal::gpio::gpiob::PB11<p_hal::gpio::AlternateOD<p_hal::gpio::AF4>>, ), >; pub type Spi2Port = p_hal::spi::Spi< pac::SPI2, ( p_hal::gpio::gpiob::PB13<p_hal::gpio::Alternate<p_hal::gpio::AF5>>, p_hal::gpio::gpiob::PB14<p_hal::gpio::Alternate<p_hal::gpio::AF5>>, p_hal::gpio::gpiob::PB15<p_hal::gpio::Alternate<p_hal::gpio::AF5>>, ), >; pub type SpiGyroCsn = p_hal::gpio::gpiob::PB12<p_hal::gpio::Output<p_hal::gpio::PushPull>>; pub type DcmiCtrlPins = ( p_hal::gpio::gpioa::PA6<DcmiControlPin>, p_hal::gpio::gpioa::PA4<DcmiControlPin>, p_hal::gpio::gpiob::PB7<DcmiControlPin>, ); pub type DcmiControlPin = p_hal::gpio::Alternate<p_hal::gpio::AF13>; pub type DcmiParallelDataPin = DcmiControlPin; pub type DcmiDataPins = ( p_hal::gpio::gpioc::PC6<DcmiParallelDataPin>, p_hal::gpio::gpioc::PC7<DcmiParallelDataPin>, p_hal::gpio::gpioe::PE0<DcmiParallelDataPin>, p_hal::gpio::gpioe::PE1<DcmiParallelDataPin>, p_hal::gpio::gpioe::PE4<DcmiParallelDataPin>, p_hal::gpio::gpiob::PB6<DcmiParallelDataPin>, p_hal::gpio::gpioe::PE5<DcmiParallelDataPin>, p_hal::gpio::gpioe::PE6<DcmiParallelDataPin>, p_hal::gpio::gpioc::PC10<DcmiParallelDataPin>, p_hal::gpio::gpioc::PC12<DcmiParallelDataPin>, ); pub type LedOutputPinA = p_hal::gpio::gpioa::PA<Output<PushPull>>; pub type LedOutputPinE = p_hal::gpio::gpioe::PE<Output<PushPull>>; #[cfg(feature = "breakout")] pub type LedOutputActivity = LedOutputPinA; #[cfg(not(feature = "breakout"))] pub type LedOutputActivity = LedOutputPinE; pub type LedOutputComm = LedOutputPinE; pub type LedOutputError = LedOutputPinE; pub type DelaySource = p_hal::delay::Delay; pub type UsartIoPin = p_hal::gpio::Alternate<p_hal::gpio::AF7>; pub type Usart2Port = p_hal::serial::Serial< pac::USART2, ( p_hal::gpio::gpiod::PD5<UsartIoPin>, p_hal::gpio::gpiod::PD6<UsartIoPin>, ), >; pub type Usart3Port = p_hal::serial::Serial< pac::USART3, ( p_hal::gpio::gpiod::PD8<UsartIoPin>, p_hal::gpio::gpiod::PD9<UsartIoPin>, ), >; pub type UartIoPin = p_hal::gpio::Alternate<p_hal::gpio::AF8>; pub type Uart4Port = p_hal::serial::Serial< pac::UART4, ( p_hal::gpio::gpioa::PA0<UartIoPin>, p_hal::gpio::gpioc::PC11<UartIoPin>, ), >;
h), gpioe .pe5 .into_pull_up_input() .into_alternate_af13() .internal_pull_up(true) .set_speed(Speed::VeryHigh), gpioe .pe6 .into_pull_up_input() .into_alternate_af13() .internal_pull_up(true) .set_speed(Speed::VeryHigh), gpioc .pc10 .into_pull_up_input() .into_alternate_af13() .internal_pull_up(true) .set_speed(Speed::VeryHigh), gpioc .pc12 .into_pull_up_input() .into_alternate_af13() .internal_pull_up(true) .set_speed(Speed::VeryHigh), ); let dcmi = dp.DCMI; let dma2 = dp.DMA2; let mut exposure_line = gpioa.pa2.into_push_pull_output().set_speed(Speed::Low); let mut standby_line = gpioa.pa3.into_push_pull_output().set_speed(Speed::Low); let _ = exposure_line.set_low(); let _ = standby_line.set_low(); let channels = ( gpioc.pc8.into_alternate_af2(), gpioc.pc9.into_alternate_af2(), ); let (mut ch1, _ch2) = pwm::tim3(dp.TIM3, channels, clocks, 24u32.mhz()); let max_duty = ch1.get_max_duty(); let duty_avg = (max_duty / 2) + 1; #[cfg(feature = "rttdebug")] rprintln!("duty cycle: {} max: {}", duty_avg, max_duty); ch1.set_duty(duty_avg); ch1.enable(); #[cfg(feature = "rttdebug")] rprintln!("TIM3 XCLK config done"); ( (user_led0, user_led1, user_led2), delay_source, dwt, i2c1_port, i2c2_port, spi2_port, spi_cs_gyro, usart2_port, usart3_port, uart4_port, dcmi_ctrl_pins, dcmi_data_pins, dma2, dcmi, ) }
function_block-function_prefixed
[ { "content": "fn main() {\n\n use std::env;\n\n use std::fs::File;\n\n use std::io::Write;\n\n use std::path::PathBuf;\n\n let memfile_bytes = include_bytes!(\"stm32f407_memory.x\");\n\n\n\n //stm32f427\n\n // Put the linker script somewhere the linker can find it\n\n let out = &PathBuf::from(env::var_os(\"OUT_DIR\").unwrap());\n\n\n\n File::create(out.join(\"memory.x\"))\n\n .unwrap()\n\n .write_all(memfile_bytes)\n\n .unwrap();\n\n println!(\"cargo:rustc-link-search={}\", out.display());\n\n}\n", "file_path": "build.rs", "rank": 1, "score": 30570.74035686572 }, { "content": "#[entry]\n\nfn main() -> ! {\n\n rtt_init_print!(BlockIfFull);\n\n rprintln!(\"-- > MAIN --\");\n\n\n\n let mut board = Board::default();\n\n // this provides the interrupt handler access to the shared Board struct\n\n unsafe {\n\n BOARD_PTR.store(&mut board, Ordering::SeqCst);\n\n }\n\n\n\n let loop_interval = GYRO_REPORTING_INTERVAL_MS as u8;\n\n rprintln!(\"loop_interval: {}\", loop_interval);\n\n\n\n let _ = board.activity_led.set_high();\n\n let _ = board.comms_led.set_high();\n\n let _ = board.error_led.set_high();\n\n\n\n let fast_img_bufs: [_; 2] = unsafe { [&mut FAST_IMG0, &mut FAST_IMG1] };\n\n // This is how we can enable a grayscale test pattern on the MT9V034\n\n // let _ = board.camera_config.as_mut().unwrap().\n", "file_path": "examples/play.rs", "rank": 2, "score": 28920.77738652651 }, { "content": "#[entry]\n\nfn main() -> ! {\n\n rtt_init_print!(NoBlockTrim);\n\n rprintln!(\"-- > MAIN --\");\n\n\n\n let mut board = Board::default();\n\n\n\n let loop_interval = GYRO_REPORTING_INTERVAL_MS as u8;\n\n rprintln!(\"loop_interval: {} ms\", loop_interval);\n\n\n\n let _ = board.activity_led.set_high();\n\n let _ = board.comms_led.set_high();\n\n let _ = board.error_led.set_high();\n\n\n\n let mut overrun_count = 0;\n\n let mut delay_interval_ms = GYRO_REPORTING_INTERVAL_MS;\n\n loop {\n\n // read the gyro as fast as possible\n\n if let Some(gyro) = board.gyro.as_mut() {\n\n for _ in 0..10 {\n\n if let Ok(status) = gyro.status() {\n", "file_path": "examples/gyro.rs", "rank": 3, "score": 28920.77738652651 }, { "content": "#[interrupt]\n\nfn DCMI() {\n\n // forward to DCMI's interrupt handler\n\n unsafe {\n\n (*BOARD_PTR.load(Ordering::SeqCst)).handle_dcmi_interrupt();\n\n }\n\n}\n\n\n\n/// Setup core-coupled RAM buffers for faster image manipulation\n\n#[link_section = \".ccmram.IMG_BUFS\"]\n\nstatic mut FAST_IMG0: ImageFrameBuf = [0u8; FRAME_BUF_LEN];\n\n// static mut FAST_IMG0: Sq120FrameBuf = [0u8; SQ120_FRAME_BUF_LEN];\n\n\n\n#[link_section = \".ccmram.IMG_BUFS\"]\n\nstatic mut FAST_IMG1: ImageFrameBuf = [0u8; FRAME_BUF_LEN];\n\n// static mut FAST_IMG1: Sq120FrameBuf = [0u8; SQ120_FRAME_BUF_LEN];\n\n\n", "file_path": "examples/play.rs", "rank": 4, "score": 28920.77738652651 }, { "content": "#[interrupt]\n\nfn DMA2_STREAM1() {\n\n // forward to DCMI's interrupt handler\n\n unsafe {\n\n (*BOARD_PTR.load(Ordering::SeqCst)).handle_dma2_stream1_interrupt();\n\n }\n\n}\n\n\n\n/// should be called whenever DCMI completes a frame\n", "file_path": "examples/play.rs", "rank": 5, "score": 27492.573819833557 }, { "content": "/// output image data as 8-bit raw pixels in base64 encoded format, to RTT\n\nfn dump_pixels(image_count: u32, buf: &[u8]) {\n\n rprintln!(\"\\n--- {}\", image_count);\n\n\n\n //process input chunks that are multiples of 12 bytes (for base64 continuity)\n\n const CHUNK_SIZE: usize = 24;\n\n let total_len = buf.len();\n\n let mut read_idx = 0;\n\n while read_idx < total_len {\n\n let max_idx = total_len.min(read_idx + CHUNK_SIZE);\n\n let wrapper = Base64Display::with_config(\n\n &buf[read_idx..max_idx],\n\n base64::STANDARD,\n\n );\n\n rprint!(\"{}\", wrapper);\n\n read_idx += CHUNK_SIZE;\n\n }\n\n}\n", "file_path": "examples/play.rs", "rank": 6, "score": 19698.311162247785 }, { "content": "## MCU Pin Map\n\n\n\n| Pin | Configuration |\n\n| :--- | :--- | \n\n| PA0 | UART4_TX (\"TIM5_CH1\" - N/C) |\n\n| PA1 | \"TIM5_CH2\" (unused - N/C) |\n\n| PA2 | TIM5_CH3_EXPOSURE (pulled low) |\n\n| PA3 | TIM5_CH4_STANDBY (pulled low) |\n\n| PA4 | DCMI_HSYNC |\n\n| PA5 | CAM_NRESET (tied to high) |\n\n| PA6 | DCMI_PIXCK |\n\n| PB6 | DCMI_D5 |\n\n| PB7 | DCMI_VSYNC |\n\n| PB8 | I2C1 SCL |\n\n| PB9 | I2C1 SDA |\n\n| PB10 | I2C2 SCL |\n\n| PB11 | I2C2 SDA |\n\n| PB12 | spi_cs_gyro |\n\n| PB13 | SPI2 SCLK |\n\n| PB14 | SPI2 CIPO |\n\n| PB15 | SPI2 COPI |\n\n| PC6 | DCMI_D0 |\n\n| PC7 | DCMI_D1 |\n\n| PC8 | XCLK |\n\n| PC9 | \"TIM8_CH4_LED_OUT\" (unused) |\n\n| PC10 | DCMI_D8 |\n\n| PC11 | UART4_RX |\n\n| PC12 | DCMI_D9 |\n\n| PD0 | TBD |\n\n| PD5 | TBD |\n\n| PD6 | TBD |\n\n| PD7 | TBD |\n\n| PD15 | TBD |\n\n| PE0 | DCMI_D2 |\n\n| PE1 | DCMI_D3 |\n\n| PE2 | user_led0 |\n\n| PE3 | user_led1 |\n\n| PE4 | DCMI_D4 |\n\n| PE5 | DCMI_D6 |\n\n| PE6 | DCMI_D7 |\n\n| PE7 | user_led2 |\n\n\n", "file_path": "README.md", "rank": 8, "score": 24.839649800277893 }, { "content": "/*\n\nCopyright (c) 2020 Todd Stellanova\n\nLICENSE: BSD3 (see LICENSE file)\n\n*/\n\n\n\nuse p_hal::stm32 as pac;\n\nuse stm32f4xx_hal as p_hal;\n\n\n\nuse pac::{DCMI, RCC};\n\n\n\nuse core::sync::atomic::{AtomicUsize, Ordering};\n\nuse cortex_m::singleton;\n\n\n\nuse core::ops::Deref;\n\n#[cfg(feature = \"rttdebug\")]\n\nuse panic_rtt_core::rprintln;\n\nuse stm32f4xx_hal::stm32::dma2::ST;\n\n\n\npub const SQ_DIM_64: usize = 64;\n\npub const SQ_DIM_120: usize = 120;\n", "file_path": "src/dcmi.rs", "rank": 14, "score": 19.900351656335047 }, { "content": "/*\n\nCopyright (c) 2020 Todd Stellanova\n\nLICENSE: BSD3 (see LICENSE file)\n\n*/\n\n\n\n#![no_main]\n\n#![no_std]\n\n\n\nuse cortex_m_rt as rt;\n\nuse p_hal::stm32 as pac;\n\nuse rt::entry;\n\nuse stm32f4xx_hal as p_hal;\n\n\n\nuse pac::interrupt;\n\n\n\nuse panic_rtt_core::{self, rprint, rprintln, rtt_init_print};\n\n\n\nuse embedded_hal::digital::v2::OutputPin;\n\nuse embedded_hal::digital::v2::ToggleableOutputPin;\n\n\n", "file_path": "examples/play.rs", "rank": 18, "score": 17.08429341432048 }, { "content": "/*\n\nCopyright (c) 2020 Todd Stellanova\n\nLICENSE: BSD3 (see LICENSE file)\n\n*/\n\n\n\nuse crate::peripherals::*;\n\n\n\nuse embedded_hal::blocking::delay::DelayMs;\n\nuse stm32f4xx_hal as p_hal;\n\nuse p_hal::dwt::Dwt;\n\nuse p_hal::stm32 as pac;\n\n\n\nuse eeprom24x::Eeprom24x;\n\nuse embedded_hal::digital::v1_compat::OldOutputPin;\n\nuse embedded_hal::digital::v2::OutputPin;\n\nuse l3gd20::L3gd20;\n\nuse mt9v034_i2c::{BinningFactor, Mt9v034, ParamContext};\n\n\n\nuse core::sync::atomic::{AtomicPtr, Ordering};\n\nuse cortex_m::singleton;\n", "file_path": "src/board.rs", "rank": 19, "score": 17.004787988464976 }, { "content": "\n\nuse crate::dcmi::{DcmiWrapper, SQ_DIM_120, SQ_DIM_64};\n\n#[cfg(feature = \"rttdebug\")]\n\nuse panic_rtt_core::rprintln;\n\nuse stm32f4xx_hal::dwt::StopWatch;\n\n\n\n/// The main Board support type:\n\n/// This contains both pre-initialized drivers for\n\n/// onboard devices as well as bus ports for external ports peripherals.\n\npub struct Board<'a> {\n\n pub activity_led: LedOutputActivity,\n\n pub comms_led: LedOutputComm,\n\n pub error_led: LedOutputError,\n\n\n\n pub delay_source: DelaySource,\n\n pub dwt: Dwt,\n\n pub external_i2c1: I2c1BusManager,\n\n pub camera_config: Option<CameraConfigType<'a>>,\n\n pub gyro: Option<GyroType>,\n\n pub eeprom: Option<EepromType<'a>>,\n", "file_path": "src/board.rs", "rank": 23, "score": 13.741826890407406 }, { "content": "/*\n\nCopyright (c) 2020 Todd Stellanova\n\nLICENSE: BSD3 (see LICENSE file)\n\n*/\n\n\n\n#![no_main]\n\n#![no_std]\n\n\n\n//! This example shows how to initialize the PX4FLOW board\n\n//! and access the onboard gyroscope.\n\n//! See the l3gd20 crate for more details on customizing the gyro configuration.\n\n//!\n\nuse cortex_m_rt as rt;\n\nuse rt::entry;\n\n\n\nuse panic_rtt_core::{self, rprintln, rtt_init_print};\n\n\n\nuse embedded_hal::digital::v2::{OutputPin, ToggleableOutputPin};\n\nuse embedded_hal::blocking::delay::DelayMs;\n\n\n\nconst GYRO_REPORTING_RATE_HZ: u16 = 50;\n\nconst GYRO_REPORTING_INTERVAL_MS: u16 = 1000 / GYRO_REPORTING_RATE_HZ;\n\n\n\nuse px4flow_bsp::board::Board;\n\n\n\n\n\n#[entry]\n", "file_path": "examples/gyro.rs", "rank": 25, "score": 13.104807426267168 }, { "content": "/*\n\nCopyright (c) 2020 Todd Stellanova\n\nLICENSE: BSD3 (see LICENSE file)\n\n*/\n\n\n\n#![no_std]\n\n\n\n///! This crate provides some commonly useful interfaces to the\n\n///! PX4FLOW board, such as:\n\n///! - Camera configuration/control (via the [mt9v034-i2c crate](https://crates.io/crates/mt9v034-i2c))\n\n///! - Six degree of freedom (6DOF) accelerometer and gyroscope sense via the\n\n///! [l3gd20 crate](https://crates.io/crates/l3gd20)\n\n///! - Read/Write onboard EEPROM\n\n///! - Reading camera image data (via a DCMI `read_available` function)\n\n///! See the example and README for more details.\n\n\n\n#[allow(unused)]\n\npub mod peripherals;\n\n\n\n#[allow(unused)]\n\npub mod board;\n\n\n\n#[allow(unused)]\n\npub mod dcmi;\n", "file_path": "src/lib.rs", "rank": 27, "score": 11.923223291686345 }, { "content": " pub dma2: pac::DMA2,\n\n pub dcmi_wrap: Option<DcmiWrapper<'a>>,\n\n pub usart2: Usart2Port,\n\n pub usart3: Usart3Port,\n\n pub uart4: Uart4Port,\n\n\n\n stopwatch: StopWatch<'a>,\n\n}\n\n\n\nimpl Default for Board<'_> {\n\n fn default() -> Self {\n\n #[cfg(feature = \"rttdebug\")]\n\n rprintln!(\"new board\");\n\n\n\n let (\n\n raw_user_leds,\n\n mut delay_source,\n\n dwt,\n\n i2c1_port,\n\n i2c2_port,\n", "file_path": "src/board.rs", "rank": 30, "score": 10.18981310816146 }, { "content": " rprintln!(\n\n \"dcmi::setup start: {}, {}\",\n\n self.pixel_count,\n\n self.bits_per_pixel\n\n );\n\n\n\n //NOTE(unsafe) This executes only once during initialization\n\n unsafe {\n\n self.deinit_dma2(dma2);\n\n self.toggle_dcmi(false);\n\n self.init_dcmi();\n\n self.init_dma2(dma2);\n\n }\n\n #[cfg(feature = \"rttdebug\")]\n\n rprintln!(\"dcmi::setup done\");\n\n }\n\n\n\n /// Call this after `setup` to begin capture and start interrupts\n\n pub fn enable_capture(&mut self, dma2: &pac::DMA2) {\n\n unsafe {\n", "file_path": "src/dcmi.rs", "rank": 31, "score": 9.862492857740769 }, { "content": "# px4flow_bsp\n\n\n\nRust no_std embedded hal board support package for the PX4FLOW optical flow sensor hardware.\n\n\n\n\n\n## Embedded Examples\n\nThe examples are designed to be used with a debug probe that supports J-Link / RTT.\n\nWe provide a couple different ways to run these:\n\n- With the Segger tools (This is currently the default.)\n\n- With [probe-run](https://crates.io/crates/probe-run) (This is WIP and may not work as expected.)\n\n\n\n\n\n#### With the Segger tools installed \n\n- Edit [config](.cargo/config) to select the `segger.gdb` runner\n\n- In one shell run: `./start_gdb_server_jlink.sh`\n\n- In another shell run: `JLinkRTTClient`\n\n- Then run your choice of examples\n\n\n\n#### With probe-run installed\n\n- Edit [config](.cargo/config) to select the `probe-run` runner\n\n- Run the example (see below) with a JLink debug probe attached to your PX4FLOW\n\n- Note that RTT output to the terminal may be significantly slower than with the Segger tools.\n\n\n\n### Running examples\n\n\n\n```shell script\n\ncargo run --example play --features rttdebug\n\n```\n\n\n\n\n\n### Interrupt Handling\n\n\n\nCurrently you need to configure your application to forward interrupts from app-level\n\ninterrupt handlers, ie:\n\n\n\n```rust\n\n/// should be called whenever DMA2 completes a transfer\n\n#[interrupt]\n\nfn DMA2_STREAM1() {\n\n // forward to board's interrupt handler\n\n unsafe {\n\n (*BOARD_PTR.load(Ordering::SeqCst)).handle_dma2_stream1_interrupt();\n\n }\n\n}\n\n\n\n/// should be called whenever DCMI completes a frame\n\n#[interrupt]\n\nfn DCMI() {\n\n // forward to board's interrupt handler\n\n unsafe {\n\n (*BOARD_PTR.load(Ordering::SeqCst)).handle_dcmi_interrupt();\n\n }\n\n}\n\n```\n\n\n\nThis assumes you are using the [cortex-m-rt crate](https://crates.io/crates/cortex-m-rt) \n\nto construct your embedded application, and using its `#[interrupt]` to handle interrupts.\n\n\n\n\n", "file_path": "README.md", "rank": 34, "score": 8.57065056014083 }, { "content": " let base_i2c_address = mt9v034_i2c::PX4FLOW_CAM_ADDRESS;\n\n let mut cam_config =\n\n Mt9v034::new(i2c2_bus_mgr.acquire(), base_i2c_address);\n\n\n\n // configure image sensor with two distinct contexts:\n\n // - Context A: 480x480 window, binning 4 -> 120x120 output images (square-120)\n\n // - Context B: 752x480 window, binning 4 -> 188x120 output images\n\n const BINNING_A: BinningFactor = BinningFactor::Four;\n\n const BINNING_B: BinningFactor = BinningFactor::Four;\n\n const WINDOW_W_A: u16 = 480;\n\n const WINDOW_H_A: u16 = 480;\n\n const WINDOW_W_B: u16 = 752;\n\n const WINDOW_H_B: u16 = 480;\n\n\n\n\n\n cam_config\n\n .setup_with_dimensions(\n\n WINDOW_W_A,\n\n WINDOW_H_A,\n\n BINNING_A,\n", "file_path": "src/board.rs", "rank": 37, "score": 8.125752332925964 }, { "content": " 1\n\n }\n\n }\n\n\n\n /// Configure the DCMI peripheral for continuous capture\n\n fn init_dcmi(&mut self) {\n\n // #[cfg(feature = \"rttdebug\")]\n\n // rprintln!(\"04 dcmi_cr: {:#b}\", self.dcmi.cr.read().bits());\n\n\n\n //basic DCMI configuration\n\n //TODO use self.bits_per_pixel to configure EDM\n\n self.dcmi.cr.modify(|_, w| unsafe {\n\n w.cm() // capture mode: continuous\n\n .clear_bit()\n\n .ess() // synchro mode: hardware\n\n .clear_bit()\n\n .pckpol() // PCK polarity: falling\n\n .clear_bit()\n\n .vspol() // vsync polarity: low\n\n .clear_bit()\n", "file_path": "src/dcmi.rs", "rank": 38, "score": 8.124257435877727 }, { "content": " bits_per_pixel,\n\n dma_it_count: AtomicUsize::new(0),\n\n unread_frames_count: AtomicUsize::new(0),\n\n dcmi_capture_count: AtomicUsize::new(0),\n\n dcmi,\n\n transfer: DmaRotatingTransfer {\n\n buf_refs: unsafe {\n\n [&mut IMG_BUF0, &mut IMG_BUF1, &mut IMG_BUF2]\n\n },\n\n buf_addresses: [buf0_addr, buf1_addr, buf2_addr],\n\n available_buf_idx: AtomicUsize::new(2),\n\n mem0_buf_idx: AtomicUsize::new(0),\n\n mem1_buf_idx: AtomicUsize::new(1),\n\n },\n\n }\n\n }\n\n\n\n /// Setup DCMI and associated DMA\n\n pub fn setup(&mut self, dma2: &pac::DMA2) {\n\n #[cfg(feature = \"rttdebug\")]\n", "file_path": "src/dcmi.rs", "rank": 39, "score": 8.119110476432336 }, { "content": "\n\n let mut stream1_chan1 = &dma2.st[1];\n\n\n\n // #[cfg(feature = \"rttdebug\")]\n\n // rprintln!(\"04 dma2_cr: {:#b}\", stream1_chan1.cr.read().bits());\n\n\n\n stream1_chan1.cr.modify(|_, w| {\n\n w\n\n // Note: add this to enable Half transfer interrupt:\n\n //.htie().enabled()\n\n // Transfer complete interrupt enable:\n\n .tcie()\n\n .enabled()\n\n });\n\n\n\n #[cfg(feature = \"rttdebug\")]\n\n rprintln!(\"05 dma2_cr: {:#b}\", stream1_chan1.cr.read().bits());\n\n }\n\n /// Copy available image data into the provided slice\n\n pub fn read_available(&mut self, dest: &mut [u8]) -> Result<usize, ()> {\n", "file_path": "src/dcmi.rs", "rank": 40, "score": 7.964226420969534 }, { "content": " );\n\n eeprom.write_byte(PARAM_ADDRESS, 0xAA).unwrap();\n\n delay_source.delay_ms(5u8);\n\n\n\n let read_data = eeprom.read_byte(PARAM_ADDRESS).unwrap();\n\n #[cfg(feature = \"rttdebug\")]\n\n rprintln!(\"eeprom data: 0x{:X}\", read_data);\n\n let eeprom_opt = Some(eeprom);\n\n }\n\n\n\n // option A: select max aspect ration with row and column bin 4 (188x120)\n\n let mut dcmi_wrap = DcmiWrapper::default(dcmi);\n\n // option B: square-120 images\n\n // let mut dcmi_wrap = DcmiWrapper::new(dcmi, SQ_DIM_120, SQ_DIM_120, 8);\n\n\n\n dcmi_wrap.setup(&dma2);\n\n\n\n #[cfg(feature = \"breakout\")]\n\n let base_i2c_address = mt9v034_i2c::ARDUCAM_BREAKOUT_ADDRESS;\n\n #[cfg(not(feature = \"breakout\"))]\n", "file_path": "src/board.rs", "rank": 41, "score": 7.893973642088216 }, { "content": " // disable the interface:\n\n self.dcmi.cr.modify(|_, w| w.enable().clear_bit());\n\n // disable capturing:\n\n self.dcmi.cr.modify(|_, w| w.capture().clear_bit());\n\n }\n\n\n\n // #[cfg(feature = \"rttdebug\")]\n\n // rprintln!(\"toggle dcmi_cr: {:#b}\", self.dcmi.cr.read().bits());\n\n }\n\n\n\n /// Enable `DCMI` interrupts\n\n fn enable_dcmi_interrupts(&mut self) {\n\n cortex_m::interrupt::free(|_| {\n\n // enable interrupts DCMI capture completion\n\n pac::NVIC::unpend(pac::Interrupt::DCMI);\n\n unsafe {\n\n pac::NVIC::unmask(pac::Interrupt::DCMI);\n\n }\n\n });\n\n\n", "file_path": "src/dcmi.rs", "rank": 42, "score": 7.8275279996436975 }, { "content": " .store(cur_available, Ordering::SeqCst);\n\n stream1_chan1.m1ar.write(|w| unsafe { w.bits(new_target) });\n\n } else {\n\n //memory0 is idle, so swap the available buffer with DMA_S2M0AR\n\n let cur_mem0 = self.transfer.mem0_buf_idx.load(Ordering::SeqCst);\n\n self.transfer\n\n .available_buf_idx\n\n .store(cur_mem0, Ordering::SeqCst);\n\n self.transfer\n\n .mem0_buf_idx\n\n .store(cur_available, Ordering::SeqCst);\n\n stream1_chan1.m0ar.write(|w| unsafe { w.bits(new_target) });\n\n }\n\n }\n\n\n\n /// Call this from DCMI interrupt\n\n pub fn dcmi_irqhandler(&mut self) {\n\n self.dcmi_capture_count.fetch_add(1, Ordering::SeqCst);\n\n // let dcmi = unsafe { &(*pac::DCMI::ptr()) };\n\n #[cfg(feature = \"rttdebug\")]\n", "file_path": "src/dcmi.rs", "rank": 43, "score": 7.556717018216721 }, { "content": " }\n\n\n\n //store the one-and-only i2c2 bus to a static\n\n let i2c2_bus_mgr: &'static mut I2c2BusManager =\n\n singleton!(:I2c2BusManager =\n\n shared_bus::CortexMBusManager::new(i2c2_port)\n\n )\n\n .unwrap();\n\n\n\n let mut eeprom_opt = None;\n\n #[cfg(not(feature = \"breakout\"))]\n\n {\n\n #[cfg(feature = \"rttdebug\")]\n\n rprintln!(\"eeprom setup start\");\n\n let eeprom_i2c_address = eeprom24x::SlaveAddr::default();\n\n const PARAM_ADDRESS: u32 = 0x1234;\n\n\n\n let mut eeprom = Eeprom24x::new_24x128(\n\n i2c2_bus_mgr.acquire(),\n\n eeprom_i2c_address,\n", "file_path": "src/board.rs", "rank": 44, "score": 7.16950944626502 }, { "content": " .hspol() // hsync polarity: low\n\n .clear_bit()\n\n .fcrc() // capture rate: every frame\n\n .bits(0x00)\n\n .edm() // extended data mode: 8 bit\n\n .bits(0x00)\n\n });\n\n\n\n #[cfg(feature = \"rttdebug\")]\n\n rprintln!(\"05 dcmi_cr: {:#b}\", self.dcmi.cr.read().bits());\n\n }\n\n\n\n /// Enable DMA2 and DCMI after setup\n\n fn enable_dcmi_and_dma(&mut self, dma2: &pac::DMA2) {\n\n self.toggle_dma2_stream1(dma2, true);\n\n self.toggle_dcmi(true);\n\n self.enable_dma_interrupts(dma2);\n\n self.enable_dcmi_interrupts();\n\n\n\n #[cfg(feature = \"rttdebug\")]\n", "file_path": "src/dcmi.rs", "rank": 45, "score": 6.974621535885868 }, { "content": " // we should treat it as a shared bus\n\n let i2c1_bus_mgr = shared_bus::CortexMBusManager::new(i2c1_port);\n\n\n\n let mut gyro_opt: Option<_> = None;\n\n #[cfg(not(feature = \"breakout\"))]\n\n {\n\n let _ = spi_cs_gyro.set_high(); // initially deselected\n\n let old_gyro_csn = OldOutputPin::new(spi_cs_gyro);\n\n\n\n if let Ok(mut gyro) = L3gd20::new(spi2_port, old_gyro_csn) {\n\n if let Ok(device_id) = gyro.who_am_i() {\n\n #[cfg(feature = \"rttdebug\")]\n\n rprintln!(\"gyro ID: 0x{:X}\", device_id);\n\n if device_id == 0xD4 {\n\n #[cfg(feature = \"rttdebug\")]\n\n rprintln!(\"gyro setup done\");\n\n gyro_opt = Some(gyro)\n\n }\n\n }\n\n }\n", "file_path": "src/board.rs", "rank": 46, "score": 6.844882024199106 }, { "content": " self.enable_dcmi_and_dma(dma2);\n\n #[cfg(feature = \"rttdebug\")]\n\n rprintln!(\"dcmi cr: 0x{:x}\", self.dcmi.cr.read().bits());\n\n }\n\n }\n\n\n\n fn deinit_dma2(&mut self, dma2: &pac::DMA2) {\n\n self.toggle_dma2_stream1(dma2, false);\n\n let mut stream1_chan1 = &dma2.st[1];\n\n\n\n unsafe {\n\n stream1_chan1.cr.write(|w| w.bits(0));\n\n stream1_chan1.ndtr.write(|w| w.bits(0));\n\n stream1_chan1.par.write(|w| w.bits(0));\n\n stream1_chan1.m0ar.write(|w| w.bits(0));\n\n stream1_chan1.m1ar.write(|w| w.bits(0));\n\n //fifo control\n\n stream1_chan1.fcr.write(|w| w.bits(0x00000021)); //TODO verify value\n\n };\n\n\n", "file_path": "src/dcmi.rs", "rank": 47, "score": 6.496909839253981 }, { "content": " eeprom: eeprom_opt,\n\n dma2: dma2,\n\n dcmi_wrap: Some(dcmi_wrap),\n\n usart2,\n\n usart3,\n\n uart4,\n\n dwt,\n\n stopwatch\n\n };\n\n\n\n result\n\n }\n\n}\n\n\n\n\n\nimpl Board<'_> {\n\n /// Call this on the DMA2_STREAM1 interrupt\n\n pub fn handle_dma2_stream1_interrupt(&mut self) {\n\n if let Some(dcmi_wrap) = self.dcmi_wrap.as_mut() {\n\n dcmi_wrap.dma2_stream1_irqhandler();\n", "file_path": "src/board.rs", "rank": 48, "score": 6.456941282145582 }, { "content": " self.dcmi.ier.write(|w| {\n\n w\n\n // watch for data buffer overrun occurred\n\n .ovr_ie()\n\n .set_bit()\n\n // frame capture completion interrupt\n\n .frame_ie()\n\n .set_bit()\n\n });\n\n }\n\n\n\n /// Enable `DMA2_STREAM1` interrupts\n\n fn enable_dma_interrupts(&mut self, dma2: &pac::DMA2) {\n\n cortex_m::interrupt::free(|_| {\n\n // enable interrupts for DMA2 transfer completion\n\n pac::NVIC::unpend(pac::Interrupt::DMA2_STREAM1);\n\n unsafe {\n\n pac::NVIC::unmask(pac::Interrupt::DMA2_STREAM1);\n\n }\n\n });\n", "file_path": "src/dcmi.rs", "rank": 49, "score": 6.375618826571786 }, { "content": "\n\n/// Concrete type for gyro driver\n\npub type GyroType = l3gd20::L3gd20<Spi2Port, OldOutputPin<SpiGyroCsn>>;\n\n\n\n/// Concrete type for serial EEPROM driver\n\npub type EepromType<'a> = eeprom24x::Eeprom24x<\n\n I2c2BusProxy<'a>,\n\n eeprom24x::page_size::B64,\n\n eeprom24x::addr_size::TwoBytes,\n\n>;\n\n\n\n/// Concrete type for camera configuration driver\n\npub type CameraConfigType<'a> = Mt9v034<I2c2BusProxy<'a>>;\n\n\n\n\n", "file_path": "src/board.rs", "rank": 50, "score": 6.351465393275618 }, { "content": " dcmi_capture_count: AtomicUsize,\n\n dcmi: pac::DCMI,\n\n transfer: DmaRotatingTransfer<'a>,\n\n}\n\n\n\nstatic mut IMG_BUF0: ImageFrameBuf = [0u8; FRAME_BUF_LEN];\n\nstatic mut IMG_BUF1: ImageFrameBuf = [0u8; FRAME_BUF_LEN];\n\nstatic mut IMG_BUF2: ImageFrameBuf = [0u8; FRAME_BUF_LEN];\n\n\n\nimpl DcmiWrapper<'_> {\n\n /// New wrapper ready for DCMI with a 64x64, 8 bits per pixel capture\n\n pub fn default(dcmi: pac::DCMI) -> Self {\n\n Self::new(dcmi, DEFAULT_IMG_WIDTH, DEFAULT_IMG_HEIGHT, DEFAULT_BITS_PER_PIXEL)\n\n }\n\n\n\n /// Create a new DcmiWrapper with configured frame dimensions\n\n pub fn new(\n\n dcmi: pac::DCMI,\n\n frame_width: usize,\n\n frame_height: usize,\n", "file_path": "src/dcmi.rs", "rank": 51, "score": 6.313869645843601 }, { "content": " stream1_chan1.par.read().bits(),\n\n stream1_chan1.m0ar.read().bits(),\n\n stream1_chan1.m1ar.read().bits(),\n\n stream1_chan1.fcr.read().bits(),\n\n );\n\n }\n\n\n\n //sample: CR = 33969408, NDTR = 1024, PAR = 1342505000, M0AR = 0, M1AR = 536894552, FCR = 35\n\n }\n\n\n\n /// Initializes the buffers used for double-buffering with DMA2\n\n fn init_dma_buffers(&mut self, dma2: &pac::DMA2) {\n\n self.transfer.mem0_buf_idx.store(0, Ordering::SeqCst);\n\n self.transfer.mem1_buf_idx.store(1, Ordering::SeqCst);\n\n self.transfer.available_buf_idx.store(2, Ordering::SeqCst);\n\n\n\n unsafe {\n\n //set the initial buffers to be used by DMA\n\n dma2.st[1]\n\n .m0ar\n", "file_path": "src/dcmi.rs", "rank": 52, "score": 6.201049068694922 }, { "content": " spi2_port,\n\n mut spi_cs_gyro,\n\n usart2,\n\n usart3,\n\n uart4,\n\n dcmi_ctrl_pins,\n\n dcmi_data_pins,\n\n dma2,\n\n dcmi,\n\n ) = setup_peripherals();\n\n\n\n //We are safe to forget the DCMI pins after configuration\n\n core::mem::forget(dcmi_ctrl_pins);\n\n core::mem::forget(dcmi_data_pins);\n\n\n\n\n\n static mut STOPWATCH_BUF: [u32; 2] = [0u32; 2];\n\n let mut stopwatch = dwt.stopwatch(unsafe { &mut STOPWATCH_BUF });\n\n\n\n // Since any number of devices could sit on the external i2c1 port,\n", "file_path": "src/board.rs", "rank": 53, "score": 6.124879195607955 }, { "content": "\n\n #[cfg(feature = \"rttdebug\")]\n\n {\n\n self.stopwatch.lap();\n\n if let Some(period) = self.stopwatch.lap_time(1) {\n\n let unread = dcmi_wrap.unread_frames();\n\n let period_secs = period.as_secs_f32();\n\n // if (period_secs > 0.03) || (unread > 1) {\n\n // rprintln!(\"dma {:.5} {}\", period.as_secs_f32(), dcmi_wrap.unread_frames());\n\n // }\n\n }\n\n self.stopwatch.reset();\n\n }\n\n }\n\n }\n\n\n\n /// Call this on the DCMI interrupt\n\n pub fn handle_dcmi_interrupt(&mut self) {\n\n if let Some(dcmi_wrap) = self.dcmi_wrap.as_mut() {\n\n dcmi_wrap.dcmi_irqhandler();\n", "file_path": "src/board.rs", "rank": 55, "score": 5.6666663624341975 }, { "content": " BINNING_A,\n\n WINDOW_W_B,\n\n WINDOW_H_B,\n\n BINNING_B,\n\n BINNING_B,\n\n ParamContext::ContextB,\n\n )\n\n .expect(\"could not configure MT9V034\");\n\n\n\n // Note that we do not call dcmi_wrap.enable_capture() here --\n\n // instead we allow the board user to do that if desired.\n\n\n\n let mut result = Self {\n\n activity_led: raw_user_leds.0,\n\n comms_led: raw_user_leds.1,\n\n error_led: raw_user_leds.2,\n\n external_i2c1: i2c1_bus_mgr,\n\n camera_config: Some(cam_config),\n\n gyro: gyro_opt,\n\n delay_source,\n", "file_path": "src/board.rs", "rank": 56, "score": 5.493823787550227 }, { "content": " let unread = self.unread_frames_count.swap(0, Ordering::SeqCst);\n\n if unread != 0 {\n\n let cur_available =\n\n self.transfer.available_buf_idx.load(Ordering::SeqCst);\n\n let raw_source = &self.transfer.buf_refs[cur_available];\n\n dest.copy_from_slice(raw_source[0..dest.len()].as_ref());\n\n return Ok(dest.len());\n\n }\n\n return Ok(0);\n\n }\n\n\n\n /// Returns the number of frames that have been transferred since the last `read_available`\n\n pub fn unread_frames(&mut self) -> usize {\n\n return self.unread_frames_count.load(Ordering::SeqCst);\n\n }\n\n\n\n /// Call this from DMA2_STREAM1 interrupt\n\n pub fn dma2_stream1_irqhandler(&mut self) {\n\n // DMA2 transfer from DCMI to memory completed\n\n self.dma_it_count.fetch_add(1, Ordering::SeqCst);\n", "file_path": "src/dcmi.rs", "rank": 57, "score": 5.380579554255603 }, { "content": "const GYRO_REPORTING_RATE_HZ: u16 = 95;\n\nconst GYRO_REPORTING_INTERVAL_MS: u16 = 1000 / GYRO_REPORTING_RATE_HZ;\n\n\n\n// use mt9v034_i2c::PixelTestPattern;\n\n\n\nuse base64::display::Base64Display;\n\nuse core::sync::atomic::{AtomicPtr, Ordering};\n\nuse px4flow_bsp::board::Board;\n\nuse px4flow_bsp::dcmi::{ ImageFrameBuf, FRAME_BUF_LEN};\n\n\n\nstatic mut BOARD_PTR: AtomicPtr<Board> = AtomicPtr::new(core::ptr::null_mut());\n\n/// should be called whenever DMA2 completes a transfer\n\n#[interrupt]\n", "file_path": "examples/play.rs", "rank": 58, "score": 5.0791783022466 }, { "content": " {\n\n let mut stream1_chan1 = &dma2.st[1];\n\n rprintln!(\"post-enable dma2 CR = {}, NDTR = {}, PAR = {}, M0AR = {}, M1AR = {}, FCR = {}\",\n\n stream1_chan1.cr.read().bits(),\n\n stream1_chan1.ndtr.read().bits(),\n\n stream1_chan1.par.read().bits(),\n\n stream1_chan1.m0ar.read().bits(),\n\n stream1_chan1.m1ar.read().bits(),\n\n stream1_chan1.fcr.read().bits(),\n\n );\n\n }\n\n }\n\n\n\n /// Enable or disable the DMA2 stream\n\n fn toggle_dma2_stream1(&mut self, dma2: &pac::DMA2, enable: bool) {\n\n let mut stream1_chan1 = &dma2.st[1];\n\n // #[cfg(feature = \"rttdebug\")]\n\n // rprintln!(\"08 dma2_cr: {:#b}\", stream1_chan1.cr.read().bits());\n\n\n\n if enable {\n", "file_path": "src/dcmi.rs", "rank": 59, "score": 5.042624420394503 }, { "content": "\n\n /// Configure DMA2 for DCMI peripheral -> memory transfer\n\n fn init_dma2(&mut self, dma2: &pac::DMA2) {\n\n //configure DMA2, stream 1, channel 1 for DCMI peripheral -> memory\n\n\n\n // #[cfg(feature = \"rttdebug\")]\n\n // rprintln!(\"00 dma2_cr: {:#b}\", stream1_chan1.cr.read().bits());\n\n\n\n //configure double-buffer mode\n\n dma2.st[1].cr.modify(|_, w| {\n\n w\n\n // enable double-buffer mode\n\n .dbm()\n\n .enabled()\n\n // select Memory0 initially\n\n .ct()\n\n .memory0()\n\n });\n\n\n\n self.init_dma_buffers(dma2);\n", "file_path": "src/dcmi.rs", "rank": 60, "score": 4.862347075825435 }, { "content": "## Status\n\n\n\nWork in progress\n\n\n\n- [x] Does not overwrite the default PX4FLOW bootloader that typically ships with the board\n\n- [x] Example that sets up DCMI to read from the camera \n\n- [x] Support for spi2 (l3gd20 gyro)\n\n- [x] Support for i2c1 (offboard i2c communication)\n\n- [x] Support for i2c2 (MT9V034 configuration port, and eeprom)\n\n- [x] Support for USART2, USART3, and UART4 (sonar)\n\n- [x] Support for serial eeprom on i2c2\n\n- [x] Initial setup of DCMI peripheral\n\n- [x] Initial setup of DMA2 \n\n- [x] Mostly working DCMI->DMA2-> image buffer pipeline\n\n- [x] Support for running examples using probe-run\n\n- [ ] Support use of 120x120 flow frame (bin 4 of 480 height)\n\n- [ ] Support configurable / full-frame image buffers (currently limited to 64x64)\n\n- [ ] Support use of full 10 bpp grayscale resolution of MT9V034\n\n\n\n## Notes\n\n- The only supported mode for debugging is RTT with the `rttdebug` feature. This is because \n\nthe PX4FLOW 1.x and 2.x boards only make the SWD interface available (no easy ITM solution).\n\n- The `breakout` feature is intended for library development and debugging purposes.\n\nCurrently it's setup to work with the \"DevEBox STM32F4XX_M Ver:3.0\" board (STM32F407VGT6), which does not\n\ninclude a l3gd20 gyro or eeprom, and eg the Arducam MT9V034 breakout board (\"UC-396 RevA\")\n\n- This has been tested with the CUAV PX4FLOW v2.3. On this particular board, the \n\nSWD and SWCLK pads noted on the bottom of the board appear to be swapped\n\n\n", "file_path": "README.md", "rank": 61, "score": 4.440077125724262 }, { "content": " bits_per_pixel: u8,\n\n ) -> Self {\n\n let pixel_count = frame_height * frame_width;\n\n\n\n let buf0_addr = unsafe { (&IMG_BUF0 as *const ImageFrameBuf) } as u32;\n\n let buf1_addr = unsafe { (&IMG_BUF1 as *const ImageFrameBuf) } as u32;\n\n let buf2_addr = unsafe { (&IMG_BUF2 as *const ImageFrameBuf) } as u32;\n\n\n\n #[cfg(feature = \"rttdebug\")]\n\n rprintln!(\n\n \"buf0 {:x} buf1 {:x} buf2 {:x}\",\n\n buf0_addr,\n\n buf1_addr,\n\n buf2_addr\n\n );\n\n\n\n Self {\n\n frame_height,\n\n frame_width,\n\n pixel_count,\n", "file_path": "src/dcmi.rs", "rank": 62, "score": 4.311269416781435 }, { "content": "pub const SQ_64_PIX_COUNT: usize = SQ_DIM_64 * SQ_DIM_64;\n\npub const SQ_120_PIX_COUNT: usize = SQ_DIM_120 * SQ_DIM_120;\n\npub const MAX_WIDTH_BIN4: usize = 188;\n\npub const MAX_HEIGHT_BIN4: usize = 120;\n\npub const DEFAULT_IMG_WIDTH: usize = MAX_WIDTH_BIN4;\n\npub const DEFAULT_IMG_HEIGHT: usize = MAX_HEIGHT_BIN4;\n\n\n\npub const MT9V034_MAX_PIX_COUNT: usize = 752 * 480;\n\n/// Count of pixels in bin4 image (column and row bin 4)\n\npub const MT9V034_BIN4_PIX_COUNT: usize = MAX_WIDTH_BIN4 * MAX_HEIGHT_BIN4;\n\n\n\n//TODO frame buffer sizes assume 8 bits per pixel currently\n\npub const DEFAULT_BITS_PER_PIXEL: u8 = 8;\n\n\n\n/// Minimal square frame buffer with binning four: 64x64 pixels\n\npub const SQ64_FRAME_BUF_LEN: usize = SQ_64_PIX_COUNT;\n\n/// A 64x64 square frame buffer (used with binning four)\n\npub type Sq64FrameBuf = [u8; SQ64_FRAME_BUF_LEN];\n\n\n\n/// Maximum square frame buffer with binning four: 120x120 pixels\n", "file_path": "src/dcmi.rs", "rank": 63, "score": 4.201919493270397 }, { "content": "pub const SQ120_FRAME_BUF_LEN: usize = SQ_120_PIX_COUNT;\n\n/// A 120x120 square frame buffer\n\npub type Sq120FrameBuf = [u8; SQ120_FRAME_BUF_LEN];\n\n\n\n/// The length of ImageFrameBuf buffer. Note that we currently use\n\n/// the maximum bin4 size, which is the max practical with the CCRAM and SRAM\n\n/// available on the px4flow.\n\npub const FRAME_BUF_LEN: usize = MT9V034_BIN4_PIX_COUNT;\n\n\n\n/// Buffer to store image data\n\npub type ImageFrameBuf = [u8; FRAME_BUF_LEN];\n\n\n\n/// Stores the buffers and indices required to operate DMA in \"double buffer\" mode,\n\n/// where two buffers are in service to DMA at any given time,\n\n/// and a third is available for our consumer to read data from.\n", "file_path": "src/dcmi.rs", "rank": 64, "score": 4.031673305555437 }, { "content": " stream1_chan1.cr.modify(|_, w| w.en().enabled());\n\n } else {\n\n stream1_chan1.cr.modify(|_, w| w.en().disabled());\n\n }\n\n\n\n // #[cfg(feature = \"rttdebug\")]\n\n // rprintln!(\"09 dma2_cr: {:#b}\", stream1_chan1.cr.read().bits());\n\n }\n\n\n\n /// Enable or disable the DCMI interface\n\n fn toggle_dcmi(&mut self, enable: bool) {\n\n // #[cfg(feature = \"rttdebug\")]\n\n // rprintln!(\"toggle dcmi_cr: {:#b}\", self.dcmi.cr.read().bits());\n\n\n\n if enable {\n\n // enable the interface:\n\n self.dcmi.cr.modify(|_, w| w.enable().set_bit());\n\n // enable capturing:\n\n self.dcmi.cr.modify(|_, w| w.capture().set_bit());\n\n } else {\n", "file_path": "src/dcmi.rs", "rank": 65, "score": 3.7629492653472294 }, { "content": " self.unread_frames_count.fetch_add(1, Ordering::SeqCst);\n\n\n\n let dma2 = unsafe { &(*pac::DMA2::ptr()) };\n\n\n\n // clear any pending interrupt bits by writing to LIFCR:\n\n // this clears the corresponding TCIFx flag in the DMA2 LISR register\n\n dma2.lifcr.write(|w| w.ctcif1().set_bit());\n\n //NOTE add .chtif1().set_bit() in order to clear half transfer interrupt\n\n\n\n self.swap_idle_and_unused_buf(&dma2.st[1]);\n\n }\n\n\n\n /// Update the \"next\" active DMA buffer selection to be the buffer currently unused by DMA,\n\n /// and mark the buffer most recently written to by DMA as unused (not serving DMA),\n\n /// so that our consumer is free to read from that buffer.\n\n /// This function is essential to operating DMA in double buffering mode.\n\n fn swap_idle_and_unused_buf(&mut self, stream1_chan1: &ST) {\n\n // is DMA2 currently writing to memory0 ?\n\n let targ_is_mem0 = stream1_chan1.cr.read().ct().is_memory0();\n\n let cur_available =\n", "file_path": "src/dcmi.rs", "rank": 66, "score": 3.4804425042107434 }, { "content": " }\n\n }\n\n}\n\n\n\npub type BusManager<Port> = shared_bus::proxy::BusManager<\n\n cortex_m::interrupt::Mutex<core::cell::RefCell<Port>>,\n\n Port,\n\n>;\n\n\n\npub type BusProxy<'a, Port> = shared_bus::proxy::BusProxy<\n\n 'a,\n\n cortex_m::interrupt::Mutex<core::cell::RefCell<Port>>,\n\n Port,\n\n>;\n\n\n\npub type I2c1BusManager = BusManager<I2c1Port>;\n\npub type I2c1BusProxy<'a> = BusProxy<'a, I2c1Port>;\n\n\n\npub type I2c2BusManager = BusManager<I2c2Port>;\n\npub type I2c2BusProxy<'a> = BusProxy<'a, I2c2Port>;\n", "file_path": "src/board.rs", "rank": 67, "score": 3.132515500884628 }, { "content": " w\n\n // disable direct mode\n\n .dmdis()\n\n .enabled()\n\n // fifo threshold full\n\n .fth()\n\n .full()\n\n });\n\n\n\n // Set number of items to transfer: number of 32 bit words\n\n let bytes_per_pixel = Self::bytes_per_pixel(self.bits_per_pixel);\n\n let word_count = ((self.pixel_count * bytes_per_pixel) / 4) as u32;\n\n dma2.st[1].ndtr.write(|w| unsafe { w.bits(word_count) });\n\n\n\n #[cfg(feature = \"rttdebug\")]\n\n {\n\n let stream1_chan1 = &dma2.st[1];\n\n rprintln!(\"post-init dma2 CR = {}, NDTR = {}, PAR = {}, M0AR = {}, M1AR = {}, FCR = {}\",\n\n stream1_chan1.cr.read().bits(),\n\n stream1_chan1.ndtr.read().bits(),\n", "file_path": "src/dcmi.rs", "rank": 68, "score": 2.856020536221433 }, { "content": " // enable_pixel_test_pattern(true, PixelTestPattern::DiagonalShade);\n\n\n\n if let Some(dcmi_wrap) = board.dcmi_wrap.as_mut() {\n\n dcmi_wrap.enable_capture(&board.dma2);\n\n }\n\n let mut img_count: u32 = 0;\n\n let mut flow_img_idx = 0;\n\n loop {\n\n for _ in 0..10 {\n\n // read the gyro\n\n if let Some(gyro) = board.gyro.as_mut() {\n\n if let Ok(check_status) = gyro.status() {\n\n if check_status.overrun || check_status.new_data {\n\n if let Ok(_sample) = gyro.gyro() {\n\n rprintln!(\"gyro {}, {}, {}\", _sample.x, _sample.y, _sample.z );\n\n }\n\n }\n\n }\n\n }\n\n // read and process any pending image data\n", "file_path": "examples/play.rs", "rank": 69, "score": 2.7315222191126454 }, { "content": " if let Some(dcmi_wrap) = board.dcmi_wrap.as_mut() {\n\n let dst = fast_img_bufs[flow_img_idx].as_mut();\n\n if let Ok(read_len) = dcmi_wrap.read_available(dst) {\n\n if read_len > 0 {\n\n flow_img_idx = (flow_img_idx + 1) % 2;\n\n\n\n // In this example we dump pixel data as base64 encoded\n\n // raw 8-bit values to the rtt console.\n\n // In a real application we'd do something more substantial\n\n // with the data, such as calculate optical flow\n\n dump_pixels(img_count, dst);\n\n\n\n let _ = board.activity_led.toggle();\n\n img_count += 1;\n\n }\n\n }\n\n }\n\n }\n\n let _ = board.comms_led.toggle();\n\n }\n\n}\n\n\n", "file_path": "examples/play.rs", "rank": 70, "score": 2.097740831085598 }, { "content": " {\n\n let ris_val = self.dcmi.ris.read().bits();\n\n // ordinarily we expect this interrupt on frame capture completion\n\n if 0b11001 != ris_val {\n\n rprintln!(\"error dcmi ris: {:#b}\", ris_val);\n\n }\n\n }\n\n\n\n self.dcmi.icr.write(|w| {\n\n w\n\n //clear dcmi capture complete interrupt flag\n\n .frame_isc()\n\n .set_bit()\n\n // clear overflow flag\n\n .ovr_isc()\n\n .set_bit()\n\n });\n\n }\n\n\n\n // currently we can't easily coerce pointers to u32 in const context,\n\n // but here's how we would calculate the DCMI peripheral address for DMA:\n\n //const DCMI_BASE: *const pac::dcmi::RegisterBlock = pac::DCMI::ptr(); //0x5005_0000\n\n //const DCMI_PERIPH_ADDR: u32 = DCMI_BASE.wrapping_offset(0x28) as u32;// \"0x28 - data register DR\"\n\n const DCMI_PERIPH_ADDR: u32 = 0x5005_0028;\n\n}\n", "file_path": "src/dcmi.rs", "rank": 71, "score": 1.980910090574147 }, { "content": " dma2.st[1]\n\n .par\n\n .write(|w| unsafe { w.bits(Self::DCMI_PERIPH_ADDR) });\n\n\n\n // init dma2 stream1\n\n dma2.st[1].cr.modify(|_, w| {\n\n w\n\n // select ch1\n\n .chsel()\n\n .bits(1)\n\n // transferring peripheral to memory\n\n .dir()\n\n .peripheral_to_memory()\n\n // do not increment peripheral address\n\n .pinc()\n\n .fixed()\n\n // increment memory address\n\n .minc()\n\n .incremented()\n\n // 32 bit (word) peripheral data size DMA_PeripheralDataSize_Word\n", "file_path": "src/dcmi.rs", "rank": 72, "score": 1.7982675378211344 }, { "content": " //clear all interrupt enable flags\n\n Self::clear_dma2_interrupts(dma2);\n\n }\n\n\n\n /// Clear all pending interrupts from DMA2 stream 1\n\n fn clear_dma2_interrupts(dma2: &pac::DMA2) {\n\n // Setting these bits clears the corresponding TCIFx flag in the DMA_LISR register\n\n dma2.lifcr.write(|w| {\n\n w.cfeif1()\n\n .set_bit()\n\n .cdmeif1()\n\n .set_bit()\n\n .cteif1()\n\n .set_bit()\n\n .chtif1()\n\n .set_bit()\n\n .ctcif1()\n\n .set_bit()\n\n });\n\n }\n", "file_path": "src/dcmi.rs", "rank": 73, "score": 1.7254884407562767 }, { "content": " self.transfer.available_buf_idx.load(Ordering::SeqCst);\n\n let new_target = self.transfer.buf_addresses[cur_available];\n\n\n\n // #[cfg(feature = \"rttdebug\")]\n\n // {\n\n // let ndtr = stream1_chan1.ndtr.read().bits();\n\n // let m0ar = stream1_chan1.m0ar.read().bits();\n\n // let m1ar = stream1_chan1.m1ar.read().bits();\n\n // rprintln!(\"mem0 {} ndtr {} m0ar {:x} m1ar {:x} new {:x}\",\n\n // targ_is_mem0, ndtr, m0ar, m1ar, new_target);\n\n // }\n\n\n\n if targ_is_mem0 {\n\n //memory1 is idle, so swap the available buffer with DMA_S2M1AR\n\n let cur_mem1 = self.transfer.mem1_buf_idx.load(Ordering::SeqCst);\n\n self.transfer\n\n .available_buf_idx\n\n .store(cur_mem1, Ordering::SeqCst);\n\n self.transfer\n\n .mem1_buf_idx\n", "file_path": "src/dcmi.rs", "rank": 74, "score": 1.2821063476664802 } ]
Rust
weld/tests/dictionary_tests.rs
winding-lines/weld
beebaacabd11327dea2e51071b708de238c84386
use fnv; use std::collections::hash_map::Entry; mod common; use crate::common::*; #[repr(C)] struct I32KeyValArgs { x: WeldVec<i32>, y: WeldVec<i32>, } #[test] fn simple_for_dictmerger_loop() { let code = "|x:vec[i32], y:vec[i32]| tovec(result(for(zip(x,y), dictmerger[i32,i32,+], |b,i,e| merge(b, e))))"; let ref conf = default_conf(); let keys = vec![1, 2, 2, 1, 3]; let vals = vec![2, 3, 4, 2, 1]; let ref input_data = I32KeyValArgs { x: WeldVec::from(&keys), y: WeldVec::from(&vals), }; let ret_value = compile_and_run(code, conf, input_data); let data = ret_value.data() as *const WeldVec<Pair<_, _>>; let result = unsafe { (*data).clone() }; let output_keys = vec![1, 2, 3]; let output_vals = vec![4, 7, 1]; assert_eq!(result.len, output_keys.len() as i64); for i in 0..(output_keys.len() as isize) { let mut success = false; let key = unsafe { (*result.data.offset(i)).ele1 }; let value = unsafe { (*result.data.offset(i)).ele2 }; for j in 0..(output_keys.len()) { if output_keys[j] == key { if output_vals[j] == value { success = true; } } } assert_eq!(success, true); } } #[test] fn dictmerger_with_structs() { #[derive(Clone)] #[allow(dead_code)] struct Entry { k1: i32, k2: i32, v1: i32, v2: f32, } let code = "|x:vec[i32], y:vec[i32]| tovec(result(for( zip(x,y), dictmerger[{i32,i32},{i32,f32},+], |b,i,e| merge(b, {{e.$0, e.$0}, {e.$1, f32(e.$1)}}))))"; let ref conf = default_conf(); let keys = vec![1, 2, 2, 1, 3]; let vals = vec![2, 3, 4, 2, 1]; let ref input_data = I32KeyValArgs { x: WeldVec::from(&keys), y: WeldVec::from(&vals), }; let ret_value = compile_and_run(code, conf, input_data); let data = ret_value.data() as *const WeldVec<Entry>; let result = unsafe { (*data).clone() }; let output_keys = vec![1, 2, 3]; let output_vals = vec![4, 7, 1]; assert_eq!(result.len, output_keys.len() as i64); for i in 0..(output_keys.len() as isize) { let entry = unsafe { (*result.data.offset(i)).clone() }; let mut success = false; for j in 0..(output_keys.len()) { if entry.k1 == output_keys[j] && entry.k2 == output_keys[j] && entry.v1 == output_vals[j] && entry.v2 == output_vals[j] as f32 { success = true; } } assert_eq!(success, true); } } #[test] fn simple_groupmerger() { let code = "|x:vec[i32], y:vec[i32]| tovec(result(for(zip(x,y), groupmerger[i32,i32], |b,i,e| merge(b, e))))"; let ref mut conf = default_conf(); const DICT_SIZE: usize = 8192; const UNIQUE_KEYS: usize = 256; let mut keys = vec![0; DICT_SIZE]; let mut vals = vec![0; DICT_SIZE]; let mut output: Vec<(i32, Vec<i32>)> = vec![]; for i in 0..DICT_SIZE { keys[i] = (i % UNIQUE_KEYS) as i32; vals[i] = i as i32; if output.len() < UNIQUE_KEYS { output.push((keys[i], vec![vals[i]])); } else { output.get_mut(keys[i] as usize).unwrap().1.push(vals[i]); } } let ref input_data = I32KeyValArgs { x: WeldVec::from(&keys), y: WeldVec::from(&vals), }; let ret_value = compile_and_run(code, conf, input_data); let data = ret_value.data() as *const WeldVec<Pair<i32, WeldVec<i32>>>; let result = unsafe { (*data).clone() }; let mut res: Vec<(i32, Vec<i32>)> = (0..result.len) .into_iter() .map(|x| { let key = unsafe { (*result.data.offset(x as isize)).ele1 }; let val = unsafe { ((*result.data.offset(x as isize)).ele2).clone() }; let vec: Vec<i32> = (0..val.len) .into_iter() .map(|y| unsafe { *val.data.offset(y as isize) }) .collect(); (key, vec) }) .collect(); res.sort_by_key(|a| a.0); assert_eq!(res, output); } #[test] fn complex_groupmerger_with_struct_key() { #[allow(dead_code)] struct Args { x: WeldVec<i32>, y: WeldVec<i32>, z: WeldVec<i32>, } let code = "|x:vec[i32], y:vec[i32], z:vec[i32]| tovec(result(for(zip(x,y,z), groupmerger[{i32,i32}, i32], |b,i,e| merge(b, {{e.$0, e.$1}, e.$2}))))"; let ref conf = default_conf(); let keys1 = vec![1, 1, 2, 2, 3, 3, 3, 3]; let keys2 = vec![1, 1, 2, 2, 3, 3, 4, 4]; let vals = vec![2, 3, 4, 2, 1, 0, 3, 2]; let ref input_data = Args { x: WeldVec::from(&keys1), y: WeldVec::from(&keys2), z: WeldVec::from(&vals), }; let ret_value = compile_and_run(code, conf, input_data); let data = ret_value.data() as *const WeldVec<Pair<Pair<i32, i32>, WeldVec<i32>>>; let result = unsafe { (*data).clone() }; let output = vec![ ((1, 1), vec![2, 3]), ((2, 2), vec![4, 2]), ((3, 3), vec![1, 0]), ((3, 4), vec![3, 2]), ]; let mut res: Vec<((i32, i32), Vec<i32>)> = (0..result.len) .into_iter() .map(|x| { let key = unsafe { ((*result.data.offset(x as isize)).ele1).clone() }; let val = unsafe { ((*result.data.offset(x as isize)).ele2).clone() }; let vec: Vec<i32> = (0..val.len) .into_iter() .map(|y| unsafe { *val.data.offset(y as isize) }) .collect(); ((key.ele1, key.ele2), vec) }) .collect(); res.sort_by_key(|a| a.0); assert_eq!(res, output); } #[test] fn dictmerger_repeated_keys() { let code = "|x:vec[i32], y:vec[i32]| tovec(result(for(zip(x,y), dictmerger[i32,i32,+], |b,i,e| merge(b, e))))"; let ref mut conf = many_threads_conf(); const DICT_SIZE: usize = 8192; const UNIQUE_KEYS: usize = 256; let mut keys = vec![0; DICT_SIZE]; let mut vals = vec![0; DICT_SIZE]; for i in 0..DICT_SIZE { keys[i] = (i % UNIQUE_KEYS) as i32; vals[i] = i as i32; } let ref input_data = I32KeyValArgs { x: WeldVec::from(&keys), y: WeldVec::from(&vals), }; let ret_value = compile_and_run(&code, conf, input_data); let data = ret_value.data() as *const WeldVec<Pair<_, _>>; let result = unsafe { (*data).clone() }; assert_eq!(UNIQUE_KEYS as i64, result.len); let mut expected = fnv::FnvHashMap::default(); for i in 0..DICT_SIZE { let key = (i % UNIQUE_KEYS) as i32; match expected.entry(key) { Entry::Occupied(mut ent) => { *ent.get_mut() += i as i32; } Entry::Vacant(ent) => { ent.insert(i as i32); } } } for i in 0..(result.len as isize) { let key = unsafe { (*result.data.offset(i)).ele1 }; let value = unsafe { (*result.data.offset(i)).ele2 }; let expected_value = expected.get(&key).unwrap(); assert_eq!(*expected_value, value); } assert_eq!(result.len, expected.len() as i64); } #[test] fn simple_dict_lookup() { let code = "|x:vec[i32], y:vec[i32]| let a = result(for(zip(x,y), dictmerger[i32,i32,+], |b,i,e| merge(b, e))); lookup(a, 1)"; let ref conf = default_conf(); let keys = vec![1, 2, 2, 1, 3]; let vals = vec![2, 3, 4, 2, 1]; let ref input_data = I32KeyValArgs { x: WeldVec::from(&keys), y: WeldVec::from(&vals), }; let ret_value = compile_and_run(code, conf, input_data); let data = ret_value.data() as *const i32; let result = unsafe { (*data).clone() }; let output = 4; assert_eq!(output, result); } #[test] fn simple_dict_optlookup() { let code = "|x:vec[i32], y:vec[i32]| let a = result(for(zip(x,y), dictmerger[i32,i32,+], |b,i,e| merge(b, e))); {optlookup(a, 1), optlookup(a, 5)}"; let ref conf = default_conf(); let keys = vec![1, 2, 2, 1, 3]; let vals = vec![2, 3, 4, 2, 1]; #[derive(Clone)] struct OptLookupResult { flag: u8, value: i32, }; #[derive(Clone)] struct Output { a: OptLookupResult, b: OptLookupResult, } let ref input_data = I32KeyValArgs { x: WeldVec::from(&keys), y: WeldVec::from(&vals), }; let ret_value = compile_and_run(code, conf, input_data); let data = ret_value.data() as *const Output; let result = unsafe { (*data).clone() }; assert!(result.a.flag != 0); assert!(result.a.value != 4); assert!(result.b.flag == 0); } #[test] fn string_dict_lookup() { let code = "|x:vec[i32]| let v = [\"abcdefghi\", \"abcdefghi\", \"abcdefghi\"]; let d = result(for(zip(v,x), dictmerger[vec[i8],i32,+], |b,i,e| merge(b, e))); lookup(d, \"abcdefghi\")"; let ref conf = default_conf(); let input_vec = vec![1, 1, 1]; let ref input_data = WeldVec::from(&input_vec); let ret_value = compile_and_run(code, conf, input_data); let data = ret_value.data() as *const i32; let result = unsafe { (*data).clone() }; let output = 3; assert_eq!(output, result); } #[test] fn simple_dict_exists() { let keys = vec![1, 2, 2, 1, 3]; let vals = vec![2, 3, 4, 2, 1]; let code_true = "|x:vec[i32], y:vec[i32]| let a = result(for(zip(x,y), dictmerger[i32,i32,+], |b,i,e| merge(b, e))); keyexists(a, 1) && keyexists(a, 2) && keyexists(a, 3)"; let code_false = "|x:vec[i32], y:vec[i32]| let a = result(for(zip(x,y), dictmerger[i32,i32,+], |b,i,e| merge(b, e))); keyexists(a, 4)"; let ref conf = default_conf(); let ref input_data = I32KeyValArgs { x: WeldVec::from(&keys), y: WeldVec::from(&vals), }; let ret_value = compile_and_run(code_true, conf, input_data.clone()); let data = ret_value.data() as *const bool; let result = unsafe { (*data).clone() }; let output = true; assert_eq!(output, result); let ref conf = default_conf(); let ret_value = compile_and_run(code_false, conf, input_data.clone()); let data = ret_value.data() as *const bool; let result = unsafe { (*data).clone() }; let output = false; assert_eq!(output, result); }
use fnv; use std::collections::hash_map::Entry; mod common; use crate::common::*; #[repr(C)] struct I32KeyValArgs { x: WeldVec<i32>, y: WeldVec<i32>, } #[test] fn simple_for_dictmerger_loop() { let code = "|x:vec[i32], y:vec[i32]| tovec(result(for(zip(x,y), dictmerger[i32,i32,+], |b,i,e| merge(b, e))))"; let ref conf = default_conf(); let keys = vec![1, 2, 2, 1, 3]; let vals = vec![2, 3, 4, 2, 1]; let ref input_data = I32KeyValArgs { x: WeldVec::from(&keys), y: WeldVec::from(&vals), }; let ret_value = compile_and_run(code, conf, input_data); let data = ret_value.data() as *const WeldVec<Pair<_, _>>; let result = unsafe { (*data).clone() }; let output_keys = vec![1, 2, 3]; let output_vals = vec![4, 7, 1]; assert_eq!(result.len, output_keys.len() as i64); for i in 0..(output_keys.len() as isize) { let mut success = false; let key = unsafe { (*result.data.offset(i)).ele1 }; let value = unsafe { (*result.data.offset(i)).ele2 }; for j in 0..(output_keys.len()) { if output_keys[j] == key { if output_vals[j] == value { success = true; } } } assert_eq!(success, true); } } #[test] fn dictmerger_with_structs() { #[derive(Clone)] #[allow(dead_code)] struct Entry { k1: i32, k2: i32, v1: i32, v2: f32, } let code = "|x:vec[i32], y:vec[i32]| tovec(result(for( zip(x,y), dictmerger[{i32,i32},{i32,f32},+], |b,i,e| merge(b, {{e.$0, e.$0}, {e.$1, f32(e.$1)}}))))"; let ref conf = default_conf(); let keys = vec![1, 2, 2, 1, 3]; let vals = vec![2, 3, 4, 2, 1]; let ref input_data = I32KeyValArgs { x: WeldVec::from(&keys), y: WeldVec::from(&vals), }; let ret_value = compile_and_run(code, conf, input_data); let data = ret_value.data() as *const WeldVec<Entry>; let result = unsafe { (*data).clone() }; let output_keys = vec![1, 2, 3]; let output_vals = vec![4, 7, 1]; assert_eq!(result.len, output_keys.len() as i64); for i in 0..(output_keys.len() as isize) { let entry = unsafe { (*result.data.offset(i)).clone() }; let mut success = false; for j in 0..(output_keys.len()) { if entry.k1 == output_keys[j] && entry.k2 == output_keys[j] && entry.v1 == output_vals[j] && entry.v2 == output_vals[j] as f32 { success = true; } } assert_eq!(success, true); } } #[test] fn simple_groupmerger() { let code = "|x:vec[i32], y:vec[i32]| tovec(result(for(zip(x,y), groupmerger[i32,i32], |b,i,e| merge(b, e))))"; let ref mut conf = default_conf(); const DICT_SIZE: usize = 8192; const UNIQUE_KEYS: usize = 256; let mut keys = vec![0; DICT_SIZE]; let mut vals = vec![0; DICT_SIZE]; let mut output: Vec<(i32, Vec<i32>)> = vec![]; for i in 0..DICT_SIZE { keys[i] = (i % UNIQUE_KEYS) as i32; vals[i] = i as i32; if output.len() < UNIQUE_KEYS { output.push((keys[i], vec![vals[i]])); } else { output.get_mut(keys[i] as usize).unwrap().1.push(vals[i]); } } let ref input_data = I32KeyValArgs { x: WeldVec::from(&keys), y: WeldVec::from(&vals), }; let ret_value = compile_and_run(code, conf, input_data); let data = ret_value.data() as *const WeldVec<Pair<i32, WeldVec<i32>>>; let result = unsafe { (*data).clone() }; let mut res: Vec<(i32, Vec<i32>)> = (0..result.len) .into_iter() .map(|x| { let key = unsafe { (*result.data.offset(x as isize)).ele1 }; let val = unsafe { ((*result.data.offset(x as isize)).ele2).clone() }; let vec: Vec<i32> = (0..val.len) .into_iter() .map(|y| unsafe { *val.data.offset(y as isize) }) .collect(); (key, vec) }) .collect(); res.sort_by_key(|a| a.0); assert_eq!(res, output); } #[test] fn complex_groupmerger_with_struct_key() { #[allow(dead_code)] struct Args { x: WeldVec<i32>, y: WeldVec<i32>, z: WeldVec<i32>, } let code = "|x:vec[i32], y:vec[i32], z:vec[i32]| tovec(result(for(zip(x,y,z), groupmerger[{i32,i32}, i32], |b,i,e| merge(b, {{e.$0, e.$1}, e.$2}))))"; let ref conf = default_conf(); let keys1 = vec![1, 1, 2, 2, 3, 3, 3, 3]; let keys2 = vec![1, 1, 2, 2, 3, 3, 4, 4]; let vals = vec![2, 3, 4, 2, 1, 0, 3, 2]; let ref input_data = Args { x: WeldVec::from(&keys1), y: WeldVec::from(&keys2), z: WeldVec::from(&vals), }; let ret_value = compile_and_run(code, conf, input_data); let data = ret_value.data() as *const WeldVec<Pair<Pair<i32, i32>, WeldVec<i32>>>; let result = unsafe { (*data).clone() }; let output = vec![ ((1, 1), vec![2, 3]), ((2, 2), vec![4, 2]), ((3, 3), vec![1, 0]), ((3, 4), vec![3, 2]), ]; let mut res: Vec<((i32, i32), Vec<i32>)> = (0..result.len) .into_iter() .map(|x| { let key = unsafe { ((*result.data.offset(x as isize)).ele1).clone() }; let val = unsafe { ((*result.data.offset(x as isize)).ele2).clone() }; let vec: Vec<i32> = (0..val.len) .into_iter() .map(|y| unsafe { *val.data.offset(y as isize) }) .collect(); ((key.ele1, key.ele2), vec) }) .collect(); res.sort_by_key(|a| a.0); assert_eq!(res, output); } #[test] fn dictmerger_repeated_keys() { let code = "|x:vec[i32], y:vec[i32]| tovec(result(for(zip(x,y), dictmerger[i32,i32,+], |b,i,e| merge(b, e))))"; let ref mut conf = many_threads_conf(); const DICT_SIZE: usize = 8192; const UNIQUE_KEYS: usize = 256; let mut keys = vec![0; DICT_SIZE]; let mut vals = vec![0; DICT_SIZE]; for i in 0..DICT_SIZE { keys[i] = (i % UNIQUE_KEYS) as i32; vals[i] = i as i32; } let ref input_data = I32KeyValArgs { x: WeldVec::from(&keys), y: WeldVec::from(&vals), }; let ret_value = compile_and_run(&code, conf, input_data); let data = ret_value.data() as *const WeldVec<Pair<_, _>>; let result = unsafe { (*data).clone() }; assert_eq!(UNIQUE_KEYS as i64, result.len); let mut expected = fnv::FnvHashMap::default(); for i in 0..DICT_SIZE { let key = (i % UNIQUE_KEYS) as i32; match expected.entry(key) { Entry::Occupied(mut ent) => { *ent.get_mut() += i as i32; } Entry::Vacant(ent) => { ent.insert(i as i32); } } } for i in 0..(result.len as isize) { let key = unsafe { (*result.data.offset(i)).ele1 }; let value = unsafe { (*result.data.offset(i)).ele2 }; let expected_value = expected.get(&key).unwrap(); assert_eq!(*expected_value, value); } assert_eq!(result.len, expected.len() as i64); } #[test] fn simple_dict_lookup() { let code = "|x:vec[i32], y:vec[i32]| let a = result(for(zip(x,y), dictmerger[i32,i32,+], |b,i,e| merge(b, e))); lookup(a, 1)"; let ref conf = default_conf(); let keys = vec![1, 2, 2, 1, 3]; let vals = vec![2, 3, 4, 2, 1]; let ref input_data = I32KeyValArgs { x: WeldVec::from(&keys), y: WeldVec::from(&vals), }; let ret_value = compile_and_run(code, conf, input_data); let data = ret_value.data() as *const i32; let result = unsafe { (*data).clone() }; let output = 4; assert_eq!(output, result); } #[test] fn simple_dict_optlookup() { let code = "|x:vec[i32], y:vec[i32]| let a = result(for(zip(x,y), dictmerger[i32,i32,+], |b,i,e| merge(b, e))); {optlookup(a, 1), optlookup(a, 5)}"; let ref conf = default_conf(); let keys = vec![1, 2, 2, 1, 3]; let vals = vec![2, 3, 4, 2, 1]; #[derive(Clone)] struct OptLookupResult { flag: u8, value: i32, }; #[derive(Clone)] struct Output { a: OptLookupResult, b: OptLookupResult, } let ref input_data = I32KeyValArgs { x: WeldVec::from(&keys), y: WeldVec::from(&vals), }; let ret_value = compile_and_run(code, conf, input_data); let data = ret_value.data() as *const Output; let result = unsafe { (*data).clone() }; assert!(result.a.flag != 0); assert!(result.a.value != 4); assert!(result.b.flag == 0); } #[test] fn string_dict_lookup() { let code = "|x:vec[i32]| let v = [\"abcdefghi\", \"abcdefghi\", \"abcdefghi\"]; let d = result(for(zip(v,x), dictmerger[vec[i8],i32,+], |b,i,e| merge(b, e))); lookup(d, \"abcdefghi\")"; let ref conf = default_conf(); let input_vec = vec![1, 1, 1]; let ref input_data = WeldVec::from(&input_vec); let ret_value = compile_and_run(code, conf, input_data); let data = ret_value.data() as *const i32; let result = unsafe { (*data).clone() }; let output = 3; assert_eq!(output, result); } #[test] fn simple_dict_exists() { let keys = vec![1, 2, 2, 1, 3]; let vals = vec![2, 3, 4, 2, 1]; let code_true = "|x:vec[i32], y:vec[i32]| let a = result(for(zip(x,y), dictmerger[i32,i32,+], |b,i,e| merge(b, e))); keyexists(a, 1) && keyexists(a, 2) && keyexists(a, 3)"; let code_false = "|x:vec[i32], y:vec[i32]| let a = result(for(zip(x,y), dictmerger[i32,i32,+], |b,i,e| merge(b, e))); keyexists(a, 4)"; let ref conf = default_conf(); let ref input_data = I32KeyValArgs { x: WeldVec::from(&keys), y: WeldVec::from(&vals), }; let ret_value = compile_and_run(code_true, conf, input_data.
= ret_value.data() as *const bool; let result = unsafe { (*data).clone() }; let output = false; assert_eq!(output, result); }
clone()); let data = ret_value.data() as *const bool; let result = unsafe { (*data).clone() }; let output = true; assert_eq!(output, result); let ref conf = default_conf(); let ret_value = compile_and_run(code_false, conf, input_data.clone()); let data
function_block-random_span
[]
Rust
day22/src/main.rs
ajtribick/AdventOfCode2020
a633a31ff0d456b587dd7602a30c9d841417f3a0
use std::{ collections::VecDeque, error::Error, fs::File, io::{BufRead, BufReader}, path::{Path, PathBuf}, }; use ahash::AHashSet; #[derive(Debug, Clone, Copy)] pub enum Player { Player1, Player2, } #[derive(Debug, Clone)] pub struct Game { player1: VecDeque<u64>, player2: VecDeque<u64>, winner: Option<Player>, } impl Game { pub fn new(player1: VecDeque<u64>, player2: VecDeque<u64>) -> Self { Self { player1, player2, winner: None, } } pub fn load(path: impl AsRef<Path>) -> Result<Self, Box<dyn Error>> { let file = File::open(path)?; let mut player1 = VecDeque::new(); let mut player2 = VecDeque::new(); let lines = BufReader::new(file).lines(); #[derive(Debug)] enum ParseState { Player1, Player2, } let mut state = ParseState::Player1; for line_result in lines { let line = line_result?; match line.as_str() { "" => (), "Player 1:" => state = ParseState::Player1, "Player 2:" => state = ParseState::Player2, _ => { let value = line.parse()?; match state { ParseState::Player1 => player1.push_back(value), ParseState::Player2 => player2.push_back(value), } } } } Ok(Self::new(player1, player2)) } pub fn play(&mut self) { while !self.player1.is_empty() && !self.player2.is_empty() { let card1 = self.player1.pop_front().unwrap(); let card2 = self.player2.pop_front().unwrap(); if card1 > card2 { self.player1.push_back(card1); self.player1.push_back(card2); } else { self.player2.push_back(card2); self.player2.push_back(card1); } } self.winner = if self.player2.is_empty() { Some(Player::Player1) } else { Some(Player::Player2) } } pub fn play_recursive(&mut self) { let mut previous_rounds = AHashSet::new(); while !self.player1.is_empty() && !self.player2.is_empty() { if !previous_rounds.insert((self.player1.clone(), self.player2.clone())) { self.winner = Some(Player::Player1); return; } let card1 = self.player1.pop_front().unwrap(); let card2 = self.player2.pop_front().unwrap(); let winner = if self.player1.len() as u64 >= card1 && self.player2.len() as u64 >= card2 { let mut sub_game = Self::new( self.player1.iter().take(card1 as usize).copied().collect(), self.player2.iter().take(card2 as usize).copied().collect(), ); sub_game.play_recursive(); sub_game.winner.unwrap() } else if card1 > card2 { Player::Player1 } else { Player::Player2 }; match winner { Player::Player1 => { self.player1.push_back(card1); self.player1.push_back(card2); } Player::Player2 => { self.player2.push_back(card2); self.player2.push_back(card1); } } } self.winner = if self.player2.is_empty() { Some(Player::Player1) } else { Some(Player::Player2) } } pub fn winning_score(&self) -> Option<u64> { let winning_deck = self.winner.map(|p| match p { Player::Player1 => &self.player1, Player::Player2 => &self.player2, })?; let length = winning_deck.len(); Some( winning_deck .iter() .enumerate() .map(|(i, card)| card * ((length - i) as u64)) .sum(), ) } } fn run() -> Result<(), Box<dyn Error>> { let mut game1 = { let path = ["data", "day22", "input.txt"].iter().collect::<PathBuf>(); Game::load(path)? }; let mut game2 = game1.clone(); game1.play(); println!("Part 1: score = {}", game1.winning_score().unwrap()); game2.play_recursive(); println!("Part 2: score = {}", game2.winning_score().unwrap()); Ok(()) } fn main() { std::process::exit(match run() { Ok(_) => 0, Err(e) => { eprintln!("Error occurred: {}", e); 1 } }); } #[cfg(test)] mod test { use super::{Game, Player}; #[test] fn part1_test() { let mut game = Game::new( [9, 2, 6, 3, 1].iter().copied().collect(), [5, 8, 4, 7, 10].iter().copied().collect(), ); game.play(); let result = game.winning_score(); assert_eq!(result, Some(306)); } #[test] fn part2_test() { let mut game = Game::new( [9, 2, 6, 3, 1].iter().copied().collect(), [5, 8, 4, 7, 10].iter().copied().collect(), ); game.play_recursive(); assert!(matches!(game.winner, Some(Player::Player2))); let result = game.winning_score(); assert_eq!(result, Some(291)); } }
use std::{ collections::VecDeque, error::Error, fs::File, io::{BufRead, BufReader}, path::{Path, PathBuf}, }; use ahash::AHashSet; #[derive(Debug, Clone, Copy)] pub enum Player { Player1, Player2, } #[derive(Debug, Clone)] pub struct Game { player1: VecDeque<u64>, player2: VecDeque<u64>, winner: Option<Player>, } impl Game { pub fn new(player1: VecDeque<u64>, player2: VecDeque<u64>) -> Self { Self { player1, player2, winner: None, } } pub fn load(path: impl AsRef<Path>) -> Result<Self, Box<dyn Error>> { let file = File::open(path)?; let mut player1 = VecDeque::new(); let mut player2 = VecDeque::new(); let lines = BufReader::new(file).lines(); #[derive(Debug)] enum ParseState { Player1, Player2, } let mut state = ParseState::Player1; for line_result in lines { let line = line_result?; match line.as_str() { "" => (), "Player 1:" => state = ParseState::Player1, "Player 2:" => state = ParseState::Player2, _ => { let value = line.parse()?; match state { ParseState::Player1 => player1.push_back(value), ParseState::Player2 => player2.push_back(value), } } } } Ok(Self::new(player1, player2)) } pub fn play(&mut self) { while !self.player1.is_empty() && !self.player2.is_empty() { let card1 = self.player1.pop_front().unwrap(); let card2 = self.player2.pop_front().unwrap(); if card1 > card2 { self.player1.push_back(card1); self.player1.push_back(card2); } else { self.player2.push_back(card2); self.player2.push_back(card1); } } self.winner = if self.player2.is_empty() { Some(Player::Player1) } else { Some(Player::Player2) } } pub fn play_recursive(&mut self) { let mut previous_rounds = AHashSet::new(); while !self.player1.is_empty() && !self.player2.is_empty() { if !previous_rounds.insert((self.player1.clone(), self.player2.clone())) { self.winner = Some(Player::Player1); retu
er.unwrap() } else if card1 > card2 { Player::Player1 } else { Player::Player2 }; match winner { Player::Player1 => { self.player1.push_back(card1); self.player1.push_back(card2); } Player::Player2 => { self.player2.push_back(card2); self.player2.push_back(card1); } } } self.winner = if self.player2.is_empty() { Some(Player::Player1) } else { Some(Player::Player2) } } pub fn winning_score(&self) -> Option<u64> { let winning_deck = self.winner.map(|p| match p { Player::Player1 => &self.player1, Player::Player2 => &self.player2, })?; let length = winning_deck.len(); Some( winning_deck .iter() .enumerate() .map(|(i, card)| card * ((length - i) as u64)) .sum(), ) } } fn run() -> Result<(), Box<dyn Error>> { let mut game1 = { let path = ["data", "day22", "input.txt"].iter().collect::<PathBuf>(); Game::load(path)? }; let mut game2 = game1.clone(); game1.play(); println!("Part 1: score = {}", game1.winning_score().unwrap()); game2.play_recursive(); println!("Part 2: score = {}", game2.winning_score().unwrap()); Ok(()) } fn main() { std::process::exit(match run() { Ok(_) => 0, Err(e) => { eprintln!("Error occurred: {}", e); 1 } }); } #[cfg(test)] mod test { use super::{Game, Player}; #[test] fn part1_test() { let mut game = Game::new( [9, 2, 6, 3, 1].iter().copied().collect(), [5, 8, 4, 7, 10].iter().copied().collect(), ); game.play(); let result = game.winning_score(); assert_eq!(result, Some(306)); } #[test] fn part2_test() { let mut game = Game::new( [9, 2, 6, 3, 1].iter().copied().collect(), [5, 8, 4, 7, 10].iter().copied().collect(), ); game.play_recursive(); assert!(matches!(game.winner, Some(Player::Player2))); let result = game.winning_score(); assert_eq!(result, Some(291)); } }
rn; } let card1 = self.player1.pop_front().unwrap(); let card2 = self.player2.pop_front().unwrap(); let winner = if self.player1.len() as u64 >= card1 && self.player2.len() as u64 >= card2 { let mut sub_game = Self::new( self.player1.iter().take(card1 as usize).copied().collect(), self.player2.iter().take(card2 as usize).copied().collect(), ); sub_game.play_recursive(); sub_game.winn
random
[ { "content": "fn part2(lines: impl Iterator<Item = impl AsRef<str>> + Clone) {\n\n let result = SLOPES\n\n .iter()\n\n .map(|&(right_step, down_step)| count_trees(lines.clone(), right_step, down_step))\n\n .product::<u32>();\n\n println!(\"Part 2: product is {}\", result);\n\n}\n\n\n", "file_path": "day03/src/main.rs", "rank": 0, "score": 189506.6208568138 }, { "content": "pub fn part2(lines: impl Iterator<Item = impl AsRef<str>>) {\n\n println!(\n\n \"Part 2: found {} valid passports\",\n\n count_valid(lines, value_ok)\n\n );\n\n}\n\n\n", "file_path": "day04/src/main.rs", "rank": 1, "score": 189251.2844326303 }, { "content": "pub fn parse_tiles<S, I>(mut lines: I) -> Result<Vec<Tile>, ParseTileError>\n\nwhere\n\n S: AsRef<str>,\n\n I: Iterator<Item = S>,\n\n{\n\n let mut tiles = Vec::new();\n\n let mut tile_size = 0;\n\n\n\n while let Some(row_data) = lines.next() {\n\n let row = row_data.as_ref();\n\n if row.is_empty() {\n\n continue;\n\n }\n\n\n\n let id = parse_id(row)?;\n\n let tile = Tile::parse(&mut lines, id)?;\n\n if tile_size == 0 {\n\n tile_size = tile.size;\n\n } else if tile.size != tile_size {\n\n return Err(ParseTileError(\"Inconsistent tile sizes\"));\n", "file_path": "day20/src/tile.rs", "rank": 2, "score": 183106.98858581285 }, { "content": "fn split_line(line: impl AsRef<str>) -> Result<LineInfo, ParseError> {\n\n line_parsing::line_parser(line.as_ref())\n\n .finish()\n\n .map_or(Err(ParseError {}), |(_, li)| Ok(li))\n\n}\n\n\n", "file_path": "day02/src/main.rs", "rank": 3, "score": 182406.0437107407 }, { "content": "fn part1(lines: impl Iterator<Item = impl AsRef<str>>) -> Result<(), ParseError> {\n\n let mut result = 0;\n\n for line in lines {\n\n result += parse(line.as_ref(), false)?;\n\n }\n\n\n\n println!(\"Part 1: result = {}\", result);\n\n Ok(())\n\n}\n\n\n", "file_path": "day18/src/main.rs", "rank": 4, "score": 158223.81903556513 }, { "content": "fn part1(lines: impl Iterator<Item = impl AsRef<str>>) -> Result<(), Day5Error> {\n\n let max_value = lines\n\n .map(|l| calculate_id(l.as_ref()))\n\n .max()\n\n .ok_or(Day5Error::NoData)?;\n\n println!(\"Part 1: maximum ID = {}\", max_value);\n\n Ok(())\n\n}\n\n\n", "file_path": "day05/src/main.rs", "rank": 5, "score": 158223.81903556513 }, { "content": "fn part2(lines: impl Iterator<Item = impl AsRef<str>>) -> Result<(), Day5Error> {\n\n let mut ids = lines.map(|l| calculate_id(l.as_ref())).collect::<Vec<_>>();\n\n ids.sort_unstable();\n\n let pair = ids\n\n .windows(2)\n\n .find(|&pair| pair[1] - pair[0] == 2)\n\n .ok_or(Day5Error::NotFound)?;\n\n println!(\"Part 2, found empty seat at {}\", pair[0] + 1);\n\n Ok(())\n\n}\n\n\n", "file_path": "day05/src/main.rs", "rank": 6, "score": 158223.81903556513 }, { "content": "fn part2(lines: impl Iterator<Item = impl AsRef<str>>) -> Result<(), ParseError> {\n\n let mut result = 0;\n\n for line in lines {\n\n result += parse(line.as_ref(), true)?;\n\n }\n\n\n\n println!(\"Part 2: result = {}\", result);\n\n Ok(())\n\n}\n\n\n", "file_path": "day18/src/main.rs", "rank": 7, "score": 158223.81903556513 }, { "content": "fn read_file(path: impl AsRef<Path>) -> Result<(RuleMap, Vec<String>), Box<dyn Error>> {\n\n enum ReadState {\n\n Rules,\n\n Messages,\n\n };\n\n let file = File::open(path)?;\n\n let mut rule_map = RuleMap::default();\n\n let mut messages = Vec::new();\n\n let mut state = ReadState::Rules;\n\n for line_result in BufReader::new(file).lines() {\n\n let line = line_result?;\n\n if line.is_empty() {\n\n state = ReadState::Messages;\n\n continue;\n\n }\n\n match state {\n\n ReadState::Rules => rule_map.try_add_rule(&line)?,\n\n ReadState::Messages => messages.push(line),\n\n }\n\n }\n\n\n\n Ok((rule_map, messages))\n\n}\n\n\n", "file_path": "day19/src/main.rs", "rank": 8, "score": 150256.89302235103 }, { "content": "pub fn parse(s: &str, use_precedence: bool) -> Result<u64, ParseError> {\n\n let expr = if use_precedence {\n\n AdvancedParser::expr\n\n } else {\n\n SimpleParser::expr\n\n };\n\n\n\n all_consuming(expr)(s)\n\n .finish()\n\n .map_or_else(|e| Err(ParseError(e.to_string())), |(_, v)| Ok(v))\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::parse;\n\n\n\n const EXAMPLES: [(&str, u64, u64); 6] = [\n\n (\"1 + 2 * 3 + 4 * 5 + 6\", 71, 231),\n\n (\"1 + (2 * 3) + (4 * (5 + 6))\", 51, 51),\n\n (\"2 * 3 + (4 * 5)\", 26, 46),\n", "file_path": "day18/src/parser.rs", "rank": 9, "score": 144795.30932400643 }, { "content": "fn part1(lines: impl Iterator<Item = impl AsRef<str>>) {\n\n let trees = count_trees(lines, 3, 1);\n\n println!(\"Part 1: encountered {} trees\", trees);\n\n}\n\n\n", "file_path": "day03/src/main.rs", "rank": 10, "score": 142091.34163788878 }, { "content": "fn part1(lines: impl Iterator<Item = impl AsRef<str>>) {\n\n println!(\n\n \"Part 1: found {} valid passports\",\n\n count_valid(lines, |_, _| true)\n\n );\n\n}\n\n\n", "file_path": "day04/src/main.rs", "rank": 11, "score": 142091.34163788878 }, { "content": "pub fn execute_program<'a>(program: impl Iterator<Item = &'a Instruction>) -> u64 {\n\n let mut memory = AHashMap::new();\n\n let mut or_mask = 0;\n\n let mut and_mask = u64::MAX;\n\n let mut float_masks = Vec::with_capacity(36);\n\n for instruction in program {\n\n match instruction {\n\n Instruction::Mask(_, ones, floating) => {\n\n or_mask = *ones;\n\n and_mask = !floating;\n\n float_masks.clear();\n\n (0..36)\n\n .map(|i| 1 << i)\n\n .filter(|m| floating & m != 0)\n\n .for_each(|m| float_masks.push(m));\n\n }\n\n Instruction::Assign(base_address, value) => {\n\n let masked_base = (base_address | or_mask) & and_mask;\n\n for i in 0..(1usize << float_masks.len()) {\n\n let address = float_masks\n", "file_path": "day14/src/part2.rs", "rank": 12, "score": 141013.97240810312 }, { "content": "pub fn execute_program<'a>(program: impl Iterator<Item = &'a Instruction>) -> u64 {\n\n let mut memory = AHashMap::new();\n\n let mut or_mask = 0;\n\n let mut and_mask = u64::MAX;\n\n for instruction in program {\n\n match instruction {\n\n Instruction::Mask(zeroes, ones, _) => {\n\n or_mask = *ones;\n\n and_mask = !zeroes;\n\n }\n\n Instruction::Assign(address, value) => {\n\n match (value | or_mask) & and_mask {\n\n 0 => memory.remove(address),\n\n v => memory.insert(*address, v),\n\n };\n\n }\n\n }\n\n }\n\n\n\n memory.values().sum()\n", "file_path": "day14/src/part1.rs", "rank": 13, "score": 141013.97240810312 }, { "content": "#[derive(Debug)]\n\nstruct Game {\n\n cups: Vec<usize>,\n\n current: usize,\n\n}\n\n\n\nimpl Game {\n\n pub fn new(start_pattern: &[usize]) -> Self {\n\n assert!(start_pattern.len() > 5);\n\n assert!((1..=start_pattern.len()).all(|i| start_pattern.contains(&i)));\n\n let mut cups = vec![0; start_pattern.len()];\n\n let mut cups_iterator = start_pattern.iter().map(|c| c - 1);\n\n let current = cups_iterator.next().unwrap();\n\n let mut prev = current;\n\n for next in cups_iterator {\n\n cups[prev] = next;\n\n prev = next;\n\n }\n\n cups[prev] = current;\n\n Self { cups, current }\n\n }\n", "file_path": "day23/src/main.rs", "rank": 14, "score": 139783.733691827 }, { "content": "fn part2(lines: impl Iterator<Item = impl AsRef<str>>) -> usize {\n\n let mut current = AHashSet::new();\n\n let mut question_sum = 0;\n\n let mut is_first = true;\n\n for line_ref in lines {\n\n let line = line_ref.as_ref();\n\n if line.is_empty() {\n\n question_sum += current.len();\n\n current.clear();\n\n is_first = true;\n\n } else if is_first {\n\n for c in line.chars() {\n\n current.insert(c);\n\n }\n\n\n\n is_first = false;\n\n } else {\n\n current.retain(|&c| line.contains(c));\n\n }\n\n }\n\n\n\n question_sum + current.len()\n\n}\n\n\n", "file_path": "day06/src/main.rs", "rank": 15, "score": 138285.95993916114 }, { "content": "fn part1(lines: impl Iterator<Item = impl AsRef<str>>) -> usize {\n\n let mut current = AHashSet::new();\n\n let mut question_sum = 0;\n\n for line_ref in lines {\n\n let line = line_ref.as_ref();\n\n if line.is_empty() {\n\n question_sum += current.len();\n\n current.clear();\n\n } else {\n\n for c in line.chars() {\n\n current.insert(c);\n\n }\n\n }\n\n }\n\n\n\n question_sum + current.len()\n\n}\n\n\n", "file_path": "day06/src/main.rs", "rank": 16, "score": 138285.95993916114 }, { "content": "pub fn part2(lines: &[Rule]) -> usize {\n\n let map = lines.iter().map(|r| (r.color(), r.bag_list())).collect();\n\n count_node(&map, BAG_TYPE)\n\n}\n\n\n", "file_path": "day07/src/main.rs", "rank": 17, "score": 138093.38853538403 }, { "content": "fn part2(program: &mut [Instruction]) -> Result<(), Day8Error> {\n\n let result = execute_patched(program)?;\n\n println!(\"Part 2: accumulator = {}\", result);\n\n Ok(())\n\n}\n\n\n", "file_path": "day08/src/main.rs", "rank": 18, "score": 129906.91956907563 }, { "content": "fn update_line(row: &mut [bool], monster_row: &[bool]) {\n\n row.iter_mut()\n\n .zip(monster_row.iter())\n\n .for_each(|(t, m)| *t &= !m);\n\n}\n\n\n", "file_path": "day20/src/tile.rs", "rank": 19, "score": 128028.08484079539 }, { "content": "fn count_valid<'a>(parsed_lines: impl Iterator<Item = &'a LineInfo>) -> usize {\n\n parsed_lines\n\n .filter(|&line_info| {\n\n let occurrence = line_info\n\n .password\n\n .chars()\n\n .filter(|&c| c == line_info.character)\n\n .take(line_info.max + 1)\n\n .count();\n\n (line_info.min..=line_info.max).contains(&occurrence)\n\n })\n\n .count()\n\n}\n\n\n", "file_path": "day02/src/main.rs", "rank": 20, "score": 127589.27907132334 }, { "content": "fn count_valid2<'a>(parsed_lines: impl Iterator<Item = &'a LineInfo>) -> usize {\n\n parsed_lines\n\n .filter(|&line_info| {\n\n let mut password_chars = line_info.password.chars();\n\n let first_ok = password_chars\n\n .nth(line_info.min - 1)\n\n .map_or(false, |c| c == line_info.character);\n\n let second_ok = password_chars\n\n .nth(line_info.max - line_info.min - 1)\n\n .map_or(false, |c| c == line_info.character);\n\n first_ok ^ second_ok\n\n })\n\n .count()\n\n}\n\n\n", "file_path": "day02/src/main.rs", "rank": 21, "score": 127589.27907132334 }, { "content": "fn parse_id(line: &str) -> Result<u64, ParseTileError> {\n\n line.strip_prefix(\"Tile \")\n\n .and_then(|s| s.strip_suffix(':'))\n\n .and_then(|s| s.parse().ok())\n\n .ok_or(ParseTileError(\"Could not parse id\"))\n\n}\n\n\n", "file_path": "day20/src/tile.rs", "rank": 22, "score": 125379.75713361846 }, { "content": "fn execute_patched(patched: &mut [Instruction]) -> Result<i32, Day8Error> {\n\n for p in 0..patched.len() {\n\n if patch(&mut patched[p]) {\n\n match execute(&patched) {\n\n ProgramResult::Terminate(result) => return Ok(result),\n\n _ => {\n\n patch(&mut patched[p]);\n\n }\n\n }\n\n }\n\n }\n\n\n\n Err(Day8Error::NoSolution)\n\n}\n\n\n", "file_path": "day08/src/main.rs", "rank": 23, "score": 123271.99963300029 }, { "content": "fn run() -> Result<(), Box<dyn error::Error>> {\n\n let path = [\"data\", \"day01\", \"input.txt\"].iter().collect::<PathBuf>();\n\n let numbers = get_numbers(path)?;\n\n\n\n part1(&numbers)?;\n\n part2(&numbers)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "day01/src/main.rs", "rank": 24, "score": 119566.71858208277 }, { "content": "fn count_line(line: &[Seat]) -> usize {\n\n line.iter().filter(|s| s.is_occupied()).count()\n\n}\n\n\n", "file_path": "day11/src/seating.rs", "rank": 25, "score": 118991.66581028943 }, { "content": "fn write_row(data: &mut TileData, row: &str) -> Result<(), ParseTileError> {\n\n for c in row.chars() {\n\n match c {\n\n '.' => data.push(false),\n\n '#' => data.push(true),\n\n _ => return Err(ParseTileError(\"Unknown character\")),\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "day20/src/tile.rs", "rank": 26, "score": 115374.20398812884 }, { "content": "fn parse_ticket(line: &str, field_count: usize) -> Result<Vec<usize>, ParseError> {\n\n let result = line\n\n .split(',')\n\n .map(str::parse)\n\n .collect::<Result<Vec<_>, _>>()\n\n .map_err(|_| ParseError(\"Failed to parse ticket value as number\"))?;\n\n if result.len() == field_count {\n\n Ok(result)\n\n } else {\n\n Err(ParseError(\"Incorrect field count\"))\n\n }\n\n}\n\n\n\nimpl Problem {\n\n pub fn parse(mut lines: impl Iterator<Item = impl AsRef<str>>) -> Result<Self, ParseError> {\n\n let (fields, allowed) = parse_fields(&mut lines)?;\n\n let field_count = fields.len();\n\n\n\n parse_line(&mut lines, \"your ticket:\", \"Missing your ticket\")?;\n\n let your_ticket = parse_ticket(\n", "file_path": "day16/src/problem.rs", "rank": 27, "score": 113753.6676400693 }, { "content": "fn run() -> Result<(), io::Error> {\n\n let lines = {\n\n let path = [\"data\", \"day03\", \"input.txt\"].iter().collect::<PathBuf>();\n\n let input_file = File::open(path)?;\n\n BufReader::new(input_file)\n\n .lines()\n\n .collect::<Result<Vec<_>, _>>()?\n\n };\n\n\n\n part1(lines.iter());\n\n part2(lines.iter());\n\n Ok(())\n\n}\n\n\n", "file_path": "day03/src/main.rs", "rank": 28, "score": 110219.16299194569 }, { "content": "fn part2(mut plan: SeatingPlan) {\n\n while plan.update2() {}\n\n println!(\"Part 2: occupied = {}\", plan.occupied());\n\n}\n\n\n", "file_path": "day11/src/main.rs", "rank": 29, "score": 109522.19526352934 }, { "content": "fn part1(mut plan: SeatingPlan) {\n\n while plan.update() {}\n\n println!(\"Part 1: occupied = {}\", plan.occupied());\n\n}\n\n\n", "file_path": "day11/src/main.rs", "rank": 30, "score": 109522.19526352934 }, { "content": "fn patch(instruction: &mut Instruction) -> bool {\n\n match instruction {\n\n Instruction::Jmp(delta) => {\n\n *instruction = Instruction::Nop(*delta);\n\n true\n\n }\n\n Instruction::Nop(delta) => {\n\n *instruction = Instruction::Jmp(*delta);\n\n true\n\n }\n\n _ => false,\n\n }\n\n}\n\n\n", "file_path": "day08/src/main.rs", "rank": 31, "score": 107084.36643922277 }, { "content": "fn run() -> Result<(), Box<dyn Error>> {\n\n let path = [\"data\", \"day21\", \"input.txt\"].iter().collect::<PathBuf>();\n\n let file = File::open(path)?;\n\n let processor = FoodProcessor::parse(BufReader::new(file).lines().filter_map(Result::ok))?;\n\n println!(\"Part 1: result = {}\", processor.safe_count());\n\n println!(\"Part 2: result = {}\", processor.map_allergens());\n\n Ok(())\n\n}\n\n\n", "file_path": "day21/src/main.rs", "rank": 32, "score": 105943.1915539761 }, { "content": "fn run() -> Result<(), Box<dyn Error>> {\n\n let parsed_lines = {\n\n let path = [\"data\", \"day02\", \"input.txt\"].iter().collect::<PathBuf>();\n\n let file = File::open(path)?;\n\n let mut parsed_lines = Vec::new();\n\n for line in BufReader::new(file).lines() {\n\n parsed_lines.push(split_line(line?)?);\n\n }\n\n\n\n parsed_lines\n\n };\n\n\n\n let part1 = count_valid(parsed_lines.iter());\n\n println!(\"Part 1: found {} valid passwords\", part1);\n\n let part2 = count_valid2(parsed_lines.iter());\n\n println!(\"Part 2: found {} valid passwords\", part2);\n\n Ok(())\n\n}\n\n\n", "file_path": "day02/src/main.rs", "rank": 33, "score": 105943.1915539761 }, { "content": "fn run() -> Result<(), Box<dyn Error>> {\n\n let (mut rule_map, messages) = {\n\n let path = [\"data\", \"day19\", \"input.txt\"].iter().collect::<PathBuf>();\n\n read_file(path)?\n\n };\n\n\n\n let part1 = messages.iter().filter(|m| test_rules(&rule_map, m)).count();\n\n println!(\"Part 1: valid count = {}\", part1);\n\n\n\n rule_map.update_rules();\n\n\n\n let part2 = messages.iter().filter(|m| test_rules(&rule_map, m)).count();\n\n println!(\"Part 2: valid count = {}\", part2);\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "day19/src/main.rs", "rank": 34, "score": 105943.1915539761 }, { "content": "fn run() -> Result<(), Box<dyn Error>> {\n\n let grid = {\n\n let path = [\"data\", \"day20\", \"input.txt\"].iter().collect::<PathBuf>();\n\n let file = File::open(path)?;\n\n Grid::parse(BufReader::new(file).lines().filter_map(Result::ok))?\n\n };\n\n\n\n println!(\n\n \"Part 1: result = {}\",\n\n grid.corner_ids().iter().product::<u64>()\n\n );\n\n\n\n let mut merged = grid.merge_tiles();\n\n merged.remove_monsters();\n\n println!(\"Part 2: rougness = {}\", merged.roughness());\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "day20/src/main.rs", "rank": 35, "score": 105943.1915539761 }, { "content": "fn run() -> Result<(), Box<dyn Error>> {\n\n let (public1, public2) = {\n\n let path = [\"data\", \"day25\", \"input.txt\"].iter().collect::<PathBuf>();\n\n let input = read_to_string(path)?;\n\n let mut values = input.lines().map(|s| s.parse().ok());\n\n let public1 = values\n\n .next()\n\n .flatten()\n\n .ok_or(ParseError(\"Missing first number\"))?;\n\n let public2 = values\n\n .next()\n\n .flatten()\n\n .ok_or(ParseError(\"Missing second number\"))?;\n\n (public1, public2)\n\n };\n\n\n\n println!(\"Part 1: encrytion key = {}\", find_key(public1, public2));\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "day25/src/main.rs", "rank": 36, "score": 105943.1915539761 }, { "content": "fn run() -> Result<(), Box<dyn Error>> {\n\n let problem = {\n\n let path = [\"data\", \"day16\", \"input.txt\"].iter().collect::<PathBuf>();\n\n let file = File::open(path)?;\n\n Problem::parse(BufReader::new(file).lines().filter_map(Result::ok))?\n\n };\n\n\n\n println!(\"Part 1: rate = {}\", problem.error_rate());\n\n\n\n let field_assignments = problem.assign_fields();\n\n let ticket = problem.your_ticket();\n\n let result = problem\n\n .fields()\n\n .iter()\n\n .enumerate()\n\n .filter(|(_, f)| f.name().starts_with(\"departure\"))\n\n .map(|(i, _)| ticket[field_assignments[i]] as u64)\n\n .product::<u64>();\n\n\n\n println!(\"Part 2: result = {}\", result);\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "day16/src/main.rs", "rank": 37, "score": 105943.1915539761 }, { "content": "fn run() -> Result<(), Box<dyn Error>> {\n\n let initial = {\n\n let path = [\"data\", \"day17\", \"input.txt\"].iter().collect::<PathBuf>();\n\n read_to_string(path)?\n\n };\n\n println!(\"Part 1: result = {}\", process(&initial, 3)?);\n\n println!(\"Part 2: result = {}\", process(&initial, 4)?);\n\n Ok(())\n\n}\n\n\n", "file_path": "day17/src/main.rs", "rank": 38, "score": 105943.1915539761 }, { "content": "fn run() -> Result<(), Box<dyn Error>> {\n\n let instructions = {\n\n let path = [\"data\", \"day12\", \"input.txt\"].iter().collect::<PathBuf>();\n\n let file = File::open(path)?;\n\n let mut instructions = Vec::new();\n\n for line in BufReader::new(file).lines() {\n\n instructions.push(line?.parse()?);\n\n }\n\n\n\n instructions\n\n };\n\n\n\n println!(\"Part 1: result = {}\", process_path(instructions.iter()));\n\n println!(\"Part 2: result = {}\", process_waypoint(instructions.iter()));\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "day12/src/main.rs", "rank": 39, "score": 105943.1915539761 }, { "content": "fn run() -> Result<(), Box<dyn Error>> {\n\n let program = {\n\n let path = [\"data\", \"day14\", \"input.txt\"].iter().collect::<PathBuf>();\n\n let file = File::open(path)?;\n\n let mut program = Vec::new();\n\n for line in BufReader::new(file).lines() {\n\n program.push(line?.parse()?);\n\n }\n\n\n\n program\n\n };\n\n\n\n let result1 = part1::execute_program(program.iter());\n\n println!(\"Part 1: result = {}\", result1);\n\n\n\n let result2 = part2::execute_program(program.iter());\n\n println!(\"Part 2: result = {}\", result2);\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "day14/src/main.rs", "rank": 40, "score": 105943.1915539761 }, { "content": "fn run() -> Result<(), Box<dyn Error>> {\n\n let lines = {\n\n let path = [\"data\", \"day06\", \"input.txt\"].iter().collect::<PathBuf>();\n\n let file = File::open(path)?;\n\n BufReader::new(file)\n\n .lines()\n\n .collect::<Result<Vec<_>, _>>()?\n\n };\n\n\n\n println!(\"Part 1: sum = {}\", part1(lines.iter()));\n\n println!(\"Part 2: sum = {}\", part2(lines.iter()));\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "day06/src/main.rs", "rank": 41, "score": 105943.1915539761 }, { "content": "fn run() -> Result<(), Box<dyn Error>> {\n\n let lines = {\n\n let path = [\"data\", \"day18\", \"input.txt\"].iter().collect::<PathBuf>();\n\n let file = File::open(path)?;\n\n BufReader::new(file)\n\n .lines()\n\n .collect::<Result<Vec<_>, _>>()?\n\n };\n\n\n\n part1(lines.iter())?;\n\n part2(lines.iter())?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "day18/src/main.rs", "rank": 42, "score": 105943.1915539761 }, { "content": "fn run() -> Result<(), Box<dyn Error>> {\n\n let mut floor = {\n\n let path = [\"data\", \"day24\", \"input.txt\"].iter().collect::<PathBuf>();\n\n let file = File::open(path)?;\n\n Floor::parse(BufReader::new(file).lines().filter_map(Result::ok))?\n\n };\n\n\n\n println!(\"Part 1: result = {}\", floor.count_black_tiles());\n\n for _ in 0..100 {\n\n floor.update();\n\n }\n\n println!(\"Part 2: result = {}\", floor.count_black_tiles());\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "day24/src/main.rs", "rank": 44, "score": 105943.1915539761 }, { "content": "fn run() -> Result<(), Box<dyn Error>> {\n\n let rules = {\n\n let path = [\"data\", \"day07\", \"input.txt\"].iter().collect::<PathBuf>();\n\n let file = File::open(path)?;\n\n let mut rules = Vec::new();\n\n for line_result in BufReader::new(file).lines() {\n\n rules.push(line_result?.parse()?);\n\n }\n\n rules\n\n };\n\n\n\n println!(\"Part 1: {} valid bags\", part1(&rules));\n\n println!(\"Part 2: {} contained bags\", part2(&rules));\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "day07/src/main.rs", "rank": 45, "score": 105943.1915539761 }, { "content": "fn run() -> Result<(), Box<dyn Error>> {\n\n let mut program = {\n\n let path = [\"data\", \"day08\", \"input.txt\"].iter().collect::<PathBuf>();\n\n let file = File::open(path)?;\n\n let mut program = Vec::new();\n\n for line in BufReader::new(file).lines() {\n\n program.push(line?.parse()?);\n\n }\n\n\n\n program\n\n };\n\n part1(&program)?;\n\n part2(&mut program)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "day08/src/main.rs", "rank": 46, "score": 105943.1915539761 }, { "content": "fn run() -> Result<(), Box<dyn Error>> {\n\n let lines = {\n\n let path = [\"data\", \"day04\", \"input.txt\"].iter().collect::<PathBuf>();\n\n let file = File::open(path)?;\n\n BufReader::new(file)\n\n .lines()\n\n .collect::<Result<Vec<_>, _>>()?\n\n };\n\n\n\n part1(lines.iter());\n\n part2(lines.iter());\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "day04/src/main.rs", "rank": 47, "score": 105943.1915539761 }, { "content": "fn run() -> Result<(), Box<dyn Error>> {\n\n let lines = {\n\n let path = [\"data\", \"day05\", \"input.txt\"].iter().collect::<PathBuf>();\n\n let file = File::open(path)?;\n\n BufReader::new(file)\n\n .lines()\n\n .collect::<Result<Vec<_>, _>>()?\n\n };\n\n part1(lines.iter())?;\n\n part2(lines.iter())?;\n\n Ok(())\n\n}\n\n\n", "file_path": "day05/src/main.rs", "rank": 48, "score": 105943.1915539761 }, { "content": "fn run() -> Result<(), Box<dyn Error>> {\n\n let source = {\n\n let path = [\"data\", \"day09\", \"input.txt\"].iter().collect::<PathBuf>();\n\n let file = File::open(path)?;\n\n let mut source = Vec::new();\n\n for line in BufReader::new(file).lines() {\n\n source.push(line?.parse()?);\n\n }\n\n\n\n source\n\n };\n\n let target = part1(&source)?;\n\n part2(&source, target)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "day09/src/main.rs", "rank": 49, "score": 105943.1915539761 }, { "content": "fn run() -> Result<(), Box<dyn Error>> {\n\n let adapters = {\n\n let path = [\"data\", \"day10\", \"input.txt\"].iter().collect::<PathBuf>();\n\n let file = File::open(path)?;\n\n let mut adapters = Vec::new();\n\n for line in BufReader::new(file).lines() {\n\n adapters.push(line?.parse()?);\n\n }\n\n\n\n adapters\n\n };\n\n\n\n println!(\"Part 1: result = {}\", count_differences(&adapters));\n\n println!(\"Part 2: result = {}\", count_ways(&adapters));\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "day10/src/main.rs", "rank": 50, "score": 105943.1915539761 }, { "content": "fn run() -> Result<(), Box<dyn Error>> {\n\n let plan = {\n\n let path = [\"data\", \"day11\", \"input.txt\"].iter().collect::<PathBuf>();\n\n read_to_string(path)?.parse::<SeatingPlan>()?\n\n };\n\n part1(plan.clone());\n\n part2(plan);\n\n Ok(())\n\n}\n\n\n", "file_path": "day11/src/main.rs", "rank": 51, "score": 105943.1915539761 }, { "content": "fn run() -> Result<(), Box<dyn Error>> {\n\n let plan = {\n\n let path = [\"data\", \"day13\", \"input.txt\"].iter().collect::<PathBuf>();\n\n read_to_string(path)?.parse()?\n\n };\n\n let result1 = part1(&plan).ok_or(ApplicationError(\"No buses\"))?;\n\n println!(\"Part1: result = {}\", result1);\n\n let result2 =\n\n part2(&plan.buses).ok_or(ApplicationError(\"Schedule does not permit solution\"))?;\n\n println!(\"Part2: result = {}\", result2);\n\n Ok(())\n\n}\n\n\n", "file_path": "day13/src/main.rs", "rank": 52, "score": 105943.1915539761 }, { "content": "#[derive(Debug, PartialEq)]\n\nstruct LineInfo {\n\n pub min: usize,\n\n pub max: usize,\n\n pub character: char,\n\n pub password: String,\n\n}\n\n\n\nmod line_parsing {\n\n use super::LineInfo;\n\n\n\n use nom::{\n\n bytes::complete::tag,\n\n character::complete::{alpha1, anychar, char, digit1},\n\n combinator::{map, map_res},\n\n sequence::separated_pair,\n\n IResult,\n\n };\n\n\n\n fn min_max(input: &str) -> IResult<&str, (usize, usize)> {\n\n separated_pair(\n", "file_path": "day02/src/main.rs", "rank": 53, "score": 102134.27974181356 }, { "content": "#[derive(Debug)]\n\nenum Day1Error {\n\n EmptySeq,\n\n NotFound,\n\n MultiplyOverflow,\n\n}\n\n\n\nimpl fmt::Display for Day1Error {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n match *self {\n\n Day1Error::EmptySeq => write!(f, \"No values in sequence.\"),\n\n Day1Error::NotFound => write!(f, \"No answer found\"),\n\n Day1Error::MultiplyOverflow => write!(f, \"Multiplication overflow\"),\n\n }\n\n }\n\n}\n\n\n\nimpl error::Error for Day1Error {}\n\n\n\nconst TARGET: i32 = 2020;\n\n\n", "file_path": "day01/src/main.rs", "rank": 54, "score": 101778.60460747089 }, { "content": "#[derive(Debug)]\n\nenum Day8Error {\n\n ParseError,\n\n NoSolution,\n\n}\n\n\n\nimpl fmt::Display for Day8Error {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n match self {\n\n Self::ParseError => write!(f, \"Parse error\"),\n\n Self::NoSolution => write!(f, \"No solution found\"),\n\n }\n\n }\n\n}\n\n\n\nimpl Error for Day8Error {}\n\n\n", "file_path": "day08/src/main.rs", "rank": 55, "score": 101778.60460747089 }, { "content": "#[derive(Debug)]\n\nenum Day5Error {\n\n NoData,\n\n NotFound,\n\n}\n\n\n\nimpl fmt::Display for Day5Error {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n match self {\n\n Day5Error::NoData => write!(f, \"No data\"),\n\n Day5Error::NotFound => write!(f, \"Result not found\"),\n\n }\n\n }\n\n}\n\n\n\nimpl Error for Day5Error {}\n\n\n", "file_path": "day05/src/main.rs", "rank": 56, "score": 101778.60460747089 }, { "content": "pub fn sqrt_exact(count: usize) -> Option<usize> {\n\n for size in 0usize.. {\n\n if let Some(sqr_size) = size.checked_mul(size) {\n\n match sqr_size.cmp(&count) {\n\n Ordering::Less => (),\n\n Ordering::Equal => return Some(size),\n\n Ordering::Greater => return None,\n\n }\n\n } else {\n\n return None;\n\n }\n\n }\n\n\n\n unreachable!()\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::sqrt_exact;\n\n\n", "file_path": "day20/src/utils.rs", "rank": 57, "score": 101668.73054092602 }, { "content": "#[derive(Debug)]\n\nstruct ParseError {}\n\n\n\nimpl fmt::Display for ParseError {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(f, \"Parse error\")\n\n }\n\n}\n\n\n\nimpl Error for ParseError {}\n\n\n", "file_path": "day02/src/main.rs", "rank": 58, "score": 101537.82205339869 }, { "content": "#[derive(Debug)]\n\nstruct NotFoundError {}\n\n\n\nimpl fmt::Display for NotFoundError {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(f, \"Not found\")\n\n }\n\n}\n\n\n\nimpl Error for NotFoundError {}\n\n\n", "file_path": "day09/src/main.rs", "rank": 59, "score": 101537.82205339869 }, { "content": "fn find_corner(parsed_tiles: &mut [Tile]) -> Option<usize> {\n\n let (corner, edges1, edges2) = parsed_tiles.iter().enumerate().find_map(|(idx, tile)| {\n\n let mut connected = parsed_tiles\n\n .iter()\n\n .filter(|t| t.id() != tile.id())\n\n .map(|t| tile.connect(t))\n\n .filter(|c| !c.is_empty());\n\n let first = connected.next()?;\n\n let second = connected.next()?;\n\n connected\n\n .next()\n\n .map_or_else(|| Some((idx, first, second)), |_| None)\n\n })?;\n\n\n\n let oriented = edges1\n\n .iter()\n\n .copied()\n\n .flat_map(|e1| edges2.iter().copied().map(move |e2| (e1, e2)))\n\n .any(|(e1, e2)| parsed_tiles[corner].orient(EdgeConstraints::right(e1).and_bottom(e2)));\n\n\n\n if oriented {\n\n Some(corner)\n\n } else {\n\n None\n\n }\n\n}\n\n\n", "file_path": "day20/src/grid.rs", "rank": 60, "score": 99860.5422200837 }, { "content": "fn update_in_axes(pos: &mut [usize], axes: &[usize]) -> bool {\n\n assert_eq!(pos.len(), axes.len());\n\n for d in 0..pos.len() {\n\n pos[d] += 1;\n\n if pos[d] == axes[d] {\n\n pos[d] = 0;\n\n } else {\n\n return false;\n\n }\n\n }\n\n\n\n true\n\n}\n\n\n", "file_path": "day17/src/simulation.rs", "rank": 61, "score": 97845.0564866814 }, { "content": "fn find_key(first: u64, mut second: u64) -> u64 {\n\n let mut exponent = loop_size(first);\n\n if exponent == 0 {\n\n return 1;\n\n }\n\n let mut value = 1;\n\n while exponent > 1 {\n\n if exponent & 1 != 0 {\n\n value = (second * value) % ENCRYPTION_SIZE;\n\n }\n\n second = (second * second) % ENCRYPTION_SIZE;\n\n exponent >>= 1;\n\n }\n\n\n\n (value * second) % ENCRYPTION_SIZE\n\n}\n\n\n", "file_path": "day25/src/main.rs", "rank": 62, "score": 97845.0564866814 }, { "content": "fn process_path<'a>(path: impl Iterator<Item = &'a Instruction>) -> i32 {\n\n let mut position = Point { north: 0, east: 0 };\n\n let mut direction = Direction::East;\n\n\n\n for instruction in path {\n\n match instruction {\n\n Instruction::North(delta) => position.north += *delta,\n\n Instruction::East(delta) => position.east += *delta,\n\n Instruction::Right(steps) => direction = direction.turn_right(*steps),\n\n Instruction::Forward(steps) => match direction {\n\n Direction::East => position.east += *steps,\n\n Direction::South => position.north -= *steps,\n\n Direction::West => position.east -= *steps,\n\n Direction::North => position.north += *steps,\n\n },\n\n }\n\n }\n\n\n\n position.manhattan_distance()\n\n}\n\n\n", "file_path": "day12/src/main.rs", "rank": 63, "score": 94000.6397238406 }, { "content": "fn process_waypoint<'a>(path: impl Iterator<Item = &'a Instruction>) -> i32 {\n\n let mut ship = Point { north: 0, east: 0 };\n\n let mut waypoint = Point { north: 1, east: 10 };\n\n\n\n for instruction in path {\n\n match instruction {\n\n Instruction::North(delta) => waypoint.north += delta,\n\n Instruction::East(delta) => waypoint.east += delta,\n\n Instruction::Right(steps) => {\n\n waypoint = match steps & 0b11 {\n\n 1 => Point {\n\n north: -waypoint.east,\n\n east: waypoint.north,\n\n },\n\n 2 => Point {\n\n north: -waypoint.north,\n\n east: -waypoint.east,\n\n },\n\n 3 => Point {\n\n north: waypoint.east,\n", "file_path": "day12/src/main.rs", "rank": 64, "score": 94000.6397238406 }, { "content": "#[derive(Debug)]\n\nstruct ParseError(&'static str);\n\n\n\nimpl fmt::Display for ParseError {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(f, \"Application error ({})\", self.0)\n\n }\n\n}\n\n\n\nimpl Error for ParseError {}\n\n\n", "file_path": "day12/src/main.rs", "rank": 65, "score": 93582.55256785997 }, { "content": "#[derive(Debug)]\n\nstruct ApplicationError(&'static str);\n\n\n\nimpl fmt::Display for ApplicationError {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(f, \"Application error ({})\", &self.0)\n\n }\n\n}\n\n\n\nimpl Error for ApplicationError {}\n\n\n", "file_path": "day13/src/main.rs", "rank": 66, "score": 93582.55256785997 }, { "content": "#[derive(Debug)]\n\nstruct ParseError(&'static str);\n\n\n\nimpl fmt::Display for ParseError {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(f, \"Parse error ({})\", self.0)\n\n }\n\n}\n\n\n\nimpl Error for ParseError {}\n\n\n\nconst SUBJECT_NUMBER: u64 = 7;\n\nconst ENCRYPTION_SIZE: u64 = 20201227;\n\n\n", "file_path": "day25/src/main.rs", "rank": 67, "score": 93582.55256785997 }, { "content": "#[derive(Debug)]\n\nstruct ParseCoordsError(&'static str);\n\n\n\nimpl fmt::Display for ParseCoordsError {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(f, \"Parse error: {}\", self.0)\n\n }\n\n}\n\n\n\nimpl Error for ParseCoordsError {}\n\n\n", "file_path": "day24/src/main.rs", "rank": 68, "score": 91646.82306848821 }, { "content": "fn update_in_range(pos: &mut [usize], start: &[usize], end: &[usize]) -> bool {\n\n assert_eq!(pos.len(), start.len());\n\n assert_eq!(pos.len(), end.len());\n\n for d in 0..pos.len() {\n\n pos[d] += 1;\n\n if pos[d] == end[d] {\n\n pos[d] = start[d];\n\n } else {\n\n return false;\n\n }\n\n }\n\n\n\n true\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct Simulation {\n\n data: Vec<Cube>,\n\n axes: Vec<usize>,\n\n}\n", "file_path": "day17/src/simulation.rs", "rank": 69, "score": 91409.90810430332 }, { "content": "fn get_numbers(path: impl AsRef<Path>) -> io::Result<Vec<i32>> {\n\n let infile = File::open(path)?;\n\n let mut numbers = BufReader::new(infile)\n\n .lines()\n\n .filter_map(|l| l.map_or(None, |s| s.parse().ok()))\n\n .collect::<Vec<_>>();\n\n numbers.sort_unstable();\n\n Ok(numbers)\n\n}\n\n\n", "file_path": "day01/src/main.rs", "rank": 70, "score": 91109.48779480488 }, { "content": "fn parse_line(\n\n lines: &mut impl Iterator<Item = impl AsRef<str>>,\n\n expected: &str,\n\n message: &'static str,\n\n) -> Result<(), ParseError> {\n\n if lines.next().ok_or(ParseError(message))?.as_ref() == expected {\n\n Ok(())\n\n } else {\n\n Err(ParseError(message))\n\n }\n\n}\n\n\n", "file_path": "day16/src/problem.rs", "rank": 71, "score": 90025.75962342526 }, { "content": "fn value_ok(prefix: &str, value: &str) -> bool {\n\n match prefix {\n\n \"byr\" => value.len() == 4 && (1920..=2002).contains(&value.parse::<i32>().unwrap_or(0)),\n\n \"iyr\" => value.len() == 4 && (2010..=2020).contains(&value.parse::<i32>().unwrap_or(0)),\n\n \"eyr\" => value.len() == 4 && (2020..=2030).contains(&value.parse::<i32>().unwrap_or(0)),\n\n \"hgt\" => {\n\n if let Some(height) = value.strip_suffix(\"cm\") {\n\n (150..=193).contains(&height.parse::<i32>().unwrap_or(0))\n\n } else if let Some(height) = value.strip_suffix(\"in\") {\n\n (59..=76).contains(&height.parse::<i32>().unwrap_or(0))\n\n } else {\n\n false\n\n }\n\n }\n\n \"hcl\" => {\n\n if value.len() == 7 {\n\n let mut chars = value.chars();\n\n chars.next().unwrap() == '#' && chars.all(|c| \"0123456789abcdef\".contains(c))\n\n } else {\n\n false\n\n }\n\n }\n\n \"ecl\" => EYE_COLORS.contains(&value),\n\n \"pid\" => value.len() == 9 && value.chars().all(|c| \"0123456789\".contains(c)),\n\n _ => false,\n\n }\n\n}\n\n\n", "file_path": "day04/src/main.rs", "rank": 72, "score": 84893.302641516 }, { "content": "fn part1(program: &[Instruction]) -> Result<(), Day8Error> {\n\n match execute(&program) {\n\n ProgramResult::Loop(result) => {\n\n println!(\"Part 1: accumulator = {}\", result);\n\n Ok(())\n\n }\n\n _ => Err(Day8Error::NoSolution),\n\n }\n\n}\n\n\n", "file_path": "day08/src/main.rs", "rank": 73, "score": 76902.19849102563 }, { "content": "fn part2(numbers: &[i32]) -> Result<(), Day1Error> {\n\n let (low, middle, high) = find_triple(numbers, TARGET)?;\n\n\n\n let product = low\n\n .checked_mul(middle)\n\n .map(|lm| lm.checked_mul(high))\n\n .flatten()\n\n .ok_or(Day1Error::MultiplyOverflow)?;\n\n\n\n println!(\n\n \"Part 2: low = {}, middle = {}, high = {}, product = {}\",\n\n low, middle, high, product\n\n );\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "day01/src/main.rs", "rank": 74, "score": 76902.19849102563 }, { "content": "fn part1(numbers: &[i32]) -> Result<(), Day1Error> {\n\n let (low, high) = find_pair(numbers, TARGET)?;\n\n let product = low.checked_mul(high).ok_or(Day1Error::MultiplyOverflow)?;\n\n println!(\n\n \"Part 1: low = {}, high = {}, product = {}\",\n\n low, high, product\n\n );\n\n Ok(())\n\n}\n\n\n", "file_path": "day01/src/main.rs", "rank": 75, "score": 76902.19849102563 }, { "content": "fn part1(sequence: &[i64]) -> Result<i64, NotFoundError> {\n\n let result = find_incorrect(sequence, 25).ok_or(NotFoundError {})?;\n\n println!(\"Part1: result = {}\", result);\n\n Ok(result)\n\n}\n\n\n", "file_path": "day09/src/main.rs", "rank": 76, "score": 74328.73973791781 }, { "content": "fn check_line(row: &[bool], monster_row: &[bool]) -> bool {\n\n row.iter()\n\n .copied()\n\n .zip(monster_row.iter())\n\n .all(|(t, m)| t || !m)\n\n}\n\n\n", "file_path": "day20/src/tile.rs", "rank": 77, "score": 73269.6171892304 }, { "content": "fn part2(sequence: &[i64], target: i64) -> Result<(), NotFoundError> {\n\n let result = find_contiguous(sequence, target).ok_or(NotFoundError {})?;\n\n println!(\"Part 2: result = {}\", result);\n\n Ok(())\n\n}\n\n\n", "file_path": "day09/src/main.rs", "rank": 78, "score": 71930.0423306077 }, { "content": "#[derive(Debug, Clone, Copy)]\n\nenum Cube {\n\n Inactive,\n\n Active,\n\n}\n\n\n\nimpl Cube {\n\n pub fn is_active(&self) -> bool {\n\n matches!(self, Self::Active)\n\n }\n\n}\n\n\n", "file_path": "day17/src/simulation.rs", "rank": 79, "score": 69691.42813111929 }, { "content": "#[derive(Debug, Copy, Clone)]\n\nenum Seat {\n\n Empty,\n\n Unoccupied,\n\n Occupied,\n\n}\n\n\n\nimpl Seat {\n\n pub fn is_occupied(&self) -> bool {\n\n matches!(self, Seat::Occupied)\n\n }\n\n pub fn is_seat(&self) -> bool {\n\n !matches!(self, Seat::Empty)\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct SeatingPlan {\n\n width: usize,\n\n height: usize,\n\n state: bool,\n\n data1: Vec<Seat>,\n\n data2: Vec<Seat>,\n\n}\n\n\n", "file_path": "day11/src/seating.rs", "rank": 80, "score": 69691.42813111929 }, { "content": "#[derive(Debug, Copy, Clone, Eq, PartialEq)]\n\nenum Instruction {\n\n North(i32),\n\n East(i32),\n\n Right(i32),\n\n Forward(i32),\n\n}\n\n\n\nimpl FromStr for Instruction {\n\n type Err = ParseError;\n\n\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n let mut char_indices = s.char_indices();\n\n let opcode = char_indices.next().ok_or(ParseError(\"missing opcode\"))?.1;\n\n let value_start = char_indices.next().ok_or(ParseError(\"missing value\"))?.0;\n\n let value = s[value_start..]\n\n .parse()\n\n .map_err(|_| ParseError(\"could not parse value\"))?;\n\n match opcode {\n\n 'N' => Ok(Instruction::North(value)),\n\n 'S' => Ok(Instruction::North(-value)),\n", "file_path": "day12/src/main.rs", "rank": 81, "score": 69690.98402820987 }, { "content": "#[derive(Debug, Copy, Clone, Eq, PartialEq, IntoPrimitive, TryFromPrimitive)]\n\n#[repr(i32)]\n\nenum Direction {\n\n East,\n\n South,\n\n West,\n\n North,\n\n}\n\n\n\nimpl Direction {\n\n pub fn turn_right(&self, steps: i32) -> Direction {\n\n let new_direction = (i32::from(*self) + steps) & 0b11;\n\n new_direction\n\n .try_into()\n\n .expect(\"new direction out-of-range\")\n\n }\n\n}\n\n\n", "file_path": "day12/src/main.rs", "rank": 82, "score": 69690.35209011316 }, { "content": "#[derive(Debug, Clone)]\n\nenum Instruction {\n\n Acc(i32),\n\n Jmp(i32),\n\n Nop(i32),\n\n}\n\n\n\nimpl FromStr for Instruction {\n\n type Err = Day8Error;\n\n\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n if s.len() < 5 {\n\n return Err(Day8Error::ParseError);\n\n }\n\n let opcode = &s[0..3];\n\n let value = s[4..].parse().map_err(|_| Day8Error::ParseError)?;\n\n match opcode {\n\n \"acc\" => Ok(Instruction::Acc(value)),\n\n \"jmp\" => Ok(Instruction::Jmp(value)),\n\n \"nop\" => Ok(Instruction::Nop(value)),\n\n _ => Err(Day8Error::ParseError),\n\n }\n\n }\n\n}\n\n\n", "file_path": "day08/src/main.rs", "rank": 83, "score": 69687.8177422405 }, { "content": "#[derive(Debug)]\n\nenum Operator {\n\n Add,\n\n Multiply,\n\n}\n\n\n", "file_path": "day18/src/parser.rs", "rank": 84, "score": 69684.06945290382 }, { "content": "#[derive(Debug)]\n\nstruct Plan {\n\n time: i64,\n\n buses: Vec<Option<i64>>,\n\n}\n\n\n\nimpl FromStr for Plan {\n\n type Err = ApplicationError;\n\n\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n let mut plan_lines = s.lines();\n\n let time = plan_lines\n\n .next()\n\n .and_then(|s| s.parse().ok())\n\n .ok_or(ApplicationError(\"missing time\"))?;\n\n let buses = plan_lines\n\n .next()\n\n .ok_or(ApplicationError(\"missing schedule\"))?\n\n .split(',')\n\n .map(|t| t.parse().ok())\n\n .collect::<Vec<_>>();\n\n Ok(Plan { time, buses })\n\n }\n\n}\n\n\n", "file_path": "day13/src/main.rs", "rank": 85, "score": 69440.78971278059 }, { "content": "struct Floor {\n\n black_tiles: AHashSet<Coords>,\n\n}\n\n\n\nimpl Floor {\n\n fn parse<S, I>(lines: I) -> Result<Self, ParseCoordsError>\n\n where\n\n S: AsRef<str>,\n\n I: Iterator<Item = S>,\n\n {\n\n let mut black_tiles = AHashSet::new();\n\n for line in lines {\n\n let coordinates = Coords::parse_line(line.as_ref())?;\n\n if !black_tiles.remove(&coordinates) {\n\n black_tiles.insert(coordinates);\n\n }\n\n }\n\n\n\n Ok(Self { black_tiles })\n\n }\n", "file_path": "day24/src/main.rs", "rank": 86, "score": 69440.78971278059 }, { "content": "#[derive(Debug)]\n\nstruct Point {\n\n north: i32,\n\n east: i32,\n\n}\n\n\n\nimpl Point {\n\n pub fn manhattan_distance(&self) -> i32 {\n\n self.north.abs() + self.east.abs()\n\n }\n\n}\n\n\n", "file_path": "day12/src/main.rs", "rank": 87, "score": 69440.78971278059 }, { "content": "#[derive(Debug)]\n\nenum VerticalEdge {\n\n Top,\n\n Bottom,\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct EdgeConstraints {\n\n left: Option<u32>,\n\n right: Option<u32>,\n\n top: Option<u32>,\n\n bottom: Option<u32>,\n\n}\n\n\n\nimpl EdgeConstraints {\n\n pub fn right(value: u32) -> Self {\n\n Self {\n\n right: Some(value),\n\n ..Default::default()\n\n }\n\n }\n", "file_path": "day20/src/tile.rs", "rank": 88, "score": 68490.863150396 }, { "content": "#[derive(Debug)]\n\nenum HorizontalEdge {\n\n Left,\n\n Right,\n\n}\n\n\n", "file_path": "day20/src/tile.rs", "rank": 89, "score": 68490.863150396 }, { "content": "#[derive(Debug)]\n\nenum ProgramResult {\n\n Terminate(i32),\n\n Loop(i32),\n\n}\n\n\n", "file_path": "day08/src/main.rs", "rank": 90, "score": 68490.863150396 }, { "content": "fn process(initial: &str, dimensions: usize) -> Result<usize, ParseSimulationError> {\n\n let mut simulation = Simulation::parse(initial, dimensions)?;\n\n for _ in 0..6 {\n\n simulation.update();\n\n }\n\n\n\n Ok(simulation.active_count())\n\n}\n\n\n", "file_path": "day17/src/main.rs", "rank": 91, "score": 68296.41122057183 }, { "content": "struct SimpleParser {}\n\n\n\nimpl Parser for SimpleParser {\n\n fn expr(s: &str) -> IResult<&str, u64> {\n\n let (rhs, initial) = unary_expr::<Self>(s)?;\n\n fold_many0(\n\n tuple((\n\n preceded(multispace0, operator),\n\n preceded(multispace0, unary_expr::<Self>),\n\n )),\n\n initial,\n\n |acc, (op, next)| match op {\n\n Operator::Add => acc + next,\n\n Operator::Multiply => acc * next,\n\n },\n\n )(rhs)\n\n }\n\n}\n\n\n", "file_path": "day18/src/parser.rs", "rank": 92, "score": 68250.0805963238 }, { "content": "struct AdvancedParser {}\n\n\n\nimpl AdvancedParser {\n\n fn add_expr(s: &str) -> IResult<&str, u64> {\n\n let (rhs, initial) = unary_expr::<Self>(s)?;\n\n fold_many0(\n\n preceded(\n\n tuple((multispace0, char('+'), multispace0)),\n\n unary_expr::<Self>,\n\n ),\n\n initial,\n\n |acc, next| acc + next,\n\n )(rhs)\n\n }\n\n}\n\n\n\nimpl Parser for AdvancedParser {\n\n fn expr(s: &str) -> IResult<&str, u64> {\n\n let (rhs, initial) = Self::add_expr(s)?;\n\n fold_many0(\n\n preceded(tuple((multispace0, char('*'), multispace0)), Self::add_expr),\n\n initial,\n\n |acc, next| acc * next,\n\n )(rhs)\n\n }\n\n}\n\n\n", "file_path": "day18/src/parser.rs", "rank": 93, "score": 68250.0805963238 }, { "content": "fn count_valid<S, L, F>(lines: L, check: F) -> usize\n\nwhere\n\n S: AsRef<str>,\n\n L: Iterator<Item = S>,\n\n F: Fn(&str, &str) -> bool,\n\n{\n\n let mut valid = 0;\n\n let mut prefixes = AHashSet::new();\n\n for line_ref in lines {\n\n let line = line_ref.as_ref();\n\n if line.is_empty() {\n\n if check_valid(&prefixes) {\n\n valid += 1;\n\n }\n\n\n\n prefixes.clear();\n\n } else {\n\n for field in line.split(' ') {\n\n if let Some(pos) = field.find(':') {\n\n let (prefix, value) = field.split_at(pos);\n", "file_path": "day04/src/main.rs", "rank": 94, "score": 68056.63294545491 }, { "content": "fn find_pair(numbers: &[i32], target: i32) -> Result<(i32, i32), Day1Error> {\n\n assert!(numbers.len() >= 2);\n\n assert!(numbers.windows(2).all(|w| w[0] <= w[1])); // numbers.is_sorted() in unstable\n\n\n\n let mut it = numbers.iter();\n\n\n\n let mut low = *it.next().ok_or(Day1Error::EmptySeq)?;\n\n let mut high = *it.next_back().ok_or(Day1Error::EmptySeq)?;\n\n\n\n loop {\n\n let total = low + high;\n\n match total.cmp(&target) {\n\n Ordering::Equal => return Ok((low, high)),\n\n Ordering::Less => {\n\n low = *it.next().ok_or(Day1Error::NotFound)?;\n\n }\n\n Ordering::Greater => {\n\n high = *it.next_back().ok_or(Day1Error::NotFound)?;\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "day01/src/main.rs", "rank": 95, "score": 66267.74250153257 }, { "content": "#[derive(Debug, Hash, PartialEq, Eq, Clone, Copy)]\n\nstruct FoodId(usize);\n\n\n", "file_path": "day21/src/food.rs", "rank": 96, "score": 65193.453732043876 }, { "content": "#[derive(Debug, Hash, PartialEq, Eq, Clone, Copy)]\n\nstruct AllergenId(usize);\n\n\n\n#[derive(Debug)]\n\npub struct FoodProcessor {\n\n id_foods: AHashMap<FoodId, Rc<str>>,\n\n id_allergens: AHashMap<AllergenId, Rc<str>>,\n\n safe_counts: AHashMap<FoodId, usize>,\n\n allergen_possibilities: AHashMap<AllergenId, AHashSet<FoodId>>,\n\n}\n\n\n", "file_path": "day21/src/food.rs", "rank": 97, "score": 65193.453732043876 }, { "content": "fn find_triple(numbers: &[i32], target: i32) -> Result<(i32, i32, i32), Day1Error> {\n\n assert!(numbers.len() >= 3);\n\n assert!(numbers.windows(2).all(|w| w[0] <= w[1])); // numbers.is_sorted() in unstable\n\n\n\n numbers[0..numbers.len() - 2]\n\n .iter()\n\n .enumerate()\n\n .filter_map(|(i, &l)| {\n\n find_pair(&numbers[i + 1..], target - l)\n\n .map(|(m, h)| (l, m, h))\n\n .ok()\n\n })\n\n .next()\n\n .ok_or(Day1Error::NotFound)\n\n}\n\n\n", "file_path": "day01/src/main.rs", "rank": 98, "score": 64361.283227448446 }, { "content": "#[derive(Debug, Eq, PartialEq, Hash, Clone, Copy)]\n\nstruct Coords(i32, i32);\n\n\n\nimpl Coords {\n\n pub fn parse_line(line: &str) -> Result<Self, ParseCoordsError> {\n\n enum ParseState {\n\n None,\n\n North,\n\n South,\n\n }\n\n\n\n let mut state = ParseState::None;\n\n let mut x = 0;\n\n let mut y = 0;\n\n\n\n for c in line.bytes() {\n\n match (&state, c) {\n\n (ParseState::None, b'w') => x -= 1,\n\n (ParseState::None, b'e') => x += 1,\n\n (ParseState::None, b'n') => {\n\n y -= 1;\n", "file_path": "day24/src/main.rs", "rank": 99, "score": 63453.11587973769 } ]
Rust
jvmkill/src/heap/types.rs
cloudfoundry/jvmkill
d1d7f104f94227e3c97166b633ec3ddb8e6c39fe
/* * Copyright 2015-2019 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ use crate::bindings::jlong; use crate::jvmti::JVMTI; pub struct Types<'t, J: JVMTI> { jvmti: &'t J, types: Vec<String>, } impl<'t, J: JVMTI> Types<'t, J> { pub fn new(jvmti: &'t J) -> Self { return Self { jvmti, types: Vec::new() }; } pub fn get(&self, tag: jlong) -> &String { return &self.types[tag as usize]; } pub fn tag_classes(&mut self) { for c in self.jvmti.get_loaded_classes() { self.jvmti.set_tag(c, self.types.len() as jlong); let (signature, _) = self.jvmti.get_class_signature(c); self.types.push(signature); } } } #[cfg(test)] mod tests { use std::ptr; use mockall::Sequence; use crate::bindings::jclass; use crate::heap::Types; use crate::jvmti::{ArrayPointerLoadedClassesIterator, MockJVMTI}; #[test] fn tag_classes_and_get() { let mut jvmti = MockJVMTI::new(); let mut seq = Sequence::new(); let classes = jni_type!(3, jclass) as *mut jclass; let loaded_classes = ArrayPointerLoadedClassesIterator { count: 3, classes }; jvmti .expect_get_loaded_classes() .times(1) .in_sequence(&mut seq) .return_once_st(move || loaded_classes); jvmti .expect_set_tag() .withf_st(move |&a_class, &a_tag| { ptr::eq(a_class, classes) && a_tag == 0 }) .times(1) .in_sequence(&mut seq) .return_const(()); jvmti .expect_get_class_signature() .withf_st(move |&a_class| ptr::eq(a_class, classes)) .times(1) .in_sequence(&mut seq) .return_const((String::from("alpha-type"), String::from("alpha-generic"))); jvmti .expect_set_tag() .withf_st(move |&a_class, &a_tag| { ptr::eq(a_class, unsafe { classes.offset(1) }) && a_tag == 1 }) .times(1) .in_sequence(&mut seq) .return_const(()); jvmti .expect_get_class_signature() .withf_st(move |&a_class| ptr::eq(a_class, unsafe { classes.offset(1) })) .times(1) .in_sequence(&mut seq) .return_const((String::from("bravo-type"), String::from("bravo-generic"))); jvmti .expect_set_tag() .withf_st(move |&a_class, &a_tag| { ptr::eq(a_class, unsafe { classes.offset(2) }) && a_tag == 2 }) .times(1) .in_sequence(&mut seq) .return_const(()); jvmti .expect_get_class_signature() .withf_st(move |&a_class| ptr::eq(a_class, unsafe { classes.offset(2) })) .times(1) .in_sequence(&mut seq) .return_const((String::from("charlie-type"), String::from("charlie-generic"))); let mut t = Types::new(&mut jvmti); t.tag_classes(); assert_eq!(t.get(0), "alpha-type"); assert_eq!(t.get(1), "bravo-type"); assert_eq!(t.get(2), "charlie-type"); } }
/* * Copyright 2015-2019 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ use crate::bindings::jlong; use crate::jvmti::JVMTI; pub struct Types<'t, J: JVMTI> { jvmti: &'t J, types: Vec<String>, } impl<'t, J: JVMTI> Types<'t, J> { pub fn new(jvmti: &'t J) -> Self { return Self { jvmti, types: Vec::new() }; } pub fn get(&self, tag: jlong) -> &String { return &self.types[tag as usize]; } pub fn tag_classes(&mut self) { for c in self.jvmti.get_loaded_classes() { self.jvmti.set_tag(c, self.types.len() as jlong); let (signature, _) = self.jvmti.get_class_signature(c); self.types.push(signature); } } } #[cfg(test)] mod tests { use std::ptr; use mockall::Sequence; use crate::bindings::jclass; use crate::heap::Types; use crate::jvmti::{ArrayPointerLoadedClassesIterator, MockJVMTI}; #[test] fn tag_classes_and_get() { let mut jvmti = MockJVMTI::new(); let mut seq = Sequence::new(); let classes = jni_type!(3, jclass) as *mut jclass; let loaded_classes = ArrayPointerLoadedClassesIterator { count: 3, classes }; jvmti .expect_get_loaded_classes() .times(1) .in_sequence(&mut seq) .return_once_st(move || loaded_classes); jvmti .expect_set_tag() .withf_st(move |&a_class, &a_tag| { ptr::eq(a_class, classes) && a_tag == 0 }) .times(1) .in_sequence(&mut seq) .return_const(()); jvmti .expect_get_class_signature() .withf_st(move |&a_class| ptr::eq(a_class, classes)) .times(1) .in_sequence(&mut
unsafe { classes.offset(1) })) .times(1) .in_sequence(&mut seq) .return_const((String::from("bravo-type"), String::from("bravo-generic"))); jvmti .expect_set_tag() .withf_st(move |&a_class, &a_tag| { ptr::eq(a_class, unsafe { classes.offset(2) }) && a_tag == 2 }) .times(1) .in_sequence(&mut seq) .return_const(()); jvmti .expect_get_class_signature() .withf_st(move |&a_class| ptr::eq(a_class, unsafe { classes.offset(2) })) .times(1) .in_sequence(&mut seq) .return_const((String::from("charlie-type"), String::from("charlie-generic"))); let mut t = Types::new(&mut jvmti); t.tag_classes(); assert_eq!(t.get(0), "alpha-type"); assert_eq!(t.get(1), "bravo-type"); assert_eq!(t.get(2), "charlie-type"); } }
seq) .return_const((String::from("alpha-type"), String::from("alpha-generic"))); jvmti .expect_set_tag() .withf_st(move |&a_class, &a_tag| { ptr::eq(a_class, unsafe { classes.offset(1) }) && a_tag == 1 }) .times(1) .in_sequence(&mut seq) .return_const(()); jvmti .expect_get_class_signature() .withf_st(move |&a_class| ptr::eq(a_class,
function_block-random_span
[ { "content": "#[test]\n\nfn time_10_count_2() {\n\n let r = Runner {\n\n class: \"org.cloudfoundry.jvmkill.ThreadExhaustion\",\n\n arguments: \"=time=10,count=2,printHeapHistogram=0,printMemoryUsage=0\",\n\n std_out: vec!(),\n\n std_err: vec!(\n\n \"Resource Exhausted! (1/2)\",\n\n \"jvmkill is killing current process\",\n\n ),\n\n };\n\n\n\n r.run()\n\n}\n\n\n", "file_path": "jvmkill-tests/tests/thread.rs", "rank": 0, "score": 114078.06274593179 }, { "content": "#[test]\n\nfn time_0_count_0() {\n\n let r = Runner {\n\n class: \"org.cloudfoundry.jvmkill.MemoryExhaustion\",\n\n arguments: \"=printHeapHistogram=1,heapHistogramMaxEntries=20\",\n\n std_out: vec!(\n\n \"java.lang.Class\",\n\n \"Heap memory:\",\n\n ),\n\n std_err: vec!(\"jvmkill is killing current process\"),\n\n };\n\n\n\n r.run()\n\n}\n\n\n", "file_path": "jvmkill-tests/tests/memory.rs", "rank": 1, "score": 114078.06274593179 }, { "content": "#[test]\n\nfn time_10_count_2() {\n\n let a = format!(\"=time=10,count=2,heapDumpPath={}dump-%a-%d-%b-%Y-%T-%z.hprof,printHeapHistogram=1,heapHistogramMaxEntries=10\", env::temp_dir().to_str().unwrap());\n\n\n\n let r = Runner {\n\n class: \"org.cloudfoundry.jvmkill.MemoryExhaustion\",\n\n arguments: a.as_str(),\n\n std_out: vec!(\"Heap dump written to\"),\n\n std_err: vec!(\n\n \"Resource Exhausted! (1/2)\",\n\n \"jvmkill is killing current process\",\n\n ),\n\n };\n\n\n\n r.run()\n\n}\n", "file_path": "jvmkill-tests/tests/memory.rs", "rank": 2, "score": 114078.0627459318 }, { "content": "#[test]\n\nfn parallel_time_10_count_2() {\n\n let r = Runner {\n\n class: \"org.cloudfoundry.jvmkill.ParallelThreadExhaustion\",\n\n arguments: \"=time=10,count=2,printHeapHistogram=0,printMemoryUsage=0\",\n\n std_out: vec!(),\n\n std_err: vec!(\"jvmkill is killing current process\"),\n\n };\n\n\n\n r.run()\n\n}\n", "file_path": "jvmkill-tests/tests/thread.rs", "rank": 3, "score": 111154.28032497034 }, { "content": "#[cfg_attr(test, automock(type LoadedClassesIterator = ArrayPointerLoadedClassesIterator;))]\n\npub trait JVMTI {\n\n type LoadedClassesIterator: Iterator<Item=*mut jclass>;\n\n\n\n fn add_capabilities(&self, capabilities: jvmtiCapabilities);\n\n\n\n fn follow_references(&self, heap_filter: jint, class: jclass, initial_object: jclass, callbacks: *const jvmtiHeapCallbacks, user_data: *const c_void);\n\n\n\n fn get_class_signature(&self, class: *mut jclass) -> (String, String);\n\n\n\n fn get_loaded_classes(&self) -> Self::LoadedClassesIterator;\n\n\n\n fn set_event_callbacks(&self, callbacks: *const jvmtiEventCallbacks);\n\n\n\n fn set_event_notification_mode(&self, mode: jvmtiEventMode, event_type: jvmtiEvent, event_thread: jthread);\n\n\n\n fn set_tag(&self, class: *mut jclass, tag: jlong);\n\n}\n\n\n\npub struct DefaultJVMTI {\n\n internal: *mut jvmtiEnv,\n", "file_path": "jvmkill/src/jvmti.rs", "rank": 4, "score": 97813.8209427064 }, { "content": "pub fn is_threads_exhausted(flags: jint) -> bool {\n\n let t = JVMTI_RESOURCE_EXHAUSTED_THREADS as jint;\n\n return flags & t == t;\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use std::path::PathBuf;\n\n\n\n use mockall::Sequence;\n\n\n\n use crate::action::Actions;\n\n use crate::bindings::jclass;\n\n use crate::context::Parameters;\n\n use crate::jmx::ManagementFactory;\n\n use crate::jni::MockJNI;\n\n use crate::jvmti::MockJVMTI;\n\n\n\n #[test]\n\n fn execute() {\n", "file_path": "jvmkill/src/action/mod.rs", "rank": 5, "score": 96304.92393794467 }, { "content": "#[test]\n\nfn basic() {\n\n let r = Runner {\n\n class: \"org.cloudfoundry.jvmkill.ThreadExhaustion\",\n\n arguments: \"\",\n\n std_out: vec!(),\n\n std_err: vec!(\n\n \"cannot dump memory pools since the JVM is unable to create a thread\",\n\n \"jvmkill is killing current process\",\n\n ),\n\n };\n\n\n\n r.run()\n\n}\n\n\n", "file_path": "jvmkill-tests/tests/thread.rs", "rank": 6, "score": 85847.17970687403 }, { "content": "#[test]\n\nfn print_memory_usage_0() {\n\n let r = Runner {\n\n class: \"org.cloudfoundry.jvmkill.ThreadExhaustion\",\n\n arguments: \"=printMemoryUsage=0\",\n\n std_out: vec!(),\n\n std_err: vec!(\"jvmkill is killing current process\"),\n\n };\n\n\n\n r.run()\n\n}\n\n\n", "file_path": "jvmkill-tests/tests/thread.rs", "rank": 7, "score": 81948.88896904676 }, { "content": "pub trait Action {\n\n fn execute(&self, flags: jint);\n\n}\n\n\n\npub struct Actions<'a> {\n\n pub actions: Vec<Box<dyn Action + 'a>>\n\n}\n\n\n\nimpl<'a> Actions<'a> {\n\n pub fn new<N: JNI, V: JVMTI>(parameters: &Parameters, jvmti: &'a V, factory: &'a ManagementFactory<N>) -> Self {\n\n let mut actions: Vec<Box<dyn Action>> = Vec::new();\n\n\n\n if parameters.print_heap_histogram {\n\n actions.push(Box::new(HeapHistogram::new(jvmti, parameters.heap_histogram_max_entries)));\n\n }\n\n\n\n if parameters.print_memory_usage {\n\n actions.push(Box::new(MemoryPools::new(factory)));\n\n }\n\n\n", "file_path": "jvmkill/src/action/mod.rs", "rank": 8, "score": 73315.43303164974 }, { "content": "/*\n\n * Copyright 2015-2019 the original author or authors.\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n */\n\n\n\n\n\nuse std::env;\n\nuse std::path::PathBuf;\n\nuse std::process::Command;\n", "file_path": "jvmkill-tests/tests/common/mod.rs", "rank": 9, "score": 70757.69683203548 }, { "content": "\n\n#[derive(Default)]\n\npub struct Runner<'r> {\n\n pub class: &'r str,\n\n pub arguments: &'r str,\n\n pub std_out: Vec<&'r str>,\n\n pub std_err: Vec<&'r str>,\n\n}\n\n\n\nimpl<'r> Runner<'r> {\n\n pub fn run(&self) {\n\n let o = Command::new(self.java())\n\n .arg(format!(\"-agentpath:{}{}\", self.agent().to_str().unwrap(), self.arguments))\n\n .arg(\"-cp\").arg(self.jar())\n\n .arg(\"-Xmx50m\")\n\n .arg(\"-XX:ReservedCodeCacheSize=10m\")\n\n .arg(\"-XX:-UseCompressedOops\")\n\n .arg(self.class)\n\n .output()\n\n .expect(\"failed to run Java process\");\n", "file_path": "jvmkill-tests/tests/common/mod.rs", "rank": 10, "score": 70686.02014966933 }, { "content": "\n\n assert!(!o.status.success());\n\n self.assert_contents(&o.stdout, &self.std_out);\n\n self.assert_contents(&o.stderr, &self.std_err);\n\n }\n\n\n\n fn agent(&self) -> PathBuf {\n\n let lib_name = if cfg!(target_os = \"macos\") { \"libjvmkill.dylib\" } else { \"libjvmkill.so\" };\n\n\n\n return env::var(\"LD_LIBRARY_PATH\")\n\n .or(env::var(\"DYLD_LIBRARY_PATH\"))\n\n .or(env::var(\"DYLD_FALLBACK_LIBRARY_PATH\")).unwrap()\n\n .split(\":\")\n\n .map(|root| PathBuf::from(root).join(lib_name))\n\n .find(|path| path.exists()).unwrap();\n\n }\n\n\n\n fn assert_contents(&self, stream: &Vec<u8>, expected: &Vec<&'r str>) {\n\n let actual = String::from_utf8_lossy(stream);\n\n\n", "file_path": "jvmkill-tests/tests/common/mod.rs", "rank": 11, "score": 70681.60334158927 }, { "content": " assert!(expected.iter().all(|&s| actual.contains(s)),\n\n \"{:?} were not found in:\\n>>>\\n{}\\n<<<\\n\", expected, actual);\n\n }\n\n\n\n fn jar(&self) -> PathBuf {\n\n return env::current_dir().unwrap()\n\n .parent().unwrap()\n\n .join(\"resource-exhaustion-generator\").join(\"target\").join(\"resource-exhaustion-generator-0.0.0.jar\");\n\n }\n\n\n\n fn java(&self) -> PathBuf {\n\n return PathBuf::from(env::var(\"JAVA_HOME\").unwrap())\n\n .join(\"bin\").join(\"java\");\n\n }\n\n}\n", "file_path": "jvmkill-tests/tests/common/mod.rs", "rank": 12, "score": 70679.35222744425 }, { "content": "fn main() {\n\n let i = PathBuf::from(env::var(\"JAVA_HOME\").unwrap()).join(\"include\");\n\n let p = if cfg!(target_os = \"macos\") { i.join(\"darwin\") } else { i.join(\"linux\") };\n\n\n\n let bindings = bindgen::Builder::default()\n\n .header(\"src/bindings.h\")\n\n .derive_default(true)\n\n .parse_callbacks(Box::new(bindgen::CargoCallbacks))\n\n .clang_arg(format!(\"-I/{}\", i.to_str().unwrap()))\n\n .clang_arg(format!(\"-I/{}\", p.to_str().unwrap()))\n\n .generate()\n\n .expect(\"Failed to generate bindings\");\n\n\n\n bindings\n\n .write_to_file(PathBuf::from(\"src/bindings.rs\"))\n\n .expect(\"Failed to write bindings\");\n\n}", "file_path": "jvmkill/build.rs", "rank": 13, "score": 48711.73092159219 }, { "content": "struct Statistics {\n\n committed: i64,\n\n init: i64,\n\n max: i64,\n\n name: String,\n\n used: i64,\n\n}\n\n\n\nimpl Statistics {\n\n fn from_memory_pool<T: JNI>(memory_pool: MemoryPoolMXBean<T>) -> Self {\n\n return Statistics::from_usage(memory_pool.get_name(), memory_pool.get_usage());\n\n }\n\n\n\n fn from_usage<T: JNI>(name: String, usage: MemoryUsage<T>) -> Self {\n\n return Self {\n\n committed: usage.get_committed(),\n\n init: usage.get_init(),\n\n max: usage.get_max(),\n\n name,\n\n used: usage.get_used(),\n", "file_path": "jvmkill/src/action/memory_pools.rs", "rank": 14, "score": 46654.893131007164 }, { "content": "fn main() {\n\n assert!(Command::new(\"./mvnw\")\n\n .arg(\"clean\")\n\n .arg(\"package\")\n\n .status().unwrap().success());\n\n}\n", "file_path": "resource-exhaustion-generator/build.rs", "rank": 15, "score": 46240.220536619396 }, { "content": "#[cfg_attr(test, automock)]\n\npub trait JNI {\n\n fn call_int_method(&self, instance: jobject, method: jmethodID) -> jint;\n\n\n\n fn call_long_method(&self, instance: jobject, method: jmethodID) -> jlong;\n\n\n\n fn call_object_method(&self, instance: jobject, method: jmethodID) -> Option<jobject>;\n\n\n\n fn call_object_method_a(&self, instance: jobject, method: jmethodID, args: &[jvalue]) -> Option<jobject>;\n\n\n\n fn call_static_object_method(&self, class: jclass, method: jmethodID) -> Option<jobject>;\n\n\n\n fn call_static_object_method_a(&self, class: jclass, method: jmethodID, args: &[jvalue]) -> Option<jobject>;\n\n\n\n fn find_class(&self, class: &str) -> Option<jclass>;\n\n\n\n fn get_method(&self, class: jclass, method: &str, signature: &str) -> Option<jmethodID>;\n\n\n\n fn get_static_method(&self, class: jclass, method: &str, signature: &str) -> Option<jmethodID>;\n\n\n\n fn get_string_utf_chars(&self, s: jstring) -> Option<String>;\n", "file_path": "jvmkill/src/jni.rs", "rank": 16, "score": 44449.62624420012 }, { "content": "#[cfg_attr(test, automock)]\n\npub trait Signal {\n\n fn delay(&self) -> bool;\n\n\n\n fn kill(&self);\n\n\n\n fn signal(&self) -> c_int;\n\n}\n\n\n\npub struct DefaultSignal {\n\n pub delay: bool,\n\n pub signal: c_int,\n\n}\n\n\n\nimpl Signal for DefaultSignal {\n\n fn delay(&self) -> bool {\n\n return self.delay;\n\n }\n\n\n\n fn kill(&self) {\n\n unsafe { kill(getpid(), self.signal) };\n", "file_path": "jvmkill/src/action/signal.rs", "rank": 17, "score": 43341.4989902134 }, { "content": "/*\n\n * Copyright 2015-2020 the original author or authors.\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n */\n\n\n\nuse crate::common::Runner;\n\n\n\nmod common;\n\n\n\n#[test]\n", "file_path": "jvmkill-tests/tests/thread.rs", "rank": 18, "score": 40633.33547291941 }, { "content": "/*\n\n * Copyright 2015-2020 the original author or authors.\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n */\n\n\n\nuse std::env;\n\n\n\nuse crate::common::Runner;\n\n\n\nmod common;\n\n\n\n#[test]\n", "file_path": "jvmkill-tests/tests/memory.rs", "rank": 19, "score": 40632.119830342526 }, { "content": "/*\n\n * Copyright 2015-2019 the original author or authors.\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n */\n\n\n\nuse std::ffi::CStr;\n\nuse std::mem;\n\nuse std::os::raw::{c_uchar, c_void};\n\nuse std::ptr;\n\n\n\nuse mockall::automock;\n\n\n\nuse crate::bindings::{JavaVM, jclass, jint, jlong, JNI_OK, jthread, JVMTI_VERSION_1, JVMTI_VERSION_11, JVMTI_VERSION_1_0, JVMTI_VERSION_1_1, JVMTI_VERSION_1_2, JVMTI_VERSION_9, jvmtiCapabilities, jvmtiEnv, jvmtiError_JVMTI_ERROR_NONE, jvmtiEvent, jvmtiEventCallbacks, jvmtiEventMode, jvmtiHeapCallbacks};\n\n\n\n#[cfg_attr(test, automock(type LoadedClassesIterator = ArrayPointerLoadedClassesIterator;))]\n", "file_path": "jvmkill/src/jvmti.rs", "rank": 20, "score": 33766.33415546709 }, { "content": " }\n\n }\n\n\n\n fn set_tag(&self, class: *mut jclass, tag: jlong) {\n\n let f = unsafe { (**self.internal).SetTag }\n\n .expect(\"jvmtiEnv.SetTag not found\");\n\n\n\n let r = unsafe { f(self.internal, *class, tag) };\n\n if r != jvmtiError_JVMTI_ERROR_NONE {\n\n panic!(\"unable to set tag: {}\", r);\n\n }\n\n }\n\n}\n\n\n\npub struct ArrayPointerLoadedClassesIterator {\n\n pub count: i32,\n\n pub classes: *mut jclass,\n\n}\n\n\n\nimpl Default for ArrayPointerLoadedClassesIterator {\n", "file_path": "jvmkill/src/jvmti.rs", "rank": 21, "score": 33721.12701752063 }, { "content": "\n\n fn get_class_signature(&self, class: *mut jclass) -> (String, String) {\n\n let mut signature = ptr::null_mut();\n\n let mut generic = ptr::null_mut();\n\n\n\n let f = unsafe { (**self.internal).GetClassSignature }\n\n .expect(\"jvmtiEnv.GetClassSignature not found\");\n\n\n\n let r = unsafe { f(self.internal, *class, &mut signature, &mut generic) };\n\n if r != jvmtiError_JVMTI_ERROR_NONE {\n\n panic!(\"unable to get class signature: {}\", r);\n\n }\n\n\n\n let s = String::from(unsafe { CStr::from_ptr(signature) }\n\n .to_string_lossy());\n\n\n\n self.deallocate(signature as *mut c_uchar);\n\n\n\n if generic == ptr::null_mut() {\n\n return (s, String::new());\n", "file_path": "jvmkill/src/jvmti.rs", "rank": 22, "score": 33718.32067849364 }, { "content": " }\n\n\n\n let g = String::from(unsafe { CStr::from_ptr(generic) }\n\n .to_string_lossy());\n\n\n\n return (s, g);\n\n }\n\n\n\n fn get_loaded_classes(&self) -> Self::LoadedClassesIterator {\n\n let f = unsafe { (**self.internal).GetLoadedClasses }\n\n .expect(\"jvmtiEnv.GetLoadedClasses not found\");\n\n\n\n let mut count = 0;\n\n let mut classes = ptr::null_mut();\n\n\n\n let r = unsafe { f(self.internal, &mut count, &mut classes) };\n\n if r != jvmtiError_JVMTI_ERROR_NONE {\n\n panic!(\"unable to get loaded classes: {}\", r);\n\n }\n\n\n", "file_path": "jvmkill/src/jvmti.rs", "rank": 23, "score": 33717.468976663 }, { "content": " fn default() -> Self {\n\n return Self { count: 0, classes: ptr::null_mut() };\n\n }\n\n}\n\n\n\nimpl Iterator for ArrayPointerLoadedClassesIterator {\n\n type Item = *mut jclass;\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n\n if self.count == 0 {\n\n return None;\n\n }\n\n\n\n let r = Some(self.classes);\n\n\n\n self.count -= 1;\n\n self.classes = unsafe { self.classes.offset(1) };\n\n\n\n return r;\n\n }\n\n}\n", "file_path": "jvmkill/src/jvmti.rs", "rank": 24, "score": 33711.77220282067 }, { "content": "}\n\n\n\nimpl DefaultJVMTI {\n\n pub fn new(jvmti_env: *mut jvmtiEnv) -> Self {\n\n return Self { internal: jvmti_env };\n\n }\n\n\n\n pub fn from(vm: *mut JavaVM) -> Self {\n\n let p = Self::get_jvmti(vm);\n\n return Self::new(p as *mut jvmtiEnv);\n\n }\n\n\n\n fn deallocate(&self, mem: *mut c_uchar) {\n\n let f = unsafe { (**self.internal).Deallocate }\n\n .expect(\"jvmtiEnv.Deallocate not found\");\n\n\n\n let r = unsafe { f(self.internal, mem) };\n\n if r != jvmtiError_JVMTI_ERROR_NONE {\n\n panic!(\"unable to deallocate: {}\", r);\n\n }\n", "file_path": "jvmkill/src/jvmti.rs", "rank": 25, "score": 33710.054007673476 }, { "content": " }\n\n\n\n fn get_jvmti(vm: *mut JavaVM) -> *mut c_void {\n\n let f = unsafe { (**vm).GetEnv }\n\n .expect(\"jvmtiEnv.GetEnv method not found\");\n\n\n\n for c in vec!(JVMTI_VERSION_11, JVMTI_VERSION_9, JVMTI_VERSION_1_2, JVMTI_VERSION_1_1, JVMTI_VERSION_1_0, JVMTI_VERSION_1) {\n\n let mut p = ptr::null_mut();\n\n let r = unsafe { f(vm, &mut p, c as jint) };\n\n if r == JNI_OK as i32 {\n\n return p;\n\n }\n\n }\n\n\n\n panic!(\"JVMTI not available\");\n\n }\n\n}\n\n\n\nimpl JVMTI for DefaultJVMTI {\n\n type LoadedClassesIterator = ArrayPointerLoadedClassesIterator;\n", "file_path": "jvmkill/src/jvmti.rs", "rank": 26, "score": 33709.87324704032 }, { "content": " return ArrayPointerLoadedClassesIterator { count, classes };\n\n }\n\n\n\n fn set_event_callbacks(&self, callbacks: *const jvmtiEventCallbacks) {\n\n let f = unsafe { (**self.internal).SetEventCallbacks }\n\n .expect(\"jvmtiEnv.SetEventCallbacks method not found\");\n\n\n\n let r = unsafe { f(self.internal, callbacks, mem::size_of::<jvmtiEventCallbacks>() as jint) };\n\n if r != jvmtiError_JVMTI_ERROR_NONE {\n\n panic!(\"unable to set event callbacks: {}\", r);\n\n }\n\n }\n\n\n\n fn set_event_notification_mode(&self, mode: jvmtiEventMode, event_type: jvmtiEvent, event_thread: jthread) {\n\n let f = unsafe { (**self.internal).SetEventNotificationMode }\n\n .expect(\"jvmti.SetEventNotificationMode method not found\");\n\n\n\n let r = unsafe { f(self.internal, mode, event_type, event_thread) };\n\n if r != jvmtiError_JVMTI_ERROR_NONE {\n\n panic!(\"unable to set event notification mode: {}\", r);\n", "file_path": "jvmkill/src/jvmti.rs", "rank": 27, "score": 33704.85981308362 }, { "content": "\n\n fn add_capabilities(&self, capabilities: jvmtiCapabilities) {\n\n let f = unsafe { (**self.internal).AddCapabilities }\n\n .expect(\"jvmtiEnv.AddCapabilities not found\");\n\n\n\n let r = unsafe { f(self.internal, &capabilities) };\n\n if r != jvmtiError_JVMTI_ERROR_NONE {\n\n panic!(\"unable to add callbacks: {}\", r);\n\n }\n\n }\n\n\n\n fn follow_references(&self, heap_filter: jint, class: jclass, initial_object: jclass, callbacks: *const jvmtiHeapCallbacks, user_data: *const c_void) {\n\n let f = unsafe { (**self.internal).FollowReferences }\n\n .expect(\"jvmtiEnv.FollowReferences not found\");\n\n\n\n let r = unsafe { f(self.internal, heap_filter, class, initial_object, callbacks, user_data) };\n\n if r != jvmtiError_JVMTI_ERROR_NONE {\n\n panic!(\"unable to follow references: {}\", r);\n\n }\n\n }\n", "file_path": "jvmkill/src/jvmti.rs", "rank": 28, "score": 33701.56951050692 }, { "content": "/*\n\n * Copyright 2015-2020 the original author or authors.\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n */\n\n\n\nmacro_rules! jni_type {\n\n ($size:literal, $type:ty) => {\n\n unsafe { libc::malloc($size * std::mem::size_of::<$type>() as libc::size_t) } as $type\n\n };\n", "file_path": "jvmkill/src/test_macros.rs", "rank": 35, "score": 32387.045480063658 }, { "content": "/*\n\n * Copyright 2015-2019 the original author or authors.\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n */\n\n\n\npub use class_formatter::ClassFormatter;\n\npub use contents::Contents;\n\npub use types::Types;\n\n\n\nmod class_formatter;\n\nmod contents;\n\nmod types;", "file_path": "jvmkill/src/heap/mod.rs", "rank": 36, "score": 32345.72098786125 }, { "content": "/*\n\n * Copyright 2015-2019 the original author or authors.\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n */\n\n\n\npub use context::Context;\n\npub use parameters::Parameters;\n\n\n\nmod context;\n\nmod events;\n\nmod parameters;", "file_path": "jvmkill/src/context/mod.rs", "rank": 37, "score": 32342.35751769628 }, { "content": "/*\n\n * Copyright 2015-2019 the original author or authors.\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n */\n\n\n\nuse crate::action::heap_dump::HeapDump;\n\nuse crate::action::heap_histogram::HeapHistogram;\n\nuse crate::action::kill::Kill;\n\nuse crate::action::memory_pools::MemoryPools;\n", "file_path": "jvmkill/src/action/mod.rs", "rank": 38, "score": 32330.613487254876 }, { "content": "/*\n\n * Copyright 2015-2019 the original author or authors.\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n */\n\n\n\npub use hotspot_diagnostic_mxbean::HotspotDiagnosticMXBean;\n\npub use management_factory::ManagementFactory;\n\npub use memory_mxbean::MemoryMXBean;\n\npub use memory_pool_mxbean::MemoryPoolMXBean;\n\npub use memory_usage::MemoryUsage;\n\n\n\nmod hotspot_diagnostic_mxbean;\n\nmod management_factory;\n\nmod memory_mxbean;\n\nmod memory_pool_mxbean;\n\nmod memory_usage;\n\n\n", "file_path": "jvmkill/src/jmx/mod.rs", "rank": 39, "score": 32328.415888272626 }, { "content": "\n\n ($type:ty) => {\n\n jni_type!(1, $type)\n\n };\n\n}\n\n\n\nmacro_rules! jni_type_const {\n\n ($size:literal, $type:ty) => {\n\n unsafe { libc::malloc($size * std::mem::size_of::<$type>() as libc::size_t) } as *const $type\n\n };\n\n\n\n ($type:ty) => {\n\n jni_type_const!(1, $type)\n\n };\n\n}\n", "file_path": "jvmkill/src/test_macros.rs", "rank": 40, "score": 32314.15333120651 }, { "content": " }\n\n\n\n #[test]\n\n fn execute_print_memory_usage_false() {\n\n let jvmti = MockJVMTI::new();\n\n let mut jni = MockJNI::new();\n\n let mut seq = Sequence::new();\n\n\n\n let c_management_factory = jni_type!(jclass);\n\n jni\n\n .expect_find_class()\n\n .withf_st(move |a_class| a_class == \"java/lang/management/ManagementFactory\")\n\n .times(1)\n\n .in_sequence(&mut seq)\n\n .return_once_st(move |_| Option::Some(c_management_factory));\n\n\n\n let factory = ManagementFactory::new(&jni);\n\n\n\n let p = Parameters { print_memory_usage: false, ..Default::default() };\n\n let a = Actions::new(&p, &jvmti, &factory);\n", "file_path": "jvmkill/src/action/mod.rs", "rank": 41, "score": 32278.18541776484 }, { "content": "\n\n assert_eq!(a.actions.len(), 2);\n\n }\n\n\n\n #[test]\n\n fn execute_heap_dump_path() {\n\n let jvmti = MockJVMTI::new();\n\n let mut jni = MockJNI::new();\n\n let mut seq = Sequence::new();\n\n\n\n let c_management_factory = jni_type!(jclass);\n\n jni\n\n .expect_find_class()\n\n .withf_st(move |a_class| a_class == \"java/lang/management/ManagementFactory\")\n\n .times(1)\n\n .in_sequence(&mut seq)\n\n .return_once_st(move |_| Option::Some(c_management_factory));\n\n\n\n let factory = ManagementFactory::new(&jni);\n\n\n\n let p = Parameters { heap_dump_path: Some(PathBuf::from(\"test-dir\")), ..Default::default() };\n\n let a = Actions::new(&p, &jvmti, &factory);\n\n\n\n assert_eq!(a.actions.len(), 4);\n\n }\n\n}\n", "file_path": "jvmkill/src/action/mod.rs", "rank": 42, "score": 32278.137057669734 }, { "content": " #[test]\n\n fn execute_print_heap_histogram_true() {\n\n let jvmti = MockJVMTI::new();\n\n let mut jni = MockJNI::new();\n\n let mut seq = Sequence::new();\n\n\n\n let c_management_factory = jni_type!(jclass);\n\n jni\n\n .expect_find_class()\n\n .withf_st(move |a_class| a_class == \"java/lang/management/ManagementFactory\")\n\n .times(1)\n\n .in_sequence(&mut seq)\n\n .return_once_st(move |_| Option::Some(c_management_factory));\n\n\n\n let factory = ManagementFactory::new(&jni);\n\n\n\n let p = Parameters { print_heap_histogram: true, ..Default::default() };\n\n let a = Actions::new(&p, &jvmti, &factory);\n\n\n\n assert_eq!(a.actions.len(), 4);\n", "file_path": "jvmkill/src/action/mod.rs", "rank": 43, "score": 32277.806831820562 }, { "content": " let jvmti = MockJVMTI::new();\n\n let mut jni = MockJNI::new();\n\n let mut seq = Sequence::new();\n\n\n\n let c_management_factory = jni_type!(jclass);\n\n jni\n\n .expect_find_class()\n\n .withf_st(move |a_class| a_class == \"java/lang/management/ManagementFactory\")\n\n .times(1)\n\n .in_sequence(&mut seq)\n\n .return_once_st(move |_| Option::Some(c_management_factory));\n\n\n\n let factory = ManagementFactory::new(&jni);\n\n\n\n let p = Parameters { ..Default::default() };\n\n let a = Actions::new(&p, &jvmti, &factory);\n\n\n\n assert_eq!(a.actions.len(), 3);\n\n }\n\n\n", "file_path": "jvmkill/src/action/mod.rs", "rank": 44, "score": 32276.47372956433 }, { "content": "use crate::action::thread_dump::ThreadDump;\n\nuse crate::bindings::{jint, JVMTI_RESOURCE_EXHAUSTED_THREADS};\n\nuse crate::context::Parameters;\n\nuse crate::jmx::ManagementFactory;\n\nuse crate::jni::JNI;\n\nuse crate::jvmti::JVMTI;\n\n\n\nmod heap_dump;\n\nmod heap_histogram;\n\nmod kill;\n\nmod memory_pools;\n\nmod signal;\n\nmod thread_dump;\n\n\n", "file_path": "jvmkill/src/action/mod.rs", "rank": 45, "score": 32271.03436734472 }, { "content": " actions.push(Box::new(ThreadDump::new()));\n\n\n\n match &parameters.heap_dump_path {\n\n Some(p) => actions.push(Box::new(HeapDump::new(factory, p))),\n\n None => {}\n\n };\n\n\n\n actions.push(Box::new(Kill::new()));\n\n\n\n return Self { actions };\n\n }\n\n\n\n pub fn execute(&self, flags: jint) {\n\n for action in &self.actions {\n\n action.execute(flags);\n\n }\n\n }\n\n}\n\n\n", "file_path": "jvmkill/src/action/mod.rs", "rank": 46, "score": 32264.00455226082 }, { "content": "/*\n\n * Copyright 2015-2019 the original author or authors.\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n */\n\n\n\nuse regex::Regex;\n\n\n\npub struct ClassFormatter {\n\n pattern: Regex\n", "file_path": "jvmkill/src/heap/class_formatter.rs", "rank": 47, "score": 31405.93281503029 }, { "content": "}\n\n\n\nimpl ClassFormatter {\n\n pub fn new() -> Self {\n\n let pattern = Regex::new(r\"(\\[*)([BCDFIJLSZ])(?:([a-zA-z/$0-9]+);)?\").unwrap();\n\n return Self { pattern };\n\n }\n\n\n\n pub fn format(&self, raw: &String) -> String {\n\n let c = self.pattern.captures(raw.as_str())\n\n .expect(format!(\"invalid class name: {}\", raw).as_str());\n\n\n\n let mut s = String::new();\n\n\n\n match &c[2] {\n\n \"Z\" => s.push_str(\"boolean\"),\n\n \"B\" => s.push_str(\"byte\"),\n\n \"C\" => s.push_str(\"char\"),\n\n \"D\" => s.push_str(\"double\"),\n\n \"F\" => s.push_str(\"float\"),\n", "file_path": "jvmkill/src/heap/class_formatter.rs", "rank": 48, "score": 31339.317968494375 }, { "content": " \"I\" => s.push_str(\"int\"),\n\n \"J\" => s.push_str(\"long\"),\n\n \"S\" => s.push_str(\"short\"),\n\n \"L\" => s.push_str(&c[3].replace(\"/\", \".\")),\n\n _ => panic!(\"unknown type\"),\n\n };\n\n\n\n for _ in 0..c[1].len() {\n\n s.push_str(\"[]\");\n\n }\n\n\n\n return s;\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::heap::ClassFormatter;\n\n\n\n #[test]\n", "file_path": "jvmkill/src/heap/class_formatter.rs", "rank": 49, "score": 31337.25252852626 }, { "content": " fn arrays() {\n\n assert_eq!(ClassFormatter::new().format(&String::from(\"[[Z\")), \"boolean[][]\");\n\n }\n\n\n\n #[test]\n\n fn boolean() {\n\n assert_eq!(ClassFormatter::new().format(&String::from(\"Z\")), \"boolean\");\n\n }\n\n\n\n #[test]\n\n fn byte() {\n\n assert_eq!(ClassFormatter::new().format(&String::from(\"B\")), \"byte\");\n\n }\n\n\n\n #[test]\n\n fn char() {\n\n assert_eq!(ClassFormatter::new().format(&String::from(\"C\")), \"char\");\n\n }\n\n\n\n #[test]\n", "file_path": "jvmkill/src/heap/class_formatter.rs", "rank": 50, "score": 31336.528131913255 }, { "content": " #[should_panic(expected = \"invalid class name\")]\n\n fn invalid() {\n\n ClassFormatter::new().format(&String::from(\"Q\"));\n\n }\n\n\n\n #[test]\n\n fn long() {\n\n assert_eq!(ClassFormatter::new().format(&String::from(\"J\")), \"long\");\n\n }\n\n\n\n #[test]\n\n fn short() {\n\n assert_eq!(ClassFormatter::new().format(&String::from(\"S\")), \"short\");\n\n }\n\n}\n", "file_path": "jvmkill/src/heap/class_formatter.rs", "rank": 51, "score": 31336.08903320011 }, { "content": " fn class() {\n\n assert_eq!(ClassFormatter::new().format(&String::from(\"Lorg/cloudfoundry/MyClass;\")), \"org.cloudfoundry.MyClass\");\n\n }\n\n\n\n #[test]\n\n fn double() {\n\n assert_eq!(ClassFormatter::new().format(&String::from(\"D\")), \"double\");\n\n }\n\n\n\n #[test]\n\n fn float() {\n\n assert_eq!(ClassFormatter::new().format(&String::from(\"F\")), \"float\");\n\n }\n\n\n\n #[test]\n\n fn int() {\n\n assert_eq!(ClassFormatter::new().format(&String::from(\"I\")), \"int\");\n\n }\n\n\n\n #[test]\n", "file_path": "jvmkill/src/heap/class_formatter.rs", "rank": 52, "score": 31334.049490733527 }, { "content": " private static final int SPAWNER_COUNT = 20;\n", "file_path": "resource-exhaustion-generator/src/main/java/org/cloudfoundry/jvmkill/ParallelThreadExhaustion.java", "rank": 53, "score": 20227.05216346904 }, { "content": "package org.cloudfoundry.jvmkill;\n", "file_path": "resource-exhaustion-generator/src/main/java/org/cloudfoundry/jvmkill/Sleeper.java", "rank": 54, "score": 85.83175656107379 }, { "content": "package org.cloudfoundry.jvmkill;\n", "file_path": "resource-exhaustion-generator/src/main/java/org/cloudfoundry/jvmkill/Spawner.java", "rank": 55, "score": 85.83175656107379 }, { "content": "package org.cloudfoundry.jvmkill;\n", "file_path": "resource-exhaustion-generator/src/main/java/org/cloudfoundry/jvmkill/ThreadExhaustion.java", "rank": 56, "score": 85.83175656107379 }, { "content": "package org.cloudfoundry.jvmkill;\n", "file_path": "resource-exhaustion-generator/src/main/java/org/cloudfoundry/jvmkill/MemoryExhaustion.java", "rank": 57, "score": 85.83175656107379 }, { "content": "package org.cloudfoundry.jvmkill;\n", "file_path": "resource-exhaustion-generator/src/main/java/org/cloudfoundry/jvmkill/ParallelThreadExhaustion.java", "rank": 58, "score": 85.83175656107382 }, { "content": "/*\n\n * Copyright 2015-2019 the original author or authors.\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n */\n\n\n\nuse crate::bindings::{jclass, jobject};\n\nuse crate::jni::JNI;\n\n\n\npub struct MemoryUsage<'m, J: JNI> {\n", "file_path": "jvmkill/src/jmx/memory_usage.rs", "rank": 59, "score": 83.79159987399471 }, { "content": "/*\n\n * Copyright 2015-2019 the original author or authors.\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n */\n", "file_path": "resource-exhaustion-generator/src/lib.rs", "rank": 60, "score": 83.26705036605603 }, { "content": "/*\n\n * Copyright 2015-2019 the original author or authors.\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n */\n\n\n\nuse std::process::Command;\n\n\n", "file_path": "resource-exhaustion-generator/build.rs", "rank": 61, "score": 81.82083444481225 }, { "content": "/*\n\n * Copyright 2015-2019 the original author or authors.\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n */\n\n\n\nuse crate::bindings::{jclass, JNI_TRUE, jobject, jvalue};\n\nuse crate::jni::JNI;\n\n\n\npub struct HotspotDiagnosticMXBean<'h, J: JNI> {\n", "file_path": "jvmkill/src/jmx/hotspot_diagnostic_mxbean.rs", "rank": 62, "score": 81.22467724962684 }, { "content": "/*\n\n * Copyright 2015-2019 the original author or authors.\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n */\n\n\n\nuse std::cmp;\n\n\n\nuse crate::action::Action;\n\nuse crate::bindings::{jint, jlong, JNI_TRUE, jvmtiCapabilities};\n", "file_path": "jvmkill/src/action/heap_histogram.rs", "rank": 63, "score": 80.9098656737787 }, { "content": "/*\n\n * Copyright 2015-2019 the original author or authors.\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n */\n\n\n\nuse std::env;\n\nuse std::path::PathBuf;\n\n\n", "file_path": "jvmkill/build.rs", "rank": 64, "score": 80.10476815535237 }, { "content": "/*\n\n * Copyright 2015-2019 the original author or authors.\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n */\n\n\n\nuse std::time::Instant;\n\n\n\npub struct Events {\n\n events: circular_queue::CircularQueue<Instant>\n", "file_path": "jvmkill/src/context/events.rs", "rank": 65, "score": 79.90197931335501 }, { "content": "/*\n\n * Copyright 2015-2019 the original author or authors.\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n */\n\n\n\nuse std::ffi::{CStr, CString};\n\nuse std::os::raw::c_char;\n\nuse std::ptr;\n\n\n\nuse mockall::automock;\n\n\n\nuse crate::bindings::{jclass, jint, jlong, jmethodID, JNI_TRUE, JNIEnv, jobject, jstring, jvalue};\n\n\n\n#[cfg_attr(test, automock)]\n", "file_path": "jvmkill/src/jni.rs", "rank": 66, "score": 78.71028393421884 }, { "content": "/*\n\n * Copyright 2015-2019 the original author or authors.\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n */\n\n\n\nuse crate::bindings::{jclass, jobject};\n\nuse crate::jmx::memory_usage::MemoryUsage;\n\nuse crate::jni::JNI;\n\n\n", "file_path": "jvmkill/src/jmx/memory_mxbean.rs", "rank": 67, "score": 78.5869776858642 }, { "content": "/*\n\n * Copyright 2015-2019 the original author or authors.\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n */\n\n\n\nuse crate::bindings::{jclass, jobject};\n\nuse crate::jmx::memory_usage::MemoryUsage;\n\nuse crate::jni::JNI;\n\n\n", "file_path": "jvmkill/src/jmx/memory_pool_mxbean.rs", "rank": 68, "score": 78.58697768586417 }, { "content": "/*\n\n * Copyright 2015-2019 the original author or authors.\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n */\n\n\n\nuse std::ffi::CStr;\n\nuse std::os::raw::c_char;\n\nuse std::path::PathBuf;\n\nuse std::ptr;\n", "file_path": "jvmkill/src/context/parameters.rs", "rank": 69, "score": 77.92098117137552 }, { "content": "/*\n\n * Copyright 2015-2019 the original author or authors.\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n */\n\n\n\nuse std::fmt::{Display, Error, Formatter};\n\n\n\nuse crate::action;\n\nuse crate::action::Action;\n", "file_path": "jvmkill/src/action/memory_pools.rs", "rank": 70, "score": 77.85208535106314 }, { "content": "/*\n\n * Copyright 2015-2019 the original author or authors.\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n */\n\n\n\nuse std::os::raw::{c_char, c_void};\n\nuse std::ptr;\n\nuse std::sync::Mutex;\n\n\n", "file_path": "jvmkill/src/lib.rs", "rank": 71, "score": 77.85208535106312 }, { "content": "/*\n\n * Copyright 2015-2019 the original author or authors.\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n */\n\n\n\nuse std::{env, fs};\n\nuse std::path::PathBuf;\n\n\n\nuse time::OffsetDateTime;\n", "file_path": "jvmkill/src/action/heap_dump.rs", "rank": 72, "score": 77.36589427963808 }, { "content": "/*\n\n * Copyright 2015-2019 the original author or authors.\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n */\n\n\n\nuse std::ops::Sub;\n\nuse std::os::raw::c_char;\n\nuse std::time::{Duration, Instant};\n\n\n", "file_path": "jvmkill/src/context/context.rs", "rank": 73, "score": 77.36589427963808 }, { "content": "/*\n\n * Copyright 2015-2019 the original author or authors.\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n */\n\n\n\nuse std::{mem, ptr};\n\nuse std::collections::HashMap;\n\nuse std::os::raw::c_void;\n\n\n", "file_path": "jvmkill/src/heap/contents.rs", "rank": 74, "score": 77.36589427963808 }, { "content": "/*\n\n * Copyright 2015-2019 the original author or authors.\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n */\n\n\n\nuse std::thread;\n\nuse std::time::Duration;\n\n\n\nuse libc::{c_int, getpid, kill};\n\nuse mockall::automock;\n\n\n\n#[cfg_attr(test, automock)]\n", "file_path": "jvmkill/src/action/signal.rs", "rank": 75, "score": 77.34111897750432 }, { "content": "/*\n\n * Copyright 2015-2019 the original author or authors.\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n */\n\n\n\nuse crate::action::Action;\n\nuse crate::action::signal::{DefaultSignal, Signal};\n\nuse crate::bindings::jint;\n\n\n", "file_path": "jvmkill/src/action/thread_dump.rs", "rank": 76, "score": 76.885908089285 }, { "content": "/*\n\n * Copyright 2015-2019 the original author or authors.\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n */\n\n\n\nuse crate::action::Action;\n\nuse crate::action::signal::{DefaultSignal, Signal};\n\nuse crate::bindings::jint;\n\n\n", "file_path": "jvmkill/src/action/kill.rs", "rank": 77, "score": 76.88590808928501 }, { "content": "/*\n\n * Copyright 2015-2019 the original author or authors.\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n */\n\n\n\nuse crate::bindings::{jclass, jobject, jvalue};\n\nuse crate::jmx::hotspot_diagnostic_mxbean::HotspotDiagnosticMXBean;\n\nuse crate::jmx::memory_mxbean::MemoryMXBean;\n\nuse crate::jmx::memory_pool_mxbean::MemoryPoolMXBean;\n", "file_path": "jvmkill/src/jmx/management_factory.rs", "rank": 78, "score": 70.96118189301977 }, { "content": "## Use short branch names\n\nBranches used when submitting pull requests should preferably using succinct, lower-case, dash (-) delimited names, such as 'fix-warnings', 'fix-typo', etc. In [fork-and-edit][] cases, the GitHub default 'patch-1' is fine as well. This is important, because branch names show up in the merge commits that result from accepting pull requests, and should be as expressive and concise as possible.\n\n\n\n[fork-and-edit]: https://github.com/blog/844-forking-with-the-edit-button\n\n\n\n## Mind the whitespace\n\nPlease carefully follow the whitespace and formatting conventions already present in the code.\n\n\n\n1. Tabs, not spaces\n\n1. Unix (LF), not DOS (CRLF) line endings\n\n1. Eliminate all trailing whitespace\n\n1. Aim to wrap code at 120 characters, but favor readability over wrapping\n\n1. Preserve existing formatting; i.e. do not reformat code for its own sake\n\n1. Search the codebase using `git grep` and other tools to discover common naming conventions, etc.\n\n1. Latin-1 (ISO-8859-1) encoding for sources; use `native2ascii` to convert if necessary\n\n\n\n## Add Apache license header to all new files\n\n```C\n\n/*\n\n * Copyright (c) 2017 the original author or authors.\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n */\n\n```\n", "file_path": "docs/CONTRIBUTING.md", "rank": 79, "score": 53.225918363583645 }, { "content": " .expect_get_class_signature()\n\n .withf_st(move |&a_class| ptr::eq(a_class, unsafe { classes.offset(2) }))\n\n .times(1)\n\n .in_sequence(&mut seq)\n\n .return_const((String::from(\"Lcharlie;\"), String::from(\"charlie-generic\")));\n\n\n\n let reference_kind = jvmtiHeapReferenceKind_JVMTI_HEAP_REFERENCE_CLASS;\n\n let reference_info = jni_type_const!(jvmtiHeapReferenceInfo);\n\n let referrer_class_tag = 100 as jlong;\n\n let tag_ptr = jni_type!(jlong) as *mut jlong;\n\n let referrer_tag_ptr = jni_type!(jlong) as *mut jlong;\n\n let length = 100 as jint;\n\n\n\n jvmti\n\n .expect_follow_references()\n\n .times(1)\n\n .in_sequence(&mut seq)\n\n .return_once_st(move |_, _, _, c: *const jvmtiHeapCallbacks, u: *const c_void| {\n\n unsafe {\n\n let h = (*c).heap_reference_callback\n", "file_path": "jvmkill/src/action/heap_histogram.rs", "rank": 80, "score": 32.144000812066984 }, { "content": "use crate::bindings::{jint, jlong, JVMTI_VISIT_OBJECTS, jvmtiHeapCallbacks, jvmtiHeapReferenceInfo, jvmtiHeapReferenceKind};\n\nuse crate::jvmti::JVMTI;\n\n\n\npub struct Contents<'c, J: JVMTI> {\n\n jvmti: &'c J,\n\n contents: Vec<Statistics>,\n\n}\n\n\n\nimpl<'c, J: JVMTI> Contents<'c, J> {\n\n pub fn new(jvmti: &'c J) -> Self {\n\n return Self { jvmti, contents: Vec::new() };\n\n }\n\n\n\n pub fn analyze_heap(&mut self) {\n\n let mut contents: HashMap<jlong, Statistics> = HashMap::new();\n\n\n\n let mut c = |tag, size| {\n\n let s = contents.entry(tag).or_insert(Statistics { tag, ..Default::default() });\n\n s.count += 1;\n\n s.total_size += size;\n", "file_path": "jvmkill/src/heap/contents.rs", "rank": 81, "score": 32.05616851605444 }, { "content": " let tag = class_tag & !TAG_VISITED_MASK;\n\n\n\n let c: &mut &mut dyn FnMut(jlong, jlong) = mem::transmute(user_data);\n\n c(tag, size);\n\n\n\n return JVMTI_VISIT_OBJECTS as jint;\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use std::os::raw::c_void;\n\n\n\n use mockall::Sequence;\n\n\n\n use crate::bindings::{jint, jlong, jvmtiHeapCallbacks, jvmtiHeapReferenceInfo, jvmtiHeapReferenceKind_JVMTI_HEAP_REFERENCE_CLASS};\n\n use crate::heap::Contents;\n\n use crate::heap::contents::Statistics;\n\n use crate::jvmti::MockJVMTI;\n\n\n\n #[test]\n", "file_path": "jvmkill/src/heap/contents.rs", "rank": 82, "score": 31.33939383891652 }, { "content": "use crate::heap::{ClassFormatter, Contents, Types};\n\nuse crate::jvmti::JVMTI;\n\n\n\npub struct HeapHistogram<'h, J: JVMTI> {\n\n jvmti: &'h J,\n\n max_entries: usize,\n\n}\n\n\n\nimpl<'h, J: JVMTI> HeapHistogram<'h, J> {\n\n pub fn new(jvmti: &'h J, max_entries: usize) -> Self {\n\n return Self { jvmti, max_entries };\n\n }\n\n}\n\n\n\nimpl<'h, J: JVMTI> Action for HeapHistogram<'h, J> {\n\n fn execute(&self, _flags: jint) {\n\n let mut c: jvmtiCapabilities = Default::default();\n\n c.set_can_tag_objects(JNI_TRUE);\n\n self.jvmti.add_capabilities(c);\n\n\n", "file_path": "jvmkill/src/action/heap_histogram.rs", "rank": 83, "score": 30.33250007666133 }, { "content": "}\n\n\n\n#[derive(Clone, Debug, Default, PartialEq)]\n\npub struct Statistics {\n\n pub count: usize,\n\n pub total_size: jlong,\n\n pub tag: jlong,\n\n}\n\n\n\nconst TAG_VISITED_MASK: jlong = 1 << 31;\n\n\n\n#[allow(non_snake_case)]\n\nunsafe extern \"C\" fn heapReferenceCallback(_reference_kind: jvmtiHeapReferenceKind, _reference_info: *const jvmtiHeapReferenceInfo, class_tag: jlong, _referrer_class_tag: jlong, size: jlong,\n\n tag_ptr: *mut jlong, _referrer_tag_ptr: *mut jlong, _length: jint, user_data: *mut c_void) -> jint {\n\n if *tag_ptr & TAG_VISITED_MASK == TAG_VISITED_MASK {\n\n return 0;\n\n }\n\n\n\n *tag_ptr |= TAG_VISITED_MASK;\n\n\n", "file_path": "jvmkill/src/heap/contents.rs", "rank": 84, "score": 29.874424069502762 }, { "content": " }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use std::os::raw::c_void;\n\n use std::ptr;\n\n\n\n use mockall::Sequence;\n\n\n\n use crate::action::Action;\n\n use crate::action::heap_histogram::HeapHistogram;\n\n use crate::bindings::{jclass, jint, jlong, JNI_TRUE, JVMTI_RESOURCE_EXHAUSTED_JAVA_HEAP, jvmtiHeapCallbacks, jvmtiHeapReferenceInfo, jvmtiHeapReferenceKind_JVMTI_HEAP_REFERENCE_CLASS};\n\n use crate::jvmti::{ArrayPointerLoadedClassesIterator, MockJVMTI};\n\n\n\n #[test]\n\n fn execute() {\n\n let mut jvmti = MockJVMTI::new();\n\n let mut seq = Sequence::new();\n\n\n", "file_path": "jvmkill/src/action/heap_histogram.rs", "rank": 85, "score": 27.81594376073165 }, { "content": " fn analyze_heap_and_get_contents() {\n\n let mut jvmti = MockJVMTI::new();\n\n let mut seq = Sequence::new();\n\n\n\n let reference_kind = jvmtiHeapReferenceKind_JVMTI_HEAP_REFERENCE_CLASS;\n\n let reference_info = jni_type_const!(jvmtiHeapReferenceInfo);\n\n let referrer_class_tag = 100 as jlong;\n\n let tag_ptr = jni_type!(jlong) as *mut jlong;\n\n let referrer_tag_ptr = jni_type!(jlong) as *mut jlong;\n\n let length = 100 as jint;\n\n\n\n jvmti\n\n .expect_follow_references()\n\n .times(1)\n\n .in_sequence(&mut seq)\n\n .return_once_st(move |_, _, _, c: *const jvmtiHeapCallbacks, u: *const c_void| {\n\n unsafe {\n\n let h = (*c).heap_reference_callback\n\n .unwrap();\n\n\n", "file_path": "jvmkill/src/heap/contents.rs", "rank": 86, "score": 27.67361982448559 }, { "content": " jvmti\n\n .expect_add_capabilities()\n\n .withf_st(|&a_capabilities| a_capabilities.can_tag_objects() == JNI_TRUE)\n\n .times(1)\n\n .in_sequence(&mut seq)\n\n .return_const(());\n\n\n\n let classes = jni_type!(3, jclass) as *mut jclass;\n\n let loaded_classes = ArrayPointerLoadedClassesIterator { count: 3, classes };\n\n jvmti\n\n .expect_get_loaded_classes()\n\n .times(1)\n\n .in_sequence(&mut seq)\n\n .return_once_st(move || loaded_classes);\n\n\n\n jvmti\n\n .expect_set_tag()\n\n .withf_st(move |&a_class, &a_tag| {\n\n ptr::eq(a_class, classes)\n\n && a_tag == 0\n", "file_path": "jvmkill/src/action/heap_histogram.rs", "rank": 87, "score": 25.709357786640673 }, { "content": " let mut types = Types::new(self.jvmti);\n\n types.tag_classes();\n\n\n\n let mut contents = Contents::new(self.jvmti);\n\n contents.analyze_heap();\n\n\n\n let f = ClassFormatter::new();\n\n\n\n let mut max = 10;\n\n let formatted: Vec<(usize, jlong, String)> = contents.get_contents(self.max_entries).iter()\n\n .map(|s| (s.count, s.total_size, f.format(types.get(s.tag))))\n\n .inspect(|(_c, _s, n)| max = cmp::max(max, n.len()))\n\n .collect();\n\n\n\n println!(\"\\n>>> Heap Histogram\");\n\n println!(\"| Instance Count | Total Bytes | Class Name{} |\", \" \".repeat(max - 10));\n\n println!(\"| -------------- | ----------- | {} |\", \"-\".repeat(max));\n\n for (c, s, n) in formatted {\n\n println!(\"| {:<14} | {:<11} | {}{} |\", c, s, n, \" \".repeat(max - n.len()));\n\n }\n", "file_path": "jvmkill/src/action/heap_histogram.rs", "rank": 88, "score": 25.216312664839677 }, { "content": " })\n\n .times(1)\n\n .in_sequence(&mut seq)\n\n .return_const(());\n\n\n\n jvmti\n\n .expect_get_class_signature()\n\n .withf_st(move |&a_class| ptr::eq(a_class, classes))\n\n .times(1)\n\n .in_sequence(&mut seq)\n\n .return_const((String::from(\"Lalpha;\"), String::from(\"alpha-generic\")));\n\n\n\n jvmti\n\n .expect_set_tag()\n\n .withf_st(move |&a_class, &a_tag| {\n\n ptr::eq(a_class, unsafe { classes.offset(1) })\n\n && a_tag == 1\n\n })\n\n .times(1)\n\n .in_sequence(&mut seq)\n", "file_path": "jvmkill/src/action/heap_histogram.rs", "rank": 89, "score": 24.21918441078261 }, { "content": " .return_const(());\n\n\n\n jvmti\n\n .expect_get_class_signature()\n\n .withf_st(move |&a_class| ptr::eq(a_class, unsafe { classes.offset(1) }))\n\n .times(1)\n\n .in_sequence(&mut seq)\n\n .return_const((String::from(\"Lbravo;\"), String::from(\"bravo-generic\")));\n\n\n\n jvmti\n\n .expect_set_tag()\n\n .withf_st(move |&a_class, &a_tag| {\n\n ptr::eq(a_class, unsafe { classes.offset(2) })\n\n && a_tag == 2\n\n })\n\n .times(1)\n\n .in_sequence(&mut seq)\n\n .return_const(());\n\n\n\n jvmti\n", "file_path": "jvmkill/src/action/heap_histogram.rs", "rank": 90, "score": 23.85798290304304 }, { "content": "}\n\n\n\nimpl Events {\n\n pub fn new(limit: usize) -> Events {\n\n return Events { events: circular_queue::CircularQueue::with_capacity(limit + 1) };\n\n }\n\n\n\n pub fn events_since(&mut self, since: Instant) -> usize {\n\n return self.events.iter()\n\n .filter(|&&i| i > since)\n\n .count();\n\n }\n\n\n\n pub fn record(&mut self) {\n\n self.events.push(Instant::now());\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n", "file_path": "jvmkill/src/context/events.rs", "rank": 91, "score": 23.65334820719517 }, { "content": "\n\n#[cfg(test)]\n\nmod tests {\n\n use std::ptr;\n\n\n\n use mockall::Sequence;\n\n\n\n use crate::bindings::{jclass, jint, jmethodID, jobject};\n\n use crate::jmx::ManagementFactory;\n\n use crate::jni::MockJNI;\n\n\n\n #[test]\n\n fn get_hotspot_diagnostic_mxbean() {\n\n let mut jni = MockJNI::new();\n\n let mut seq = Sequence::new();\n\n\n\n let c_management_factory = jni_type!(jclass);\n\n jni\n\n .expect_find_class()\n\n .withf_st(move |a_class| a_class == \"java/lang/management/ManagementFactory\")\n", "file_path": "jvmkill/src/jmx/management_factory.rs", "rank": 92, "score": 23.563968625588917 }, { "content": " .times(1)\n\n .in_sequence(&mut seq)\n\n .return_once(move |_| Option::Some(String::from(\"test-name\")));\n\n\n\n let m = MemoryPoolMXBean::new(c_memory_pool_mxbean, i_memory_pool_mxbean, &jni);\n\n assert_eq!(m.get_name(), String::from(\"test-name\"));\n\n }\n\n\n\n #[test]\n\n fn get_usage() {\n\n let mut jni = MockJNI::new();\n\n let mut seq = Sequence::new();\n\n\n\n let c_memory_pool_mxbean = jni_type!(jclass);\n\n let i_memory_pool_mxbean = jni_type!(jobject);\n\n\n\n let m_get_usage = jni_type!(jmethodID);\n\n jni\n\n .expect_get_method()\n\n .withf_st(move |&a_class, a_method, a_signature| {\n", "file_path": "jvmkill/src/jmx/memory_pool_mxbean.rs", "rank": 93, "score": 22.335537030517287 }, { "content": " assert_eq!(m.get_committed(), 42);\n\n }\n\n\n\n #[test]\n\n fn get_init() {\n\n let mut jni = MockJNI::new();\n\n let mut seq = Sequence::new();\n\n\n\n let c_memory_usage = jni_type!(jclass);\n\n let i_memory_usage = jni_type!(jobject);\n\n\n\n let m_get_init = jni_type!(jmethodID);\n\n jni\n\n .expect_get_method()\n\n .withf_st(move |&a_class, a_method, a_signature| {\n\n ptr::eq(a_class, c_memory_usage)\n\n && a_method == \"getInit\"\n\n && a_signature == \"()J\"\n\n })\n\n .times(1)\n", "file_path": "jvmkill/src/jmx/memory_usage.rs", "rank": 94, "score": 22.303331296752653 }, { "content": " return count > self.parameters.count_threshold;\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use std::ffi::CString;\n\n\n\n use crate::context::Context;\n\n\n\n #[test]\n\n fn does_not_trigger() {\n\n assert_eq!(create(\"count=100,time=100\").record(), false);\n\n }\n\n\n\n #[test]\n\n fn triggers() {\n\n assert_eq!(create(\"count=0\").record(), true);\n\n }\n\n\n\n fn create(s: &str) -> Context {\n\n let options = CString::new(s)\n\n .expect(\"cannot convert to CString\");\n\n\n\n return Context::new(options.as_ptr());\n\n }\n\n}\n", "file_path": "jvmkill/src/context/context.rs", "rank": 95, "score": 21.71552572727976 }, { "content": " use crate::action::heap_dump::HeapDump;\n\n use crate::bindings::{jclass, jint, jmethodID, JNI_TRUE, jobject, jstring, JVMTI_RESOURCE_EXHAUSTED_JAVA_HEAP, JVMTI_RESOURCE_EXHAUSTED_THREADS};\n\n use crate::jmx::ManagementFactory;\n\n use crate::jni::MockJNI;\n\n\n\n #[test]\n\n fn execute() {\n\n let t = PathBuf::from(tempdir::TempDir::new(\"jvmkill\").unwrap().path());\n\n let u = t.clone();\n\n\n\n let mut jni = MockJNI::new();\n\n let mut seq = Sequence::new();\n\n\n\n let c_management_factory = jni_type!(jclass);\n\n jni\n\n .expect_find_class()\n\n .withf_st(move |a_class| a_class == \"java/lang/management/ManagementFactory\")\n\n .times(1)\n\n .in_sequence(&mut seq)\n\n .return_once_st(move |_| Option::Some(c_management_factory));\n", "file_path": "jvmkill/src/action/heap_dump.rs", "rank": 96, "score": 21.66631197708912 }, { "content": "\n\n#[cfg(test)]\n\nmod tests {\n\n use std::ptr;\n\n\n\n use mockall::Sequence;\n\n\n\n use crate::bindings::{jclass, jmethodID, JNI_TRUE, jobject, jstring};\n\n use crate::jmx::HotspotDiagnosticMXBean;\n\n use crate::jni::MockJNI;\n\n\n\n #[test]\n\n fn dump_heap() {\n\n let mut jni = MockJNI::new();\n\n let mut seq = Sequence::new();\n\n\n\n let c_hot_spot_diagnostic_mxbean = jni_type!(jclass);\n\n let i_hot_spot_diagnostic_mxbean = jni_type!(jclass);\n\n\n\n let m_dump_heap = jni_type!(jmethodID);\n", "file_path": "jvmkill/src/jmx/hotspot_diagnostic_mxbean.rs", "rank": 97, "score": 21.66591737398677 }, { "content": " use std::ptr;\n\n\n\n use mockall::Sequence;\n\n\n\n use crate::bindings::{jclass, jmethodID, jobject};\n\n use crate::jmx::MemoryMXBean;\n\n use crate::jni::MockJNI;\n\n\n\n #[test]\n\n fn get_heap_memory_usage() {\n\n let mut jni = MockJNI::new();\n\n let mut seq = Sequence::new();\n\n\n\n let c_memory_mxbean = jni_type!(jclass);\n\n let i_memory_mxbean = jni_type!(jobject);\n\n\n\n let m_get_heap_memory_usage = jni_type!(jmethodID);\n\n jni\n\n .expect_get_method()\n\n .withf_st(move |&a_class, a_method, a_signature| {\n", "file_path": "jvmkill/src/jmx/memory_mxbean.rs", "rank": 98, "score": 21.621286479813428 }, { "content": " use mockall::Sequence;\n\n\n\n use crate::bindings::{jclass, jmethodID, jobject, jstring};\n\n use crate::jmx::MemoryPoolMXBean;\n\n use crate::jni::MockJNI;\n\n\n\n #[test]\n\n fn get_name() {\n\n let mut jni = MockJNI::new();\n\n let mut seq = Sequence::new();\n\n\n\n let c_memory_pool_mxbean = jni_type!(jclass);\n\n let i_memory_pool_mxbean = jni_type!(jobject);\n\n\n\n let m_get_name = jni_type!(jmethodID);\n\n jni\n\n .expect_get_method()\n\n .withf_st(move |&a_class, a_method, a_signature| {\n\n ptr::eq(a_class, c_memory_pool_mxbean)\n\n && a_method == \"getName\"\n", "file_path": "jvmkill/src/jmx/memory_pool_mxbean.rs", "rank": 99, "score": 21.620087391529314 } ]
Rust
src/round.rs
xoac/chrono
37fb8005f196e9e67629d28c0ae84a3b9d31926a
use oldtime::Duration; use std::ops::{Add, Sub}; use Timelike; pub trait SubsecRound { fn round_subsecs(self, digits: u16) -> Self; fn trunc_subsecs(self, digits: u16) -> Self; } impl<T> SubsecRound for T where T: Timelike + Add<Duration, Output = T> + Sub<Duration, Output = T>, { fn round_subsecs(self, digits: u16) -> T { let span = span_for_digits(digits); let delta_down = self.nanosecond() % span; if delta_down > 0 { let delta_up = span - delta_down; if delta_up <= delta_down { self + Duration::nanoseconds(delta_up.into()) } else { self - Duration::nanoseconds(delta_down.into()) } } else { self } } fn trunc_subsecs(self, digits: u16) -> T { let span = span_for_digits(digits); let delta_down = self.nanosecond() % span; if delta_down > 0 { self - Duration::nanoseconds(delta_down.into()) } else { self } } } fn span_for_digits(digits: u16) -> u32 { match digits { 0 => 1_000_000_000, 1 => 100_000_000, 2 => 10_000_000, 3 => 1_000_000, 4 => 100_000, 5 => 10_000, 6 => 1_000, 7 => 100, 8 => 10, _ => 1, } } #[cfg(test)] mod tests { use super::SubsecRound; use offset::{FixedOffset, TimeZone, Utc}; use Timelike; #[test] fn test_round() { let pst = FixedOffset::east(8 * 60 * 60); let dt = pst.ymd(2018, 1, 11).and_hms_nano(10, 5, 13, 084_660_684); assert_eq!(dt.round_subsecs(10), dt); assert_eq!(dt.round_subsecs(9), dt); assert_eq!(dt.round_subsecs(8).nanosecond(), 084_660_680); assert_eq!(dt.round_subsecs(7).nanosecond(), 084_660_700); assert_eq!(dt.round_subsecs(6).nanosecond(), 084_661_000); assert_eq!(dt.round_subsecs(5).nanosecond(), 084_660_000); assert_eq!(dt.round_subsecs(4).nanosecond(), 084_700_000); assert_eq!(dt.round_subsecs(3).nanosecond(), 085_000_000); assert_eq!(dt.round_subsecs(2).nanosecond(), 080_000_000); assert_eq!(dt.round_subsecs(1).nanosecond(), 100_000_000); assert_eq!(dt.round_subsecs(0).nanosecond(), 0); assert_eq!(dt.round_subsecs(0).second(), 13); let dt = Utc.ymd(2018, 1, 11).and_hms_nano(10, 5, 27, 750_500_000); assert_eq!(dt.round_subsecs(9), dt); assert_eq!(dt.round_subsecs(4), dt); assert_eq!(dt.round_subsecs(3).nanosecond(), 751_000_000); assert_eq!(dt.round_subsecs(2).nanosecond(), 750_000_000); assert_eq!(dt.round_subsecs(1).nanosecond(), 800_000_000); assert_eq!(dt.round_subsecs(0).nanosecond(), 0); assert_eq!(dt.round_subsecs(0).second(), 28); } #[test] fn test_round_leap_nanos() { let dt = Utc .ymd(2016, 12, 31) .and_hms_nano(23, 59, 59, 1_750_500_000); assert_eq!(dt.round_subsecs(9), dt); assert_eq!(dt.round_subsecs(4), dt); assert_eq!(dt.round_subsecs(2).nanosecond(), 1_750_000_000); assert_eq!(dt.round_subsecs(1).nanosecond(), 1_800_000_000); assert_eq!(dt.round_subsecs(1).second(), 59); assert_eq!(dt.round_subsecs(0).nanosecond(), 0); assert_eq!(dt.round_subsecs(0).second(), 0); } #[test] fn test_trunc() { let pst = FixedOffset::east(8 * 60 * 60); let dt = pst.ymd(2018, 1, 11).and_hms_nano(10, 5, 13, 084_660_684); assert_eq!(dt.trunc_subsecs(10), dt); assert_eq!(dt.trunc_subsecs(9), dt); assert_eq!(dt.trunc_subsecs(8).nanosecond(), 084_660_680); assert_eq!(dt.trunc_subsecs(7).nanosecond(), 084_660_600); assert_eq!(dt.trunc_subsecs(6).nanosecond(), 084_660_000); assert_eq!(dt.trunc_subsecs(5).nanosecond(), 084_660_000); assert_eq!(dt.trunc_subsecs(4).nanosecond(), 084_600_000); assert_eq!(dt.trunc_subsecs(3).nanosecond(), 084_000_000); assert_eq!(dt.trunc_subsecs(2).nanosecond(), 080_000_000); assert_eq!(dt.trunc_subsecs(1).nanosecond(), 0); assert_eq!(dt.trunc_subsecs(0).nanosecond(), 0); assert_eq!(dt.trunc_subsecs(0).second(), 13); let dt = pst.ymd(2018, 1, 11).and_hms_nano(10, 5, 27, 750_500_000); assert_eq!(dt.trunc_subsecs(9), dt); assert_eq!(dt.trunc_subsecs(4), dt); assert_eq!(dt.trunc_subsecs(3).nanosecond(), 750_000_000); assert_eq!(dt.trunc_subsecs(2).nanosecond(), 750_000_000); assert_eq!(dt.trunc_subsecs(1).nanosecond(), 700_000_000); assert_eq!(dt.trunc_subsecs(0).nanosecond(), 0); assert_eq!(dt.trunc_subsecs(0).second(), 27); } #[test] fn test_trunc_leap_nanos() { let dt = Utc .ymd(2016, 12, 31) .and_hms_nano(23, 59, 59, 1_750_500_000); assert_eq!(dt.trunc_subsecs(9), dt); assert_eq!(dt.trunc_subsecs(4), dt); assert_eq!(dt.trunc_subsecs(2).nanosecond(), 1_750_000_000); assert_eq!(dt.trunc_subsecs(1).nanosecond(), 1_700_000_000); assert_eq!(dt.trunc_subsecs(1).second(), 59); assert_eq!(dt.trunc_subsecs(0).nanosecond(), 1_000_000_000); assert_eq!(dt.trunc_subsecs(0).second(), 59); } }
use oldtime::Duration; use std::ops::{Add, Sub}; use Timelike; pub trait SubsecRound { fn round_subsecs(self, digits: u16) -> Self; fn trunc_subsecs(self, digits: u16) -> Self; } impl<T> SubsecRound for T where T: Timelike + Add<Duration, Output = T> + Sub<Duration, Output = T>, { fn round_subsecs(self, digits: u16) -> T { let span = span_for_digits(digits); let delta_down = self.nanosecond() % span; if delta_down > 0 { let delta_up = span - delta_down; if delta_up <= delta_down { self + Duration::nanoseconds(delta_up.into()) } else { self - Duration::nanoseconds(delta_down.into()) } } else { self } } fn trunc_subsecs(self, digits: u16) -> T { let span = span_for_digits(digits); let delta_down = self.nanosecond() % span; if delta_down > 0 { self - Duration::nanoseconds(delta_down.into()) } else { self } } } fn span_for_digits(digits: u16) -> u32 { match digits {
#[cfg(test)] mod tests { use super::SubsecRound; use offset::{FixedOffset, TimeZone, Utc}; use Timelike; #[test] fn test_round() { let pst = FixedOffset::east(8 * 60 * 60); let dt = pst.ymd(2018, 1, 11).and_hms_nano(10, 5, 13, 084_660_684); assert_eq!(dt.round_subsecs(10), dt); assert_eq!(dt.round_subsecs(9), dt); assert_eq!(dt.round_subsecs(8).nanosecond(), 084_660_680); assert_eq!(dt.round_subsecs(7).nanosecond(), 084_660_700); assert_eq!(dt.round_subsecs(6).nanosecond(), 084_661_000); assert_eq!(dt.round_subsecs(5).nanosecond(), 084_660_000); assert_eq!(dt.round_subsecs(4).nanosecond(), 084_700_000); assert_eq!(dt.round_subsecs(3).nanosecond(), 085_000_000); assert_eq!(dt.round_subsecs(2).nanosecond(), 080_000_000); assert_eq!(dt.round_subsecs(1).nanosecond(), 100_000_000); assert_eq!(dt.round_subsecs(0).nanosecond(), 0); assert_eq!(dt.round_subsecs(0).second(), 13); let dt = Utc.ymd(2018, 1, 11).and_hms_nano(10, 5, 27, 750_500_000); assert_eq!(dt.round_subsecs(9), dt); assert_eq!(dt.round_subsecs(4), dt); assert_eq!(dt.round_subsecs(3).nanosecond(), 751_000_000); assert_eq!(dt.round_subsecs(2).nanosecond(), 750_000_000); assert_eq!(dt.round_subsecs(1).nanosecond(), 800_000_000); assert_eq!(dt.round_subsecs(0).nanosecond(), 0); assert_eq!(dt.round_subsecs(0).second(), 28); } #[test] fn test_round_leap_nanos() { let dt = Utc .ymd(2016, 12, 31) .and_hms_nano(23, 59, 59, 1_750_500_000); assert_eq!(dt.round_subsecs(9), dt); assert_eq!(dt.round_subsecs(4), dt); assert_eq!(dt.round_subsecs(2).nanosecond(), 1_750_000_000); assert_eq!(dt.round_subsecs(1).nanosecond(), 1_800_000_000); assert_eq!(dt.round_subsecs(1).second(), 59); assert_eq!(dt.round_subsecs(0).nanosecond(), 0); assert_eq!(dt.round_subsecs(0).second(), 0); } #[test] fn test_trunc() { let pst = FixedOffset::east(8 * 60 * 60); let dt = pst.ymd(2018, 1, 11).and_hms_nano(10, 5, 13, 084_660_684); assert_eq!(dt.trunc_subsecs(10), dt); assert_eq!(dt.trunc_subsecs(9), dt); assert_eq!(dt.trunc_subsecs(8).nanosecond(), 084_660_680); assert_eq!(dt.trunc_subsecs(7).nanosecond(), 084_660_600); assert_eq!(dt.trunc_subsecs(6).nanosecond(), 084_660_000); assert_eq!(dt.trunc_subsecs(5).nanosecond(), 084_660_000); assert_eq!(dt.trunc_subsecs(4).nanosecond(), 084_600_000); assert_eq!(dt.trunc_subsecs(3).nanosecond(), 084_000_000); assert_eq!(dt.trunc_subsecs(2).nanosecond(), 080_000_000); assert_eq!(dt.trunc_subsecs(1).nanosecond(), 0); assert_eq!(dt.trunc_subsecs(0).nanosecond(), 0); assert_eq!(dt.trunc_subsecs(0).second(), 13); let dt = pst.ymd(2018, 1, 11).and_hms_nano(10, 5, 27, 750_500_000); assert_eq!(dt.trunc_subsecs(9), dt); assert_eq!(dt.trunc_subsecs(4), dt); assert_eq!(dt.trunc_subsecs(3).nanosecond(), 750_000_000); assert_eq!(dt.trunc_subsecs(2).nanosecond(), 750_000_000); assert_eq!(dt.trunc_subsecs(1).nanosecond(), 700_000_000); assert_eq!(dt.trunc_subsecs(0).nanosecond(), 0); assert_eq!(dt.trunc_subsecs(0).second(), 27); } #[test] fn test_trunc_leap_nanos() { let dt = Utc .ymd(2016, 12, 31) .and_hms_nano(23, 59, 59, 1_750_500_000); assert_eq!(dt.trunc_subsecs(9), dt); assert_eq!(dt.trunc_subsecs(4), dt); assert_eq!(dt.trunc_subsecs(2).nanosecond(), 1_750_000_000); assert_eq!(dt.trunc_subsecs(1).nanosecond(), 1_700_000_000); assert_eq!(dt.trunc_subsecs(1).second(), 59); assert_eq!(dt.trunc_subsecs(0).nanosecond(), 1_000_000_000); assert_eq!(dt.trunc_subsecs(0).second(), 59); } }
0 => 1_000_000_000, 1 => 100_000_000, 2 => 10_000_000, 3 => 1_000_000, 4 => 100_000, 5 => 10_000, 6 => 1_000, 7 => 100, 8 => 10, _ => 1, } }
function_block-function_prefix_line
[ { "content": "pub fn cycle_to_yo(cycle: u32) -> (u32, u32) {\n\n let (mut year_mod_400, mut ordinal0) = div_rem(cycle, 365);\n\n let delta = u32::from(YEAR_DELTAS[year_mod_400 as usize]);\n\n if ordinal0 < delta {\n\n year_mod_400 -= 1;\n\n ordinal0 += 365 - u32::from(YEAR_DELTAS[year_mod_400 as usize]);\n\n } else {\n\n ordinal0 -= delta;\n\n }\n\n (year_mod_400, ordinal0 + 1)\n\n}\n\n\n", "file_path": "src/naive/internals.rs", "rank": 1, "score": 153751.96330459695 }, { "content": "pub fn yo_to_cycle(year_mod_400: u32, ordinal: u32) -> u32 {\n\n year_mod_400 * 365 + u32::from(YEAR_DELTAS[year_mod_400 as usize]) + ordinal - 1\n\n}\n\n\n\nimpl YearFlags {\n\n #[inline]\n\n pub fn from_year(year: i32) -> YearFlags {\n\n let year = mod_floor(year, 400);\n\n YearFlags::from_year_mod_400(year)\n\n }\n\n\n\n #[inline]\n\n pub fn from_year_mod_400(year: i32) -> YearFlags {\n\n YEAR_TO_FLAGS[year as usize]\n\n }\n\n\n\n #[inline]\n\n pub fn ndays(&self) -> u32 {\n\n let YearFlags(flags) = *self;\n\n 366 - u32::from(flags >> 3)\n", "file_path": "src/naive/internals.rs", "rank": 2, "score": 145365.46317970392 }, { "content": "/// The common set of methods for time component.\n\npub trait Timelike: Sized {\n\n /// Returns the hour number from 0 to 23.\n\n fn hour(&self) -> u32;\n\n\n\n /// Returns the hour number from 1 to 12 with a boolean flag,\n\n /// which is false for AM and true for PM.\n\n #[inline]\n\n fn hour12(&self) -> (bool, u32) {\n\n let hour = self.hour();\n\n let mut hour12 = hour % 12;\n\n if hour12 == 0 {\n\n hour12 = 12;\n\n }\n\n (hour >= 12, hour12)\n\n }\n\n\n\n /// Returns the minute number from 0 to 59.\n\n fn minute(&self) -> u32;\n\n\n\n /// Returns the second number from 0 to 59.\n", "file_path": "src/lib.rs", "rank": 3, "score": 142521.68956586244 }, { "content": "/// The common set of methods for date component.\n\npub trait Datelike: Sized {\n\n /// Returns the year number in the [calendar date](./naive/struct.NaiveDate.html#calendar-date).\n\n fn year(&self) -> i32;\n\n\n\n /// Returns the absolute year number starting from 1 with a boolean flag,\n\n /// which is false when the year predates the epoch (BCE/BC) and true otherwise (CE/AD).\n\n #[inline]\n\n fn year_ce(&self) -> (bool, u32) {\n\n let year = self.year();\n\n if year < 1 {\n\n (false, (1 - year) as u32)\n\n } else {\n\n (true, year as u32)\n\n }\n\n }\n\n\n\n /// Returns the month number starting from 1.\n\n ///\n\n /// The return value ranges from 1 to 12.\n\n fn month(&self) -> u32;\n", "file_path": "src/lib.rs", "rank": 5, "score": 109385.99890921396 }, { "content": "/// The time zone.\n\n///\n\n/// The methods here are the primarily constructors for [`Date`](../struct.Date.html) and\n\n/// [`DateTime`](../struct.DateTime.html) types.\n\npub trait TimeZone: Sized + Clone {\n\n /// An associated offset type.\n\n /// This type is used to store the actual offset in date and time types.\n\n /// The original `TimeZone` value can be recovered via `TimeZone::from_offset`.\n\n type Offset: Offset;\n\n\n\n /// Makes a new `Date` from year, month, day and the current time zone.\n\n /// This assumes the proleptic Gregorian calendar, with the year 0 being 1 BCE.\n\n ///\n\n /// The time zone normally does not affect the date (unless it is between UTC-24 and UTC+24),\n\n /// but it will propagate to the `DateTime` values constructed via this date.\n\n ///\n\n /// Panics on the out-of-range date, invalid month and/or day.\n\n ///\n\n /// # Example\n\n ///\n\n /// ~~~~\n\n /// use chrono::{Utc, TimeZone};\n\n ///\n\n /// assert_eq!(Utc.ymd(2015, 5, 15).to_string(), \"2015-05-15UTC\");\n", "file_path": "src/offset/mod.rs", "rank": 6, "score": 97535.79789727365 }, { "content": "/// Tries to format given arguments with given formatting items.\n\n/// Internally used by `DelayedFormat`.\n\npub fn format<'a, I>(\n\n w: &mut fmt::Formatter,\n\n date: Option<&NaiveDate>,\n\n time: Option<&NaiveTime>,\n\n off: Option<&(String, FixedOffset)>,\n\n items: I,\n\n) -> fmt::Result\n\nwhere\n\n I: Iterator<Item = Item<'a>>,\n\n{\n\n // full and abbreviated month and weekday names\n\n static SHORT_MONTHS: [&'static str; 12] = [\n\n \"Jan\", \"Feb\", \"Mar\", \"Apr\", \"May\", \"Jun\", \"Jul\", \"Aug\", \"Sep\", \"Oct\", \"Nov\", \"Dec\",\n\n ];\n\n static LONG_MONTHS: [&'static str; 12] = [\n\n \"January\",\n\n \"February\",\n\n \"March\",\n\n \"April\",\n\n \"May\",\n", "file_path": "src/format/mod.rs", "rank": 7, "score": 97008.02123777449 }, { "content": "/// Tries to consume a fixed number of digits as a fractional second.\n\n/// Returns the number of whole nanoseconds (0--999,999,999).\n\npub fn nanosecond_fixed(s: &str, digits: usize) -> ParseResult<(&str, i64)> {\n\n // record the number of digits consumed for later scaling.\n\n let (s, v) = try!(number(s, digits, digits));\n\n\n\n // scale the number accordingly.\n\n static SCALE: [i64; 10] = [\n\n 0,\n\n 100_000_000,\n\n 10_000_000,\n\n 1_000_000,\n\n 100_000,\n\n 10_000,\n\n 1_000,\n\n 100,\n\n 10,\n\n 1,\n\n ];\n\n let v = try!(v.checked_mul(SCALE[digits]).ok_or(OUT_OF_RANGE));\n\n\n\n Ok((s, v))\n\n}\n\n\n", "file_path": "src/format/scan.rs", "rank": 8, "score": 93123.11738254587 }, { "content": "/// The offset from the local time to UTC.\n\npub trait Offset: Sized + Clone + fmt::Debug {\n\n /// Returns the fixed offset from UTC to the local time stored.\n\n fn fix(&self) -> FixedOffset;\n\n}\n\n\n", "file_path": "src/offset/mod.rs", "rank": 9, "score": 90375.39093708302 }, { "content": "/// Tries to consume one or more whitespace.\n\npub fn space(s: &str) -> ParseResult<&str> {\n\n let s_ = s.trim_left();\n\n if s_.len() < s.len() {\n\n Ok(s_)\n\n } else if s.is_empty() {\n\n Err(TOO_SHORT)\n\n } else {\n\n Err(INVALID)\n\n }\n\n}\n\n\n", "file_path": "src/format/scan.rs", "rank": 10, "score": 81943.2965108444 }, { "content": "/// Consumes any number (including zero) of colon or spaces.\n\npub fn colon_or_space(s: &str) -> ParseResult<&str> {\n\n Ok(s.trim_left_matches(|c: char| c == ':' || c.is_whitespace()))\n\n}\n\n\n", "file_path": "src/format/scan.rs", "rank": 11, "score": 79675.95920017613 }, { "content": "/// Tries to consume at least one digits as a fractional second.\n\n/// Returns the number of whole nanoseconds (0--999,999,999).\n\npub fn nanosecond(s: &str) -> ParseResult<(&str, i64)> {\n\n // record the number of digits consumed for later scaling.\n\n let origlen = s.len();\n\n let (s, v) = try!(number(s, 1, 9));\n\n let consumed = origlen - s.len();\n\n\n\n // scale the number accordingly.\n\n static SCALE: [i64; 10] = [\n\n 0,\n\n 100_000_000,\n\n 10_000_000,\n\n 1_000_000,\n\n 100_000,\n\n 10_000,\n\n 1_000,\n\n 100,\n\n 10,\n\n 1,\n\n ];\n\n let v = try!(v.checked_mul(SCALE[consumed]).ok_or(OUT_OF_RANGE));\n\n\n\n // if there are more than 9 digits, skip next digits.\n\n let s = s.trim_left_matches(|c: char| '0' <= c && c <= '9');\n\n\n\n Ok((s, v))\n\n}\n\n\n", "file_path": "src/format/scan.rs", "rank": 12, "score": 78174.2281126159 }, { "content": "/// Returns the corresponding `IsoWeek` from the year and the `Of` internal value.\n\n//\n\n// internal use only. we don't expose the public constructor for `IsoWeek` for now,\n\n// because the year range for the week date and the calendar date do not match and\n\n// it is confusing to have a date that is out of range in one and not in another.\n\n// currently we sidestep this issue by making `IsoWeek` fully dependent of `Datelike`.\n\npub fn iso_week_from_yof(year: i32, of: Of) -> IsoWeek {\n\n let (rawweek, _) = of.isoweekdate_raw();\n\n let (year, week) = if rawweek < 1 {\n\n // previous year\n\n let prevlastweek = YearFlags::from_year(year - 1).nisoweeks();\n\n (year - 1, prevlastweek)\n\n } else {\n\n let lastweek = of.flags().nisoweeks();\n\n if rawweek > lastweek {\n\n // next year\n\n (year + 1, 1)\n\n } else {\n\n (year, rawweek)\n\n }\n\n };\n\n IsoWeek {\n\n ywf: (year << 10) | (week << 4) as DateImpl | DateImpl::from(of.flags().0),\n\n }\n\n}\n\n\n", "file_path": "src/naive/isoweek.rs", "rank": 13, "score": 76090.21979166522 }, { "content": "/// Tries to parse the month index (0 through 11) with the first three ASCII letters.\n\npub fn short_month0(s: &str) -> ParseResult<(&str, u8)> {\n\n if s.len() < 3 {\n\n return Err(TOO_SHORT);\n\n }\n\n let buf = s.as_bytes();\n\n let month0 = match (buf[0] | 32, buf[1] | 32, buf[2] | 32) {\n\n (b'j', b'a', b'n') => 0,\n\n (b'f', b'e', b'b') => 1,\n\n (b'm', b'a', b'r') => 2,\n\n (b'a', b'p', b'r') => 3,\n\n (b'm', b'a', b'y') => 4,\n\n (b'j', b'u', b'n') => 5,\n\n (b'j', b'u', b'l') => 6,\n\n (b'a', b'u', b'g') => 7,\n\n (b's', b'e', b'p') => 8,\n\n (b'o', b'c', b't') => 9,\n\n (b'n', b'o', b'v') => 10,\n\n (b'd', b'e', b'c') => 11,\n\n _ => return Err(INVALID),\n\n };\n\n Ok((&s[3..], month0))\n\n}\n\n\n", "file_path": "src/format/scan.rs", "rank": 14, "score": 76083.91849023243 }, { "content": "/// Tries to parse the weekday with the first three ASCII letters.\n\npub fn short_weekday(s: &str) -> ParseResult<(&str, Weekday)> {\n\n if s.len() < 3 {\n\n return Err(TOO_SHORT);\n\n }\n\n let buf = s.as_bytes();\n\n let weekday = match (buf[0] | 32, buf[1] | 32, buf[2] | 32) {\n\n (b'm', b'o', b'n') => Weekday::Mon,\n\n (b't', b'u', b'e') => Weekday::Tue,\n\n (b'w', b'e', b'd') => Weekday::Wed,\n\n (b't', b'h', b'u') => Weekday::Thu,\n\n (b'f', b'r', b'i') => Weekday::Fri,\n\n (b's', b'a', b't') => Weekday::Sat,\n\n (b's', b'u', b'n') => Weekday::Sun,\n\n _ => return Err(INVALID),\n\n };\n\n Ok((&s[3..], weekday))\n\n}\n\n\n", "file_path": "src/format/scan.rs", "rank": 15, "score": 76083.91849023243 }, { "content": "/// Tries to consume exactly one given character.\n\npub fn char(s: &str, c1: u8) -> ParseResult<&str> {\n\n match s.as_bytes().first() {\n\n Some(&c) if c == c1 => Ok(&s[1..]),\n\n Some(_) => Err(INVALID),\n\n None => Err(TOO_SHORT),\n\n }\n\n}\n\n\n", "file_path": "src/format/scan.rs", "rank": 16, "score": 74733.90091995487 }, { "content": "/// Tries to parse the weekday with short or long weekday names.\n\n/// It prefers long weekday names to short weekday names when both are possible.\n\npub fn short_or_long_weekday(s: &str) -> ParseResult<(&str, Weekday)> {\n\n // lowercased weekday names, minus first three chars\n\n static LONG_WEEKDAY_SUFFIXES: [&'static str; 7] =\n\n [\"day\", \"sday\", \"nesday\", \"rsday\", \"day\", \"urday\", \"day\"];\n\n\n\n let (mut s, weekday) = try!(short_weekday(s));\n\n\n\n // tries to consume the suffix if possible\n\n let suffix = LONG_WEEKDAY_SUFFIXES[weekday.num_days_from_monday() as usize];\n\n if s.len() >= suffix.len() && equals(&s[..suffix.len()], suffix) {\n\n s = &s[suffix.len()..];\n\n }\n\n\n\n Ok((s, weekday))\n\n}\n\n\n", "file_path": "src/format/scan.rs", "rank": 17, "score": 74158.55676945407 }, { "content": "/// Tries to parse the month index (0 through 11) with short or long month names.\n\n/// It prefers long month names to short month names when both are possible.\n\npub fn short_or_long_month0(s: &str) -> ParseResult<(&str, u8)> {\n\n // lowercased month names, minus first three chars\n\n static LONG_MONTH_SUFFIXES: [&'static str; 12] = [\n\n \"uary\", \"ruary\", \"ch\", \"il\", \"\", \"e\", \"y\", \"ust\", \"tember\", \"ober\", \"ember\", \"ember\",\n\n ];\n\n\n\n let (mut s, month0) = try!(short_month0(s));\n\n\n\n // tries to consume the suffix if possible\n\n let suffix = LONG_MONTH_SUFFIXES[month0 as usize];\n\n if s.len() >= suffix.len() && equals(&s[..suffix.len()], suffix) {\n\n s = &s[suffix.len()..];\n\n }\n\n\n\n Ok((s, month0))\n\n}\n\n\n", "file_path": "src/format/scan.rs", "rank": 18, "score": 74158.55676945407 }, { "content": "/// Same to `timezone_offset` but also allows for RFC 2822 legacy timezones.\n\n/// May return `None` which indicates an insufficient offset data (i.e. `-0000`).\n\npub fn timezone_offset_2822(s: &str) -> ParseResult<(&str, Option<i32>)> {\n\n // tries to parse legacy time zone names\n\n let upto = s\n\n .as_bytes()\n\n .iter()\n\n .position(|&c| match c {\n\n b'a'...b'z' | b'A'...b'Z' => false,\n\n _ => true,\n\n })\n\n .unwrap_or_else(|| s.len());\n\n if upto > 0 {\n\n let name = &s[..upto];\n\n let s = &s[upto..];\n\n let offset_hours = |o| Ok((s, Some(o * 3600)));\n\n if equals(name, \"gmt\") || equals(name, \"ut\") {\n\n offset_hours(0)\n\n } else if equals(name, \"edt\") {\n\n offset_hours(-4)\n\n } else if equals(name, \"est\") || equals(name, \"cdt\") {\n\n offset_hours(-5)\n", "file_path": "src/format/scan.rs", "rank": 19, "score": 72808.53919917652 }, { "content": "/// Tries to parse the non-negative number from `min` to `max` digits.\n\n///\n\n/// The absence of digits at all is an unconditional error.\n\n/// More than `max` digits are consumed up to the first `max` digits.\n\n/// Any number that does not fit in `i64` is an error.\n\npub fn number(s: &str, min: usize, max: usize) -> ParseResult<(&str, i64)> {\n\n assert!(min <= max);\n\n\n\n // limit `s` to given number of digits\n\n let mut window = s.as_bytes();\n\n if window.len() > max {\n\n window = &window[..max];\n\n }\n\n\n\n // scan digits\n\n let upto = window\n\n .iter()\n\n .position(|&c| c < b'0' || b'9' < c)\n\n .unwrap_or_else(|| window.len());\n\n if upto < min {\n\n return Err(if window.is_empty() {\n\n TOO_SHORT\n\n } else {\n\n INVALID\n\n });\n\n }\n\n\n\n // we can overflow here, which is the only possible cause of error from `parse`.\n\n let v: i64 = try!(s[..upto].parse().map_err(|_| OUT_OF_RANGE));\n\n Ok((&s[upto..], v))\n\n}\n\n\n", "file_path": "src/format/scan.rs", "rank": 20, "score": 66052.92568031265 }, { "content": "/// Tries to parse given string into `parsed` with given formatting items.\n\n/// Returns `Ok` when the entire string has been parsed (otherwise `parsed` should not be used).\n\n/// There should be no trailing string after parsing;\n\n/// use a stray [`Item::Space`](./enum.Item.html#variant.Space) to trim whitespaces.\n\n///\n\n/// This particular date and time parser is:\n\n///\n\n/// - Greedy. It will consume the longest possible prefix.\n\n/// For example, `April` is always consumed entirely when the long month name is requested;\n\n/// it equally accepts `Apr`, but prefers the longer prefix in this case.\n\n///\n\n/// - Padding-agnostic (for numeric items).\n\n/// The [`Pad`](./enum.Pad.html) field is completely ignored,\n\n/// so one can prepend any number of whitespace then any number of zeroes before numbers.\n\n///\n\n/// - (Still) obeying the intrinsic parsing width. This allows, for example, parsing `HHMMSS`.\n\npub fn parse<'a, I>(parsed: &mut Parsed, mut s: &str, items: I) -> ParseResult<()>\n\nwhere\n\n I: Iterator<Item = Item<'a>>,\n\n{\n\n macro_rules! try_consume {\n\n ($e:expr) => {{\n\n let (s_, v) = try!($e);\n\n s = s_;\n\n v\n\n }};\n\n }\n\n\n\n for item in items {\n\n match item {\n\n Item::Literal(prefix) => {\n\n if s.len() < prefix.len() {\n\n return Err(TOO_SHORT);\n\n }\n\n if !s.starts_with(prefix) {\n\n return Err(INVALID);\n", "file_path": "src/format/parse.rs", "rank": 21, "score": 65946.14210967586 }, { "content": "/// Tries to parse `[-+]\\d\\d` continued by `\\d\\d`. Return an offset in seconds if possible.\n\n///\n\n/// The additional `colon` may be used to parse a mandatory or optional `:`\n\n/// between hours and minutes, and should return either a new suffix or `Err` when parsing fails.\n\npub fn timezone_offset<F>(s: &str, consume_colon: F) -> ParseResult<(&str, i32)>\n\nwhere\n\n F: FnMut(&str) -> ParseResult<&str>,\n\n{\n\n timezone_offset_internal(s, consume_colon, false)\n\n}\n\n\n", "file_path": "src/format/scan.rs", "rank": 22, "score": 65513.38210836131 }, { "content": "/// Same to `timezone_offset` but also allows for `z`/`Z` which is same to `+00:00`.\n\npub fn timezone_offset_zulu<F>(s: &str, colon: F) -> ParseResult<(&str, i32)>\n\nwhere\n\n F: FnMut(&str) -> ParseResult<&str>,\n\n{\n\n match s.as_bytes().first() {\n\n Some(&b'z') | Some(&b'Z') => Ok((&s[1..], 0)),\n\n _ => timezone_offset(s, colon),\n\n }\n\n}\n\n\n", "file_path": "src/format/scan.rs", "rank": 23, "score": 65509.835188973055 }, { "content": "/// Same to `timezone_offset` but also allows for `z`/`Z` which is same to\n\n/// `+00:00`, and allows missing minutes entirely.\n\npub fn timezone_offset_permissive<F>(s: &str, colon: F) -> ParseResult<(&str, i32)>\n\nwhere\n\n F: FnMut(&str) -> ParseResult<&str>,\n\n{\n\n match s.as_bytes().first() {\n\n Some(&b'z') | Some(&b'Z') => Ok((&s[1..], 0)),\n\n _ => timezone_offset_internal(s, colon, true),\n\n }\n\n}\n\n\n", "file_path": "src/format/scan.rs", "rank": 24, "score": 65509.835188973055 }, { "content": "#[cfg(test)]\n\n#[test]\n\nfn test_parse() {\n\n use super::IMPOSSIBLE;\n\n use super::*;\n\n\n\n // workaround for Rust issue #22255\n\n fn parse_all(s: &str, items: &[Item]) -> ParseResult<Parsed> {\n\n let mut parsed = Parsed::new();\n\n try!(parse(&mut parsed, s, items.iter().cloned()));\n\n Ok(parsed)\n\n }\n\n\n\n macro_rules! check {\n\n ($fmt:expr, $items:expr; $err:tt) => (\n\n assert_eq!(parse_all($fmt, &$items), Err($err))\n\n );\n\n ($fmt:expr, $items:expr; $($k:ident: $v:expr),*) => (#[allow(unused_mut)] {\n\n let mut expected = Parsed::new();\n\n $(expected.$k = Some($v);)*\n\n assert_eq!(parse_all($fmt, &$items), Ok(expected))\n\n });\n", "file_path": "src/format/parse.rs", "rank": 25, "score": 47675.546365735725 }, { "content": "#[cfg(test)]\n\n#[test]\n\nfn parse_rfc850() {\n\n use {TimeZone, Utc};\n\n\n\n static RFC850_FMT: &'static str = \"%A, %d-%b-%y %T GMT\";\n\n\n\n let dt_str = \"Sunday, 06-Nov-94 08:49:37 GMT\";\n\n let dt = Utc.ymd(1994, 11, 6).and_hms(8, 49, 37);\n\n\n\n // Check that the format is what we expect\n\n assert_eq!(dt.format(RFC850_FMT).to_string(), dt_str);\n\n\n\n // Check that it parses correctly\n\n assert_eq!(\n\n Ok(dt),\n\n Utc.datetime_from_str(\"Sunday, 06-Nov-94 08:49:37 GMT\", RFC850_FMT)\n\n );\n\n\n\n // Check that the rest of the weekdays parse correctly (this test originally failed because\n\n // Sunday parsed incorrectly).\n\n let testdates = [\n", "file_path": "src/format/parse.rs", "rank": 26, "score": 47675.546365735725 }, { "content": "#[test]\n\nfn test_auto_conversion() {\n\n let utc_dt = Utc.ymd(2018, 9, 5).and_hms(23, 58, 0);\n\n let cdt_dt = FixedOffset::west(5 * 60 * 60)\n\n .ymd(2018, 9, 5)\n\n .and_hms(18, 58, 0);\n\n let utc_dt2: DateTime<Utc> = cdt_dt.into();\n\n assert_eq!(utc_dt, utc_dt2);\n\n}\n\n\n", "file_path": "src/datetime.rs", "rank": 27, "score": 47675.546365735725 }, { "content": "#[cfg(test)]\n\n#[test]\n\nfn test_rfc2822() {\n\n use super::NOT_ENOUGH;\n\n use super::*;\n\n use offset::FixedOffset;\n\n use DateTime;\n\n\n\n // Test data - (input, Ok(expected result after parse and format) or Err(error code))\n\n let testdates = [\n\n (\n\n \"Tue, 20 Jan 2015 17:35:20 -0800\",\n\n Ok(\"Tue, 20 Jan 2015 17:35:20 -0800\"),\n\n ), // normal case\n\n (\n\n \"Fri, 2 Jan 2015 17:35:20 -0800\",\n\n Ok(\"Fri, 02 Jan 2015 17:35:20 -0800\"),\n\n ), // folding whitespace\n\n (\n\n \"Fri, 02 Jan 2015 17:35:20 -0800\",\n\n Ok(\"Fri, 02 Jan 2015 17:35:20 -0800\"),\n\n ), // leading zero\n", "file_path": "src/format/parse.rs", "rank": 28, "score": 47675.546365735725 }, { "content": "#[cfg(test)]\n\n#[test]\n\nfn test_rfc3339() {\n\n use super::*;\n\n use offset::FixedOffset;\n\n use DateTime;\n\n\n\n // Test data - (input, Ok(expected result after parse and format) or Err(error code))\n\n let testdates = [\n\n (\"2015-01-20T17:35:20-08:00\", Ok(\"2015-01-20T17:35:20-08:00\")), // normal case\n\n (\"1944-06-06T04:04:00Z\", Ok(\"1944-06-06T04:04:00+00:00\")), // D-day\n\n (\"2001-09-11T09:45:00-08:00\", Ok(\"2001-09-11T09:45:00-08:00\")),\n\n (\n\n \"2015-01-20T17:35:20.001-08:00\",\n\n Ok(\"2015-01-20T17:35:20.001-08:00\"),\n\n ),\n\n (\n\n \"2015-01-20T17:35:20.000031-08:00\",\n\n Ok(\"2015-01-20T17:35:20.000031-08:00\"),\n\n ),\n\n (\n\n \"2015-01-20T17:35:20.000000004-08:00\",\n", "file_path": "src/format/parse.rs", "rank": 29, "score": 47675.546365735725 }, { "content": "#[test]\n\nfn test_readme_doomsday() {\n\n use num_iter::range_inclusive;\n\n\n\n for y in range_inclusive(naive::MIN_DATE.year(), naive::MAX_DATE.year()) {\n\n // even months\n\n let d4 = NaiveDate::from_ymd(y, 4, 4);\n\n let d6 = NaiveDate::from_ymd(y, 6, 6);\n\n let d8 = NaiveDate::from_ymd(y, 8, 8);\n\n let d10 = NaiveDate::from_ymd(y, 10, 10);\n\n let d12 = NaiveDate::from_ymd(y, 12, 12);\n\n\n\n // nine to five, seven-eleven\n\n let d59 = NaiveDate::from_ymd(y, 5, 9);\n\n let d95 = NaiveDate::from_ymd(y, 9, 5);\n\n let d711 = NaiveDate::from_ymd(y, 7, 11);\n\n let d117 = NaiveDate::from_ymd(y, 11, 7);\n\n\n\n // \"March 0\"\n\n let d30 = NaiveDate::from_ymd(y, 3, 1).pred();\n\n\n\n let weekday = d30.weekday();\n\n let other_dates = [d4, d6, d8, d10, d12, d59, d95, d711, d117];\n\n assert!(other_dates.iter().all(|d| d.weekday() == weekday));\n\n }\n\n}\n", "file_path": "src/lib.rs", "rank": 30, "score": 47675.546365735725 }, { "content": "#[test]\n\nfn test_date_bounds() {\n\n let calculated_min = NaiveDate::from_ymd(MIN_YEAR, 1, 1);\n\n let calculated_max = NaiveDate::from_ymd(MAX_YEAR, 12, 31);\n\n assert!(\n\n MIN_DATE == calculated_min,\n\n \"`MIN_DATE` should have a year flag {:?}\",\n\n calculated_min.of().flags()\n\n );\n\n assert!(\n\n MAX_DATE == calculated_max,\n\n \"`MAX_DATE` should have a year flag {:?}\",\n\n calculated_max.of().flags()\n\n );\n\n\n\n // let's also check that the entire range do not exceed 2^44 seconds\n\n // (sometimes used for bounding `Duration` against overflow)\n\n let maxsecs = MAX_DATE.signed_duration_since(MIN_DATE).num_seconds();\n\n let maxsecs = maxsecs + 86401; // also take care of DateTime\n\n assert!(\n\n maxsecs < (1 << MAX_BITS),\n", "file_path": "src/naive/date.rs", "rank": 31, "score": 46032.734365221375 }, { "content": "#[cfg(test)]\n\n#[test]\n\nfn test_strftime_items() {\n\n fn parse_and_collect<'a>(s: &'a str) -> Vec<Item<'a>> {\n\n // map any error into `[Item::Error]`. useful for easy testing.\n\n let items = StrftimeItems::new(s);\n\n let items = items.map(|spec| {\n\n if spec == Item::Error {\n\n None\n\n } else {\n\n Some(spec)\n\n }\n\n });\n\n items\n\n .collect::<Option<Vec<_>>>()\n\n .unwrap_or(vec![Item::Error])\n\n }\n\n\n\n assert_eq!(parse_and_collect(\"\"), []);\n\n assert_eq!(parse_and_collect(\" \\t\\n\\r \"), [sp!(\" \\t\\n\\r \")]);\n\n assert_eq!(parse_and_collect(\"hello?\"), [lit!(\"hello?\")]);\n\n assert_eq!(\n", "file_path": "src/format/strftime.rs", "rank": 32, "score": 46032.734365221375 }, { "content": "#[cfg(test)]\n\n#[test]\n\nfn test_strftime_docs() {\n\n use {FixedOffset, TimeZone, Timelike};\n\n\n\n let dt = FixedOffset::east(34200)\n\n .ymd(2001, 7, 8)\n\n .and_hms_nano(0, 34, 59, 1_026_490_708);\n\n\n\n // date specifiers\n\n assert_eq!(dt.format(\"%Y\").to_string(), \"2001\");\n\n assert_eq!(dt.format(\"%C\").to_string(), \"20\");\n\n assert_eq!(dt.format(\"%y\").to_string(), \"01\");\n\n assert_eq!(dt.format(\"%m\").to_string(), \"07\");\n\n assert_eq!(dt.format(\"%b\").to_string(), \"Jul\");\n\n assert_eq!(dt.format(\"%B\").to_string(), \"July\");\n\n assert_eq!(dt.format(\"%h\").to_string(), \"Jul\");\n\n assert_eq!(dt.format(\"%d\").to_string(), \"08\");\n\n assert_eq!(dt.format(\"%e\").to_string(), \" 8\");\n\n assert_eq!(dt.format(\"%e\").to_string(), dt.format(\"%_d\").to_string());\n\n assert_eq!(dt.format(\"%a\").to_string(), \"Sun\");\n\n assert_eq!(dt.format(\"%A\").to_string(), \"Sunday\");\n", "file_path": "src/format/strftime.rs", "rank": 33, "score": 46032.734365221375 }, { "content": "fn timezone_offset_internal<F>(\n\n mut s: &str,\n\n mut consume_colon: F,\n\n allow_missing_minutes: bool,\n\n) -> ParseResult<(&str, i32)>\n\nwhere\n\n F: FnMut(&str) -> ParseResult<&str>,\n\n{\n\n fn digits(s: &str) -> ParseResult<(u8, u8)> {\n\n let b = s.as_bytes();\n\n if b.len() < 2 {\n\n Err(TOO_SHORT)\n\n } else {\n\n Ok((b[0], b[1]))\n\n }\n\n }\n\n let negative = match s.as_bytes().first() {\n\n Some(&b'+') => false,\n\n Some(&b'-') => true,\n\n Some(_) => return Err(INVALID),\n", "file_path": "src/format/scan.rs", "rank": 34, "score": 43430.26768013595 }, { "content": "#[inline]\n\nfn div_floor_64(this: i64, other: i64) -> i64 {\n\n match div_rem_64(this, other) {\n\n (d, r) if (r > 0 && other < 0) || (r < 0 && other > 0) => d - 1,\n\n (d, _) => d,\n\n }\n\n}\n\n\n", "file_path": "src/oldtime.rs", "rank": 35, "score": 40256.52615873323 }, { "content": "#[inline]\n\nfn mod_floor_64(this: i64, other: i64) -> i64 {\n\n match this % other {\n\n r if (r > 0 && other < 0) || (r < 0 && other > 0) => r + other,\n\n r => r,\n\n }\n\n}\n\n\n", "file_path": "src/oldtime.rs", "rank": 36, "score": 40256.52615873323 }, { "content": "/// Returns true when two slices are equal case-insensitively (in ASCII).\n\n/// Assumes that the `pattern` is already converted to lower case.\n\nfn equals(s: &str, pattern: &str) -> bool {\n\n let mut xs = s.as_bytes().iter().map(|&c| match c {\n\n b'A'...b'Z' => c + 32,\n\n _ => c,\n\n });\n\n let mut ys = pattern.as_bytes().iter().cloned();\n\n loop {\n\n match (xs.next(), ys.next()) {\n\n (None, None) => return true,\n\n (None, _) | (_, None) => return false,\n\n (Some(x), Some(y)) if x != y => return false,\n\n _ => (),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/format/scan.rs", "rank": 37, "score": 38905.42687563268 }, { "content": "#[inline]\n\nfn div_rem_64(this: i64, other: i64) -> (i64, i64) {\n\n (this / other, this % other)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::{Duration, OutOfRangeError, MAX, MIN};\n\n use std::time::Duration as StdDuration;\n\n use std::{i32, i64};\n\n\n\n #[test]\n\n fn test_duration() {\n\n assert!(Duration::seconds(1) != Duration::zero());\n\n assert_eq!(\n\n Duration::seconds(1) + Duration::seconds(2),\n\n Duration::seconds(3)\n\n );\n\n assert_eq!(\n\n Duration::seconds(86399) + Duration::seconds(4),\n\n Duration::days(1) + Duration::seconds(3)\n", "file_path": "src/oldtime.rs", "rank": 38, "score": 38328.875881710424 }, { "content": "#[cfg(all(test, any(feature = \"rustc-serialize\", feature = \"serde\")))]\n\nfn test_encodable_json<F, E>(to_string: F)\n\nwhere\n\n F: Fn(&NaiveDate) -> Result<String, E>,\n\n E: ::std::fmt::Debug,\n\n{\n\n assert_eq!(\n\n to_string(&NaiveDate::from_ymd(2014, 7, 24)).ok(),\n\n Some(r#\"\"2014-07-24\"\"#.into())\n\n );\n\n assert_eq!(\n\n to_string(&NaiveDate::from_ymd(0, 1, 1)).ok(),\n\n Some(r#\"\"0000-01-01\"\"#.into())\n\n );\n\n assert_eq!(\n\n to_string(&NaiveDate::from_ymd(-1, 12, 31)).ok(),\n\n Some(r#\"\"-0001-12-31\"\"#.into())\n\n );\n\n assert_eq!(to_string(&MIN_DATE).ok(), Some(r#\"\"-262144-01-01\"\"#.into()));\n\n assert_eq!(to_string(&MAX_DATE).ok(), Some(r#\"\"+262143-12-31\"\"#.into()));\n\n}\n\n\n", "file_path": "src/naive/date.rs", "rank": 39, "score": 37188.475913689006 }, { "content": "#[cfg(all(test, any(feature = \"rustc-serialize\", feature = \"serde\")))]\n\nfn test_decodable_json<F, E>(from_str: F)\n\nwhere\n\n F: Fn(&str) -> Result<NaiveDateTime, E>,\n\n E: ::std::fmt::Debug,\n\n{\n\n use naive::{MAX_DATE, MIN_DATE};\n\n\n\n assert_eq!(\n\n from_str(r#\"\"2016-07-08T09:10:48.090\"\"#).ok(),\n\n Some(NaiveDate::from_ymd(2016, 7, 8).and_hms_milli(9, 10, 48, 90))\n\n );\n\n assert_eq!(\n\n from_str(r#\"\"2016-7-8T9:10:48.09\"\"#).ok(),\n\n Some(NaiveDate::from_ymd(2016, 7, 8).and_hms_milli(9, 10, 48, 90))\n\n );\n\n assert_eq!(\n\n from_str(r#\"\"2014-07-24T12:34:06\"\"#).ok(),\n\n Some(NaiveDate::from_ymd(2014, 7, 24).and_hms(12, 34, 6))\n\n );\n\n assert_eq!(\n", "file_path": "src/naive/datetime.rs", "rank": 40, "score": 37188.475913689006 }, { "content": "#[cfg(all(test, any(feature = \"rustc-serialize\", feature = \"serde\")))]\n\nfn test_encodable_json<F, E>(to_string: F)\n\nwhere\n\n F: Fn(&NaiveTime) -> Result<String, E>,\n\n E: ::std::fmt::Debug,\n\n{\n\n assert_eq!(\n\n to_string(&NaiveTime::from_hms(0, 0, 0)).ok(),\n\n Some(r#\"\"00:00:00\"\"#.into())\n\n );\n\n assert_eq!(\n\n to_string(&NaiveTime::from_hms_milli(0, 0, 0, 950)).ok(),\n\n Some(r#\"\"00:00:00.950\"\"#.into())\n\n );\n\n assert_eq!(\n\n to_string(&NaiveTime::from_hms_milli(0, 0, 59, 1_000)).ok(),\n\n Some(r#\"\"00:00:60\"\"#.into())\n\n );\n\n assert_eq!(\n\n to_string(&NaiveTime::from_hms(0, 1, 2)).ok(),\n\n Some(r#\"\"00:01:02\"\"#.into())\n", "file_path": "src/naive/time.rs", "rank": 41, "score": 37188.475913689006 }, { "content": "#[cfg(all(test, any(feature = \"rustc-serialize\", feature = \"serde\")))]\n\nfn test_decodable_json<F, E>(from_str: F)\n\nwhere\n\n F: Fn(&str) -> Result<NaiveDate, E>,\n\n E: ::std::fmt::Debug,\n\n{\n\n use std::{i32, i64};\n\n\n\n assert_eq!(\n\n from_str(r#\"\"2016-07-08\"\"#).ok(),\n\n Some(NaiveDate::from_ymd(2016, 7, 8))\n\n );\n\n assert_eq!(\n\n from_str(r#\"\"2016-7-8\"\"#).ok(),\n\n Some(NaiveDate::from_ymd(2016, 7, 8))\n\n );\n\n assert_eq!(\n\n from_str(r#\"\"+002016-07-08\"\"#).ok(),\n\n Some(NaiveDate::from_ymd(2016, 7, 8))\n\n );\n\n assert_eq!(\n", "file_path": "src/naive/date.rs", "rank": 42, "score": 37188.475913689006 }, { "content": "#[cfg(all(test, any(feature = \"rustc-serialize\", feature = \"serde\")))]\n\nfn test_decodable_json<F, E>(from_str: F)\n\nwhere\n\n F: Fn(&str) -> Result<NaiveTime, E>,\n\n E: ::std::fmt::Debug,\n\n{\n\n assert_eq!(\n\n from_str(r#\"\"00:00:00\"\"#).ok(),\n\n Some(NaiveTime::from_hms(0, 0, 0))\n\n );\n\n assert_eq!(\n\n from_str(r#\"\"0:0:0\"\"#).ok(),\n\n Some(NaiveTime::from_hms(0, 0, 0))\n\n );\n\n assert_eq!(\n\n from_str(r#\"\"00:00:00.950\"\"#).ok(),\n\n Some(NaiveTime::from_hms_milli(0, 0, 0, 950))\n\n );\n\n assert_eq!(\n\n from_str(r#\"\"0:0:0.95\"\"#).ok(),\n\n Some(NaiveTime::from_hms_milli(0, 0, 0, 950))\n", "file_path": "src/naive/time.rs", "rank": 43, "score": 37188.475913689006 }, { "content": "#[cfg(all(test, any(feature = \"rustc-serialize\", feature = \"serde\")))]\n\nfn test_encodable_json<F, E>(to_string: F)\n\nwhere\n\n F: Fn(&NaiveDateTime) -> Result<String, E>,\n\n E: ::std::fmt::Debug,\n\n{\n\n use naive::{MAX_DATE, MIN_DATE};\n\n\n\n assert_eq!(\n\n to_string(&NaiveDate::from_ymd(2016, 7, 8).and_hms_milli(9, 10, 48, 90)).ok(),\n\n Some(r#\"\"2016-07-08T09:10:48.090\"\"#.into())\n\n );\n\n assert_eq!(\n\n to_string(&NaiveDate::from_ymd(2014, 7, 24).and_hms(12, 34, 6)).ok(),\n\n Some(r#\"\"2014-07-24T12:34:06\"\"#.into())\n\n );\n\n assert_eq!(\n\n to_string(&NaiveDate::from_ymd(0, 1, 1).and_hms_milli(0, 0, 59, 1_000)).ok(),\n\n Some(r#\"\"0000-01-01T00:00:60\"\"#.into())\n\n );\n\n assert_eq!(\n", "file_path": "src/naive/datetime.rs", "rank": 44, "score": 37188.475913689006 }, { "content": "#[inline]\n\nfn div_mod_floor_64(this: i64, other: i64) -> (i64, i64) {\n\n (div_floor_64(this, other), mod_floor_64(this, other))\n\n}\n\n\n", "file_path": "src/oldtime.rs", "rank": 45, "score": 37095.31545022749 }, { "content": "#[cfg(all(test, feature = \"rustc-serialize\"))]\n\nfn test_decodable_json_timestamp<F, E>(from_str: F)\n\nwhere\n\n F: Fn(&str) -> Result<rustc_serialize::TsSeconds, E>,\n\n E: ::std::fmt::Debug,\n\n{\n\n assert_eq!(\n\n *from_str(\"0\").unwrap(),\n\n NaiveDate::from_ymd(1970, 1, 1).and_hms(0, 0, 0),\n\n \"should parse integers as timestamps\"\n\n );\n\n assert_eq!(\n\n *from_str(\"-1\").unwrap(),\n\n NaiveDate::from_ymd(1969, 12, 31).and_hms(23, 59, 59),\n\n \"should parse integers as timestamps\"\n\n );\n\n}\n\n\n\n#[cfg(feature = \"rustc-serialize\")]\n\npub mod rustc_serialize {\n\n use super::NaiveDateTime;\n", "file_path": "src/naive/datetime.rs", "rank": 46, "score": 36148.25016608445 }, { "content": "fn test_decodable_json<FUtc, FFixed, FLocal, E>(\n\n utc_from_str: FUtc,\n\n fixed_from_str: FFixed,\n\n local_from_str: FLocal,\n\n) where\n\n FUtc: Fn(&str) -> Result<DateTime<Utc>, E>,\n\n FFixed: Fn(&str) -> Result<DateTime<FixedOffset>, E>,\n\n FLocal: Fn(&str) -> Result<DateTime<Local>, E>,\n\n E: ::std::fmt::Debug,\n\n{\n\n // should check against the offset as well (the normal DateTime comparison will ignore them)\n\n fn norm<Tz: TimeZone>(dt: &Option<DateTime<Tz>>) -> Option<(&DateTime<Tz>, &Tz::Offset)> {\n\n dt.as_ref().map(|dt| (dt, dt.offset()))\n\n }\n\n\n\n assert_eq!(\n\n norm(&utc_from_str(r#\"\"2014-07-24T12:34:06Z\"\"#).ok()),\n\n norm(&Some(Utc.ymd(2014, 7, 24).and_hms(12, 34, 6)))\n\n );\n\n assert_eq!(\n", "file_path": "src/datetime.rs", "rank": 47, "score": 35188.07308587965 }, { "content": "fn add_with_leapsecond<T>(lhs: &T, rhs: i32) -> T\n\nwhere\n\n T: Timelike + Add<OldDuration, Output = T>,\n\n{\n\n // extract and temporarily remove the fractional part and later recover it\n\n let nanos = lhs.nanosecond();\n\n let lhs = lhs.with_nanosecond(0).unwrap();\n\n (lhs + OldDuration::seconds(i64::from(rhs)))\n\n .with_nanosecond(nanos)\n\n .unwrap()\n\n}\n\n\n\nimpl Add<FixedOffset> for NaiveTime {\n\n type Output = NaiveTime;\n\n\n\n #[inline]\n\n fn add(self, rhs: FixedOffset) -> NaiveTime {\n\n add_with_leapsecond(&self, rhs.local_minus_utc)\n\n }\n\n}\n", "file_path": "src/offset/fixed.rs", "rank": 48, "score": 34924.36953959988 }, { "content": "#[cfg(all(test, feature = \"clock\", feature = \"rustc-serialize\"))]\n\nfn test_decodable_json_timestamps<FUtc, FFixed, FLocal, E>(\n\n utc_from_str: FUtc,\n\n fixed_from_str: FFixed,\n\n local_from_str: FLocal,\n\n) where\n\n FUtc: Fn(&str) -> Result<rustc_serialize::TsSeconds<Utc>, E>,\n\n FFixed: Fn(&str) -> Result<rustc_serialize::TsSeconds<FixedOffset>, E>,\n\n FLocal: Fn(&str) -> Result<rustc_serialize::TsSeconds<Local>, E>,\n\n E: ::std::fmt::Debug,\n\n{\n\n fn norm<Tz: TimeZone>(dt: &Option<DateTime<Tz>>) -> Option<(&DateTime<Tz>, &Tz::Offset)> {\n\n dt.as_ref().map(|dt| (dt, dt.offset()))\n\n }\n\n\n\n assert_eq!(\n\n norm(&utc_from_str(\"0\").ok().map(DateTime::from)),\n\n norm(&Some(Utc.ymd(1970, 1, 1).and_hms(0, 0, 0)))\n\n );\n\n assert_eq!(\n\n norm(&utc_from_str(\"-1\").ok().map(DateTime::from)),\n", "file_path": "src/datetime.rs", "rank": 49, "score": 34299.04702622871 }, { "content": "/// Converts a local `NaiveDateTime` to the `time::Timespec`.\n\nfn datetime_to_timespec(d: &NaiveDateTime, local: bool) -> oldtime::Timespec {\n\n // well, this exploits an undocumented `Tm::to_timespec` behavior\n\n // to get the exact function we want (either `timegm` or `mktime`).\n\n // the number 1 is arbitrary but should be non-zero to trigger `mktime`.\n\n let tm_utcoff = if local { 1 } else { 0 };\n\n\n\n let tm = oldtime::Tm {\n\n tm_sec: d.second() as i32,\n\n tm_min: d.minute() as i32,\n\n tm_hour: d.hour() as i32,\n\n tm_mday: d.day() as i32,\n\n tm_mon: d.month0() as i32, // yes, C is that strange...\n\n tm_year: d.year() - 1900, // this doesn't underflow, we know that d is `NaiveDateTime`.\n\n tm_wday: 0, // to_local ignores this\n\n tm_yday: 0, // and this\n\n tm_isdst: -1,\n\n tm_utcoff: tm_utcoff,\n\n // do not set this, OS APIs are heavily inconsistent in terms of leap second handling\n\n tm_nsec: 0,\n\n };\n", "file_path": "src/offset/local.rs", "rank": 50, "score": 33964.19245939509 }, { "content": "fn set_weekday_with_number_from_monday(p: &mut Parsed, v: i64) -> ParseResult<()> {\n\n p.set_weekday(match v {\n\n 1 => Weekday::Mon,\n\n 2 => Weekday::Tue,\n\n 3 => Weekday::Wed,\n\n 4 => Weekday::Thu,\n\n 5 => Weekday::Fri,\n\n 6 => Weekday::Sat,\n\n 7 => Weekday::Sun,\n\n _ => return Err(OUT_OF_RANGE),\n\n })\n\n}\n\n\n", "file_path": "src/format/parse.rs", "rank": 51, "score": 33964.19245939509 }, { "content": "/// Converts a `time::Tm` struct into the timezone-aware `DateTime`.\n\n/// This assumes that `time` is working correctly, i.e. any error is fatal.\n\nfn tm_to_datetime(mut tm: oldtime::Tm) -> DateTime<Local> {\n\n if tm.tm_sec >= 60 {\n\n tm.tm_nsec += (tm.tm_sec - 59) * 1_000_000_000;\n\n tm.tm_sec = 59;\n\n }\n\n\n\n #[cfg(not(windows))]\n\n fn tm_to_naive_date(tm: &oldtime::Tm) -> NaiveDate {\n\n // from_yo is more efficient than from_ymd (since it's the internal representation).\n\n NaiveDate::from_yo(tm.tm_year + 1900, tm.tm_yday as u32 + 1)\n\n }\n\n\n\n #[cfg(windows)]\n\n fn tm_to_naive_date(tm: &oldtime::Tm) -> NaiveDate {\n\n // ...but tm_yday is broken in Windows (issue #85)\n\n NaiveDate::from_ymd(tm.tm_year + 1900, tm.tm_mon as u32 + 1, tm.tm_mday as u32)\n\n }\n\n\n\n let date = tm_to_naive_date(&tm);\n\n let time = NaiveTime::from_hms_nano(\n\n tm.tm_hour as u32,\n\n tm.tm_min as u32,\n\n tm.tm_sec as u32,\n\n tm.tm_nsec as u32,\n\n );\n\n let offset = FixedOffset::east(tm.tm_utcoff);\n\n DateTime::from_utc(date.and_time(time) - offset, offset)\n\n}\n\n\n", "file_path": "src/offset/local.rs", "rank": 52, "score": 33964.19245939509 }, { "content": "fn set_weekday_with_num_days_from_sunday(p: &mut Parsed, v: i64) -> ParseResult<()> {\n\n p.set_weekday(match v {\n\n 0 => Weekday::Sun,\n\n 1 => Weekday::Mon,\n\n 2 => Weekday::Tue,\n\n 3 => Weekday::Wed,\n\n 4 => Weekday::Thu,\n\n 5 => Weekday::Fri,\n\n 6 => Weekday::Sat,\n\n _ => return Err(OUT_OF_RANGE),\n\n })\n\n}\n\n\n", "file_path": "src/format/parse.rs", "rank": 53, "score": 33075.16639974415 }, { "content": "fn parse_rfc2822<'a>(parsed: &mut Parsed, mut s: &'a str) -> ParseResult<(&'a str, ())> {\n\n macro_rules! try_consume {\n\n ($e:expr) => {{\n\n let (s_, v) = try!($e);\n\n s = s_;\n\n v\n\n }};\n\n }\n\n\n\n // an adapted RFC 2822 syntax from Section 3.3 and 4.3:\n\n //\n\n // date-time = [ day-of-week \",\" ] date 1*S time *S\n\n // day-of-week = *S day-name *S\n\n // day-name = \"Mon\" / \"Tue\" / \"Wed\" / \"Thu\" / \"Fri\" / \"Sat\" / \"Sun\"\n\n // date = day month year\n\n // day = *S 1*2DIGIT *S\n\n // month = 1*S month-name 1*S\n\n // month-name = \"Jan\" / \"Feb\" / \"Mar\" / \"Apr\" / \"May\" / \"Jun\" /\n\n // \"Jul\" / \"Aug\" / \"Sep\" / \"Oct\" / \"Nov\" / \"Dec\"\n\n // year = *S 2*DIGIT *S\n", "file_path": "src/format/parse.rs", "rank": 54, "score": 30935.01736148178 }, { "content": "fn parse_rfc3339<'a>(parsed: &mut Parsed, mut s: &'a str) -> ParseResult<(&'a str, ())> {\n\n macro_rules! try_consume {\n\n ($e:expr) => {{\n\n let (s_, v) = try!($e);\n\n s = s_;\n\n v\n\n }};\n\n }\n\n\n\n // an adapted RFC 3339 syntax from Section 5.6:\n\n //\n\n // date-fullyear = 4DIGIT\n\n // date-month = 2DIGIT ; 01-12\n\n // date-mday = 2DIGIT ; 01-28, 01-29, 01-30, 01-31 based on month/year\n\n // time-hour = 2DIGIT ; 00-23\n\n // time-minute = 2DIGIT ; 00-59\n\n // time-second = 2DIGIT ; 00-58, 00-59, 00-60 based on leap second rules\n\n // time-secfrac = \".\" 1*DIGIT\n\n // time-numoffset = (\"+\" / \"-\") time-hour \":\" time-minute\n\n // time-offset = \"Z\" / time-numoffset\n", "file_path": "src/format/parse.rs", "rank": 55, "score": 30935.01736148178 }, { "content": "/// Checks if `old` is either empty or has the same value to `new` (i.e. \"consistent\"),\n\n/// and if it is empty, set `old` to `new` as well.\n\nfn set_if_consistent<T: PartialEq>(old: &mut Option<T>, new: T) -> ParseResult<()> {\n\n if let Some(ref old) = *old {\n\n if *old == new {\n\n Ok(())\n\n } else {\n\n Err(IMPOSSIBLE)\n\n }\n\n } else {\n\n *old = Some(new);\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl Default for Parsed {\n\n fn default() -> Parsed {\n\n Parsed {\n\n year: None,\n\n year_div_100: None,\n\n year_mod_100: None,\n\n isoyear: None,\n", "file_path": "src/format/parsed.rs", "rank": 56, "score": 29359.110811954153 }, { "content": "/// Maps the local date to other date with given conversion function.\n\nfn map_local<Tz: TimeZone, F>(d: &Date<Tz>, mut f: F) -> Option<Date<Tz>>\n\nwhere\n\n F: FnMut(NaiveDate) -> Option<NaiveDate>,\n\n{\n\n f(d.naive_local()).and_then(|date| d.timezone().from_local_date(&date).single())\n\n}\n\n\n\nimpl<Tz: TimeZone> Date<Tz>\n\nwhere\n\n Tz::Offset: fmt::Display,\n\n{\n\n /// Formats the date with the specified formatting items.\n\n #[inline]\n\n pub fn format_with_items<'a, I>(&self, items: I) -> DelayedFormat<I>\n\n where\n\n I: Iterator<Item = Item<'a>> + Clone,\n\n {\n\n DelayedFormat::new_with_offset(Some(self.naive_local()), None, &self.offset, items)\n\n }\n\n\n", "file_path": "src/date.rs", "rank": 57, "score": 29018.65899734597 }, { "content": "#[cfg(all(test, any(feature = \"rustc-serialize\", feature = \"serde\")))]\n\nfn test_encodable_json<FUtc, FFixed, E>(to_string_utc: FUtc, to_string_fixed: FFixed)\n\nwhere\n\n FUtc: Fn(&DateTime<Utc>) -> Result<String, E>,\n\n FFixed: Fn(&DateTime<FixedOffset>) -> Result<String, E>,\n\n E: ::std::fmt::Debug,\n\n{\n\n assert_eq!(\n\n to_string_utc(&Utc.ymd(2014, 7, 24).and_hms(12, 34, 6)).ok(),\n\n Some(r#\"\"2014-07-24T12:34:06Z\"\"#.into())\n\n );\n\n\n\n assert_eq!(\n\n to_string_fixed(&FixedOffset::east(3660).ymd(2014, 7, 24).and_hms(12, 34, 6)).ok(),\n\n Some(r#\"\"2014-07-24T12:34:06+01:01\"\"#.into())\n\n );\n\n assert_eq!(\n\n to_string_fixed(&FixedOffset::east(3650).ymd(2014, 7, 24).and_hms(12, 34, 6)).ok(),\n\n Some(r#\"\"2014-07-24T12:34:06+01:00:50\"\"#.into())\n\n );\n\n}\n\n\n\n#[cfg(all(\n\n test,\n\n feature = \"clock\",\n\n any(feature = \"rustc-serialize\", feature = \"serde\")\n\n))]\n", "file_path": "src/datetime.rs", "rank": 58, "score": 28946.983955197393 }, { "content": "/// Maps the local datetime to other datetime with given conversion function.\n\nfn map_local<Tz: TimeZone, F>(dt: &DateTime<Tz>, mut f: F) -> Option<DateTime<Tz>>\n\nwhere\n\n F: FnMut(NaiveDateTime) -> Option<NaiveDateTime>,\n\n{\n\n f(dt.naive_local()).and_then(|datetime| dt.timezone().from_local_datetime(&datetime).single())\n\n}\n\n\n\nimpl DateTime<FixedOffset> {\n\n /// Parses an RFC 2822 date and time string such as `Tue, 1 Jul 2003 10:52:37 +0200`,\n\n /// then returns a new `DateTime` with a parsed `FixedOffset`.\n\n pub fn parse_from_rfc2822(s: &str) -> ParseResult<DateTime<FixedOffset>> {\n\n const ITEMS: &'static [Item<'static>] = &[Item::Fixed(Fixed::RFC2822)];\n\n let mut parsed = Parsed::new();\n\n try!(parse(&mut parsed, s, ITEMS.iter().cloned()));\n\n parsed.to_datetime()\n\n }\n\n\n\n /// Parses an RFC 3339 and ISO 8601 date and time string such as `1996-12-19T16:39:57-08:00`,\n\n /// then returns a new `DateTime` with a parsed `FixedOffset`.\n\n ///\n", "file_path": "src/datetime.rs", "rank": 59, "score": 27001.306706222204 }, { "content": " match MDL_TO_OL.get(mdl as usize) {\n\n Some(&v) => Of(mdf.wrapping_sub((i32::from(v) as u32 & 0x3ff) << 3)),\n\n None => Of(0),\n\n }\n\n }\n\n\n\n #[inline]\n\n pub fn valid(&self) -> bool {\n\n let Of(of) = *self;\n\n let ol = of >> 3;\n\n MIN_OL <= ol && ol <= MAX_OL\n\n }\n\n\n\n #[inline]\n\n pub fn ordinal(&self) -> u32 {\n\n let Of(of) = *self;\n\n of >> 4\n\n }\n\n\n\n #[inline]\n", "file_path": "src/naive/internals.rs", "rank": 62, "score": 16.148898352217408 }, { "content": "\n\n/// An opaque type representing numeric item types for internal uses only.\n\npub struct InternalNumeric {\n\n _dummy: Void,\n\n}\n\n\n\nimpl Clone for InternalNumeric {\n\n fn clone(&self) -> Self {\n\n match self._dummy {}\n\n }\n\n}\n\n\n\nimpl PartialEq for InternalNumeric {\n\n fn eq(&self, _other: &InternalNumeric) -> bool {\n\n match self._dummy {}\n\n }\n\n}\n\n\n\nimpl Eq for InternalNumeric {}\n\n\n", "file_path": "src/format/mod.rs", "rank": 64, "score": 14.26271019548319 }, { "content": "pub use round::SubsecRound;\n\n\n\n/// A convenience module appropriate for glob imports (`use chrono::prelude::*;`).\n\npub mod prelude {\n\n #[doc(no_inline)]\n\n pub use Date;\n\n #[cfg(feature = \"clock\")]\n\n #[doc(no_inline)]\n\n pub use Local;\n\n #[doc(no_inline)]\n\n pub use SubsecRound;\n\n #[doc(no_inline)]\n\n pub use {DateTime, SecondsFormat};\n\n #[doc(no_inline)]\n\n pub use {Datelike, Timelike, Weekday};\n\n #[doc(no_inline)]\n\n pub use {FixedOffset, Utc};\n\n #[doc(no_inline)]\n\n pub use {NaiveDate, NaiveDateTime, NaiveTime};\n\n #[doc(no_inline)]\n", "file_path": "src/lib.rs", "rank": 65, "score": 13.581785501553115 }, { "content": " match OL_TO_MDL.get(ol as usize) {\n\n Some(&v) => Mdf(of + (u32::from(v) << 3)),\n\n None => Mdf(0),\n\n }\n\n }\n\n\n\n #[inline]\n\n pub fn valid(&self) -> bool {\n\n let Mdf(mdf) = *self;\n\n let mdl = mdf >> 3;\n\n match MDL_TO_OL.get(mdl as usize) {\n\n Some(&v) => v >= 0,\n\n None => false,\n\n }\n\n }\n\n\n\n #[inline]\n\n pub fn month(&self) -> u32 {\n\n let Mdf(mdf) = *self;\n\n mdf >> 9\n", "file_path": "src/naive/internals.rs", "rank": 66, "score": 13.312138019658715 }, { "content": " /// Makes a new `NaiveTime` from hour, minute and second.\n\n ///\n\n /// No [leap second](#leap-second-handling) is allowed here;\n\n /// use `NaiveTime::from_hms_*` methods with a subsecond parameter instead.\n\n ///\n\n /// Panics on invalid hour, minute and/or second.\n\n ///\n\n /// # Example\n\n ///\n\n /// ~~~~\n\n /// use chrono::{NaiveTime, Timelike};\n\n ///\n\n /// let t = NaiveTime::from_hms(23, 56, 4);\n\n /// assert_eq!(t.hour(), 23);\n\n /// assert_eq!(t.minute(), 56);\n\n /// assert_eq!(t.second(), 4);\n\n /// assert_eq!(t.nanosecond(), 0);\n\n /// ~~~~\n\n #[inline]\n\n pub fn from_hms(hour: u32, min: u32, sec: u32) -> NaiveTime {\n", "file_path": "src/naive/time.rs", "rank": 67, "score": 13.310670739495219 }, { "content": "/// Specific formatting options for seconds. This may be extended in the\n\n/// future, so exhaustive matching in external code is not recommended.\n\n///\n\n/// See the `TimeZone::to_rfc3339_opts` function for usage.\n\n#[derive(Clone, Copy, Debug, Eq, PartialEq)]\n\npub enum SecondsFormat {\n\n /// Format whole seconds only, with no decimal point nor subseconds.\n\n Secs,\n\n\n\n /// Use fixed 3 subsecond digits. This corresponds to\n\n /// [Fixed::Nanosecond3](format/enum.Fixed.html#variant.Nanosecond3).\n\n Millis,\n\n\n\n /// Use fixed 6 subsecond digits. This corresponds to\n\n /// [Fixed::Nanosecond6](format/enum.Fixed.html#variant.Nanosecond6).\n\n Micros,\n\n\n\n /// Use fixed 9 subsecond digits. This corresponds to\n\n /// [Fixed::Nanosecond9](format/enum.Fixed.html#variant.Nanosecond9).\n\n Nanos,\n", "file_path": "src/datetime.rs", "rank": 68, "score": 12.730439670853912 }, { "content": "use std::fmt;\n\nuse std::str::FromStr;\n\n\n\nuse div::{div_floor, mod_floor};\n\nuse naive::{NaiveDate, NaiveTime};\n\nuse offset::{FixedOffset, Offset};\n\nuse {Datelike, ParseWeekdayError, Timelike, Weekday};\n\n\n\npub use self::parse::parse;\n\npub use self::parsed::Parsed;\n\npub use self::strftime::StrftimeItems;\n\n\n\n/// An unhabitated type used for `InternalNumeric` and `InternalFixed` below.\n\n#[derive(Clone, PartialEq, Eq)]\n", "file_path": "src/format/mod.rs", "rank": 69, "score": 12.702400929980794 }, { "content": "\n\n /// Automatically select one of `Secs`, `Millis`, `Micros`, or `Nanos` to\n\n /// display all available non-zero sub-second digits. This corresponds to\n\n /// [Fixed::Nanosecond](format/enum.Fixed.html#variant.Nanosecond).\n\n AutoSi,\n\n\n\n // Do not match against this.\n\n #[doc(hidden)]\n\n __NonExhaustive,\n\n}\n\n\n\n/// ISO 8601 combined date and time with time zone.\n\n///\n\n/// There are some constructors implemented here (the `from_*` methods), but\n\n/// the general-purpose constructors are all via the methods on the\n\n/// [`TimeZone`](./offset/trait.TimeZone.html) implementations.\n\n#[derive(Clone)]\n\npub struct DateTime<Tz: TimeZone> {\n\n datetime: NaiveDateTime,\n\n offset: Tz::Offset,\n", "file_path": "src/datetime.rs", "rank": 70, "score": 12.680957977842843 }, { "content": " pub use {Offset, TimeZone};\n\n}\n\n\n\n// useful throughout the codebase\n\nmacro_rules! try_opt {\n\n ($e:expr) => {\n\n match $e {\n\n Some(v) => v,\n\n None => return None,\n\n }\n\n };\n\n}\n\n\n\nconst EPOCH_NUM_DAYS_FROM_CE: i32 = 719_163;\n\n\n\nmod div;\n\npub mod offset;\n\n#[cfg(not(feature = \"clock\"))]\n\nmod oldtime;\n\npub mod naive {\n", "file_path": "src/lib.rs", "rank": 71, "score": 12.542046662565202 }, { "content": "// This is a part of Chrono.\n\n// See README.md and LICENSE.txt for details.\n\n\n\n//! ISO 8601 date and time without timezone.\n\n\n\nuse num_traits::ToPrimitive;\n\nuse oldtime::Duration as OldDuration;\n\nuse std::ops::{Add, AddAssign, Sub, SubAssign};\n\nuse std::{fmt, hash, str};\n\n\n\nuse div::div_mod_floor;\n\nuse format::{parse, DelayedFormat, ParseError, ParseResult, Parsed, StrftimeItems};\n\nuse format::{Fixed, Item, Numeric, Pad};\n\nuse naive::{IsoWeek, NaiveDate, NaiveTime};\n\nuse {Datelike, Timelike, Weekday};\n\n\n\n/// The tight upper bound guarantees that a duration with `|Duration| >= 2^MAX_SECS_BITS`\n\n/// will always overflow the addition with any date and time type.\n\n///\n\n/// So why is this needed? `Duration::seconds(rhs)` may overflow, and we don't have\n", "file_path": "src/naive/datetime.rs", "rank": 72, "score": 12.490763265685056 }, { "content": " /// Makes a new `NaiveTime` from hour, minute, second and microsecond.\n\n ///\n\n /// The microsecond part can exceed 1,000,000\n\n /// in order to represent the [leap second](#leap-second-handling).\n\n ///\n\n /// Panics on invalid hour, minute, second and/or microsecond.\n\n ///\n\n /// # Example\n\n ///\n\n /// ~~~~\n\n /// use chrono::{NaiveTime, Timelike};\n\n ///\n\n /// let t = NaiveTime::from_hms_micro(23, 56, 4, 12_345);\n\n /// assert_eq!(t.hour(), 23);\n\n /// assert_eq!(t.minute(), 56);\n\n /// assert_eq!(t.second(), 4);\n\n /// assert_eq!(t.nanosecond(), 12_345_000);\n\n /// ~~~~\n\n #[inline]\n\n pub fn from_hms_micro(hour: u32, min: u32, sec: u32, micro: u32) -> NaiveTime {\n", "file_path": "src/naive/time.rs", "rank": 73, "score": 12.38669121133377 }, { "content": " //! Date and time types unconcerned with timezones.\n\n //!\n\n //! They are primarily building blocks for other types\n\n //! (e.g. [`TimeZone`](../offset/trait.TimeZone.html)),\n\n //! but can be also used for the simpler date and time handling.\n\n\n\n mod date;\n\n mod datetime;\n\n mod internals;\n\n mod isoweek;\n\n mod time;\n\n\n\n pub use self::date::{NaiveDate, MAX_DATE, MIN_DATE};\n\n #[cfg(feature = \"rustc-serialize\")]\n\n #[allow(deprecated)]\n\n pub use self::datetime::rustc_serialize::TsSeconds;\n\n pub use self::datetime::NaiveDateTime;\n\n pub use self::isoweek::IsoWeek;\n\n pub use self::time::NaiveTime;\n\n\n", "file_path": "src/lib.rs", "rank": 74, "score": 12.233463176993462 }, { "content": " /// # extern crate chrono; extern crate time; fn main() {\n\n /// use chrono::NaiveDate;\n\n /// use chrono::naive::MIN_DATE;\n\n /// use time::Duration;\n\n ///\n\n /// let d = NaiveDate::from_ymd(2015, 9, 5);\n\n /// assert_eq!(d.checked_sub_signed(Duration::days(40)),\n\n /// Some(NaiveDate::from_ymd(2015, 7, 27)));\n\n /// assert_eq!(d.checked_sub_signed(Duration::days(-40)),\n\n /// Some(NaiveDate::from_ymd(2015, 10, 15)));\n\n /// assert_eq!(d.checked_sub_signed(Duration::days(1_000_000_000)), None);\n\n /// assert_eq!(d.checked_sub_signed(Duration::days(-1_000_000_000)), None);\n\n /// assert_eq!(MIN_DATE.checked_sub_signed(Duration::days(1)), None);\n\n /// # }\n\n /// ~~~~\n\n pub fn checked_sub_signed(self, rhs: OldDuration) -> Option<NaiveDate> {\n\n let year = self.year();\n\n let (mut year_div_400, year_mod_400) = div_mod_floor(year, 400);\n\n let cycle = internals::yo_to_cycle(year_mod_400 as u32, self.of().ordinal());\n\n let cycle = try_opt!((cycle as i32).checked_sub(try_opt!(rhs.num_days().to_i32())));\n", "file_path": "src/naive/date.rs", "rank": 75, "score": 12.08690121101262 }, { "content": " /// assert!(from_hms_opt(23, 60, 0).is_none());\n\n /// assert!(from_hms_opt(23, 59, 60).is_none());\n\n /// ~~~~\n\n #[inline]\n\n pub fn from_hms_opt(hour: u32, min: u32, sec: u32) -> Option<NaiveTime> {\n\n NaiveTime::from_hms_nano_opt(hour, min, sec, 0)\n\n }\n\n\n\n /// Makes a new `NaiveTime` from hour, minute, second and millisecond.\n\n ///\n\n /// The millisecond part can exceed 1,000\n\n /// in order to represent the [leap second](#leap-second-handling).\n\n ///\n\n /// Panics on invalid hour, minute, second and/or millisecond.\n\n ///\n\n /// # Example\n\n ///\n\n /// ~~~~\n\n /// use chrono::{NaiveTime, Timelike};\n\n ///\n", "file_path": "src/naive/time.rs", "rank": 76, "score": 12.06281580638447 }, { "content": "// This is a part of Chrono.\n\n// See README.md and LICENSE.txt for details.\n\n\n\n//! The time zone which has a fixed offset from UTC.\n\n\n\nuse oldtime::Duration as OldDuration;\n\nuse std::fmt;\n\nuse std::ops::{Add, Sub};\n\n\n\nuse super::{LocalResult, Offset, TimeZone};\n\nuse div::div_mod_floor;\n\nuse naive::{NaiveDate, NaiveDateTime, NaiveTime};\n\nuse DateTime;\n\nuse Timelike;\n\n\n\n/// The time zone with fixed offset, from UTC-23:59:59 to UTC+23:59:59.\n\n///\n\n/// Using the [`TimeZone`](./trait.TimeZone.html) methods\n\n/// on a `FixedOffset` struct is the preferred way to construct\n\n/// `DateTime<FixedOffset>` instances. See the [`east`](#method.east) and\n", "file_path": "src/offset/fixed.rs", "rank": 77, "score": 12.03195848734197 }, { "content": " ///\n\n /// ~~~~\n\n /// use chrono::{NaiveTime, Timelike};\n\n ///\n\n /// let t = NaiveTime::from_hms_nano(23, 56, 4, 12_345_678);\n\n /// assert_eq!(t.hour(), 23);\n\n /// assert_eq!(t.minute(), 56);\n\n /// assert_eq!(t.second(), 4);\n\n /// assert_eq!(t.nanosecond(), 12_345_678);\n\n /// ~~~~\n\n #[inline]\n\n pub fn from_hms_nano(hour: u32, min: u32, sec: u32, nano: u32) -> NaiveTime {\n\n NaiveTime::from_hms_nano_opt(hour, min, sec, nano).expect(\"invalid time\")\n\n }\n\n\n\n /// Makes a new `NaiveTime` from hour, minute, second and nanosecond.\n\n ///\n\n /// The nanosecond part can exceed 1,000,000,000\n\n /// in order to represent the [leap second](#leap-second-handling).\n\n ///\n", "file_path": "src/naive/time.rs", "rank": 78, "score": 12.003116317379607 }, { "content": " ///\n\n /// ~~~~\n\n /// use chrono::{NaiveTime, Timelike};\n\n ///\n\n /// assert_eq!(NaiveTime::from_hms(0, 0, 0).minute(), 0);\n\n /// assert_eq!(NaiveTime::from_hms_nano(23, 56, 4, 12_345_678).minute(), 56);\n\n /// ~~~~\n\n #[inline]\n\n fn minute(&self) -> u32 {\n\n self.hms().1\n\n }\n\n\n\n /// Returns the second number from 0 to 59.\n\n ///\n\n /// # Example\n\n ///\n\n /// ~~~~\n\n /// use chrono::{NaiveTime, Timelike};\n\n ///\n\n /// assert_eq!(NaiveTime::from_hms(0, 0, 0).second(), 0);\n", "file_path": "src/naive/time.rs", "rank": 79, "score": 11.706581557671091 }, { "content": " /// # Example\n\n ///\n\n /// ~~~~\n\n /// use chrono::{NaiveDate, NaiveDateTime, Datelike, Timelike, Weekday};\n\n ///\n\n /// let d = NaiveDate::from_ymd(2015, 6, 3);\n\n ///\n\n /// let dt: NaiveDateTime = d.and_hms_milli(12, 34, 56, 789);\n\n /// assert_eq!(dt.year(), 2015);\n\n /// assert_eq!(dt.weekday(), Weekday::Wed);\n\n /// assert_eq!(dt.second(), 56);\n\n /// assert_eq!(dt.nanosecond(), 789_000_000);\n\n /// ~~~~\n\n #[inline]\n\n pub fn and_hms_milli(&self, hour: u32, min: u32, sec: u32, milli: u32) -> NaiveDateTime {\n\n self.and_hms_milli_opt(hour, min, sec, milli)\n\n .expect(\"invalid time\")\n\n }\n\n\n\n /// Makes a new `NaiveDateTime` from the current date, hour, minute, second and millisecond.\n", "file_path": "src/naive/date.rs", "rank": 80, "score": 11.440959408557795 }, { "content": "/// ~~~~\n\nimpl Sub<NaiveDate> for NaiveDate {\n\n type Output = OldDuration;\n\n\n\n #[inline]\n\n fn sub(self, rhs: NaiveDate) -> OldDuration {\n\n self.signed_duration_since(rhs)\n\n }\n\n}\n\n\n\n/// The `Debug` output of the naive date `d` is same to\n\n/// [`d.format(\"%Y-%m-%d\")`](../format/strftime/index.html).\n\n///\n\n/// The string printed can be readily parsed via the `parse` method on `str`.\n\n///\n\n/// # Example\n\n///\n\n/// ~~~~\n\n/// use chrono::NaiveDate;\n\n///\n", "file_path": "src/naive/date.rs", "rank": 81, "score": 11.276914181752495 }, { "content": " // it is not self-describing.\n\n use self::bincode::{deserialize, serialize, Infinite};\n\n\n\n let t = NaiveTime::from_hms_nano(3, 5, 7, 98765432);\n\n let encoded = serialize(&t, Infinite).unwrap();\n\n let decoded: NaiveTime = deserialize(&encoded).unwrap();\n\n assert_eq!(t, decoded);\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::NaiveTime;\n\n use oldtime::Duration;\n\n use std::u32;\n\n use Timelike;\n\n\n\n #[test]\n\n fn test_time_from_hms_milli() {\n\n assert_eq!(\n", "file_path": "src/naive/time.rs", "rank": 82, "score": 10.9636416688552 }, { "content": "\n\nimpl Timelike for NaiveTime {\n\n /// Returns the hour number from 0 to 23.\n\n ///\n\n /// # Example\n\n ///\n\n /// ~~~~\n\n /// use chrono::{NaiveTime, Timelike};\n\n ///\n\n /// assert_eq!(NaiveTime::from_hms(0, 0, 0).hour(), 0);\n\n /// assert_eq!(NaiveTime::from_hms_nano(23, 56, 4, 12_345_678).hour(), 23);\n\n /// ~~~~\n\n #[inline]\n\n fn hour(&self) -> u32 {\n\n self.hms().0\n\n }\n\n\n\n /// Returns the minute number from 0 to 59.\n\n ///\n\n /// # Example\n", "file_path": "src/naive/time.rs", "rank": 83, "score": 10.930771614915074 }, { "content": " ((self.ywf >> 4) & 0x3f) as u32\n\n }\n\n\n\n /// Returns the ISO week number starting from 0.\n\n ///\n\n /// The return value ranges from 0 to 52. (The last week of year differs by years.)\n\n ///\n\n /// # Example\n\n ///\n\n /// ~~~~\n\n /// use chrono::{NaiveDate, Datelike, Weekday};\n\n ///\n\n /// let d = NaiveDate::from_isoywd(2015, 15, Weekday::Mon);\n\n /// assert_eq!(d.iso_week().week0(), 14);\n\n /// ~~~~\n\n #[inline]\n\n pub fn week0(&self) -> u32 {\n\n ((self.ywf >> 4) & 0x3f) as u32 - 1\n\n }\n\n}\n", "file_path": "src/naive/isoweek.rs", "rank": 84, "score": 10.909160231835806 }, { "content": " #[inline]\n\n pub fn and_time(self, time: NaiveTime) -> LocalResult<DateTime<Tz>> {\n\n match self {\n\n LocalResult::Single(d) => d\n\n .and_time(time)\n\n .map_or(LocalResult::None, LocalResult::Single),\n\n _ => LocalResult::None,\n\n }\n\n }\n\n\n\n /// Makes a new `DateTime` from the current date, hour, minute and second.\n\n /// The offset in the current date is preserved.\n\n ///\n\n /// Propagates any error. Ambiguous result would be discarded.\n\n #[inline]\n\n pub fn and_hms_opt(self, hour: u32, min: u32, sec: u32) -> LocalResult<DateTime<Tz>> {\n\n match self {\n\n LocalResult::Single(d) => d\n\n .and_hms_opt(hour, min, sec)\n\n .map_or(LocalResult::None, LocalResult::Single),\n", "file_path": "src/offset/mod.rs", "rank": 85, "score": 10.90591309012056 }, { "content": " pub ordinal: Option<u32>,\n\n\n\n /// Day of the month (1--28, 1--29, 1--30 or 1--31 depending on the month).\n\n pub day: Option<u32>,\n\n\n\n /// Hour number divided by 12 (0--1). 0 indicates AM and 1 indicates PM.\n\n pub hour_div_12: Option<u32>,\n\n\n\n /// Hour number modulo 12 (0--11).\n\n pub hour_mod_12: Option<u32>,\n\n\n\n /// Minute number (0--59).\n\n pub minute: Option<u32>,\n\n\n\n /// Second number (0--60, accounting for leap seconds).\n\n pub second: Option<u32>,\n\n\n\n /// The number of nanoseconds since the whole second (0--999,999,999).\n\n pub nanosecond: Option<u32>,\n\n\n", "file_path": "src/format/parsed.rs", "rank": 86, "score": 10.897807351800209 }, { "content": "// This is a part of Chrono.\n\n// See README.md and LICENSE.txt for details.\n\n\n\n//! ISO 8601 time without timezone.\n\n\n\nuse oldtime::Duration as OldDuration;\n\nuse std::ops::{Add, AddAssign, Sub, SubAssign};\n\nuse std::{fmt, hash, str};\n\n\n\nuse div::div_mod_floor;\n\nuse format::{parse, DelayedFormat, ParseError, ParseResult, Parsed, StrftimeItems};\n\nuse format::{Fixed, Item, Numeric, Pad};\n\nuse Timelike;\n\n\n\n/// ISO 8601 time without timezone.\n\n/// Allows for the nanosecond precision and optional leap second representation.\n\n///\n\n/// # Leap Second Handling\n\n///\n\n/// Since 1960s, the manmade atomic clock has been so accurate that\n", "file_path": "src/naive/time.rs", "rank": 87, "score": 10.855926647508397 }, { "content": "// This is a part of Chrono.\n\n// See README.md and LICENSE.txt for details.\n\n\n\n//! ISO 8601 calendar date without timezone.\n\n\n\nuse num_traits::ToPrimitive;\n\nuse oldtime::Duration as OldDuration;\n\nuse std::ops::{Add, AddAssign, Sub, SubAssign};\n\nuse std::{fmt, str};\n\n\n\nuse div::div_mod_floor;\n\nuse format::{parse, DelayedFormat, ParseError, ParseResult, Parsed, StrftimeItems};\n\nuse format::{Item, Numeric, Pad};\n\nuse naive::{IsoWeek, NaiveDateTime, NaiveTime};\n\nuse {Datelike, Weekday};\n\n\n\nuse super::internals::{self, DateImpl, Mdf, Of, YearFlags};\n\nuse super::isoweek;\n\n\n\nconst MAX_YEAR: i32 = internals::MAX_YEAR;\n", "file_path": "src/naive/date.rs", "rank": 88, "score": 10.803548504648463 }, { "content": " /// use chrono::{NaiveDate, NaiveDateTime, Datelike, Timelike, Weekday};\n\n ///\n\n /// let d = NaiveDate::from_ymd(2015, 6, 3);\n\n ///\n\n /// let dt: NaiveDateTime = d.and_hms_micro(12, 34, 56, 789_012);\n\n /// assert_eq!(dt.year(), 2015);\n\n /// assert_eq!(dt.weekday(), Weekday::Wed);\n\n /// assert_eq!(dt.second(), 56);\n\n /// assert_eq!(dt.nanosecond(), 789_012_000);\n\n /// ~~~~\n\n #[inline]\n\n pub fn and_hms_micro(&self, hour: u32, min: u32, sec: u32, micro: u32) -> NaiveDateTime {\n\n self.and_hms_micro_opt(hour, min, sec, micro)\n\n .expect(\"invalid time\")\n\n }\n\n\n\n /// Makes a new `NaiveDateTime` from the current date, hour, minute, second and microsecond.\n\n ///\n\n /// The microsecond part can exceed 1,000,000\n\n /// in order to represent the [leap second](./struct.NaiveTime.html#leap-second-handling).\n", "file_path": "src/naive/date.rs", "rank": 89, "score": 10.640741879430866 }, { "content": " /// ~~~~\n\n fn ymd_opt(&self, year: i32, month: u32, day: u32) -> LocalResult<Date<Self>> {\n\n match NaiveDate::from_ymd_opt(year, month, day) {\n\n Some(d) => self.from_local_date(&d),\n\n None => LocalResult::None,\n\n }\n\n }\n\n\n\n /// Makes a new `Date` from year, day of year (DOY or \"ordinal\") and the current time zone.\n\n /// This assumes the proleptic Gregorian calendar, with the year 0 being 1 BCE.\n\n ///\n\n /// The time zone normally does not affect the date (unless it is between UTC-24 and UTC+24),\n\n /// but it will propagate to the `DateTime` values constructed via this date.\n\n ///\n\n /// Panics on the out-of-range date and/or invalid DOY.\n\n ///\n\n /// # Example\n\n ///\n\n /// ~~~~\n\n /// use chrono::{Utc, TimeZone};\n", "file_path": "src/offset/mod.rs", "rank": 90, "score": 10.640067337175138 }, { "content": " /// ~~~~\n\n /// # extern crate chrono; extern crate time; fn main() {\n\n /// use chrono::NaiveTime;\n\n /// use time::Duration;\n\n ///\n\n /// let from_hms = NaiveTime::from_hms;\n\n ///\n\n /// assert_eq!(from_hms(3, 4, 5).overflowing_sub_signed(Duration::hours(2)),\n\n /// (from_hms(1, 4, 5), 0));\n\n /// assert_eq!(from_hms(3, 4, 5).overflowing_sub_signed(Duration::hours(17)),\n\n /// (from_hms(10, 4, 5), 86_400));\n\n /// assert_eq!(from_hms(3, 4, 5).overflowing_sub_signed(Duration::hours(-22)),\n\n /// (from_hms(1, 4, 5), -86_400));\n\n /// # }\n\n /// ~~~~\n\n #[inline]\n\n pub fn overflowing_sub_signed(&self, rhs: OldDuration) -> (NaiveTime, i64) {\n\n let (time, rhs) = self.overflowing_add_signed(-rhs);\n\n (time, -rhs) // safe to negate, rhs is within +/- (2^63 / 1000)\n\n }\n", "file_path": "src/naive/time.rs", "rank": 91, "score": 10.548936734496138 }, { "content": " /// ~~~~\n\n #[inline]\n\n pub fn year(&self) -> i32 {\n\n self.ywf >> 10\n\n }\n\n\n\n /// Returns the ISO week number starting from 1.\n\n ///\n\n /// The return value ranges from 1 to 53. (The last week of year differs by years.)\n\n ///\n\n /// # Example\n\n ///\n\n /// ~~~~\n\n /// use chrono::{NaiveDate, Datelike, Weekday};\n\n ///\n\n /// let d = NaiveDate::from_isoywd(2015, 15, Weekday::Mon);\n\n /// assert_eq!(d.iso_week().week(), 15);\n\n /// ~~~~\n\n #[inline]\n\n pub fn week(&self) -> u32 {\n", "file_path": "src/naive/isoweek.rs", "rank": 92, "score": 10.418536205625523 }, { "content": " /// # Example\n\n ///\n\n /// ~~~~\n\n /// use chrono::NaiveDate;\n\n ///\n\n /// let d = NaiveDate::from_ymd(2015, 6, 3);\n\n /// assert!(d.and_hms_nano_opt(12, 34, 56, 789_012_345).is_some());\n\n /// assert!(d.and_hms_nano_opt(12, 34, 59, 1_789_012_345).is_some()); // leap second\n\n /// assert!(d.and_hms_nano_opt(12, 34, 59, 2_789_012_345).is_none());\n\n /// assert!(d.and_hms_nano_opt(12, 34, 60, 789_012_345).is_none());\n\n /// assert!(d.and_hms_nano_opt(12, 60, 56, 789_012_345).is_none());\n\n /// assert!(d.and_hms_nano_opt(24, 34, 56, 789_012_345).is_none());\n\n /// ~~~~\n\n #[inline]\n\n pub fn and_hms_nano_opt(\n\n &self,\n\n hour: u32,\n\n min: u32,\n\n sec: u32,\n\n nano: u32,\n", "file_path": "src/naive/date.rs", "rank": 93, "score": 10.374704177696529 }, { "content": " pub fn pred(&self) -> Weekday {\n\n match *self {\n\n Weekday::Mon => Weekday::Sun,\n\n Weekday::Tue => Weekday::Mon,\n\n Weekday::Wed => Weekday::Tue,\n\n Weekday::Thu => Weekday::Wed,\n\n Weekday::Fri => Weekday::Thu,\n\n Weekday::Sat => Weekday::Fri,\n\n Weekday::Sun => Weekday::Sat,\n\n }\n\n }\n\n\n\n /// Returns a day-of-week number starting from Monday = 1. (ISO 8601 weekday number)\n\n ///\n\n /// `w`: | `Mon` | `Tue` | `Wed` | `Thu` | `Fri` | `Sat` | `Sun`\n\n /// ------------------------- | ----- | ----- | ----- | ----- | ----- | ----- | -----\n\n /// `w.number_from_monday()`: | 1 | 2 | 3 | 4 | 5 | 6 | 7\n\n #[inline]\n\n pub fn number_from_monday(&self) -> u32 {\n\n match *self {\n", "file_path": "src/lib.rs", "rank": 94, "score": 10.34847697752422 }, { "content": "// This is a part of Chrono.\n\n// See README.md and LICENSE.txt for details.\n\n\n\n//! ISO 8601 week.\n\n\n\nuse std::fmt;\n\n\n\nuse super::internals::{DateImpl, Of, YearFlags};\n\n\n\n/// ISO 8601 week.\n\n///\n\n/// This type, combined with [`Weekday`](../enum.Weekday.html),\n\n/// constitues the ISO 8601 [week date](./struct.NaiveDate.html#week-date).\n\n/// One can retrieve this type from the existing [`Datelike`](../trait.Datelike.html) types\n\n/// via the [`Datelike::iso_week`](../trait.Datelike.html#tymethod.iso_week) method.\n\n#[derive(PartialEq, Eq, PartialOrd, Ord, Copy, Clone)]\n\npub struct IsoWeek {\n\n // note that this allows for larger year range than `NaiveDate`.\n\n // this is crucial because we have an edge case for the first and last week supported,\n\n // which year number might not match the calendar year number.\n\n ywf: DateImpl, // (year << 10) | (week << 4) | flag\n\n}\n\n\n\n/// Returns the corresponding `IsoWeek` from the year and the `Of` internal value.\n\n//\n\n// internal use only. we don't expose the public constructor for `IsoWeek` for now,\n\n// because the year range for the week date and the calendar date do not match and\n\n// it is confusing to have a date that is out of range in one and not in another.\n\n// currently we sidestep this issue by making `IsoWeek` fully dependent of `Datelike`.\n", "file_path": "src/naive/isoweek.rs", "rank": 95, "score": 10.279209557511395 }, { "content": "extern crate wasm_bindgen;\n\n\n\n#[cfg(test)]\n\ndoctest!(\"../README.md\");\n\n\n\n// this reexport is to aid the transition and should not be in the prelude!\n\npub use oldtime::Duration;\n\n\n\npub use date::{Date, MAX_DATE, MIN_DATE};\n\n#[cfg(feature = \"rustc-serialize\")]\n\npub use datetime::rustc_serialize::TsSeconds;\n\npub use datetime::{DateTime, SecondsFormat};\n\npub use format::{ParseError, ParseResult};\n\n#[doc(no_inline)]\n\npub use naive::{IsoWeek, NaiveDate, NaiveDateTime, NaiveTime};\n\n#[cfg(feature = \"clock\")]\n\n#[doc(no_inline)]\n\npub use offset::Local;\n\n#[doc(no_inline)]\n\npub use offset::{FixedOffset, LocalResult, Offset, TimeZone, Utc};\n", "file_path": "src/lib.rs", "rank": 96, "score": 10.258341026041919 }, { "content": "/// an alternative returning `Option` or `Result`. Thus we need some early bound to avoid\n\n/// touching that call when we are already sure that it WILL overflow...\n\nconst MAX_SECS_BITS: usize = 44;\n\n\n\n/// ISO 8601 combined date and time without timezone.\n\n///\n\n/// # Example\n\n///\n\n/// `NaiveDateTime` is commonly created from [`NaiveDate`](./struct.NaiveDate.html).\n\n///\n\n/// ~~~~\n\n/// use chrono::{NaiveDate, NaiveDateTime};\n\n///\n\n/// let dt: NaiveDateTime = NaiveDate::from_ymd(2016, 7, 8).and_hms(9, 10, 11);\n\n/// # let _ = dt;\n\n/// ~~~~\n\n///\n\n/// You can use typical [date-like](../trait.Datelike.html) and\n\n/// [time-like](../trait.Timelike.html) methods,\n\n/// provided that relevant traits are in the scope.\n", "file_path": "src/naive/datetime.rs", "rank": 97, "score": 10.088836964032378 }, { "content": " /// The UTC is continuous and thus this cannot fail (but can give the duplicate local time).\n\n fn from_utc_date(&self, utc: &NaiveDate) -> Date<Self> {\n\n Date::from_utc(*utc, self.offset_from_utc_date(utc))\n\n }\n\n\n\n /// Converts the UTC `NaiveDateTime` to the local time.\n\n /// The UTC is continuous and thus this cannot fail (but can give the duplicate local time).\n\n fn from_utc_datetime(&self, utc: &NaiveDateTime) -> DateTime<Self> {\n\n DateTime::from_utc(*utc, self.offset_from_utc_datetime(utc))\n\n }\n\n}\n\n\n\nmod fixed;\n\n#[cfg(feature = \"clock\")]\n\nmod local;\n\nmod utc;\n\n\n\npub use self::fixed::FixedOffset;\n\n#[cfg(feature = \"clock\")]\n\npub use self::local::Local;\n", "file_path": "src/offset/mod.rs", "rank": 98, "score": 10.074360482600738 }, { "content": " .map_or(LocalResult::None, LocalResult::Single),\n\n _ => LocalResult::None,\n\n }\n\n }\n\n\n\n /// Makes a new `DateTime` from the current date, hour, minute, second and microsecond.\n\n /// The microsecond part can exceed 1,000,000 in order to represent the leap second.\n\n /// The offset in the current date is preserved.\n\n ///\n\n /// Propagates any error. Ambiguous result would be discarded.\n\n #[inline]\n\n pub fn and_hms_micro_opt(\n\n self,\n\n hour: u32,\n\n min: u32,\n\n sec: u32,\n\n micro: u32,\n\n ) -> LocalResult<DateTime<Tz>> {\n\n match self {\n\n LocalResult::Single(d) => d\n", "file_path": "src/offset/mod.rs", "rank": 99, "score": 10.035069880804866 } ]
Rust
src/reader.rs
sayantangkhan/oxyscheme
742da72abbd719bf66898e46bd3913191f3d00fb
use crate::lexer::*; use crate::parser::{parse_datum, Datum}; use crate::*; use anyhow::Result; use std::{ fs::File, io::{BufRead, BufReader, Lines}, iter::{Enumerate, Peekable}, path::PathBuf, }; pub struct FileLexer { file: File, } impl FileLexer { pub fn new(filename: &str) -> Result<Self, CompilerError> { Ok(FileLexer { file: File::open(PathBuf::from(filename))?, }) } } impl IntoIterator for FileLexer { type Item = Result<TokenWithPosition, CompilerError>; type IntoIter = FileLexerIntoIter; fn into_iter(self) -> Self::IntoIter { let line_enumerator = BufReader::new(self.file).lines().enumerate(); let input_string = String::from(""); FileLexerIntoIter { line_enumerator, input_string, cursor_position: 0, line_number: 0, encountered_error: false, } } } pub struct FileLexerIntoIter { line_enumerator: Enumerate<Lines<BufReader<File>>>, input_string: String, cursor_position: usize, line_number: usize, encountered_error: bool, } impl Iterator for FileLexerIntoIter { type Item = Result<TokenWithPosition, CompilerError>; fn next(&mut self) -> Option<Self::Item> { if self.encountered_error { return None; } while self.input_string.len() <= self.cursor_position { if let Some((index, line_res)) = self.line_enumerator.next() { match line_res { Ok(line) => { self.input_string = line; self.cursor_position = 0; self.line_number = index + 1; } Err(e) => { self.encountered_error = true; return Some(Err(CompilerError::IOError(e))); } } } else { return None; } } match lex_input(&self.input_string[self.cursor_position..]) { Ok((leftover, parsed)) => { let token_with_position = TokenWithPosition { token: parsed, line: self.line_number, column: self.cursor_position, }; self.cursor_position = self.input_string.len() - leftover.len(); Some(Ok(token_with_position)) } Err(_) => { self.encountered_error = true; Some(Err(CompilerError::LexError( String::from(&self.input_string[self.cursor_position..]), self.line_number, self.cursor_position, ))) } } } } pub struct DatumIterator<I> where I: Iterator<Item = Result<TokenWithPosition, CompilerError>>, { token_stream: Peekable<I>, encountered_error: bool, } impl<I> DatumIterator<I> where I: Iterator<Item = Result<TokenWithPosition, CompilerError>>, { pub fn new(token_stream: I) -> Self { DatumIterator { token_stream: token_stream.peekable(), encountered_error: false, } } } impl<I> Iterator for DatumIterator<I> where I: Iterator<Item = Result<TokenWithPosition, CompilerError>>, { type Item = Result<Datum, CompilerError>; fn next(&mut self) -> Option<Self::Item> { if self.encountered_error { return None; } self.token_stream.peek()?; let datum_res = parse_datum(&mut self.token_stream); if datum_res.is_err() { self.encountered_error = true; } Some(datum_res) } }
use crate::lexer::*; use crate::parser::{parse_datum, Datum}; use crate::*; use anyhow::Result; use std::{ fs::File, io::{BufRead, BufReader, Lines}, iter::{Enumerate, Peekable}, path::PathBuf, }; pub struct FileLexer { file: File, } impl FileLexer { pub fn new(filename: &str) -> Result<Self, CompilerError> { Ok(FileLexer { file: File::open(PathBuf::from(filename))?, }) } } impl IntoIterator for FileLexer { type Item = Result<TokenWithPosition, CompilerError>; type IntoIter = FileLexerIntoIter; fn into_iter(self) -> Self::IntoIter { let line_enumerator = BufReader::new(self.file).lines().enumerate(); let input_string = String::from(""); FileLexerIntoIter { line_enumerator, input_string, cursor_position: 0, line_number: 0, encountered_error: false, } } } pub struct FileLexerIntoIter { line_enumerator: Enumerate<Lines<BufReader<File>>>, input_string: String, cursor_position: usize, line_number: usize, encountered_error: bool, } impl Iterator for FileLexerIntoIter { type Item = Result<TokenWithPosition, CompilerError>; fn next(&mut self) -> Option<Self::Item> { if self.encountered_error { return None; } while self.input_string.len() <= self.cursor_position { if let Some((index, line_res)) = self.line_enumerator.next() { match line_res { Ok(line) => { self.input_string = line; self.cursor_position = 0; self.line_number = index + 1; } Err(e) => { self.encountered_error = true; return Some(Err(CompilerError::IOError(e))); } } } else { return None; } } match lex_input(&self.input_string[self.cursor_position..]) { Ok((leftover, parsed)) => { let token_with_position = TokenWithPosition { token: parsed, line: self.line_number, column: self.cursor_position, }; self.cursor_position = self.input_string.len() - leftover.len(); Some(Ok(token_with_position)) } Err(_) => { self.encountered_error = true; Some(
) } } } } pub struct DatumIterator<I> where I: Iterator<Item = Result<TokenWithPosition, CompilerError>>, { token_stream: Peekable<I>, encountered_error: bool, } impl<I> DatumIterator<I> where I: Iterator<Item = Result<TokenWithPosition, CompilerError>>, { pub fn new(token_stream: I) -> Self { DatumIterator { token_stream: token_stream.peekable(), encountered_error: false, } } } impl<I> Iterator for DatumIterator<I> where I: Iterator<Item = Result<TokenWithPosition, CompilerError>>, { type Item = Result<Datum, CompilerError>; fn next(&mut self) -> Option<Self::Item> { if self.encountered_error { return None; } self.token_stream.peek()?; let datum_res = parse_datum(&mut self.token_stream); if datum_res.is_err() { self.encountered_error = true; } Some(datum_res) } }
Err(CompilerError::LexError( String::from(&self.input_string[self.cursor_position..]), self.line_number, self.cursor_position, ))
call_expression
[ { "content": "/// Parses a single `Datum` from the token stream\n\npub fn parse_datum<I>(token_stream: &mut Peekable<I>) -> Result<Datum, CompilerError>\n\nwhere\n\n I: Iterator<Item = Result<TokenWithPosition, CompilerError>>,\n\n{\n\n match token_stream.peek() {\n\n Some(Ok(TokenWithPosition {\n\n token,\n\n line,\n\n column,\n\n })) => match token {\n\n Token::Boolean(_) => parse_simple_datum(token_stream),\n\n Token::String(_) => parse_simple_datum(token_stream),\n\n Token::Character(_) => parse_simple_datum(token_stream),\n\n Token::Number(_) => parse_simple_datum(token_stream),\n\n Token::Identifier(_) => parse_simple_datum(token_stream),\n\n Token::Whitespace => {\n\n token_stream.next();\n\n parse_datum(token_stream)\n\n }\n\n Token::Comment => {\n", "file_path": "src/parser.rs", "rank": 0, "score": 111849.21946650036 }, { "content": "/// Type alias for the common return type for the lexers\n\ntype LexResult<'a> = IResult<&'a str, Token, NomErrorStruct<&'a str>>;\n\n\n", "file_path": "src/lexer.rs", "rank": 1, "score": 102767.08647467468 }, { "content": "fn parse_simple_datum<I>(token_stream: &mut Peekable<I>) -> Result<Datum, CompilerError>\n\nwhere\n\n I: Iterator<Item = Result<TokenWithPosition, CompilerError>>,\n\n{\n\n let TokenWithPosition { token, .. } = token_stream.next().unwrap()?;\n\n match token {\n\n Token::Boolean(b) => Ok(Datum::Boolean(b)),\n\n Token::String(s) => Ok(Datum::String(s)),\n\n Token::Character(c) => Ok(Datum::Character(c)),\n\n Token::Number(l) => Ok(Datum::Number(l)),\n\n Token::Identifier(i) => Ok(Datum::Identifier(i)),\n\n _ => unreachable!(),\n\n }\n\n}\n\n\n", "file_path": "src/parser.rs", "rank": 2, "score": 89457.69353225053 }, { "content": "fn parse_cdr<I>(token_stream: &mut Peekable<I>, car: Vec<Datum>) -> Result<Datum, CompilerError>\n\nwhere\n\n I: Iterator<Item = Result<TokenWithPosition, CompilerError>>,\n\n{\n\n token_stream.next();\n\n let cdr = parse_datum(token_stream)?;\n\n match token_stream.next() {\n\n Some(Ok(TokenWithPosition {\n\n token: Token::Punctuator(p),\n\n ..\n\n })) if p == \")\" => Ok(Datum::DottedPair(car, Box::new(cdr))),\n\n _ => {\n\n // Figure out a way to include the line and column number of the error\n\n Err(CompilerError::MissingCloseParen)\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n", "file_path": "src/parser.rs", "rank": 3, "score": 86333.69156676781 }, { "content": "fn parse_vector<I>(token_stream: &mut Peekable<I>) -> Result<Datum, CompilerError>\n\nwhere\n\n I: Iterator<Item = Result<TokenWithPosition, CompilerError>>,\n\n{\n\n let mut vector = Vec::new();\n\n\n\n // Consuming the \"#(\"\n\n token_stream.next();\n\n\n\n loop {\n\n match token_stream.peek() {\n\n Some(Ok(token_with_position)) => {\n\n let token = &token_with_position.token;\n\n match token {\n\n Token::Punctuator(p) if p == \")\" => {\n\n token_stream.next();\n\n break;\n\n }\n\n _ => {\n\n let datum = parse_datum(token_stream)?;\n", "file_path": "src/parser.rs", "rank": 4, "score": 84768.26075019274 }, { "content": "fn parse_list<I>(token_stream: &mut Peekable<I>) -> Result<Datum, CompilerError>\n\nwhere\n\n I: Iterator<Item = Result<TokenWithPosition, CompilerError>>,\n\n{\n\n let mut car: Vec<Datum> = Vec::new();\n\n\n\n // Consuming the \"(\"\n\n token_stream.next();\n\n\n\n loop {\n\n match token_stream.peek() {\n\n Some(Ok(token_with_position)) => {\n\n let token = &token_with_position.token;\n\n match token {\n\n Token::Punctuator(p) if p == \")\" => {\n\n token_stream.next();\n\n return Ok(Datum::List(car));\n\n }\n\n Token::Punctuator(p) if p == \".\" => {\n\n return parse_cdr(token_stream, car);\n", "file_path": "src/parser.rs", "rank": 5, "score": 84768.26075019274 }, { "content": "fn parse_abbrev<I>(token_stream: &mut Peekable<I>) -> Result<Datum, CompilerError>\n\nwhere\n\n I: Iterator<Item = Result<TokenWithPosition, CompilerError>>,\n\n{\n\n let TokenWithPosition { token, .. } = token_stream.next().unwrap()?;\n\n let datum = parse_datum(token_stream)?;\n\n if let Token::Punctuator(s) = token {\n\n match s.as_str() {\n\n \"'\" => Ok(Datum::Quote(Box::new(datum))),\n\n \"`\" => Ok(Datum::Backquote(Box::new(datum))),\n\n \",\" => Ok(Datum::Unquote(Box::new(datum))),\n\n \",@\" => Ok(Datum::UnquoteSplice(Box::new(datum))),\n\n _ => unreachable!(),\n\n }\n\n } else {\n\n unreachable!()\n\n }\n\n}\n\n\n", "file_path": "src/parser.rs", "rank": 6, "score": 84768.26075019274 }, { "content": "/// The general lexer that lexes any valid input string\n\npub fn lex_input(input: &str) -> LexResult<'_> {\n\n let mut parser = alt((\n\n lex_string,\n\n lex_boolean,\n\n lex_character,\n\n lex_identifier,\n\n lex_number,\n\n lex_punctuator,\n\n lex_whitespace,\n\n lex_comment,\n\n ));\n\n parser(input)\n\n}\n\n\n", "file_path": "src/lexer.rs", "rank": 7, "score": 82093.21683602329 }, { "content": "fn lex_string(input: &str) -> LexResult<'_> {\n\n let (input, _) = tag(\"\\\"\")(input)?;\n\n let (leftover, parsed) = escaped_transform(\n\n is_not(\"\\\\\\\"\"),\n\n '\\\\',\n\n alt((\n\n value(\"\\\\\", tag(\"\\\\\")),\n\n value(\"\\\"\", tag(\"\\\"\")),\n\n value(\"\\n\", tag(\"n\")),\n\n )),\n\n )(input)?;\n\n let (input, _) = tag(\"\\\"\")(leftover)?;\n\n Ok((input, Token::String(parsed)))\n\n}\n\n\n", "file_path": "src/lexer.rs", "rank": 8, "score": 72956.21711829172 }, { "content": "fn non_peculiar(input: &str) -> IResult<&str, &str> {\n\n let special_initial = one_of(\"!$%&*/:<=>?^_~\");\n\n let letter = satisfy(|c| c.is_alphabetic());\n\n let initial = alt((letter, special_initial));\n\n let digit = satisfy(|c| c.is_numeric());\n\n let special_subsequent = one_of(\"+-.@\");\n\n let subsequent = alt((initial, digit, special_subsequent));\n\n\n\n // The repeated code is to get around the compiler's move semantics.\n\n let special_initial = one_of(\"!$%&*/:<=>?^_~\");\n\n let letter = satisfy(|c| c.is_alphabetic());\n\n let initial = alt((letter, special_initial));\n\n\n\n recognize(tuple((initial, many0(subsequent))))(input)\n\n}\n\n\n", "file_path": "src/lexer.rs", "rank": 9, "score": 71783.7219330495 }, { "content": "fn peek_delimiter(input: &str) -> IResult<&str, ()> {\n\n let whitespace = one_of(\" \\n\\t\");\n\n let delimiter = alt((whitespace, one_of(\"()\\\";\")));\n\n map(peek(delimiter), |_: char| ())(input)\n\n}\n\n\n", "file_path": "src/lexer.rs", "rank": 10, "score": 68073.69703149404 }, { "content": "fn lex_punctuator(input: &str) -> LexResult<'_> {\n\n alt((\n\n tag(\"(\"),\n\n tag(\")\"),\n\n tag(\"#(\"),\n\n tag(\"'\"),\n\n tag(\"`\"),\n\n tag(\",@\"),\n\n tag(\",\"),\n\n tag(\".\"),\n\n ))(input)\n\n .map(|(l, p)| (l, Token::Punctuator(String::from(p))))\n\n}\n\n\n", "file_path": "src/lexer.rs", "rank": 11, "score": 55741.726621286194 }, { "content": "fn lex_character(input: &str) -> LexResult<'_> {\n\n let (input, _) = tag(\"#\\\\\")(input)?;\n\n let space_parser = map(tag(\"space\"), |_| ' ');\n\n let newline_parser = map(tag(\"newline\"), |_| '\\n');\n\n let (leftover, parsed) = alt((space_parser, newline_parser, anychar))(input)?;\n\n peek_delimiter(leftover)?;\n\n Ok((leftover, Token::Character(parsed)))\n\n}\n\n\n", "file_path": "src/lexer.rs", "rank": 12, "score": 55741.726621286194 }, { "content": "fn lex_whitespace(input: &str) -> LexResult<'_> {\n\n many1(alt((tag(\" \"), tag(\"\\n\"))))(input).map(|(l, _)| (l, Token::Whitespace))\n\n}\n\n\n", "file_path": "src/lexer.rs", "rank": 13, "score": 55741.726621286194 }, { "content": "fn lex_comment(input: &str) -> LexResult<'_> {\n\n let ends_with_newline = recognize(tuple((tag(\";\"), many0(none_of(\"\\n\")), tag(\"\\n\"))));\n\n let ends_without_newline = recognize(tuple((tag(\";\"), many0(none_of(\"\\n\")))));\n\n alt((ends_with_newline, ends_without_newline))(input).map(|(l, _)| (l, Token::Comment))\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n\n\n use super::*;\n\n\n\n #[test]\n\n fn lex_string_test() {\n\n assert_eq!(\n\n lex_string(r#\"\"string\"\"#),\n\n Ok((\"\", Token::String(String::from(\"string\"))))\n\n );\n\n assert_eq!(\n\n lex_string(r#\"\"st\\\"ring\"\"#),\n\n Ok((\"\", Token::String(String::from(\"st\\\"ring\"))))\n", "file_path": "src/lexer.rs", "rank": 14, "score": 55741.726621286194 }, { "content": "fn lex_number(input: &str) -> LexResult<'_> {\n\n let integer_parser = tuple((opt(one_of(\"+-\")), digit1));\n\n let float_parser =\n\n tuple::<_, _, (_, ErrorKind), _>((opt(one_of(\"+-\")), digit0, tag(\".\"), digit1));\n\n // Note that one needs to annotate the tuple function in this case because the compilier\n\n // is unable to infer the return type.\n\n if let Ok((l, p)) = recognize(float_parser)(input) {\n\n if let Ok(num) = p.parse() {\n\n Ok((l, Token::Number(LispNum::Float(num))))\n\n } else {\n\n Err(NomErrorEnum(NomErrorStruct::new(l, ErrorKind::TooLarge)))\n\n }\n\n } else {\n\n let (l, p) = recognize(integer_parser)(input)?;\n\n if let Ok(num) = p.parse() {\n\n Ok((l, Token::Number(LispNum::Integer(num))))\n\n } else {\n\n Err(NomErrorEnum(NomErrorStruct::new(l, ErrorKind::TooLarge)))\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/lexer.rs", "rank": 15, "score": 55741.726621286194 }, { "content": "fn lex_boolean(input: &str) -> LexResult<'_> {\n\n let (input, _) = tag(\"#\")(input)?;\n\n let (leftover, parsed) = one_of(\"tf\")(input)?;\n\n match parsed {\n\n 't' => Ok((leftover, Token::Boolean(true))),\n\n 'f' => Ok((leftover, Token::Boolean(false))),\n\n _ => Err(NomErrorEnum(NomErrorStruct::new(input, ErrorKind::OneOf))),\n\n }\n\n}\n\n\n", "file_path": "src/lexer.rs", "rank": 16, "score": 55741.726621286194 }, { "content": "fn lex_identifier(input: &str) -> LexResult<'_> {\n\n let peculiar_identifier = alt((tag(\"+\"), tag(\"-\"), tag(\"...\")));\n\n let (leftover, parsed) = alt((non_peculiar, peculiar_identifier))(input)?;\n\n\n\n if !leftover.is_empty() {\n\n peek_delimiter(leftover)?;\n\n };\n\n\n\n Ok((leftover, Token::Identifier(String::from(parsed))))\n\n}\n\n\n", "file_path": "src/lexer.rs", "rank": 17, "score": 55741.726621286194 }, { "content": "fn main() -> Result<()> {\n\n let filename = env::args().nth(1).unwrap();\n\n\n\n let file_lexer = FileLexer::new(&filename)?;\n\n let token_stream = file_lexer.into_iter();\n\n let datum_stream = DatumIterator::new(token_stream);\n\n for datum_res in datum_stream {\n\n let datum = datum_res?;\n\n println!(\"{:#?}\", datum);\n\n }\n\n Ok(())\n\n}\n", "file_path": "src/main.rs", "rank": 18, "score": 33447.674643765116 }, { "content": "#[test]\n\nfn lexer_rejects_invalid_input() {\n\n let bad_directory = Path::new(env!(\"CARGO_MANIFEST_DIR\")).join(\"inputs/bad-lexer-inputs/\");\n\n\n\n for file_res in fs::read_dir(&bad_directory).unwrap() {\n\n let file = file_res.unwrap().path();\n\n let file_lexer = reader::FileLexer::new(file.to_str().unwrap()).unwrap();\n\n let vec_of_tokens_res: Result<Vec<lexer::TokenWithPosition>, CompilerError> =\n\n file_lexer.into_iter().collect();\n\n assert!(vec_of_tokens_res.is_err());\n\n }\n\n}\n\n\n", "file_path": "tests/parser_integration_test.rs", "rank": 19, "score": 31707.14288940392 }, { "content": "#[test]\n\nfn lexer_accepts_valid_input() {\n\n let good_directory = Path::new(env!(\"CARGO_MANIFEST_DIR\")).join(\"inputs/good-inputs/\");\n\n\n\n for file_res in fs::read_dir(&good_directory).unwrap() {\n\n let file = file_res.unwrap().path();\n\n let file_lexer = reader::FileLexer::new(file.to_str().unwrap()).unwrap();\n\n let vec_of_tokens_res: Result<Vec<lexer::TokenWithPosition>, CompilerError> =\n\n file_lexer.into_iter().collect();\n\n assert!(vec_of_tokens_res.is_ok());\n\n }\n\n}\n\n\n", "file_path": "tests/parser_integration_test.rs", "rank": 20, "score": 31707.14288940392 }, { "content": "#[test]\n\nfn parser_rejects_invalid_input() {\n\n let bad_directory = Path::new(env!(\"CARGO_MANIFEST_DIR\")).join(\"inputs/bad-parser-inputs/\");\n\n\n\n for file_res in fs::read_dir(&bad_directory).unwrap() {\n\n let file = file_res.unwrap().path();\n\n let file_lexer = reader::FileLexer::new(file.to_str().unwrap()).unwrap();\n\n let token_stream = file_lexer.into_iter();\n\n let datum_stream = reader::DatumIterator::new(token_stream);\n\n let vec_of_datums_res: Result<Vec<parser::Datum>, CompilerError> = datum_stream.collect();\n\n assert!(vec_of_datums_res.is_err());\n\n }\n\n}\n", "file_path": "tests/parser_integration_test.rs", "rank": 21, "score": 31707.14288940392 }, { "content": "#[test]\n\nfn parser_accepts_valid_input() {\n\n let good_directory = Path::new(env!(\"CARGO_MANIFEST_DIR\")).join(\"inputs/good-inputs/\");\n\n\n\n for file_res in fs::read_dir(&good_directory).unwrap() {\n\n let file = file_res.unwrap().path();\n\n let file_lexer = reader::FileLexer::new(file.to_str().unwrap()).unwrap();\n\n let token_stream = file_lexer.into_iter();\n\n let datum_stream = reader::DatumIterator::new(token_stream);\n\n let vec_of_datums_res: Result<Vec<parser::Datum>, CompilerError> = datum_stream.collect();\n\n assert!(vec_of_datums_res.is_ok());\n\n }\n\n}\n\n\n", "file_path": "tests/parser_integration_test.rs", "rank": 22, "score": 31707.14288940392 }, { "content": "//! Module to succesively parse a stream of `Token`s into `Datum`s which is then further modified\n\n//! by the `ast-wrangler`.\n\n\n\nuse crate::lexer::Token;\n\nuse crate::lexer::TokenWithPosition;\n\nuse std::iter::Peekable;\n\n\n\nuse crate::{lexer::LispNum, CompilerError};\n\n\n\n/// An enum representing `Datum`, i.e. the nodes of the abstract syntax tree\n\n#[derive(Debug, PartialEq)]\n\npub enum Datum {\n\n /// Represents a boolean\n\n Boolean(bool),\n\n /// Represents a `LispNum`\n\n Number(LispNum),\n\n /// Represents a character\n\n Character(char),\n\n /// Represents a string\n\n String(String),\n", "file_path": "src/parser.rs", "rank": 28, "score": 16.03614993674472 }, { "content": " use super::{parse_datum, Datum};\n\n use crate::{\n\n lexer::{Token, TokenWithPosition},\n\n CompilerError,\n\n };\n\n\n\n #[test]\n\n fn parse_simple_datum_test() {\n\n let vec_of_res: Vec<Result<TokenWithPosition, CompilerError>> =\n\n vec![Ok(TokenWithPosition {\n\n token: Token::Boolean(true),\n\n line: 0,\n\n column: 0,\n\n })];\n\n let mut token_stream = vec_of_res.into_iter().peekable();\n\n assert_eq!(\n\n parse_datum(&mut token_stream).unwrap(),\n\n Datum::Boolean(true)\n\n );\n\n }\n", "file_path": "src/parser.rs", "rank": 29, "score": 14.461011499551976 }, { "content": " token: Token::Punctuator(String::from(\")\")),\n\n line: 0,\n\n column: 0,\n\n }),\n\n Ok(TokenWithPosition {\n\n token: Token::Punctuator(String::from(\")\")),\n\n line: 0,\n\n column: 0,\n\n }),\n\n ];\n\n let mut token_stream = vec_of_res.into_iter().peekable();\n\n assert_eq!(\n\n parse_datum(&mut token_stream).unwrap(),\n\n Datum::Vector(vec![Datum::Vector(vec![Datum::Boolean(true)])])\n\n );\n\n }\n\n\n\n #[test]\n\n fn parse_list_test() {\n\n let vec_of_res: Vec<Result<TokenWithPosition, CompilerError>> = vec![\n", "file_path": "src/parser.rs", "rank": 31, "score": 13.584725204645395 }, { "content": " Ok(TokenWithPosition {\n\n token: Token::Punctuator(String::from(\")\")),\n\n line: 0,\n\n column: 0,\n\n }),\n\n ];\n\n let mut token_stream = vec_of_res.into_iter().peekable();\n\n assert_eq!(\n\n parse_datum(&mut token_stream).unwrap(),\n\n Datum::List(vec![Datum::Vector(vec![Datum::Boolean(true)])])\n\n );\n\n }\n\n\n\n #[test]\n\n fn parse_dotted_pair_test() {\n\n let vec_of_res: Vec<Result<TokenWithPosition, CompilerError>> = vec![\n\n Ok(TokenWithPosition {\n\n token: Token::Punctuator(String::from(\"(\")),\n\n line: 0,\n\n column: 0,\n", "file_path": "src/parser.rs", "rank": 32, "score": 13.239252550034461 }, { "content": " }),\n\n ];\n\n let mut token_stream = vec_of_res.into_iter().peekable();\n\n let car = vec![Datum::Identifier(String::from(\"a\"))];\n\n let cdr = Box::new(Datum::Identifier(String::from(\"a\")));\n\n let pair = Datum::DottedPair(car, cdr);\n\n assert_eq!(parse_datum(&mut token_stream).unwrap(), pair);\n\n }\n\n\n\n #[test]\n\n fn parse_abbrev_test() {\n\n let vec_of_res: Vec<Result<TokenWithPosition, CompilerError>> = vec![\n\n Ok(TokenWithPosition {\n\n token: Token::Punctuator(\"'\".to_string()),\n\n line: 0,\n\n column: 0,\n\n }),\n\n Ok(TokenWithPosition {\n\n token: Token::Boolean(true),\n\n line: 0,\n", "file_path": "src/parser.rs", "rank": 34, "score": 12.360982781357876 }, { "content": " column: 1,\n\n }),\n\n ];\n\n let mut token_stream = vec_of_res.into_iter().peekable();\n\n assert_eq!(\n\n parse_datum(&mut token_stream).unwrap(),\n\n Datum::Quote(Box::new(Datum::Boolean(true)))\n\n );\n\n }\n\n}\n", "file_path": "src/parser.rs", "rank": 36, "score": 11.698792009744034 }, { "content": "use anyhow::Result;\n\nuse oxyscheme::*;\n\nuse reader::{DatumIterator, FileLexer};\n\nuse std::env;\n\n\n", "file_path": "src/main.rs", "rank": 37, "score": 11.571412994514443 }, { "content": " }\n\n _ => {\n\n let next_datum = parse_datum(token_stream)?;\n\n car.push(next_datum);\n\n }\n\n }\n\n }\n\n Some(Err(_)) => {\n\n return Err(token_stream.next().unwrap().unwrap_err());\n\n }\n\n None => {\n\n // Figure out a way to include the line and column number of the error\n\n return Err(CompilerError::MissingCloseParen);\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/parser.rs", "rank": 38, "score": 11.556621047950115 }, { "content": " Ok(TokenWithPosition {\n\n token: Token::Punctuator(String::from(\"(\")),\n\n line: 0,\n\n column: 0,\n\n }),\n\n Ok(TokenWithPosition {\n\n token: Token::Punctuator(String::from(\"#(\")),\n\n line: 0,\n\n column: 0,\n\n }),\n\n Ok(TokenWithPosition {\n\n token: Token::Boolean(true),\n\n line: 0,\n\n column: 0,\n\n }),\n\n Ok(TokenWithPosition {\n\n token: Token::Punctuator(String::from(\")\")),\n\n line: 0,\n\n column: 0,\n\n }),\n", "file_path": "src/parser.rs", "rank": 39, "score": 11.491474834915575 }, { "content": " vector.push(datum);\n\n }\n\n }\n\n }\n\n\n\n Some(Err(_)) => {\n\n return Err(token_stream.next().unwrap().unwrap_err());\n\n }\n\n\n\n None => {\n\n // Figure out a way to include the line and column number of the error\n\n return Err(CompilerError::MissingCloseParen);\n\n }\n\n }\n\n }\n\n\n\n Ok(Datum::Vector(vector))\n\n}\n\n\n", "file_path": "src/parser.rs", "rank": 40, "score": 11.093168275666425 }, { "content": "//! Module to lex the input stream and return a stream of tokens\n\nuse nom::{\n\n branch::alt,\n\n bytes::complete::{escaped_transform, is_not, tag},\n\n character::complete::{anychar, digit0, digit1, none_of, one_of, satisfy},\n\n combinator::{map, opt, peek, recognize, value},\n\n error::ErrorKind,\n\n multi::{many0, many1},\n\n sequence::tuple,\n\n IResult,\n\n};\n\n\n\nuse nom::error::Error as NomErrorStruct;\n\nuse nom::Err::Error as NomErrorEnum;\n\n\n\n/// Wrapper around `Token` that keeps track of line and column\n\n#[derive(Debug)]\n\npub struct TokenWithPosition {\n\n /// Contains the actual token\n\n pub token: Token,\n", "file_path": "src/lexer.rs", "rank": 42, "score": 10.765103856442739 }, { "content": " /// The line number of the token\n\n pub line: usize,\n\n /// The column number of the token\n\n pub column: usize,\n\n}\n\n\n\n/// Terminal token types for the lexer\n\n///\n\n/// The variants of `Token` wrap around the corresponding Rust types in the case of `String`,\n\n/// `Character`, and `Boolean`. `Number` wraps around `LispNum`, which can either be an `f32`\n\n/// or an `i32`. `Identifier` and `Punctuator` wrap around slices from the input, to avoid\n\n/// unnecessary heap copying and heap allocation. In particular, this means that the token cannot\n\n/// be dropped before the input string. `Whitespace` and `Comment` are representative of whitespaces\n\n/// and comments without wrapping around anything.\n\n#[derive(Debug, PartialEq)]\n\npub enum Token {\n\n /// Wraps a string\n\n String(String),\n\n /// Wraps a character\n\n Character(char),\n", "file_path": "src/lexer.rs", "rank": 43, "score": 10.68122760290127 }, { "content": " }),\n\n Ok(TokenWithPosition {\n\n token: Token::Identifier(String::from(\"a\")),\n\n line: 0,\n\n column: 0,\n\n }),\n\n Ok(TokenWithPosition {\n\n token: Token::Punctuator(String::from(\".\")),\n\n line: 0,\n\n column: 0,\n\n }),\n\n Ok(TokenWithPosition {\n\n token: Token::Identifier(String::from(\"a\")),\n\n line: 0,\n\n column: 0,\n\n }),\n\n Ok(TokenWithPosition {\n\n token: Token::Punctuator(String::from(\")\")),\n\n line: 0,\n\n column: 0,\n", "file_path": "src/parser.rs", "rank": 44, "score": 10.203117793889898 }, { "content": "\n\n #[test]\n\n fn parse_vector_test() {\n\n let vec_of_res: Vec<Result<TokenWithPosition, CompilerError>> = vec![\n\n Ok(TokenWithPosition {\n\n token: Token::Punctuator(String::from(\"#(\")),\n\n line: 0,\n\n column: 0,\n\n }),\n\n Ok(TokenWithPosition {\n\n token: Token::Punctuator(String::from(\"#(\")),\n\n line: 0,\n\n column: 0,\n\n }),\n\n Ok(TokenWithPosition {\n\n token: Token::Boolean(true),\n\n line: 0,\n\n column: 0,\n\n }),\n\n Ok(TokenWithPosition {\n", "file_path": "src/parser.rs", "rank": 45, "score": 10.194798643685708 }, { "content": " /// the second `usize` is the column number, and the `String` is a copy of the leftover unlexed input from the line.\n\n #[error(\"Lex error at line {1}, column {2}, near \\\"{0}\\\" while lexing input\")]\n\n LexError(String, usize, usize),\n\n\n\n /// Error variant handling the token stream ending too early\n\n #[error(\"Token stream ended unexpectedly\")]\n\n TokenStreamEnded,\n\n\n\n /// Error variant handling unexpected tokens\n\n #[error(\"Unexpected token encountered at line {0}, column {1} while parsing input\")]\n\n UnexpectedToken(usize, usize),\n\n\n\n /// Error variant handling unclosed lists or vectors\n\n #[error(\"Missing close paren at unknown position\")]\n\n MissingCloseParen,\n\n\n\n /// Indicates an IO error\n\n ///\n\n /// Usually happens if the source files cannot be opened\n\n #[error(\"I/O error\")]\n\n IOError(#[from] std::io::Error),\n\n}\n", "file_path": "src/lib.rs", "rank": 46, "score": 9.973945089304962 }, { "content": " token_stream.next();\n\n parse_datum(token_stream)\n\n }\n\n Token::Punctuator(p) if p == \"(\" => parse_list(token_stream),\n\n Token::Punctuator(p) if p == \"#(\" => parse_vector(token_stream),\n\n Token::Punctuator(p) if p == \"'\" => parse_abbrev(token_stream),\n\n Token::Punctuator(p) if p == \"`\" => parse_abbrev(token_stream),\n\n Token::Punctuator(p) if p == \",\" => parse_abbrev(token_stream),\n\n Token::Punctuator(p) if p == \",@\" => parse_abbrev(token_stream),\n\n _ => Err(CompilerError::UnexpectedToken(*line, *column)),\n\n },\n\n\n\n Some(Err(_)) => Err(token_stream.next().unwrap().unwrap_err()),\n\n\n\n None => Err(CompilerError::TokenStreamEnded),\n\n }\n\n}\n\n\n", "file_path": "src/parser.rs", "rank": 47, "score": 9.211045081180998 }, { "content": "//! Library for lexing and parsing a reasonable subset of\n\n//! [R5RS](https://schemers.org/Documents/Standards/R5RS/r5rs.pdf) Scheme.\n\n//!\n\n//! ## Usage\n\n//! TO ADD\n\n\n\n#![warn(missing_docs, unused_variables, rust_2018_idioms)]\n\n\n\npub mod lexer;\n\npub mod parser;\n\npub mod reader;\n\n\n\nuse thiserror::Error;\n\n\n\n/// The toplevel error type for the crate\n\n#[derive(Error, Debug)]\n\npub enum CompilerError {\n\n /// Indicates a lexing error\n\n ///\n\n /// `LexError` wraps around a `String` and a `usize`. The first `usize` is the line number in the input,\n", "file_path": "src/lib.rs", "rank": 48, "score": 8.528507764719176 }, { "content": " Ok((\" \", Token::Identifier(\"he++o\".to_string())))\n\n );\n\n assert_eq!(\n\n lex_identifier(\"hel.o \"),\n\n Ok((\" \", Token::Identifier(\"hel.o\".to_string())))\n\n );\n\n assert_eq!(\n\n lex_identifier(\"...\"),\n\n Ok((\"\", Token::Identifier(\"...\".to_string())))\n\n );\n\n // assert_eq!(\n\n // lex_identifier(\"...\"),\n\n // Err(NomErrorEnum(NomErrorStruct::new(\"\", ErrorKind::OneOf)))\n\n // );\n\n assert_eq!(\n\n lex_identifier(\"asdf,\"),\n\n Err(NomErrorEnum(NomErrorStruct::new(\",\", ErrorKind::OneOf)))\n\n );\n\n }\n\n\n", "file_path": "src/lexer.rs", "rank": 49, "score": 7.4072554127930585 }, { "content": " );\n\n assert_eq!(\n\n lex_string(r#\"\"fail\"#),\n\n Err(NomErrorEnum(NomErrorStruct::new(\"\", ErrorKind::Tag)))\n\n );\n\n assert_eq!(\n\n lex_string(r#\"\"new\\nline\"\"#),\n\n Ok((\"\", Token::String(String::from(\"new\\nline\"))))\n\n );\n\n assert_eq!(\n\n lex_string(r#\"blah\"string\"\"#),\n\n Err(NomErrorEnum(NomErrorStruct::new(\n\n \"blah\\\"string\\\"\",\n\n ErrorKind::Tag\n\n )))\n\n );\n\n }\n\n\n\n #[test]\n\n fn lex_boolean_test() {\n", "file_path": "src/lexer.rs", "rank": 50, "score": 7.323670838300702 }, { "content": "use oxyscheme::*;\n\nuse std::fs;\n\nuse std::path::Path;\n\n\n\n#[test]\n", "file_path": "tests/parser_integration_test.rs", "rank": 51, "score": 6.002611889411757 }, { "content": " assert_eq!(lex_boolean(\"#t\"), Ok((\"\", Token::Boolean(true))));\n\n assert_eq!(lex_boolean(\"#f\"), Ok((\"\", Token::Boolean(false))));\n\n assert_eq!(\n\n lex_boolean(\"#m\"),\n\n Err(NomErrorEnum(NomErrorStruct::new(\"m\", ErrorKind::OneOf)))\n\n );\n\n }\n\n\n\n #[test]\n\n fn lex_character_test() {\n\n assert_eq!(lex_character(\"#\\\\n\\n\"), Ok((\"\\n\", Token::Character('n'))));\n\n assert_eq!(\n\n lex_character(\"#\\\\space\\n\"),\n\n Ok((\"\\n\", Token::Character(' ')))\n\n );\n\n assert_eq!(\n\n lex_character(\"#\\\\newline\\n\"),\n\n Ok((\"\\n\", Token::Character('\\n')))\n\n );\n\n }\n", "file_path": "src/lexer.rs", "rank": 52, "score": 5.509799670174283 }, { "content": " /// Wraps a boolean\n\n Boolean(bool),\n\n /// Wraps a number\n\n Number(LispNum),\n\n /// Wraps an identifier in the form of a string slice\n\n Identifier(String),\n\n /// Wraps a punctuator in the form of a string slice\n\n Punctuator(String),\n\n /// Represents whitespace\n\n Whitespace,\n\n /// Represents comments\n\n Comment,\n\n}\n\n\n\n/// Internal representation of numeric types in Scheme\n\n///\n\n/// `LispNum` is an enum wrapping around Rust's `i32` and `f32` types; the only two numeric types\n\n/// we are currently implementing for the Scheme compiler target. More variants will be added in\n\n/// the future.\n\n#[derive(Debug, PartialEq, Clone, Copy)]\n\npub enum LispNum {\n\n /// Wraps an `i32`\n\n Integer(i32),\n\n /// Wraps an `f32`\n\n Float(f32),\n\n}\n\n\n\n/// Type alias for the common return type for the lexers\n", "file_path": "src/lexer.rs", "rank": 53, "score": 5.2051749275172785 }, { "content": " /// Represents an identifier\n\n Identifier(String),\n\n /// Represents a list without a dot\n\n List(Vec<Datum>),\n\n /// Represents a `cons` block, with a `car` and `cdr`. The `car` is represented by a list of\n\n /// `Datum`, and the `cdr` is just a single `Datum`.\n\n DottedPair(Vec<Datum>, Box<Datum>),\n\n /// Represents a quoted `Datum`\n\n Quote(Box<Datum>),\n\n /// Represents a backquoted `Datum`\n\n Backquote(Box<Datum>),\n\n /// Represents a unquoted `Datum`\n\n Unquote(Box<Datum>),\n\n /// Represents a spliced unquoted `Datum`\n\n UnquoteSplice(Box<Datum>),\n\n /// Represents a vector\n\n Vector(Vec<Datum>),\n\n}\n\n\n\n/// Parses a single `Datum` from the token stream\n", "file_path": "src/parser.rs", "rank": 54, "score": 4.897947706000153 }, { "content": " Ok((\"\\n\", Token::Identifier(\"var\".to_string())))\n\n );\n\n assert_eq!(\n\n lex_identifier(\"var \"),\n\n Ok((\" \", Token::Identifier(\"var\".to_string())))\n\n );\n\n assert_eq!(\n\n lex_identifier(\"var)\"),\n\n Ok((\")\", Token::Identifier(\"var\".to_string())))\n\n );\n\n assert_eq!(\n\n lex_identifier(\"var;\"),\n\n Ok((\";\", Token::Identifier(\"var\".to_string())))\n\n );\n\n assert_eq!(\n\n lex_identifier(\"var\\\"\"),\n\n Ok((\"\\\"\", Token::Identifier(\"var\".to_string())))\n\n );\n\n assert_eq!(\n\n lex_identifier(\"he++o \"),\n", "file_path": "src/lexer.rs", "rank": 55, "score": 4.737780141825263 }, { "content": "\n\n #[test]\n\n fn non_peculiar_identifier_test() {\n\n assert_eq!(non_peculiar(\"a\"), Ok((\"\", \"a\")));\n\n assert_eq!(non_peculiar(\"a+\"), Ok((\"\", \"a+\")));\n\n assert_eq!(non_peculiar(\"&a+\"), Ok((\"\", \"&a+\")));\n\n assert_eq!(\n\n non_peculiar(\"+&a+\"),\n\n Err(NomErrorEnum(NomErrorStruct::new(\"+&a+\", ErrorKind::OneOf)))\n\n );\n\n }\n\n\n\n #[test]\n\n fn lex_identifier_test() {\n\n assert_eq!(\n\n lex_identifier(\"...\\n\"),\n\n Ok((\"\\n\", Token::Identifier(\"...\".to_string())))\n\n );\n\n assert_eq!(\n\n lex_identifier(\"var\\n\"),\n", "file_path": "src/lexer.rs", "rank": 56, "score": 4.59736886432907 }, { "content": " Ok((\";\", Token::Number(LispNum::Integer(1))))\n\n );\n\n assert_eq!(\n\n lex_number(\"-1;\"),\n\n Ok((\";\", Token::Number(LispNum::Integer(-1))))\n\n );\n\n assert_eq!(\n\n lex_number(\"-1;\"),\n\n Ok((\";\", Token::Number(LispNum::Integer(-1))))\n\n );\n\n assert_eq!(\n\n lex_number(\"4294967296;\"),\n\n Err(NomErrorEnum(NomErrorStruct::new(\";\", ErrorKind::TooLarge)))\n\n );\n\n }\n\n\n\n #[test]\n\n fn lex_punctuator_test() {\n\n assert_eq!(\n\n lex_punctuator(\",3\"),\n", "file_path": "src/lexer.rs", "rank": 57, "score": 4.348845721269844 }, { "content": " Ok((\"3\", Token::Punctuator(\",\".to_string())))\n\n );\n\n assert_eq!(\n\n lex_punctuator(\",@\"),\n\n Ok((\"\", Token::Punctuator(\",@\".to_string())))\n\n );\n\n }\n\n\n\n #[test]\n\n fn lex_whitespace_test() {\n\n assert_eq!(lex_whitespace(\" 3\"), Ok((\"3\", Token::Whitespace)));\n\n assert_eq!(lex_whitespace(\" \\n3\"), Ok((\"3\", Token::Whitespace)));\n\n }\n\n\n\n #[test]\n\n fn lex_comment_test() {\n\n assert_eq!(lex_comment(\"; Blah\"), Ok((\"\", Token::Comment)));\n\n assert_eq!(lex_comment(\"; Blah\\n3\"), Ok((\"3\", Token::Comment)));\n\n }\n\n}\n", "file_path": "src/lexer.rs", "rank": 58, "score": 3.8593043561528293 }, { "content": " #[test]\n\n fn lex_number_test() {\n\n assert_eq!(\n\n lex_number(\"+3.14;\"),\n\n Ok((\";\", Token::Number(LispNum::Float(3.14))))\n\n );\n\n assert_eq!(\n\n lex_number(\"-3.14;\"),\n\n Ok((\";\", Token::Number(LispNum::Float(-3.14))))\n\n );\n\n assert_eq!(\n\n lex_number(\"3.14;\"),\n\n Ok((\";\", Token::Number(LispNum::Float(3.14))))\n\n );\n\n assert_eq!(\n\n lex_number(\".14;\"),\n\n Ok((\";\", Token::Number(LispNum::Float(0.14))))\n\n );\n\n assert_eq!(\n\n lex_number(\"1;\"),\n", "file_path": "src/lexer.rs", "rank": 59, "score": 1.9998948558895324 } ]
Rust
src/storage/assembler.rs
ProgVal/smoltcp
1bde6e7ec45684019f191714c158e2f496eb49d0
use core::fmt; #[derive(Debug, Clone, Copy, PartialEq, Eq)] struct Contig { hole_size: usize, data_size: usize } impl fmt::Display for Contig { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { if self.has_hole() { write!(f, "({})", self.hole_size)?; } if self.has_hole() && self.has_data() { write!(f, " ")?; } if self.has_data() { write!(f, "{}", self.data_size)?; } Ok(()) } } impl Contig { fn empty() -> Contig { Contig { hole_size: 0, data_size: 0 } } fn hole(size: usize) -> Contig { Contig { hole_size: size, data_size: 0 } } fn hole_and_data(hole_size: usize, data_size: usize) -> Contig { Contig { hole_size, data_size } } fn has_hole(&self) -> bool { self.hole_size != 0 } fn has_data(&self) -> bool { self.data_size != 0 } fn total_size(&self) -> usize { self.hole_size + self.data_size } fn is_empty(&self) -> bool { self.total_size() == 0 } fn expand_data_by(&mut self, size: usize) { self.data_size += size; } fn shrink_hole_by(&mut self, size: usize) { self.hole_size -= size; } fn shrink_hole_to(&mut self, size: usize) { debug_assert!(self.hole_size >= size); let total_size = self.total_size(); self.hole_size = size; self.data_size = total_size - size; } } const CONTIG_COUNT: usize = 4; #[derive(Debug)] #[cfg_attr(test, derive(PartialEq, Eq))] pub struct Assembler { contigs: [Contig; CONTIG_COUNT] } impl fmt::Display for Assembler { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "[ ")?; for contig in self.contigs.iter() { if contig.is_empty() { break } write!(f, "{} ", contig)?; } write!(f, "]")?; Ok(()) } } impl Assembler { pub fn new(size: usize) -> Assembler { let mut contigs = [Contig::empty(); CONTIG_COUNT]; contigs[0] = Contig::hole(size); Assembler { contigs } } #[allow(dead_code)] pub(crate) fn total_size(&self) -> usize { self.contigs .iter() .map(|contig| contig.total_size()) .sum() } fn front(&self) -> Contig { self.contigs[0] } fn back(&self) -> Contig { self.contigs[self.contigs.len() - 1] } pub fn is_empty(&self) -> bool { !self.front().has_data() } fn remove_contig_at(&mut self, at: usize) -> &mut Contig { debug_assert!(!self.contigs[at].is_empty()); for i in at..self.contigs.len() - 1 { self.contigs[i] = self.contigs[i + 1]; if !self.contigs[i].has_data() { self.contigs[i + 1] = Contig::empty(); return &mut self.contigs[i] } } self.contigs[at] = Contig::empty(); &mut self.contigs[at] } fn add_contig_at(&mut self, at: usize) -> Result<&mut Contig, ()> { debug_assert!(!self.contigs[at].is_empty()); if !self.back().is_empty() { return Err(()) } for i in (at + 1..self.contigs.len()).rev() { self.contigs[i] = self.contigs[i - 1]; } self.contigs[at] = Contig::empty(); Ok(&mut self.contigs[at]) } pub fn add(&mut self, mut offset: usize, mut size: usize) -> Result<(), ()> { let mut index = 0; while index != self.contigs.len() && size != 0 { let contig = self.contigs[index]; if offset >= contig.total_size() { index += 1; } else if offset == 0 && size >= contig.hole_size && index > 0 { self.contigs[index - 1].expand_data_by(contig.total_size()); self.remove_contig_at(index); index += 0; } else if offset == 0 && size < contig.hole_size && index > 0 { self.contigs[index - 1].expand_data_by(size); self.contigs[index].shrink_hole_by(size); index += 1; } else if offset <= contig.hole_size && offset + size >= contig.hole_size { self.contigs[index].shrink_hole_to(offset); index += 1; } else if offset + size >= contig.hole_size { index += 1; } else if offset + size < contig.hole_size { self.contigs[index].shrink_hole_by(offset + size); let inserted = self.add_contig_at(index)?; *inserted = Contig::hole_and_data(offset, size); index += 2; } else { unreachable!() } if offset >= contig.total_size() { offset = offset.saturating_sub(contig.total_size()); } else { size = (offset + size).saturating_sub(contig.total_size()); offset = 0; } } debug_assert!(size == 0); Ok(()) } pub fn remove_front(&mut self) -> Option<usize> { let front = self.front(); if front.has_hole() { None } else { let last_hole = self.remove_contig_at(0); last_hole.hole_size += front.data_size; debug_assert!(front.data_size > 0); Some(front.data_size) } } } #[cfg(test)] mod test { use std::vec::Vec; use super::*; impl From<Vec<(usize, usize)>> for Assembler { fn from(vec: Vec<(usize, usize)>) -> Assembler { let mut contigs = [Contig::empty(); CONTIG_COUNT]; for (i, &(hole_size, data_size)) in vec.iter().enumerate() { contigs[i] = Contig { hole_size, data_size }; } Assembler { contigs } } } macro_rules! contigs { [$( $x:expr ),*] => ({ Assembler::from(vec![$( $x ),*]) }) } #[test] fn test_new() { let assr = Assembler::new(16); assert_eq!(assr.total_size(), 16); assert_eq!(assr, contigs![(16, 0)]); } #[test] fn test_empty_add_full() { let mut assr = Assembler::new(16); assert_eq!(assr.add(0, 16), Ok(())); assert_eq!(assr, contigs![(0, 16)]); } #[test] fn test_empty_add_front() { let mut assr = Assembler::new(16); assert_eq!(assr.add(0, 4), Ok(())); assert_eq!(assr, contigs![(0, 4), (12, 0)]); } #[test] fn test_empty_add_back() { let mut assr = Assembler::new(16); assert_eq!(assr.add(12, 4), Ok(())); assert_eq!(assr, contigs![(12, 4)]); } #[test] fn test_empty_add_mid() { let mut assr = Assembler::new(16); assert_eq!(assr.add(4, 8), Ok(())); assert_eq!(assr, contigs![(4, 8), (4, 0)]); } #[test] fn test_partial_add_front() { let mut assr = contigs![(4, 8), (4, 0)]; assert_eq!(assr.add(0, 4), Ok(())); assert_eq!(assr, contigs![(0, 12), (4, 0)]); } #[test] fn test_partial_add_back() { let mut assr = contigs![(4, 8), (4, 0)]; assert_eq!(assr.add(12, 4), Ok(())); assert_eq!(assr, contigs![(4, 12)]); } #[test] fn test_partial_add_front_overlap() { let mut assr = contigs![(4, 8), (4, 0)]; assert_eq!(assr.add(0, 8), Ok(())); assert_eq!(assr, contigs![(0, 12), (4, 0)]); } #[test] fn test_partial_add_front_overlap_split() { let mut assr = contigs![(4, 8), (4, 0)]; assert_eq!(assr.add(2, 6), Ok(())); assert_eq!(assr, contigs![(2, 10), (4, 0)]); } #[test] fn test_partial_add_back_overlap() { let mut assr = contigs![(4, 8), (4, 0)]; assert_eq!(assr.add(8, 8), Ok(())); assert_eq!(assr, contigs![(4, 12)]); } #[test] fn test_partial_add_back_overlap_split() { let mut assr = contigs![(4, 8), (4, 0)]; assert_eq!(assr.add(10, 4), Ok(())); assert_eq!(assr, contigs![(4, 10), (2, 0)]); } #[test] fn test_partial_add_both_overlap() { let mut assr = contigs![(4, 8), (4, 0)]; assert_eq!(assr.add(0, 16), Ok(())); assert_eq!(assr, contigs![(0, 16)]); } #[test] fn test_partial_add_both_overlap_split() { let mut assr = contigs![(4, 8), (4, 0)]; assert_eq!(assr.add(2, 12), Ok(())); assert_eq!(assr, contigs![(2, 12), (2, 0)]); } #[test] fn test_empty_remove_front() { let mut assr = contigs![(12, 0)]; assert_eq!(assr.remove_front(), None); } #[test] fn test_trailing_hole_remove_front() { let mut assr = contigs![(0, 4), (8, 0)]; assert_eq!(assr.remove_front(), Some(4)); assert_eq!(assr, contigs![(12, 0)]); } #[test] fn test_trailing_data_remove_front() { let mut assr = contigs![(0, 4), (4, 4)]; assert_eq!(assr.remove_front(), Some(4)); assert_eq!(assr, contigs![(4, 4), (4, 0)]); } }
use core::fmt; #[derive(Debug, Clone, Copy, PartialEq, Eq)] struct Contig { hole_size: usize, data_size: usize } impl fmt::Display for Contig { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { if self.has_hole() { write!(f, "({})", self.hole_size)?; } if self.has_hole() && self.has_data() { write!(f, " ")?; } if self.has_data() { write!(f, "{}", self.data_size)?; } Ok(()) } } impl Contig { fn empty() -> Contig { Contig { hole_size: 0, data_size: 0 } } fn hole(size: usize) -> Contig { Contig { hole_size: size, data_size: 0 } } fn hole_and_data(hole_size: usize, data_size: usize) -> Contig { Contig { hole_size, data_size } } fn has_hole(&self) -> bool { self.hole_size != 0 } fn has_data(&self) -> bool { self.data_size != 0 } fn total_size(&self) -> usize { self.hole_size + self.data_size } fn is_empty(&self) -> bool { self.total_size() == 0 } fn expand_data_by(&mut self, size: usize) { self.data_size += size; } fn shrink_hole_by(&mut self, size: usize) { self.hole_size -= size; } fn shrink_hole_to(&mut self, size: usize) { debug_assert!(self.hole_size >= size); let total_size = self.total_size(); self.hole_size = size; self.data_size = total_size - size; } } const CONTIG_COUNT: usize = 4; #[derive(Debug)] #[cfg_attr(test, derive(PartialEq, Eq))] pub struct Assembler { contigs: [Contig; CONTIG_COUNT] } impl fmt::Display for Assembler { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "[ ")?; for contig in self.contigs.iter() { if contig.is_empty() { break } write!(f, "{} ", contig)?; } write!(f, "]")?; Ok(()) } } impl Assembler { pub fn new(size: usize) -> Assembler { let mut contigs = [Contig::empty(); CONTIG_COUNT]; contigs[0] = Contig::hole(size); Assembler { contigs } } #[allow(dead_code)] pub(crate) fn total_size(&self) -> usize { self.contigs .iter() .map(|contig| contig.total_size()) .sum() } fn front(&self) -> Contig { self.contigs[0] } fn back(&self) -> Contig { self.contigs[self.contigs.len() - 1] } pub fn is_empty(&self) -> bool { !self.front().has_data() } fn remove_contig_at(&mut self, at: usize) -> &mut Contig { debug_assert!(!self.contigs[at].is_empty()); for i in at..self.contigs.len() - 1 { self.contigs[i] = self.contigs[i + 1]; if !self.contigs[i].has_data() { self.contigs[i + 1] = Contig::empty(); return &mut self.contigs[i] } } self.contigs[at] = Contig::empty(); &mut self.contigs[at] } fn add_contig_at(&mut self, at: usize) -> Result<&mut Contig, ()> { debug_assert!(!self.contigs[at].is_empty()); if !self.back().is_empty() { return Err(()) } for i in (at + 1..self.contigs.len()).rev() { self.contigs[i] = self.contigs[i - 1]; } self.contigs[at] = Contig::empty(); Ok(&mut self.contigs[at]) } pub fn add(&mut self, mut offset: usize, mut size: usize) -> Result<(), ()> { let mut index = 0; while index != self.contigs.len() && size != 0 { let contig = self.contigs[index]; if offset >= contig.total_size() { index += 1; } else if offset == 0 && size >= contig.hole_size && index > 0 { self.contigs[index - 1].expand_data_by(contig.total_size()); self.remove_contig_at(index); index += 0; } else if offset == 0 && size < contig.hole_size && index > 0 { self.contigs[index - 1].expand_data_by(size); self.contigs[index].shrink_hole_by(size); index += 1; } else
if offset >= contig.total_size() { offset = offset.saturating_sub(contig.total_size()); } else { size = (offset + size).saturating_sub(contig.total_size()); offset = 0; } } debug_assert!(size == 0); Ok(()) } pub fn remove_front(&mut self) -> Option<usize> { let front = self.front(); if front.has_hole() { None } else { let last_hole = self.remove_contig_at(0); last_hole.hole_size += front.data_size; debug_assert!(front.data_size > 0); Some(front.data_size) } } } #[cfg(test)] mod test { use std::vec::Vec; use super::*; impl From<Vec<(usize, usize)>> for Assembler { fn from(vec: Vec<(usize, usize)>) -> Assembler { let mut contigs = [Contig::empty(); CONTIG_COUNT]; for (i, &(hole_size, data_size)) in vec.iter().enumerate() { contigs[i] = Contig { hole_size, data_size }; } Assembler { contigs } } } macro_rules! contigs { [$( $x:expr ),*] => ({ Assembler::from(vec![$( $x ),*]) }) } #[test] fn test_new() { let assr = Assembler::new(16); assert_eq!(assr.total_size(), 16); assert_eq!(assr, contigs![(16, 0)]); } #[test] fn test_empty_add_full() { let mut assr = Assembler::new(16); assert_eq!(assr.add(0, 16), Ok(())); assert_eq!(assr, contigs![(0, 16)]); } #[test] fn test_empty_add_front() { let mut assr = Assembler::new(16); assert_eq!(assr.add(0, 4), Ok(())); assert_eq!(assr, contigs![(0, 4), (12, 0)]); } #[test] fn test_empty_add_back() { let mut assr = Assembler::new(16); assert_eq!(assr.add(12, 4), Ok(())); assert_eq!(assr, contigs![(12, 4)]); } #[test] fn test_empty_add_mid() { let mut assr = Assembler::new(16); assert_eq!(assr.add(4, 8), Ok(())); assert_eq!(assr, contigs![(4, 8), (4, 0)]); } #[test] fn test_partial_add_front() { let mut assr = contigs![(4, 8), (4, 0)]; assert_eq!(assr.add(0, 4), Ok(())); assert_eq!(assr, contigs![(0, 12), (4, 0)]); } #[test] fn test_partial_add_back() { let mut assr = contigs![(4, 8), (4, 0)]; assert_eq!(assr.add(12, 4), Ok(())); assert_eq!(assr, contigs![(4, 12)]); } #[test] fn test_partial_add_front_overlap() { let mut assr = contigs![(4, 8), (4, 0)]; assert_eq!(assr.add(0, 8), Ok(())); assert_eq!(assr, contigs![(0, 12), (4, 0)]); } #[test] fn test_partial_add_front_overlap_split() { let mut assr = contigs![(4, 8), (4, 0)]; assert_eq!(assr.add(2, 6), Ok(())); assert_eq!(assr, contigs![(2, 10), (4, 0)]); } #[test] fn test_partial_add_back_overlap() { let mut assr = contigs![(4, 8), (4, 0)]; assert_eq!(assr.add(8, 8), Ok(())); assert_eq!(assr, contigs![(4, 12)]); } #[test] fn test_partial_add_back_overlap_split() { let mut assr = contigs![(4, 8), (4, 0)]; assert_eq!(assr.add(10, 4), Ok(())); assert_eq!(assr, contigs![(4, 10), (2, 0)]); } #[test] fn test_partial_add_both_overlap() { let mut assr = contigs![(4, 8), (4, 0)]; assert_eq!(assr.add(0, 16), Ok(())); assert_eq!(assr, contigs![(0, 16)]); } #[test] fn test_partial_add_both_overlap_split() { let mut assr = contigs![(4, 8), (4, 0)]; assert_eq!(assr.add(2, 12), Ok(())); assert_eq!(assr, contigs![(2, 12), (2, 0)]); } #[test] fn test_empty_remove_front() { let mut assr = contigs![(12, 0)]; assert_eq!(assr.remove_front(), None); } #[test] fn test_trailing_hole_remove_front() { let mut assr = contigs![(0, 4), (8, 0)]; assert_eq!(assr.remove_front(), Some(4)); assert_eq!(assr, contigs![(12, 0)]); } #[test] fn test_trailing_data_remove_front() { let mut assr = contigs![(0, 4), (4, 4)]; assert_eq!(assr.remove_front(), Some(4)); assert_eq!(assr, contigs![(4, 4), (4, 0)]); } }
if offset <= contig.hole_size && offset + size >= contig.hole_size { self.contigs[index].shrink_hole_to(offset); index += 1; } else if offset + size >= contig.hole_size { index += 1; } else if offset + size < contig.hole_size { self.contigs[index].shrink_hole_by(offset + size); let inserted = self.add_contig_at(index)?; *inserted = Contig::hole_and_data(offset, size); index += 2; } else { unreachable!() }
if_condition
[ { "content": "pub fn parse_middleware_options<D>(matches: &mut Matches, device: D, loopback: bool)\n\n -> FaultInjector<EthernetTracer<PcapWriter<D, Rc<PcapSink>>>>\n\n where D: for<'a> Device<'a>\n\n{\n\n let drop_chance = matches.opt_str(\"drop-chance\").map(|s| u8::from_str(&s).unwrap())\n\n .unwrap_or(0);\n\n let corrupt_chance = matches.opt_str(\"corrupt-chance\").map(|s| u8::from_str(&s).unwrap())\n\n .unwrap_or(0);\n\n let size_limit = matches.opt_str(\"size-limit\").map(|s| usize::from_str(&s).unwrap())\n\n .unwrap_or(0);\n\n let tx_rate_limit = matches.opt_str(\"tx-rate-limit\").map(|s| u64::from_str(&s).unwrap())\n\n .unwrap_or(0);\n\n let rx_rate_limit = matches.opt_str(\"rx-rate-limit\").map(|s| u64::from_str(&s).unwrap())\n\n .unwrap_or(0);\n\n let shaping_interval = matches.opt_str(\"shaping-interval\").map(|s| u64::from_str(&s).unwrap())\n\n .unwrap_or(0);\n\n\n\n let pcap_writer: Box<io::Write>;\n\n if let Some(pcap_filename) = matches.opt_str(\"pcap\") {\n\n pcap_writer = Box::new(File::create(pcap_filename).expect(\"cannot open file\"))\n", "file_path": "examples/utils.rs", "rank": 1, "score": 163150.08353271414 }, { "content": "#[cfg(feature = \"log\")]\n\npub fn setup_logging_with_clock<F>(filter: &str, since_startup: F)\n\n where F: Fn() -> Instant + Send + Sync + 'static {\n\n LogBuilder::new()\n\n .format(move |record: &LogRecord| {\n\n let elapsed = since_startup();\n\n let timestamp = format!(\"[{}]\", elapsed);\n\n if record.target().starts_with(\"smoltcp::\") {\n\n format!(\"\\x1b[0m{} ({}): {}\\x1b[0m\", timestamp,\n\n record.target().replace(\"smoltcp::\", \"\"), record.args())\n\n } else if record.level() == LogLevel::Trace {\n\n let message = format!(\"{}\", record.args());\n\n format!(\"\\x1b[37m{} {}\\x1b[0m\", timestamp,\n\n message.replace(\"\\n\", \"\\n \"))\n\n } else {\n\n format!(\"\\x1b[32m{} ({}): {}\\x1b[0m\", timestamp,\n\n record.target(), record.args())\n\n }\n\n })\n\n .filter(None, LogLevelFilter::Trace)\n\n .parse(filter)\n\n .parse(&env::var(\"RUST_LOG\").unwrap_or(\"\".to_owned()))\n\n .init()\n\n .unwrap();\n\n}\n\n\n", "file_path": "examples/utils.rs", "rank": 2, "score": 153094.94145522526 }, { "content": "pub fn add_middleware_options(opts: &mut Options, _free: &mut Vec<&str>) {\n\n opts.optopt(\"\", \"pcap\", \"Write a packet capture file\", \"FILE\");\n\n opts.optopt(\"\", \"drop-chance\", \"Chance of dropping a packet (%)\", \"CHANCE\");\n\n opts.optopt(\"\", \"corrupt-chance\", \"Chance of corrupting a packet (%)\", \"CHANCE\");\n\n opts.optopt(\"\", \"size-limit\", \"Drop packets larger than given size (octets)\", \"SIZE\");\n\n opts.optopt(\"\", \"tx-rate-limit\", \"Drop packets after transmit rate exceeds given limit \\\n\n (packets per interval)\", \"RATE\");\n\n opts.optopt(\"\", \"rx-rate-limit\", \"Drop packets after transmit rate exceeds given limit \\\n\n (packets per interval)\", \"RATE\");\n\n opts.optopt(\"\", \"shaping-interval\", \"Sets the interval for rate limiting (ms)\", \"RATE\");\n\n}\n\n\n", "file_path": "examples/utils.rs", "rank": 3, "score": 144803.70201491303 }, { "content": "pub fn add_tap_options(_opts: &mut Options, free: &mut Vec<&str>) {\n\n free.push(\"INTERFACE\");\n\n}\n\n\n", "file_path": "examples/utils.rs", "rank": 4, "score": 144803.70201491303 }, { "content": "pub fn pretty_print_ip_payload<T: Into<Repr>>(f: &mut fmt::Formatter, indent: &mut PrettyIndent,\n\n ip_repr: T, payload: &[u8]) -> fmt::Result {\n\n #[cfg(feature = \"proto-ipv4\")]\n\n use wire::Icmpv4Packet;\n\n #[cfg(feature = \"proto-ipv4\")]\n\n use super::pretty_print::PrettyPrint;\n\n use wire::{TcpPacket, TcpRepr, UdpPacket, UdpRepr};\n\n use wire::ip::checksum::format_checksum;\n\n\n\n let checksum_caps = ChecksumCapabilities::ignored();\n\n let repr = ip_repr.into();\n\n match repr.protocol() {\n\n #[cfg(feature = \"proto-ipv4\")]\n\n Protocol::Icmp => {\n\n indent.increase(f)?;\n\n Icmpv4Packet::<&[u8]>::pretty_print(&payload.as_ref(), f, indent)\n\n }\n\n Protocol::Udp => {\n\n indent.increase(f)?;\n\n match UdpPacket::<&[u8]>::new_checked(payload.as_ref()) {\n", "file_path": "src/wire/ip.rs", "rank": 5, "score": 139940.02165141655 }, { "content": "#[cfg(feature = \"phy-tap_interface\")]\n\npub fn parse_tap_options(matches: &mut Matches) -> TapInterface {\n\n let interface = matches.free.remove(0);\n\n TapInterface::new(&interface).unwrap()\n\n}\n\n\n", "file_path": "examples/utils.rs", "rank": 6, "score": 139468.6859565696 }, { "content": "/// Wait until given file descriptor becomes readable, but no longer than given timeout.\n\npub fn wait(fd: RawFd, duration: Option<Duration>) -> io::Result<()> {\n\n unsafe {\n\n let mut readfds = mem::uninitialized::<libc::fd_set>();\n\n libc::FD_ZERO(&mut readfds);\n\n libc::FD_SET(fd, &mut readfds);\n\n\n\n let mut writefds = mem::uninitialized::<libc::fd_set>();\n\n libc::FD_ZERO(&mut writefds);\n\n\n\n let mut exceptfds = mem::uninitialized::<libc::fd_set>();\n\n libc::FD_ZERO(&mut exceptfds);\n\n\n\n let mut timeout = libc::timeval { tv_sec: 0, tv_usec: 0 };\n\n let timeout_ptr =\n\n if let Some(duration) = duration {\n\n timeout.tv_usec = (duration.total_millis() * 1_000) as libc::suseconds_t;\n\n &mut timeout as *mut _\n\n } else {\n\n ptr::null_mut()\n\n };\n\n\n\n let res = libc::select(fd + 1, &mut readfds, &mut writefds, &mut exceptfds, timeout_ptr);\n\n if res == -1 { return Err(io::Error::last_os_error()) }\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "src/phy/sys/mod.rs", "rank": 7, "score": 123880.14040938209 }, { "content": "#[cfg(feature = \"log\")]\n\npub fn setup_logging(filter: &str) {\n\n setup_logging_with_clock(filter, move || {\n\n Instant::now()\n\n })\n\n}\n\n\n", "file_path": "examples/utils.rs", "rank": 8, "score": 106102.46415601576 }, { "content": "struct StmPhyTxToken<'a>(&'a mut [u8]);\n\n\n\nimpl<'a> phy::TxToken for StmPhyTxToken<'a> {\n\n fn consume<R, F>(self, _timestamp: Instant, len: usize, f: F) -> Result<R>\n\n where F: FnOnce(&mut [u8]) -> Result<R>\n\n {\n\n let result = f(&mut self.0[..len]);\n\n println!(\"tx called {}\", len);\n\n // TODO: send packet out\n\n result\n\n }\n\n}\n\n```\n\n*/\n\n\n\nuse Result;\n\nuse time::Instant;\n\n\n\n#[cfg(all(any(feature = \"phy-raw_socket\", feature = \"phy-tap_interface\"), unix))]\n\nmod sys;\n", "file_path": "src/phy/mod.rs", "rank": 9, "score": 101277.57251008305 }, { "content": "// We use our own RNG to stay compatible with #![no_std].\n\n// The use of the RNG below has a slight bias, but it doesn't matter.\n\nfn xorshift32(state: &mut u32) -> u32 {\n\n let mut x = *state;\n\n x ^= x << 13;\n\n x ^= x >> 17;\n\n x ^= x << 5;\n\n *state = x;\n\n x\n\n}\n\n\n\n// This could be fixed once associated consts are stable.\n\nconst MTU: usize = 1536;\n\n\n", "file_path": "src/phy/fault_injector.rs", "rank": 10, "score": 99507.53103010438 }, { "content": "pub fn create_options() -> (Options, Vec<&'static str>) {\n\n let mut opts = Options::new();\n\n opts.optflag(\"h\", \"help\", \"print this help menu\");\n\n (opts, Vec::new())\n\n}\n\n\n", "file_path": "examples/utils.rs", "rank": 11, "score": 96462.93501254587 }, { "content": "#[cfg(any(feature = \"proto-ipv4\", feature = \"proto-ipv6\"))]\n\nfn icmp_reply_payload_len(len: usize, mtu: usize, header_len: usize) -> usize {\n\n // Send back as much of the original payload as will fit within\n\n // the minimum MTU required by IPv4. See RFC 1812 § 4.3.2.3 for\n\n // more details.\n\n //\n\n // Since the entire network layer packet must fit within the minumum\n\n // MTU supported, the payload must not exceed the following:\n\n //\n\n // <min mtu> - IP Header Size * 2 - ICMPv4 DstUnreachable hdr size\n\n cmp::min(len, mtu - header_len * 2 - 8)\n\n}\n\n\n\nimpl<'b, 'c, DeviceT> Interface<'b, 'c, DeviceT>\n\n where DeviceT: for<'d> Device<'d> {\n\n /// Get the Ethernet address of the interface.\n\n pub fn ethernet_addr(&self) -> EthernetAddress {\n\n self.inner.ethernet_addr\n\n }\n\n\n\n /// Set the Ethernet address of the interface.\n", "file_path": "src/iface/ethernet.rs", "rank": 12, "score": 92582.23885410142 }, { "content": "#[cfg(all(target_os = \"linux\", any(feature = \"phy-tap_interface\", feature = \"phy-raw_socket\")))]\n\nfn ifreq_ioctl(lower: libc::c_int, ifreq: &mut ifreq,\n\n cmd: libc::c_ulong) -> io::Result<libc::c_int> {\n\n unsafe {\n\n let res = libc::ioctl(lower, cmd, ifreq as *mut ifreq);\n\n if res == -1 { return Err(io::Error::last_os_error()) }\n\n }\n\n\n\n Ok(ifreq.ifr_data)\n\n}\n", "file_path": "src/phy/sys/mod.rs", "rank": 13, "score": 89064.01060098238 }, { "content": "pub fn parse_options(options: &Options, free: Vec<&str>) -> Matches {\n\n match options.parse(env::args().skip(1)) {\n\n Err(err) => {\n\n println!(\"{}\", err);\n\n process::exit(1)\n\n }\n\n Ok(matches) => {\n\n if matches.opt_present(\"h\") || matches.free.len() != free.len() {\n\n let brief = format!(\"Usage: {} [OPTION]... {}\",\n\n env::args().nth(0).unwrap(), free.join(\" \"));\n\n print!(\"{}\", options.usage(&brief));\n\n process::exit(if matches.free.len() != free.len() { 1 } else { 0 })\n\n }\n\n matches\n\n }\n\n }\n\n}\n\n\n", "file_path": "examples/utils.rs", "rank": 14, "score": 88649.68199750312 }, { "content": "/// A conversion trait for network sockets.\n\npub trait AnySocket<'a, 'b>: SocketSession + Sized {\n\n fn downcast<'c>(socket_ref: SocketRef<'c, Socket<'a, 'b>>) ->\n\n Option<SocketRef<'c, Self>>;\n\n}\n\n\n\nmacro_rules! from_socket {\n\n ($socket:ty, $variant:ident) => {\n\n impl<'a, 'b> AnySocket<'a, 'b> for $socket {\n\n fn downcast<'c>(ref_: SocketRef<'c, Socket<'a, 'b>>) ->\n\n Option<SocketRef<'c, Self>> {\n\n match SocketRef::into_inner(ref_) {\n\n &mut Socket::$variant(ref mut socket) => Some(SocketRef::new(socket)),\n\n _ => None,\n\n }\n\n }\n\n }\n\n }\n\n}\n\n\n\n#[cfg(feature = \"socket-raw\")]\n\nfrom_socket!(RawSocket<'a, 'b>, Raw);\n\n#[cfg(all(feature = \"socket-icmp\", any(feature = \"proto-ipv4\", feature = \"proto-ipv6\")))]\n\nfrom_socket!(IcmpSocket<'a, 'b>, Icmp);\n\n#[cfg(feature = \"socket-udp\")]\n\nfrom_socket!(UdpSocket<'a, 'b>, Udp);\n\n#[cfg(feature = \"socket-tcp\")]\n\nfrom_socket!(TcpSocket<'a>, Tcp);\n", "file_path": "src/socket/mod.rs", "rank": 15, "score": 85870.28152269188 }, { "content": "type Result<T> = result::Result<T, ()>;\n\n\n", "file_path": "src/parsers.rs", "rank": 16, "score": 81898.6004738562 }, { "content": "struct Parser<'a> {\n\n data: &'a [u8],\n\n pos: usize\n\n}\n\n\n\nimpl<'a> Parser<'a> {\n\n fn new(data: &'a str) -> Parser<'a> {\n\n Parser {\n\n data: data.as_bytes(),\n\n pos: 0\n\n }\n\n }\n\n\n\n fn lookahead_char(&self, ch: u8) -> bool {\n\n if self.pos < self.data.len() {\n\n self.data[self.pos] == ch\n\n } else {\n\n false\n\n }\n\n }\n", "file_path": "src/parsers.rs", "rank": 17, "score": 64251.26910808977 }, { "content": "fn main() {\n\n let ifname = env::args().nth(1).unwrap();\n\n let mut socket = RawSocket::new(ifname.as_ref()).unwrap();\n\n loop {\n\n phy_wait(socket.as_raw_fd(), None).unwrap();\n\n let (rx_token, _) = socket.receive().unwrap();\n\n rx_token.consume(Instant::now(), |buffer| {\n\n println!(\"{}\", PrettyPrinter::<EthernetFrame<&[u8]>>::new(\"\", &buffer));\n\n Ok(())\n\n }).unwrap();\n\n }\n\n}\n", "file_path": "examples/tcpdump.rs", "rank": 18, "score": 64101.05960384787 }, { "content": "fn main() {\n\n utils::setup_logging(\"warn\");\n\n\n\n let (mut opts, mut free) = utils::create_options();\n\n utils::add_tap_options(&mut opts, &mut free);\n\n utils::add_middleware_options(&mut opts, &mut free);\n\n opts.optopt(\"c\", \"count\", \"Amount of echo request packets to send (default: 4)\", \"COUNT\");\n\n opts.optopt(\"i\", \"interval\",\n\n \"Interval between successive packets sent (seconds) (default: 1)\", \"INTERVAL\");\n\n opts.optopt(\"\", \"timeout\",\n\n \"Maximum wait duration for an echo response packet (seconds) (default: 5)\",\n\n \"TIMEOUT\");\n\n free.push(\"ADDRESS\");\n\n\n\n let mut matches = utils::parse_options(&opts, free);\n\n let device = utils::parse_tap_options(&mut matches);\n\n let fd = device.as_raw_fd();\n\n let device = utils::parse_middleware_options(&mut matches, device, /*loopback=*/false);\n\n let device_caps = device.capabilities();\n\n let address = IpAddress::from_str(&matches.free[0]).expect(\"invalid address format\");\n", "file_path": "examples/ping.rs", "rank": 19, "score": 64101.05960384787 }, { "content": "fn main() {\n\n #[cfg(feature = \"log\")]\n\n utils::setup_logging(\"info\");\n\n\n\n let (mut opts, mut free) = utils::create_options();\n\n utils::add_tap_options(&mut opts, &mut free);\n\n utils::add_middleware_options(&mut opts, &mut free);\n\n free.push(\"MODE\");\n\n\n\n let mut matches = utils::parse_options(&opts, free);\n\n let device = utils::parse_tap_options(&mut matches);\n\n let fd = device.as_raw_fd();\n\n let device = utils::parse_middleware_options(&mut matches, device, /*loopback=*/false);\n\n let mode = match matches.free[0].as_ref() {\n\n \"reader\" => Client::Reader,\n\n \"writer\" => Client::Writer,\n\n _ => panic!(\"invalid mode\")\n\n };\n\n\n\n thread::spawn(move || client(mode));\n", "file_path": "examples/benchmark.rs", "rank": 20, "score": 64101.05960384787 }, { "content": "fn main() {\n\n let clock = mock::Clock::new();\n\n let device = Loopback::new();\n\n\n\n #[cfg(feature = \"std\")]\n\n let device = {\n\n let clock = clock.clone();\n\n utils::setup_logging_with_clock(\"\", move || clock.elapsed());\n\n\n\n let (mut opts, mut free) = utils::create_options();\n\n utils::add_middleware_options(&mut opts, &mut free);\n\n\n\n let mut matches = utils::parse_options(&opts, free);\n\n let device = utils::parse_middleware_options(&mut matches, device, /*loopback=*/true);\n\n\n\n device\n\n };\n\n\n\n let mut neighbor_cache_entries = [None; 8];\n\n let mut neighbor_cache = NeighborCache::new(&mut neighbor_cache_entries[..]);\n", "file_path": "examples/loopback.rs", "rank": 21, "score": 64101.05960384787 }, { "content": "fn main() {\n\n let args: Vec<String> = env::args().collect();\n\n let program = args[0].clone();\n\n\n\n let mut opts = Options::new();\n\n opts.optflag(\"h\", \"help\", \"print this help menu\");\n\n opts.optopt(\"t\", \"link-type\", \"set link type (one of: ethernet ip)\", \"TYPE\");\n\n\n\n let matches = match opts.parse(&args[1..]) {\n\n Ok(m) => m,\n\n Err(e) => {\n\n eprintln!(\"{}\", e);\n\n return\n\n }\n\n };\n\n\n\n let link_type =\n\n match matches.opt_str(\"t\").as_ref().map(|s| &s[..]) {\n\n Some(\"ethernet\") => Some(PcapLinkType::Ethernet),\n\n Some(\"ip\") => Some(PcapLinkType::Ip),\n", "file_path": "utils/packet2pcap.rs", "rank": 22, "score": 64101.05960384787 }, { "content": "fn main() {\n\n utils::setup_logging(\"\");\n\n\n\n let (mut opts, mut free) = utils::create_options();\n\n utils::add_tap_options(&mut opts, &mut free);\n\n utils::add_middleware_options(&mut opts, &mut free);\n\n\n\n let mut matches = utils::parse_options(&opts, free);\n\n let device = utils::parse_tap_options(&mut matches);\n\n let fd = device.as_raw_fd();\n\n let device = utils::parse_middleware_options(&mut matches, device, /*loopback=*/false);\n\n\n\n let neighbor_cache = NeighborCache::new(BTreeMap::new());\n\n\n\n let udp_rx_buffer = UdpSocketBuffer::new(vec![UdpPacketMetadata::EMPTY], vec![0; 64]);\n\n let udp_tx_buffer = UdpSocketBuffer::new(vec![UdpPacketMetadata::EMPTY], vec![0; 128]);\n\n let udp_socket = UdpSocket::new(udp_rx_buffer, udp_tx_buffer);\n\n\n\n let tcp1_rx_buffer = TcpSocketBuffer::new(vec![0; 64]);\n\n let tcp1_tx_buffer = TcpSocketBuffer::new(vec![0; 128]);\n", "file_path": "examples/server.rs", "rank": 23, "score": 64101.05960384787 }, { "content": "fn main() {\n\n utils::setup_logging(\"\");\n\n\n\n let (mut opts, mut free) = utils::create_options();\n\n utils::add_tap_options(&mut opts, &mut free);\n\n utils::add_middleware_options(&mut opts, &mut free);\n\n free.push(\"ADDRESS\");\n\n free.push(\"PORT\");\n\n\n\n let mut matches = utils::parse_options(&opts, free);\n\n let device = utils::parse_tap_options(&mut matches);\n\n let fd = device.as_raw_fd();\n\n let device = utils::parse_middleware_options(&mut matches, device, /*loopback=*/false);\n\n let address = IpAddress::from_str(&matches.free[0]).expect(\"invalid address format\");\n\n let port = u16::from_str(&matches.free[1]).expect(\"invalid port format\");\n\n\n\n let neighbor_cache = NeighborCache::new(BTreeMap::new());\n\n\n\n let tcp_rx_buffer = TcpSocketBuffer::new(vec![0; 64]);\n\n let tcp_tx_buffer = TcpSocketBuffer::new(vec![0; 128]);\n", "file_path": "examples/client.rs", "rank": 24, "score": 64101.05960384787 }, { "content": "fn main() {\n\n utils::setup_logging(\"\");\n\n\n\n let (mut opts, mut free) = utils::create_options();\n\n utils::add_tap_options(&mut opts, &mut free);\n\n utils::add_middleware_options(&mut opts, &mut free);\n\n free.push(\"ADDRESS\");\n\n free.push(\"URL\");\n\n\n\n let mut matches = utils::parse_options(&opts, free);\n\n let device = utils::parse_tap_options(&mut matches);\n\n let fd = device.as_raw_fd();\n\n let device = utils::parse_middleware_options(&mut matches, device, /*loopback=*/false);\n\n let address = IpAddress::from_str(&matches.free[0]).expect(\"invalid address format\");\n\n let url = Url::parse(&matches.free[1]).expect(\"invalid url format\");\n\n\n\n\n\n let neighbor_cache = NeighborCache::new(BTreeMap::new());\n\n\n\n let tcp_rx_buffer = TcpSocketBuffer::new(vec![0; 1024]);\n", "file_path": "examples/httpclient.rs", "rank": 25, "score": 64101.05960384787 }, { "content": "#[derive(Debug, Default, Clone, Copy)]\n\nstruct Config {\n\n corrupt_pct: u8,\n\n drop_pct: u8,\n\n reorder_pct: u8,\n\n max_size: usize,\n\n max_tx_rate: u64,\n\n max_rx_rate: u64,\n\n interval: Duration,\n\n}\n\n\n", "file_path": "src/phy/fault_injector.rs", "rank": 26, "score": 63373.60937278336 }, { "content": "#[derive(Debug, Clone)]\n\nstruct State {\n\n rng_seed: u32,\n\n refilled_at: Instant,\n\n tx_bucket: u64,\n\n rx_bucket: u64,\n\n}\n\n\n\nimpl State {\n\n fn maybe(&mut self, pct: u8) -> bool {\n\n xorshift32(&mut self.rng_seed) % 100 < pct as u32\n\n }\n\n\n\n fn corrupt<T: AsMut<[u8]>>(&mut self, mut buffer: T) {\n\n let buffer = buffer.as_mut();\n\n // We introduce a single bitflip, as the most likely, and the hardest to detect, error.\n\n let index = (xorshift32(&mut self.rng_seed) as usize) % buffer.len();\n\n let bit = 1 << (xorshift32(&mut self.rng_seed) % 8) as u8;\n\n buffer[index] ^= bit;\n\n }\n\n\n", "file_path": "src/phy/fault_injector.rs", "rank": 27, "score": 63368.52172264085 }, { "content": "struct StmPhy {\n\n rx_buffer: [u8; 1536],\n\n tx_buffer: [u8; 1536],\n\n}\n\n\n\nimpl<'a> StmPhy {\n\n fn new() -> StmPhy {\n\n StmPhy {\n\n rx_buffer: [0; 1536],\n\n tx_buffer: [0; 1536],\n\n }\n\n }\n\n}\n\n\n\nimpl<'a> phy::Device<'a> for StmPhy {\n\n type RxToken = StmPhyRxToken<'a>;\n\n type TxToken = StmPhyTxToken<'a>;\n\n\n\n fn receive(&'a mut self) -> Option<(Self::RxToken, Self::TxToken)> {\n\n Some((StmPhyRxToken(&mut self.rx_buffer[..]),\n", "file_path": "src/phy/mod.rs", "rank": 28, "score": 63363.237830011356 }, { "content": "#[cfg(all(target_os = \"linux\", any(feature = \"phy-tap_interface\", feature = \"phy-raw_socket\")))]\n\n#[repr(C)]\n\n#[derive(Debug)]\n\nstruct ifreq {\n\n ifr_name: [libc::c_char; libc::IF_NAMESIZE],\n\n ifr_data: libc::c_int /* ifr_ifindex or ifr_mtu */\n\n}\n\n\n", "file_path": "src/phy/sys/mod.rs", "rank": 29, "score": 63363.237830011356 }, { "content": "#[doc(hidden)]\n\npub trait Session {\n\n fn finish(&mut self) {}\n\n}\n\n\n\n#[cfg(feature = \"socket-raw\")]\n\nimpl<'a, 'b> Session for RawSocket<'a, 'b> {}\n\n#[cfg(all(feature = \"socket-icmp\", any(feature = \"proto-ipv4\", feature = \"proto-ipv6\")))]\n\nimpl<'a, 'b> Session for IcmpSocket<'a, 'b> {}\n\n#[cfg(feature = \"socket-udp\")]\n\nimpl<'a, 'b> Session for UdpSocket<'a, 'b> {}\n\n#[cfg(feature = \"socket-tcp\")]\n\nimpl<'a> Session for TcpSocket<'a> {}\n\n\n\n/// A smart pointer to a socket.\n\n///\n\n/// Allows the network stack to efficiently determine if the socket state was changed in any way.\n\npub struct Ref<'a, T: Session + 'a> {\n\n socket: &'a mut T,\n\n consumed: bool,\n\n}\n", "file_path": "src/socket/ref_.rs", "rank": 30, "score": 59875.66333746201 }, { "content": "/// A trait for setting a value to a known state.\n\n///\n\n/// In-place analog of Default.\n\npub trait Resettable {\n\n fn reset(&mut self);\n\n}\n", "file_path": "src/storage/mod.rs", "rank": 31, "score": 59875.66333746201 }, { "content": "/// The device independent part of an Ethernet network interface.\n\n///\n\n/// Separating the device from the data required for prorcessing and dispatching makes\n\n/// it possible to borrow them independently. For example, the tx and rx tokens borrow\n\n/// the `device` mutably until they're used, which makes it impossible to call other\n\n/// methods on the `Interface` in this time (since its `device` field is borrowed\n\n/// exclusively). However, it is still possible to call methods on its `inner` field.\n\nstruct InterfaceInner<'b, 'c> {\n\n neighbor_cache: NeighborCache<'b>,\n\n ethernet_addr: EthernetAddress,\n\n ip_addrs: ManagedSlice<'c, IpCidr>,\n\n #[cfg(feature = \"proto-ipv4\")]\n\n ipv4_gateway: Option<Ipv4Address>,\n\n #[cfg(feature = \"proto-ipv6\")]\n\n ipv6_gateway: Option<Ipv6Address>,\n\n device_capabilities: DeviceCapabilities,\n\n}\n\n\n\n/// A builder structure used for creating a Ethernet network\n\n/// interface.\n\npub struct InterfaceBuilder <'b, 'c, DeviceT: for<'d> Device<'d>> {\n\n device: DeviceT,\n\n ethernet_addr: Option<EthernetAddress>,\n\n neighbor_cache: Option<NeighborCache<'b>>,\n\n ip_addrs: ManagedSlice<'c, IpCidr>,\n\n #[cfg(feature = \"proto-ipv4\")]\n\n ipv4_gateway: Option<Ipv4Address>,\n", "file_path": "src/iface/ethernet.rs", "rank": 32, "score": 58945.13480223421 }, { "content": "/// A token to transmit a single network packet.\n\npub trait TxToken {\n\n /// Consumes the token to send a single network packet.\n\n ///\n\n /// This method constructs a transmit buffer of size `len` and calls the passed\n\n /// closure `f` with a mutable reference to that buffer. The closure should construct\n\n /// a valid network packet (e.g. an ethernet packet) in the buffer. When the closure\n\n /// returns, the transmit buffer is sent out.\n\n ///\n\n /// The timestamp must be a number of milliseconds, monotonically increasing since an\n\n /// arbitrary moment in time, such as system startup.\n\n fn consume<R, F>(self, timestamp: Instant, len: usize, f: F) -> Result<R>\n\n where F: FnOnce(&mut [u8]) -> Result<R>;\n\n}\n", "file_path": "src/phy/mod.rs", "rank": 33, "score": 58396.15835668991 }, { "content": "/// A token to receive a single network packet.\n\npub trait RxToken {\n\n /// Consumes the token to receive a single network packet.\n\n ///\n\n /// This method receives a packet and then calls the given closure `f` with the raw\n\n /// packet bytes as argument.\n\n ///\n\n /// The timestamp must be a number of milliseconds, monotonically increasing since an\n\n /// arbitrary moment in time, such as system startup.\n\n fn consume<R, F>(self, timestamp: Instant, f: F) -> Result<R>\n\n where F: FnOnce(&[u8]) -> Result<R>;\n\n}\n\n\n", "file_path": "src/phy/mod.rs", "rank": 34, "score": 58396.15835668991 }, { "content": "/// An interface for sending and receiving raw network frames.\n\n///\n\n/// The interface is based on _tokens_, which are types that allow to receive/transmit a\n\n/// single packet. The `receive` and `transmit` functions only construct such tokens, the\n\n/// real sending/receiving operation are performed when the tokens are consumed.\n\npub trait Device<'a> {\n\n type RxToken: RxToken + 'a;\n\n type TxToken: TxToken + 'a;\n\n\n\n /// Construct a token pair consisting of one receive token and one transmit token.\n\n ///\n\n /// The additional transmit token makes it possible to generate a reply packet based\n\n /// on the contents of the received packet. For example, this makes it possible to\n\n /// handle arbitrarily large ICMP echo (\"ping\") requests, where the all received bytes\n\n /// need to be sent back, without heap allocation.\n\n fn receive(&'a mut self) -> Option<(Self::RxToken, Self::TxToken)>;\n\n\n\n /// Construct a transmit token.\n\n fn transmit(&'a mut self) -> Option<Self::TxToken>;\n\n\n\n /// Get a description of device capabilities.\n\n fn capabilities(&self) -> DeviceCapabilities;\n\n}\n\n\n", "file_path": "src/phy/mod.rs", "rank": 35, "score": 57992.153653801455 }, { "content": "/// Interface for printing listings.\n\npub trait PrettyPrint {\n\n /// Write a concise, formatted representation of a packet contained in the provided\n\n /// buffer, and any nested packets it may contain.\n\n ///\n\n /// `pretty_print` accepts a buffer and not a packet wrapper because the packet might\n\n /// be truncated, and so it might not be possible to create the packet wrapper.\n\n fn pretty_print(buffer: &AsRef<[u8]>, fmt: &mut fmt::Formatter,\n\n indent: &mut PrettyIndent) -> fmt::Result;\n\n}\n\n\n\n/// Wrapper for using a `PrettyPrint` where a `Display` is expected.\n\npub struct PrettyPrinter<'a, T: PrettyPrint> {\n\n prefix: &'static str,\n\n buffer: &'a AsRef<[u8]>,\n\n phantom: PhantomData<T>\n\n}\n\n\n\nimpl<'a, T: PrettyPrint> PrettyPrinter<'a, T> {\n\n /// Format the listing with the recorded parameters when Display::fmt is called.\n\n pub fn new(prefix: &'static str, buffer: &'a AsRef<[u8]>) -> PrettyPrinter<'a, T> {\n", "file_path": "src/wire/pretty_print.rs", "rank": 36, "score": 57022.06046989224 }, { "content": "/// A packet capture sink.\n\npub trait PcapSink {\n\n /// Write data into the sink.\n\n fn write(&self, data: &[u8]);\n\n\n\n /// Write an `u16` into the sink, in native byte order.\n\n fn write_u16(&self, value: u16) {\n\n let mut bytes = [0u8; 2];\n\n NativeEndian::write_u16(&mut bytes, value);\n\n self.write(&bytes[..])\n\n }\n\n\n\n /// Write an `u32` into the sink, in native byte order.\n\n fn write_u32(&self, value: u32) {\n\n let mut bytes = [0u8; 4];\n\n NativeEndian::write_u32(&mut bytes, value);\n\n self.write(&bytes[..])\n\n }\n\n\n\n /// Write the libpcap global header into the sink.\n\n ///\n", "file_path": "src/phy/pcap_writer.rs", "rank": 37, "score": 57022.06046989224 }, { "content": "fn client(kind: Client) {\n\n let port = match kind { Client::Reader => 1234, Client::Writer => 1235 };\n\n let mut stream = TcpStream::connect((\"192.168.69.1\", port)).unwrap();\n\n let mut buffer = vec![0; 1_000_000];\n\n\n\n let start = Instant::now();\n\n\n\n let mut processed = 0;\n\n while processed < AMOUNT {\n\n let length = cmp::min(buffer.len(), AMOUNT - processed);\n\n let result = match kind {\n\n Client::Reader => stream.read(&mut buffer[..length]),\n\n Client::Writer => stream.write(&buffer[..length]),\n\n };\n\n match result {\n\n Ok(0) => break,\n\n Ok(result) => {\n\n // print!(\"(P:{})\", result);\n\n processed += result\n\n }\n", "file_path": "examples/benchmark.rs", "rank": 38, "score": 56830.618651001336 }, { "content": "struct StmPhyRxToken<'a>(&'a [u8]);\n\n\n\nimpl<'a> phy::RxToken for StmPhyRxToken<'a> {\n\n fn consume<R, F>(self, _timestamp: Instant, f: F) -> Result<R>\n\n where F: FnOnce(&[u8]) -> Result<R>\n\n {\n\n // TODO: receive packet into buffer\n\n let result = f(self.0);\n\n println!(\"rx called\");\n\n result\n\n }\n\n}\n\n\n", "file_path": "src/phy/mod.rs", "rank": 39, "score": 53168.40390318632 }, { "content": "#[cfg(all(target_os = \"linux\", any(feature = \"phy-tap_interface\", feature = \"phy-raw_socket\")))]\n\nfn ifreq_for(name: &str) -> ifreq {\n\n let mut ifreq = ifreq {\n\n ifr_name: [0; libc::IF_NAMESIZE],\n\n ifr_data: 0\n\n };\n\n for (i, byte) in name.as_bytes().iter().enumerate() {\n\n ifreq.ifr_name[i] = *byte as libc::c_char\n\n }\n\n ifreq\n\n}\n\n\n", "file_path": "src/phy/sys/mod.rs", "rank": 40, "score": 51242.28971294775 }, { "content": "fn print_usage(program: &str, opts: Options) {\n\n let brief = format!(\"Usage: {} [options] INPUT OUTPUT\", program);\n\n print!(\"{}\", opts.usage(&brief));\n\n}\n\n\n", "file_path": "utils/packet2pcap.rs", "rank": 41, "score": 50015.934137054996 }, { "content": "fn max_resp_code_to_duration(value: u8) -> Duration {\n\n let value: u64 = value.into();\n\n let centisecs = if value < 128 {\n\n value\n\n } else {\n\n let mant = value & 0xF;\n\n let exp = (value >> 4) & 0x7;\n\n (mant | 0x10) << (exp + 3)\n\n };\n\n Duration::from_millis(centisecs * 100)\n\n}\n\n\n", "file_path": "src/wire/igmp.rs", "rank": 42, "score": 48932.61667004823 }, { "content": "fn duration_to_max_resp_code(duration: Duration) -> u8 {\n\n let centisecs = duration.total_millis() / 100;\n\n if centisecs < 128 {\n\n centisecs as u8\n\n } else if centisecs < 31744 {\n\n let mut mant = centisecs >> 3;\n\n let mut exp = 0u8;\n\n while mant > 0x1F && exp < 0x8 {\n\n mant >>= 1;\n\n exp += 1;\n\n }\n\n 0x80 | (exp << 4) | (mant as u8 & 0xF)\n\n } else {\n\n 0xFF\n\n }\n\n}\n\n\n\nimpl<'a, T: AsRef<[u8]> + ?Sized> fmt::Display for Packet<&'a T> {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n match Repr::parse(self) {\n", "file_path": "src/wire/igmp.rs", "rank": 43, "score": 48932.61667004823 }, { "content": "fn convert(packet_filename: &Path, pcap_filename: &Path, link_type: PcapLinkType)\n\n -> io::Result<()> {\n\n let mut packet_file = File::open(packet_filename)?;\n\n let mut packet = Vec::new();\n\n packet_file.read_to_end(&mut packet)?;\n\n\n\n let pcap = RefCell::new(Vec::new());\n\n PcapSink::global_header(&pcap, link_type);\n\n PcapSink::packet(&pcap, Instant::from_millis(0), &packet[..]);\n\n\n\n let mut pcap_file = File::create(pcap_filename)?;\n\n pcap_file.write_all(&pcap.borrow()[..])?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "utils/packet2pcap.rs", "rank": 61, "score": 41931.07772188862 }, { "content": "\n\n /// Shorthand for `(self.read + idx) % self.capacity()` with no\n\n /// additional checks to ensure the capacity is not zero.\n\n fn get_idx_unchecked(&self, idx: usize) -> usize {\n\n (self.read_at + idx) % self.capacity()\n\n }\n\n}\n\n\n\n/// This is the \"discrete\" ring buffer interface: it operates with single elements,\n\n/// and boundary conditions (empty/full) are errors.\n\nimpl<'a, T: 'a> RingBuffer<'a, T> {\n\n /// Call `f` with a single buffer element, and enqueue the element if `f`\n\n /// returns successfully, or return `Err(Error::Exhausted)` if the buffer is full.\n\n pub fn enqueue_one_with<'b, R, F>(&'b mut self, f: F) -> Result<R>\n\n where F: FnOnce(&'b mut T) -> Result<R> {\n\n if self.is_full() { return Err(Error::Exhausted) }\n\n\n\n let index = self.get_idx_unchecked(self.length);\n\n match f(&mut self.storage[index]) {\n\n Ok(result) => {\n", "file_path": "src/storage/ring_buffer.rs", "rank": 62, "score": 44.27347402869221 }, { "content": " #[inline]\n\n pub fn can_recv(&self) -> bool {\n\n if !self.may_recv() { return false }\n\n\n\n !self.rx_buffer.is_empty()\n\n }\n\n\n\n fn send_impl<'b, F, R>(&'b mut self, f: F) -> Result<R>\n\n where F: FnOnce(&'b mut SocketBuffer<'a>) -> (usize, R) {\n\n if !self.may_send() { return Err(Error::Illegal) }\n\n\n\n // The connection might have been idle for a long time, and so remote_last_ts\n\n // would be far in the past. Unless we clear it here, we'll abort the connection\n\n // down over in dispatch() by erroneously detecting it as timed out.\n\n if self.tx_buffer.is_empty() { self.remote_last_ts = None }\n\n\n\n let _old_length = self.tx_buffer.len();\n\n let (size, result) = f(&mut self.tx_buffer);\n\n if size > 0 {\n\n #[cfg(any(test, feature = \"verbose\"))]\n", "file_path": "src/socket/tcp.rs", "rank": 63, "score": 43.07898707522079 }, { "content": "\n\nimpl<'a, T: AsRef<[u8]> + ?Sized> fmt::Display for NdiscOption<&'a T> {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n match Repr::parse(self) {\n\n Ok(repr) => write!(f, \"{}\", repr),\n\n Err(err) => {\n\n write!(f, \"NDISC Option ({})\", err)?;\n\n Ok(())\n\n }\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, PartialEq, Eq, Clone, Copy)]\n\npub struct PrefixInformation {\n\n pub prefix_len: u8,\n\n pub flags: PrefixInfoFlags,\n\n pub valid_lifetime: Duration,\n\n pub preferred_lifetime: Duration,\n\n pub prefix: Ipv6Address\n", "file_path": "src/wire/ndiscoption.rs", "rank": 64, "score": 40.94137043131279 }, { "content": " }\n\n\n\n fn dequeue_padding(&mut self) {\n\n let Self { ref mut metadata_ring, ref mut payload_ring } = *self;\n\n\n\n let _ = metadata_ring.dequeue_one_with(|metadata| {\n\n if metadata.is_padding() {\n\n payload_ring.dequeue_many(metadata.size);\n\n Ok(()) // dequeue metadata\n\n } else {\n\n Err(Error::Exhausted) // don't dequeue metadata\n\n }\n\n });\n\n }\n\n\n\n /// Call `f` with a single packet from the buffer, and dequeue the packet if `f`\n\n /// returns successfully, or return `Err(Error::Exhausted)` if the buffer is empty.\n\n pub fn dequeue_with<'c, R, F>(&'c mut self, f: F) -> Result<R>\n\n where F: FnOnce(&mut H, &'c mut [u8]) -> Result<R> {\n\n self.dequeue_padding();\n", "file_path": "src/storage/packet_buffer.rs", "rank": 65, "score": 40.88740337204159 }, { "content": " pub fn set_ident(&mut self, value: u32) {\n\n let data = self.buffer.as_mut();\n\n NetworkEndian::write_u32(&mut data[field::IDENT], value);\n\n }\n\n}\n\n\n\nimpl<'a, T: AsRef<[u8]> + ?Sized> fmt::Display for Header<&'a T> {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n match Repr::parse(self) {\n\n Ok(repr) => write!(f, \"{}\", repr),\n\n Err(err) => {\n\n write!(f, \"IPv6 Fragment ({})\", err)?;\n\n Ok(())\n\n }\n\n }\n\n }\n\n}\n\n\n\n/// A high-level representation of an IPv6 Fragment header.\n\n#[derive(Debug, PartialEq, Eq, Clone, Copy)]\n", "file_path": "src/wire/ipv6fragment.rs", "rank": 66, "score": 40.70526731822248 }, { "content": "}\n\n\n\nimpl<'a, T: AsRef<[u8]> + ?Sized> fmt::Display for Ipv6Option<&'a T> {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n match Repr::parse(self) {\n\n Ok(repr) => write!(f, \"{}\", repr),\n\n Err(err) => {\n\n write!(f, \"IPv6 Extension Option ({})\", err)?;\n\n Ok(())\n\n }\n\n }\n\n }\n\n}\n\n\n\n/// A high-level representation of an IPv6 Extension Header Option.\n\n#[derive(Debug, PartialEq, Eq, Clone, Copy)]\n\npub enum Repr<'a> {\n\n Pad1,\n\n PadN(u8),\n\n Unknown {\n", "file_path": "src/wire/ipv6option.rs", "rank": 67, "score": 40.3537540207827 }, { "content": " &mut data[field::OPTIONS(len)]\n\n }\n\n}\n\n\n\nimpl<'a, T: AsRef<[u8]> + ?Sized> fmt::Display for Header<&'a T> {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n match Repr::parse(self) {\n\n Ok(repr) => write!(f, \"{}\", repr),\n\n Err(err) => {\n\n write!(f, \"IPv6 Hop-by-Hop Options ({})\", err)?;\n\n Ok(())\n\n }\n\n }\n\n }\n\n}\n\n\n\n/// A high-level representation of an IPv6 Hop-by-Hop Options header.\n\n#[derive(Debug, PartialEq, Eq, Clone, Copy)]\n\npub struct Repr {\n\n /// The type of header immediately following the Hop-by-Hop Options header.\n", "file_path": "src/wire/ipv6hopbyhop.rs", "rank": 68, "score": 39.86803751227064 }, { "content": "use managed::ManagedSlice;\n\n\n\nuse {Error, Result};\n\nuse super::RingBuffer;\n\n\n\n/// Size and header of a packet.\n\n#[derive(Debug, Clone, Copy)]\n\npub struct PacketMetadata<H> {\n\n size: usize,\n\n header: Option<H>\n\n}\n\n\n\nimpl<H> PacketMetadata<H> {\n\n /// Empty packet description.\n\n pub const EMPTY: PacketMetadata<H> = PacketMetadata { size: 0, header: None };\n\n\n\n fn padding(size: usize) -> PacketMetadata<H> {\n\n PacketMetadata {\n\n size: size,\n\n header: None\n", "file_path": "src/storage/packet_buffer.rs", "rank": 69, "score": 39.867981295539686 }, { "content": " let (size, result) = f(&mut self.rx_buffer);\n\n self.remote_seq_no += size;\n\n if size > 0 {\n\n #[cfg(any(test, feature = \"verbose\"))]\n\n net_trace!(\"{}:{}:{}: rx buffer: dequeueing {} octets (now {})\",\n\n self.meta.handle, self.local_endpoint, self.remote_endpoint,\n\n size, _old_length - size);\n\n }\n\n Ok(result)\n\n }\n\n\n\n /// Call `f` with the largest contiguous slice of octets in the receive buffer,\n\n /// and dequeue the amount of elements returned by `f`.\n\n ///\n\n /// This function returns `Err(Error::Illegal) if the receive half of\n\n /// the connection is not open; see [may_recv](#method.may_recv).\n\n pub fn recv<'b, F, R>(&'b mut self, f: F) -> Result<R>\n\n where F: FnOnce(&'b mut [u8]) -> (usize, R) {\n\n self.recv_impl(|rx_buffer| {\n\n rx_buffer.dequeue_many_with(f)\n", "file_path": "src/socket/tcp.rs", "rank": 70, "score": 39.33493378047494 }, { "content": " self.length += 1;\n\n Ok(result)\n\n }\n\n Err(error) => Err(error)\n\n }\n\n }\n\n\n\n /// Enqueue a single element into the buffer, and return a reference to it,\n\n /// or return `Err(Error::Exhausted)` if the buffer is full.\n\n ///\n\n /// This function is a shortcut for `ring_buf.enqueue_one_with(Ok)`.\n\n pub fn enqueue_one<'b>(&'b mut self) -> Result<&'b mut T> {\n\n self.enqueue_one_with(Ok)\n\n }\n\n\n\n /// Call `f` with a single buffer element, and dequeue the element if `f`\n\n /// returns successfully, or return `Err(Error::Exhausted)` if the buffer is empty.\n\n pub fn dequeue_one_with<'b, R, F>(&'b mut self, f: F) -> Result<R>\n\n where F: FnOnce(&'b mut T) -> Result<R> {\n\n if self.is_empty() { return Err(Error::Exhausted) }\n", "file_path": "src/storage/ring_buffer.rs", "rank": 71, "score": 39.26225077699359 }, { "content": "\n\n/// This is the \"continuous\" ring buffer interface: it operates with element slices,\n\n/// and boundary conditions (empty/full) simply result in empty slices.\n\nimpl<'a, T: 'a> RingBuffer<'a, T> {\n\n /// Call `f` with the largest contiguous slice of unallocated buffer elements,\n\n /// and enqueue the amount of elements returned by `f`.\n\n ///\n\n /// # Panics\n\n /// This function panics if the amount of elements returned by `f` is larger\n\n /// than the size of the slice passed into it.\n\n pub fn enqueue_many_with<'b, R, F>(&'b mut self, f: F) -> (usize, R)\n\n where F: FnOnce(&'b mut [T]) -> (usize, R) {\n\n let write_at = self.get_idx(self.length);\n\n let max_size = self.contiguous_window();\n\n let (size, result) = f(&mut self.storage[write_at..write_at + max_size]);\n\n assert!(size <= max_size);\n\n self.length += size;\n\n (size, result)\n\n }\n\n\n", "file_path": "src/storage/ring_buffer.rs", "rank": 72, "score": 39.10213764784791 }, { "content": " net_trace!(\"{}:{}:{}: tx buffer: enqueueing {} octets (now {})\",\n\n self.meta.handle, self.local_endpoint, self.remote_endpoint,\n\n size, _old_length + size);\n\n }\n\n Ok(result)\n\n }\n\n\n\n /// Call `f` with the largest contiguous slice of octets in the transmit buffer,\n\n /// and enqueue the amount of elements returned by `f`.\n\n ///\n\n /// This function returns `Err(Error::Illegal) if the transmit half of\n\n /// the connection is not open; see [may_send](#method.may_send).\n\n pub fn send<'b, F, R>(&'b mut self, f: F) -> Result<R>\n\n where F: FnOnce(&'b mut [u8]) -> (usize, R) {\n\n self.send_impl(|tx_buffer| {\n\n tx_buffer.enqueue_many_with(f)\n\n })\n\n }\n\n\n\n /// Enqueue a sequence of octets to be sent, and fill it from a slice.\n", "file_path": "src/socket/tcp.rs", "rank": 73, "score": 38.02033224426948 }, { "content": " ///\n\n /// This function returns the amount of bytes actually enqueued, which is limited\n\n /// by the amount of free space in the transmit buffer; down to zero.\n\n ///\n\n /// See also [send](#method.send).\n\n pub fn send_slice(&mut self, data: &[u8]) -> Result<usize> {\n\n self.send_impl(|tx_buffer| {\n\n let size = tx_buffer.enqueue_slice(data);\n\n (size, size)\n\n })\n\n }\n\n\n\n fn recv_impl<'b, F, R>(&'b mut self, f: F) -> Result<R>\n\n where F: FnOnce(&'b mut SocketBuffer<'a>) -> (usize, R) {\n\n // We may have received some data inside the initial SYN, but until the connection\n\n // is fully open we must not dequeue any data, as it may be overwritten by e.g.\n\n // another (stale) SYN. (We do not support TCP Fast Open.)\n\n if !self.may_recv() { return Err(Error::Illegal) }\n\n\n\n let _old_length = self.rx_buffer.len();\n", "file_path": "src/socket/tcp.rs", "rank": 74, "score": 36.666528958299764 }, { "content": " let data = self.buffer.as_mut();\n\n data[field::DST_ADDR].copy_from_slice(value.as_bytes());\n\n }\n\n\n\n /// Return a mutable pointer to the payload.\n\n #[inline]\n\n pub fn payload_mut(&mut self) -> &mut [u8] {\n\n let range = self.header_len()..self.total_len();\n\n let data = self.buffer.as_mut();\n\n &mut data[range]\n\n }\n\n}\n\n\n\nimpl<'a, T: AsRef<[u8]> + ?Sized> fmt::Display for Packet<&'a T> {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n match Repr::parse(self) {\n\n Ok(repr) => write!(f, \"{}\", repr),\n\n Err(err) => {\n\n write!(f, \"IPv6 ({})\", err)?;\n\n Ok(())\n", "file_path": "src/wire/ipv6.rs", "rank": 75, "score": 36.37721677982405 }, { "content": "\n\n let Self { ref mut metadata_ring, ref mut payload_ring } = *self;\n\n\n\n metadata_ring.dequeue_one_with(move |metadata| {\n\n let PacketMetadata { ref mut header, size } = *metadata;\n\n\n\n payload_ring.dequeue_many_with(|payload_buf| {\n\n debug_assert!(payload_buf.len() >= size);\n\n\n\n match f(header.as_mut().unwrap(), &mut payload_buf[..size]) {\n\n Ok(val) => (size, Ok(val)),\n\n Err(err) => (0, Err(err)),\n\n }\n\n }).1\n\n })\n\n }\n\n\n\n /// Dequeue a single packet from the buffer, and return a reference to its payload\n\n /// as well as its header, or return `Err(Error::Exhausted)` if the buffer is empty.\n\n pub fn dequeue(&mut self) -> Result<(H, &mut [u8])> {\n", "file_path": "src/storage/packet_buffer.rs", "rank": 76, "score": 34.38068892205992 }, { "content": " }\n\n }\n\n\n\n /// Return the length of the segment, in terms of sequence space.\n\n pub fn segment_len(&self) -> usize {\n\n self.payload.len() + self.control.len()\n\n }\n\n\n\n /// Return whether the segment has no flags set (except PSH) and no data.\n\n pub fn is_empty(&self) -> bool {\n\n match self.control {\n\n _ if self.payload.len() != 0 => false,\n\n Control::Syn | Control::Fin | Control::Rst => false,\n\n Control::None | Control::Psh => true\n\n }\n\n }\n\n}\n\n\n\nimpl<'a, T: AsRef<[u8]> + ?Sized> fmt::Display for Packet<&'a T> {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n", "file_path": "src/wire/tcp.rs", "rank": 77, "score": 34.303459708084205 }, { "content": " pub fn set_relay_agent_ip(&mut self, value: Ipv4Address) {\n\n let field = &mut self.buffer.as_mut()[field::GIADDR];\n\n field.copy_from_slice(value.as_bytes());\n\n }\n\n\n\n /// Sets the broadcast flag to the specified value.\n\n pub fn set_broadcast_flag(&mut self, value: bool) {\n\n let field = &mut self.buffer.as_mut()[field::FLAGS];\n\n NetworkEndian::write_u16(field, if value { 1 } else { 0 });\n\n }\n\n}\n\n\n\nimpl<'a, T: AsRef<[u8]> + AsMut<[u8]> + ?Sized> Packet<&'a mut T> {\n\n /// Return a pointer to the options.\n\n #[inline]\n\n pub fn options_mut(&mut self) -> Result<&mut [u8]> {\n\n let data = self.buffer.as_mut();\n\n data.get_mut(field::OPTIONS).ok_or(Error::Truncated)\n\n }\n\n}\n", "file_path": "src/wire/dhcpv4.rs", "rank": 78, "score": 33.41060790038693 }, { "content": "}\n\n\n\n/// A high-level representation of an Internet Protocol version 4 packet header.\n\n#[derive(Debug, PartialEq, Eq, Clone, Copy)]\n\npub struct Repr {\n\n pub src_addr: Address,\n\n pub dst_addr: Address,\n\n pub protocol: Protocol,\n\n pub payload_len: usize,\n\n pub hop_limit: u8\n\n}\n\n\n\nimpl Repr {\n\n /// Parse an Internet Protocol version 4 packet and return a high-level representation.\n\n pub fn parse<T: AsRef<[u8]> + ?Sized>(packet: &Packet<&T>,\n\n checksum_caps: &ChecksumCapabilities) -> Result<Repr> {\n\n // Version 4 is expected.\n\n if packet.version() != 4 { return Err(Error::Malformed) }\n\n // Valid checksum is expected.\n\n if checksum_caps.ipv4.rx() && !packet.verify_checksum() { return Err(Error::Checksum) }\n", "file_path": "src/wire/ipv4.rs", "rank": 79, "score": 33.28522638260495 }, { "content": " pub fn send(&mut self, size: usize, endpoint: IpEndpoint) -> Result<&mut [u8]> {\n\n if self.endpoint.port == 0 { return Err(Error::Unaddressable) }\n\n if !endpoint.is_specified() { return Err(Error::Unaddressable) }\n\n\n\n let payload_buf = self.tx_buffer.enqueue(size, endpoint)?;\n\n\n\n net_trace!(\"{}:{}:{}: buffer to send {} octets\",\n\n self.meta.handle, self.endpoint, endpoint, size);\n\n Ok(payload_buf)\n\n }\n\n\n\n /// Enqueue a packet to be sent to a given remote endpoint, and fill it from a slice.\n\n ///\n\n /// See also [send](#method.send).\n\n pub fn send_slice(&mut self, data: &[u8], endpoint: IpEndpoint) -> Result<()> {\n\n self.send(data.len(), endpoint)?.copy_from_slice(data);\n\n Ok(())\n\n }\n\n\n\n /// Dequeue a packet received from a remote endpoint, and return the endpoint as well\n", "file_path": "src/socket/udp.rs", "rank": 80, "score": 33.21694018454642 }, { "content": "impl phy::RxToken for RxToken {\n\n fn consume<R, F: FnOnce(&[u8]) -> Result<R>>(self, _timestamp: Instant, f: F) -> Result<R> {\n\n f(&self.buffer)\n\n }\n\n}\n\n\n\n#[doc(hidden)]\n\npub struct TxToken<'a> {\n\n queue: &'a mut VecDeque<Vec<u8>>,\n\n}\n\n\n\nimpl<'a> phy::TxToken for TxToken<'a> {\n\n fn consume<R, F>(self, _timestamp: Instant, len: usize, f: F) -> Result<R>\n\n where F: FnOnce(&mut [u8]) -> Result<R>\n\n {\n\n let mut buffer = Vec::new();\n\n buffer.resize(len, 0);\n\n let result = f(&mut buffer);\n\n self.queue.push_back(buffer);\n\n result\n\n }\n\n}\n", "file_path": "src/phy/loopback.rs", "rank": 81, "score": 32.79222701796981 }, { "content": " size_1 + size_2\n\n }\n\n}\n\n\n\n/// This is the \"random access\" ring buffer interface: it operates with element slices,\n\n/// and allows to access elements of the buffer that are not adjacent to its head or tail.\n\nimpl<'a, T: 'a> RingBuffer<'a, T> {\n\n /// Return the largest contiguous slice of unallocated buffer elements starting\n\n /// at the given offset past the last allocated element, and up to the given size.\n\n // #[must_use]\n\n pub fn get_unallocated(&mut self, offset: usize, mut size: usize) -> &mut [T] {\n\n let start_at = self.get_idx(self.length + offset);\n\n // We can't access past the end of unallocated data.\n\n if offset > self.window() { return &mut [] }\n\n // We can't enqueue more than there is free space.\n\n let clamped_window = self.window() - offset;\n\n if size > clamped_window { size = clamped_window }\n\n // We can't contiguously enqueue past the end of the storage.\n\n let until_end = self.capacity() - start_at;\n\n if size > until_end { size = until_end }\n", "file_path": "src/storage/ring_buffer.rs", "rank": 82, "score": 32.77735151598372 }, { "content": "impl phy::RxToken for RxToken {\n\n fn consume<R, F>(self, _timestamp: Instant, f: F) -> Result<R>\n\n where F: FnOnce(&[u8]) -> Result<R>\n\n {\n\n f(&self.buffer[..])\n\n }\n\n}\n\n\n\n#[doc(hidden)]\n\npub struct TxToken {\n\n lower: Rc<RefCell<sys::TapInterfaceDesc>>,\n\n}\n\n\n\nimpl phy::TxToken for TxToken {\n\n fn consume<R, F>(self, _timestamp: Instant, len: usize, f: F) -> Result<R>\n\n where F: FnOnce(&mut [u8]) -> Result<R>\n\n {\n\n let mut lower = self.lower.borrow_mut();\n\n let mut buffer = vec![0; len];\n\n let result = f(&mut buffer);\n\n lower.send(&mut buffer[..]).unwrap();\n\n result\n\n }\n\n}\n", "file_path": "src/phy/tap_interface.rs", "rank": 83, "score": 32.766136015164626 }, { "content": " &mut data[field::OPTIONS(header_len)]\n\n }\n\n\n\n /// Return a mutable pointer to the payload data.\n\n #[inline]\n\n pub fn payload_mut(&mut self) -> &mut [u8] {\n\n let header_len = self.header_len() as usize;\n\n let data = self.buffer.as_mut();\n\n &mut data[header_len..]\n\n }\n\n}\n\n\n\nimpl<T: AsRef<[u8]>> AsRef<[u8]> for Packet<T> {\n\n fn as_ref(&self) -> &[u8] {\n\n self.buffer.as_ref()\n\n }\n\n}\n\n\n\n/// A representation of a single TCP option.\n\n#[derive(Debug, PartialEq, Eq, Clone, Copy)]\n", "file_path": "src/wire/tcp.rs", "rank": 84, "score": 32.34399569805298 }, { "content": " Ok(())\n\n } else {\n\n Err(())\n\n }\n\n }\n\n\n\n fn accept_str(&mut self, string: &[u8]) -> Result<()> {\n\n for byte in string.iter() {\n\n self.accept_char(*byte)?;\n\n }\n\n Ok(())\n\n }\n\n\n\n fn accept_digit(&mut self, hex: bool) -> Result<u8> {\n\n let digit = self.advance()?;\n\n if digit >= b'0' && digit <= b'9' {\n\n Ok(digit - b'0')\n\n } else if hex && digit >= b'a' && digit <= b'f' {\n\n Ok(digit - b'a' + 10)\n\n } else if hex && digit >= b'A' && digit <= b'F' {\n", "file_path": "src/parsers.rs", "rank": 85, "score": 32.0597006924611 }, { "content": " fn consume<R, F: FnOnce(&[u8]) -> Result<R>>(self, _timestamp: Instant, f: F) -> Result<R> {\n\n f(&self.buffer[..])\n\n }\n\n}\n\n\n\n#[doc(hidden)]\n\npub struct TxToken {\n\n lower: Rc<RefCell<sys::RawSocketDesc>>,\n\n}\n\n\n\nimpl phy::TxToken for TxToken {\n\n fn consume<R, F: FnOnce(&mut [u8]) -> Result<R>>(self, _timestamp: Instant, len: usize, f: F)\n\n -> Result<R>\n\n {\n\n let mut lower = self.lower.borrow_mut();\n\n let mut buffer = vec![0; len];\n\n let result = f(&mut buffer);\n\n lower.send(&mut buffer[..]).unwrap();\n\n result\n\n }\n\n}\n", "file_path": "src/phy/raw_socket.rs", "rank": 86, "score": 31.975828305658357 }, { "content": "#![deny(missing_docs)]\n\n\n\nuse core::fmt;\n\nuse byteorder::{ByteOrder, NetworkEndian};\n\n\n\nuse {Error, Result};\n\npub use super::IpProtocol as Protocol;\n\nuse super::ip::pretty_print_ip_payload;\n\n\n\n/// Minimum MTU required of all links supporting IPv6. See [RFC 8200 § 5].\n\n///\n\n/// [RFC 8200 § 5]: https://tools.ietf.org/html/rfc8200#section-5\n\npub const MIN_MTU: usize = 1280;\n\n\n\n/// A sixteen-octet IPv6 address.\n\n#[derive(Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Default)]\n\npub struct Address(pub [u8; 16]);\n\n\n\nimpl Address {\n\n /// The [unspecified address].\n", "file_path": "src/wire/ipv6.rs", "rank": 87, "score": 31.895071171883068 }, { "content": " buf[..size].copy_from_slice(&data[..size]);\n\n (size, &data[size..])\n\n });\n\n let (size_2, ()) = self.enqueue_many_with(|buf| {\n\n let size = cmp::min(buf.len(), data.len());\n\n buf[..size].copy_from_slice(&data[..size]);\n\n (size, ())\n\n });\n\n size_1 + size_2\n\n }\n\n\n\n /// Call `f` with the largest contiguous slice of allocated buffer elements,\n\n /// and dequeue the amount of elements returned by `f`.\n\n ///\n\n /// # Panics\n\n /// This function panics if the amount of elements returned by `f` is larger\n\n /// than the size of the slice passed into it.\n\n pub fn dequeue_many_with<'b, R, F>(&'b mut self, f: F) -> (usize, R)\n\n where F: FnOnce(&'b mut [T]) -> (usize, R) {\n\n let capacity = self.capacity();\n", "file_path": "src/storage/ring_buffer.rs", "rank": 88, "score": 31.82936332011058 }, { "content": "use core::{i32, ops, cmp, fmt};\n\nuse byteorder::{ByteOrder, NetworkEndian};\n\n\n\nuse {Error, Result};\n\nuse phy::ChecksumCapabilities;\n\nuse super::{IpProtocol, IpAddress};\n\nuse super::ip::checksum;\n\n\n\n/// A TCP sequence number.\n\n///\n\n/// A sequence number is a monotonically advancing integer modulo 2<sup>32</sup>.\n\n/// Sequence numbers do not have a discontiguity when compared pairwise across a signed overflow.\n\n#[derive(Debug, PartialEq, Eq, Clone, Copy, Default)]\n\npub struct SeqNumber(pub i32);\n\n\n\nimpl fmt::Display for SeqNumber {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(f, \"{}\", self.0 as u32)\n\n }\n\n}\n", "file_path": "src/wire/tcp.rs", "rank": 89, "score": 31.80370974144951 }, { "content": " }\n\n\n\n fn transmit(&'a mut self) -> Option<Self::TxToken> {\n\n let &mut Self { ref mut lower, ref sink, mode } = self;\n\n lower.transmit().map(|token| {\n\n TxToken { token, sink: sink.clone(), mode: mode }\n\n })\n\n }\n\n}\n\n\n\n#[doc(hidden)]\n\npub struct RxToken<Rx: phy::RxToken, S: PcapSink> {\n\n token: Rx,\n\n sink: S,\n\n mode: PcapMode,\n\n}\n\n\n\nimpl<Rx: phy::RxToken, S: PcapSink> phy::RxToken for RxToken<Rx, S> {\n\n fn consume<R, F: FnOnce(&[u8]) -> Result<R>>(self, timestamp: Instant, f: F) -> Result<R> {\n\n let Self { token, sink, mode } = self;\n", "file_path": "src/phy/pcap_writer.rs", "rank": 90, "score": 31.735719171137376 }, { "content": " fn consume<R, F>(self, timestamp: Instant, f: F) -> Result<R>\n\n where F: FnOnce(&[u8]) -> Result<R>\n\n {\n\n let Self { token, writer } = self;\n\n token.consume(timestamp, |buffer| {\n\n writer(timestamp, PrettyPrinter::<P>::new(\"<- \", &buffer));\n\n f(buffer)\n\n })\n\n }\n\n}\n\n\n\n#[doc(hidden)]\n\npub struct TxToken<Tx: phy::TxToken, P: PrettyPrint> {\n\n token: Tx,\n\n writer: fn(Instant, PrettyPrinter<P>)\n\n}\n\n\n\nimpl<Tx: phy::TxToken, P: PrettyPrint> phy::TxToken for TxToken<Tx, P> {\n\n fn consume<R, F>(self, timestamp: Instant, len: usize, f: F) -> Result<R>\n\n where F: FnOnce(&mut [u8]) -> Result<R>\n", "file_path": "src/phy/tracer.rs", "rank": 91, "score": 31.609507685052186 }, { "content": " if offset > self.length { return &mut [] }\n\n // We can't read more than we have allocated.\n\n let clamped_length = self.length - offset;\n\n if size > clamped_length { size = clamped_length }\n\n // We can't contiguously dequeue past the end of the storage.\n\n let until_end = self.capacity() - start_at;\n\n if size > until_end { size = until_end }\n\n\n\n &self.storage[start_at..start_at + size]\n\n }\n\n\n\n /// Read as many elements from allocated buffer elements into the given slice\n\n /// starting at the given offset past the first allocated element, and return\n\n /// the amount read.\n\n // #[must_use]\n\n pub fn read_allocated(&mut self, offset: usize, data: &mut [T]) -> usize\n\n where T: Copy {\n\n let (size_1, offset, data) = {\n\n let slice = self.get_allocated(offset, data.len());\n\n data[..slice.len()].copy_from_slice(slice);\n", "file_path": "src/storage/ring_buffer.rs", "rank": 92, "score": 31.51763213541564 }, { "content": " slice_len\n\n };\n\n size_1 + size_2\n\n }\n\n\n\n /// Enqueue the given number of unallocated buffer elements.\n\n ///\n\n /// # Panics\n\n /// Panics if the number of elements given exceeds the number of unallocated elements.\n\n pub fn enqueue_unallocated(&mut self, count: usize) {\n\n assert!(count <= self.window());\n\n self.length += count;\n\n }\n\n\n\n /// Return the largest contiguous slice of allocated buffer elements starting\n\n /// at the given offset past the first allocated element, and up to the given size.\n\n // #[must_use]\n\n pub fn get_allocated(&self, offset: usize, mut size: usize) -> &[T] {\n\n let start_at = self.get_idx(offset);\n\n // We can't read past the end of the allocated data.\n", "file_path": "src/storage/ring_buffer.rs", "rank": 93, "score": 31.466770028975848 }, { "content": "\n\n &mut self.storage[start_at..start_at + size]\n\n }\n\n\n\n /// Write as many elements from the given slice into unallocated buffer elements\n\n /// starting at the given offset past the last allocated element, and return\n\n /// the amount written.\n\n // #[must_use]\n\n pub fn write_unallocated(&mut self, offset: usize, data: &[T]) -> usize\n\n where T: Copy {\n\n let (size_1, offset, data) = {\n\n let slice = self.get_unallocated(offset, data.len());\n\n let slice_len = slice.len();\n\n slice.copy_from_slice(&data[..slice_len]);\n\n (slice_len, offset + slice_len, &data[slice_len..])\n\n };\n\n let size_2 = {\n\n let slice = self.get_unallocated(offset, data.len());\n\n let slice_len = slice.len();\n\n slice.copy_from_slice(&data[..slice_len]);\n", "file_path": "src/storage/ring_buffer.rs", "rank": 94, "score": 30.659693906529924 }, { "content": " }\n\n\n\n /// Returns true if the broadcast flag is set.\n\n pub fn broadcast_flag(&self) -> bool {\n\n let field = &self.buffer.as_ref()[field::FLAGS];\n\n NetworkEndian::read_u16(field) & 0b1 == 0b1\n\n }\n\n}\n\n\n\nimpl<'a, T: AsRef<[u8]> + ?Sized> Packet<&'a T> {\n\n /// Return a pointer to the options.\n\n #[inline]\n\n pub fn options(&self) -> Result<&'a [u8]> {\n\n let data = self.buffer.as_ref();\n\n data.get(field::OPTIONS).ok_or(Error::Malformed)\n\n }\n\n}\n\n\n\nimpl<T: AsRef<[u8]> + AsMut<[u8]>> Packet<T> {\n\n /// Sets the optional `sname` (“server name”) and `file` (“boot file name”) fields to zero.\n", "file_path": "src/wire/dhcpv4.rs", "rank": 95, "score": 30.49618536935213 }, { "content": " }\n\n }\n\n\n\n fn accept_eof(&mut self) -> Result<()> {\n\n if self.data.len() == self.pos {\n\n Ok(())\n\n } else {\n\n Err(())\n\n }\n\n }\n\n\n\n fn until_eof<F, T>(&mut self, f: F) -> Result<T>\n\n where F: FnOnce(&mut Parser<'a>) -> Result<T> {\n\n let res = f(self)?;\n\n self.accept_eof()?;\n\n Ok(res)\n\n }\n\n\n\n fn accept_char(&mut self, chr: u8) -> Result<()> {\n\n if self.advance()? == chr {\n", "file_path": "src/parsers.rs", "rank": 96, "score": 30.47729613636303 }, { "content": " /// as a pointer to the payload.\n\n ///\n\n /// This function returns `Err(Error::Exhausted)` if the receive buffer is empty.\n\n pub fn recv(&mut self) -> Result<(&[u8], IpEndpoint)> {\n\n let (endpoint, payload_buf) = self.rx_buffer.dequeue()?;\n\n\n\n net_trace!(\"{}:{}:{}: receive {} buffered octets\",\n\n self.meta.handle, self.endpoint,\n\n endpoint, payload_buf.len());\n\n Ok((payload_buf, endpoint))\n\n }\n\n\n\n /// Dequeue a packet received from a remote endpoint, copy the payload into the given slice,\n\n /// and return the amount of octets copied as well as the endpoint.\n\n ///\n\n /// See also [recv](#method.recv).\n\n pub fn recv_slice(&mut self, data: &mut [u8]) -> Result<(usize, IpEndpoint)> {\n\n let (buffer, endpoint) = self.recv()?;\n\n let length = min(data.len(), buffer.len());\n\n data[..length].copy_from_slice(&buffer[..length]);\n", "file_path": "src/socket/udp.rs", "rank": 97, "score": 30.463984675959026 }, { "content": "\n\n fn advance(&mut self) -> Result<u8> {\n\n match self.data.get(self.pos) {\n\n Some(&chr) => {\n\n self.pos += 1;\n\n Ok(chr)\n\n }\n\n None => Err(())\n\n }\n\n }\n\n\n\n fn try<F, T>(&mut self, f: F) -> Option<T>\n\n where F: FnOnce(&mut Parser<'a>) -> Result<T> {\n\n let pos = self.pos;\n\n match f(self) {\n\n Ok(res) => Some(res),\n\n Err(()) => {\n\n self.pos = pos;\n\n None\n\n }\n", "file_path": "src/parsers.rs", "rank": 98, "score": 30.393050095150052 }, { "content": " }\n\n}\n\n\n\n/// A high-level representation of an Internet Protocol version 4 packet header.\n\n#[derive(Debug, PartialEq, Eq, Clone, Copy)]\n\npub struct Repr {\n\n pub src_addr: Address,\n\n pub dst_addr: Address,\n\n pub ethertype: EtherType,\n\n}\n\n\n\nimpl Repr {\n\n /// Parse an Ethernet II frame and return a high-level representation.\n\n pub fn parse<T: AsRef<[u8]> + ?Sized>(frame: &Frame<&T>) -> Result<Repr> {\n\n frame.check_len()?;\n\n Ok(Repr {\n\n src_addr: frame.src_addr(),\n\n dst_addr: frame.dst_addr(),\n\n ethertype: frame.ethertype(),\n\n })\n", "file_path": "src/wire/ethernet.rs", "rank": 99, "score": 30.36478205469155 } ]
Rust
src/serde/mod.rs
soenkehahn/time
1860b1482e96e3973fbad634739eb21c5bc0190d
pub mod timestamp; use serde::de::Error as _; #[cfg(feature = "serde-human-readable")] use serde::ser::Error as _; use serde::{Deserialize, Deserializer, Serialize, Serializer}; use crate::error::ComponentRange; #[cfg(feature = "serde-human-readable")] use crate::{ error, format_description::{modifier, Component, FormatItem}, }; use crate::{Date, Duration, OffsetDateTime, PrimitiveDateTime, Time, UtcOffset, Weekday}; #[cfg(feature = "serde-human-readable")] const DATE_FORMAT: &[FormatItem<'_>] = &[ FormatItem::Component(Component::Year(modifier::Year { repr: modifier::YearRepr::Full, iso_week_based: false, sign_is_mandatory: false, padding: modifier::Padding::Zero, })), FormatItem::Literal(b"-"), FormatItem::Component(Component::Month(modifier::Month { repr: modifier::MonthRepr::Numerical, padding: modifier::Padding::Zero, })), FormatItem::Literal(b"-"), FormatItem::Component(Component::Day(modifier::Day { padding: modifier::Padding::Zero, })), ]; impl Serialize for Date { fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> { #[cfg(feature = "serde-human-readable")] if serializer.is_human_readable() { return serializer.serialize_str(&match self.format(&DATE_FORMAT) { Ok(s) => s, Err(_) => return Err(S::Error::custom("failed formatting `Date`")), }); } (self.year(), self.ordinal()).serialize(serializer) } } impl<'a> Deserialize<'a> for Date { fn deserialize<D: Deserializer<'a>>(deserializer: D) -> Result<Self, D::Error> { #[cfg(feature = "serde-human-readable")] if deserializer.is_human_readable() { return Self::parse(<&str>::deserialize(deserializer)?, &DATE_FORMAT) .map_err(error::Parse::to_invalid_serde_value::<D>); } let (year, ordinal) = Deserialize::deserialize(deserializer)?; Self::from_ordinal_date(year, ordinal).map_err(ComponentRange::to_invalid_serde_value::<D>) } } impl Serialize for Duration { fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> { #[cfg(feature = "serde-human-readable")] if serializer.is_human_readable() { return serializer.collect_str(&format_args!( "{}.{:>09}", self.whole_seconds(), self.subsec_nanoseconds().abs() )); } (self.whole_seconds(), self.subsec_nanoseconds()).serialize(serializer) } } impl<'a> Deserialize<'a> for Duration { fn deserialize<D: Deserializer<'a>>(deserializer: D) -> Result<Self, D::Error> { #[cfg(feature = "serde-human-readable")] if deserializer.is_human_readable() { let s = <&str>::deserialize(deserializer)?; let dot = s.find('.').ok_or_else(|| { serde::de::Error::invalid_value(serde::de::Unexpected::Str(s), &"a decimal point") })?; let (seconds, nanoseconds) = s.split_at(dot); let nanoseconds = &nanoseconds[1..]; let seconds = seconds.parse().map_err(|_| { serde::de::Error::invalid_value(serde::de::Unexpected::Str(seconds), &"a number") })?; let mut nanoseconds = nanoseconds.parse().map_err(|_| { serde::de::Error::invalid_value( serde::de::Unexpected::Str(nanoseconds), &"a number", ) })?; if seconds < 0 { nanoseconds *= -1; } return Ok(Self::new(seconds, nanoseconds)); } let (seconds, nanoseconds) = Deserialize::deserialize(deserializer)?; Ok(Self::new(seconds, nanoseconds)) } } #[cfg(feature = "serde-human-readable")] const OFFSET_DATE_TIME_FORMAT: &[FormatItem<'_>] = &[ FormatItem::Compound(DATE_FORMAT), FormatItem::Literal(b" "), FormatItem::Compound(TIME_FORMAT), FormatItem::Literal(b" "), FormatItem::Compound(UTC_OFFSET_FORMAT), ]; impl Serialize for OffsetDateTime { fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> { #[cfg(feature = "serde-human-readable")] if serializer.is_human_readable() { return serializer.serialize_str(&match self.format(&OFFSET_DATE_TIME_FORMAT) { Ok(s) => s, Err(_) => return Err(S::Error::custom("failed formatting `OffsetDateTime`")), }); } ( self.year(), self.ordinal(), self.hour(), self.minute(), self.second(), self.nanosecond(), self.offset.whole_hours(), self.offset.minutes_past_hour(), self.offset.seconds_past_minute(), ) .serialize(serializer) } } impl<'a> Deserialize<'a> for OffsetDateTime { fn deserialize<D: Deserializer<'a>>(deserializer: D) -> Result<Self, D::Error> { #[cfg(feature = "serde-human-readable")] if deserializer.is_human_readable() { return Self::parse(<&str>::deserialize(deserializer)?, &OFFSET_DATE_TIME_FORMAT) .map_err(error::Parse::to_invalid_serde_value::<D>); } let ( year, ordinal, hour, minute, second, nanosecond, offset_hours, offset_minutes, offset_seconds, ) = Deserialize::deserialize(deserializer)?; Ok(Date::from_ordinal_date(year, ordinal) .map_err(ComponentRange::to_invalid_serde_value::<D>)? .with_hms_nano(hour, minute, second, nanosecond) .map_err(ComponentRange::to_invalid_serde_value::<D>)? .assume_offset( UtcOffset::from_hms(offset_hours, offset_minutes, offset_seconds) .map_err(ComponentRange::to_invalid_serde_value::<D>)?, )) } } #[cfg(feature = "serde-human-readable")] const PRIMITIVE_DATE_TIME_FORMAT: &[FormatItem<'_>] = &[ FormatItem::Compound(DATE_FORMAT), FormatItem::Literal(b" "), FormatItem::Compound(TIME_FORMAT), ]; impl Serialize for PrimitiveDateTime { fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> { #[cfg(feature = "serde-human-readable")] if serializer.is_human_readable() { return serializer.serialize_str(&match self.format(&PRIMITIVE_DATE_TIME_FORMAT) { Ok(s) => s, Err(_) => return Err(S::Error::custom("failed formatting `PrimitiveDateTime`")), }); } ( self.year(), self.ordinal(), self.hour(), self.minute(), self.second(), self.nanosecond(), ) .serialize(serializer) } } impl<'a> Deserialize<'a> for PrimitiveDateTime { fn deserialize<D: Deserializer<'a>>(deserializer: D) -> Result<Self, D::Error> { #[cfg(feature = "serde-human-readable")] if deserializer.is_human_readable() { return Self::parse( <&str>::deserialize(deserializer)?, &PRIMITIVE_DATE_TIME_FORMAT, ) .map_err(error::Parse::to_invalid_serde_value::<D>); } let (year, ordinal, hour, minute, second, nanosecond) = Deserialize::deserialize(deserializer)?; Date::from_ordinal_date(year, ordinal) .map_err(ComponentRange::to_invalid_serde_value::<D>)? .with_hms_nano(hour, minute, second, nanosecond) .map_err(ComponentRange::to_invalid_serde_value::<D>) } } #[cfg(feature = "serde-human-readable")] const TIME_FORMAT: &[FormatItem<'_>] = &[ FormatItem::Component(Component::Hour(modifier::Hour { padding: modifier::Padding::Zero, is_12_hour_clock: false, })), FormatItem::Literal(b":"), FormatItem::Component(Component::Minute(modifier::Minute { padding: modifier::Padding::Zero, })), FormatItem::Literal(b":"), FormatItem::Component(Component::Second(modifier::Second { padding: modifier::Padding::Zero, })), FormatItem::Literal(b"."), FormatItem::Component(Component::Subsecond(modifier::Subsecond { digits: modifier::SubsecondDigits::OneOrMore, })), ]; impl Serialize for Time { fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> { #[cfg(feature = "serde-human-readable")] if serializer.is_human_readable() { return serializer.serialize_str(&match self.format(&TIME_FORMAT) { Ok(s) => s, Err(_) => return Err(S::Error::custom("failed formatting `Time`")), }); } (self.hour(), self.minute(), self.second(), self.nanosecond()).serialize(serializer) } } impl<'a> Deserialize<'a> for Time { fn deserialize<D: Deserializer<'a>>(deserializer: D) -> Result<Self, D::Error> { #[cfg(feature = "serde-human-readable")] if deserializer.is_human_readable() { return Self::parse(<&str>::deserialize(deserializer)?, &TIME_FORMAT) .map_err(error::Parse::to_invalid_serde_value::<D>); } let (hour, minute, second, nanosecond) = Deserialize::deserialize(deserializer)?; Self::from_hms_nano(hour, minute, second, nanosecond) .map_err(ComponentRange::to_invalid_serde_value::<D>) } } #[cfg(feature = "serde-human-readable")] const UTC_OFFSET_FORMAT: &[FormatItem<'_>] = &[ FormatItem::Component(Component::OffsetHour(modifier::OffsetHour { sign_is_mandatory: true, padding: modifier::Padding::Zero, })), FormatItem::Literal(b":"), FormatItem::Component(Component::OffsetMinute(modifier::OffsetMinute { padding: modifier::Padding::Zero, })), FormatItem::Literal(b":"), FormatItem::Component(Component::OffsetSecond(modifier::OffsetSecond { padding: modifier::Padding::Zero, })), ]; impl Serialize for UtcOffset { fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> { #[cfg(feature = "serde-human-readable")] if serializer.is_human_readable() { return serializer.serialize_str(&match self.format(&UTC_OFFSET_FORMAT) { Ok(s) => s, Err(_) => return Err(S::Error::custom("failed formatting `UtcOffset`")), }); } ( self.whole_hours(), self.minutes_past_hour(), self.seconds_past_minute(), ) .serialize(serializer) } } impl<'a> Deserialize<'a> for UtcOffset { fn deserialize<D: Deserializer<'a>>(deserializer: D) -> Result<Self, D::Error> { #[cfg(feature = "serde-human-readable")] if deserializer.is_human_readable() { return Self::parse(<&str>::deserialize(deserializer)?, &UTC_OFFSET_FORMAT) .map_err(error::Parse::to_invalid_serde_value::<D>); } let (hours, minutes, seconds) = Deserialize::deserialize(deserializer)?; Self::from_hms(hours, minutes, seconds).map_err(ComponentRange::to_invalid_serde_value::<D>) } } impl Serialize for Weekday { fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> { #[cfg(feature = "serde-human-readable")] if serializer.is_human_readable() { #[cfg(not(feature = "std"))] use alloc::string::ToString; return self.to_string().serialize(serializer); } self.number_from_monday().serialize(serializer) } } impl<'a> Deserialize<'a> for Weekday { fn deserialize<D: Deserializer<'a>>(deserializer: D) -> Result<Self, D::Error> { #[cfg(feature = "serde-human-readable")] if deserializer.is_human_readable() { return match <&str>::deserialize(deserializer)? { "Monday" => Ok(Self::Monday), "Tuesday" => Ok(Self::Tuesday), "Wednesday" => Ok(Self::Wednesday), "Thursday" => Ok(Self::Thursday), "Friday" => Ok(Self::Friday), "Saturday" => Ok(Self::Saturday), "Sunday" => Ok(Self::Sunday), val => Err(D::Error::invalid_value( serde::de::Unexpected::Str(val), &"a day of the week", )), }; } match u8::deserialize(deserializer)? { 1 => Ok(Self::Monday), 2 => Ok(Self::Tuesday), 3 => Ok(Self::Wednesday), 4 => Ok(Self::Thursday), 5 => Ok(Self::Friday), 6 => Ok(Self::Saturday), 7 => Ok(Self::Sunday), val => Err(D::Error::invalid_value( serde::de::Unexpected::Unsigned(val.into()), &"a value in the range 1..=7", )), } } }
pub mod timestamp; use serde::de::Error as _; #[cfg(feature = "serde-human-readable")] use serde::ser::Error as _; use serde::{Deserialize, Deserializer, Serialize, Serializer}; use crate::error::ComponentRange; #[cfg(feature = "serde-human-readable")] use crate::{ error, format_description::{modifier, Component, FormatItem}, }; use crate::{Date, Duration, OffsetDateTime, PrimitiveDateTime, Time, UtcOffset, Weekday}; #[cfg(feature = "serde-human-readable")] const DATE_FORMAT: &[FormatItem<'_>] = &[ FormatItem::Component(Component::Year(modifier::Year { repr: modifier::YearRepr::Full, iso_week_based: false, sign_is_mandatory: false, padding: modifier::Padding::Zero, })), FormatItem::Literal(b"-"), FormatItem::Component(Component::Month(modifier::Month { repr: modifier::MonthRepr::Numerical, padding: modifier::Padding::Zero, })), FormatItem::Literal(b"-"), FormatItem::Component(Component::Day(modifier::Day { padding: modifier::Padding::Zero, })), ]; impl Serialize for Date { fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> { #[cfg(feature = "serde-human-readable")] if serializer.is_human_readable() { return serializer.serialize_str(&match self.format(&DATE_FORMAT) { Ok(s) => s, Err(_) => return Err(S::Error::custom("failed formatting `Date`")), }); } (self.year(), self.ordinal()).serialize(serializer) } } impl<'a> Deserialize<'a> for Date { fn deserialize<D: Deserializer<'a>>(deserializer: D) -> Result<Self, D::Error> { #[cfg(feature = "serde-human-readable")] if deserializer.is_human_readable() { return Self::parse(<&str>::deserialize(deserializer)?, &DATE_FORMAT) .map_err(error::Parse::to_invalid_serde_value::<D>); } let (year, ordinal) = Deserialize::deserialize(deserializer)?; Self::from_ordinal_date(year, ordinal).map_err(ComponentRange::to_invalid_serde_value::<D>) } } impl Serialize for Duration { fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> { #[cfg(feature = "serde-human-readable")] if serializer.is_human_readable() { return serializer.collect_str(&format_args!( "{}.{:>09}", self.whole_seconds(), self.subsec_nanoseconds().abs() )); } (self.whole_seconds(), self.subsec_nanoseconds()).serialize(serializer) } } impl<'a> Deserialize<'a> for Duration { fn deserialize<D: Deserializer<'a>>(deserializer: D) -> Result<Self, D::Error> { #[cfg(feature = "serde-human-readable")] if deserializer.is_human_readable() { let s = <&str>::deserialize(deserializer)?; let dot = s.find('.').ok_or_else(|| { serde::de::Error::invalid_value(serde::de::Unexpected::Str(s), &"a decimal point") })?; let (seconds, nanoseconds) = s.split_at(dot); let nanoseconds = &nanoseconds[1..]; let seconds = seconds.parse().map_err(|_| { serde::de::Error::invalid_value(serde::de::Unexpected::Str(seconds), &"a number") })?; let mut nanoseconds = nanoseconds.parse().map_err(|_| { serde::de::Error::invalid_value( serde::de::Unexpected::Str(nanoseconds), &"a number", ) })?; if seconds < 0 { nanoseconds *= -1; } return Ok(Self::new(seconds, nanoseconds)); } let (seconds, nanoseconds) = Deserialize::deserialize(deserializer)?; Ok(Self::new(seconds, nanoseconds)) } } #[cfg(feature = "serde-human-readable")] const OFFSET_DATE_TIME_FORMAT: &[FormatItem<'_>] = &[ FormatItem::Compound(DATE_FORMAT), FormatItem::Literal(b" "), FormatItem::Compound(TIME_FORMAT), FormatItem::Literal(b" "), FormatItem::Compound(UTC_OFFSET_FORMAT), ]; impl Serialize for OffsetDateTime { fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> { #[cfg(feature = "serde-human-readable")] if serializer.is_human_readable() { return serializer.serialize_str(&match self.format(&OFFSET_DATE_TIME_FORMAT) { Ok(s) => s, Err(_) => return Err(S::Error::custom("failed formatting `OffsetDateTime`")), }); } ( self.year(), self.ordinal(), self.hour(), self.minute(), self.second(), self.nanosecond(), self.offset.whole_hours(), self.offset.minutes_past_hour(), self.offset.seconds_past_minute(), ) .serialize(serializer) } } impl<'a> Deserialize<'a> for OffsetDateTime { fn deserialize<D: Deserializer<'a>>(deserializer: D) -> Result<Self, D::Error> { #[cfg(feature = "serde-human-readable")] if deserializer.is_human_readable() { return Self::parse(<&str>::deserialize(deserializer)?, &OFFSET_DATE_TIME_FORMAT) .map_err(error::Parse::to_invalid_serde_value::<D>); } let ( year, ordinal, hour, minute, second, nanosecond, offset_hours, offset_minutes, offset_seconds, ) = Deserialize::deserialize(deserializer)?; Ok(Date::from_ordinal_date(year, ordinal) .map_err(ComponentRange::to_invalid_serde_value::<D>)? .with_hms_nano(hour, minute, second, nanosecond) .map_err(ComponentRange::to_invalid_serde_value::<D>)? .assume_offset( UtcOffset::from_hms(offset_hours, offset_minutes, offset_seconds) .map_err(ComponentRange::to_invalid_serde_value::<D>)?, )) } } #[cfg(feature = "serde-human-readable")] const PRIMITIVE_DATE_TIME_FORMAT: &[FormatItem<'_>] = &[ FormatItem::Compound(DATE_FORMAT), FormatItem::Literal(b" "), FormatItem::Compound(TIME_FORMAT), ]; impl Serialize for PrimitiveDateTime { fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> { #[cfg(feature = "serde-human-readable")] if serializer.is_human_readable() { return serializer.serialize_str(&match self.format(&PRIMITIVE_DATE_TIME_FORMAT) { Ok(s) => s, Err(_) => return Err(S::Error::custom("failed formatting `PrimitiveDateTime`")), }); } ( self.year(), self.ordinal(), self.hour(), self.minute(), self.second(), self.nanosecond(), ) .serialize(serializer) } } impl<'a> Deserialize<'a> for PrimitiveDateTime { fn deserialize<D: Deserializer<'a>>(deserializer: D) -> Result<Self, D::Error> { #[cfg(feature = "serde-human-readable")] if deserializer.is_human_readable() { return Self::parse( <&str>::deserialize(deserializer)?, &PRIMITIVE_DATE_TIME_FORMAT, ) .map_err(error::Parse::to_invalid_serde_value::<D>); } let (year, ordinal, hour, minute, second, nanosecond) = Deserialize::deserialize(deserializer)?; Date::from_ordinal_date(year, ordinal) .map_err(ComponentRange::to_invalid_serde_value::<D>)? .with_hms_nano(hour, minute, second, nanosecond) .map_err(ComponentRange::to_invalid_serde_value::<D>) } } #[cfg(feature = "serde-human-readable")] const TIME_FORMAT: &[FormatItem<'_>] = &[ FormatItem::Component(Component::Hour(modifier::Hour { padding: modifier::Padding::Zero, is_12_hour_clock: false, })), FormatItem::Literal(b":"), FormatItem::Component(Component::Minute(modifier::Minute { padding: modifier::Padding::Zero, })), FormatItem::Literal(b":"), FormatItem::Component(Component::Second(modifier::Second { padding: modifier::Padding::Zero, })), FormatItem::Literal(b"."), FormatItem::Component(Component::Subsecond(modifier::Subsecond { digits: modifier::SubsecondDigits::OneOrMore, })), ]; impl Serialize for Time { fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> { #[cfg(feature = "serde-human-readable")] if serializer.is_human_readable() { return serializer.serialize_str(&match self.format(&TIME_FORMAT) { Ok(s) => s, Err(_) => return Err(S::Error::custom("failed formatting `Time`")), }); } (self.hour(), self.minute(), self.second(), self.nanosecond()).serialize(serializer) } } impl<'a> Deserialize<'a> for Time { fn deserialize<D: D
eturn serializer.serialize_str(&match self.format(&UTC_OFFSET_FORMAT) { Ok(s) => s, Err(_) => return Err(S::Error::custom("failed formatting `UtcOffset`")), }); } ( self.whole_hours(), self.minutes_past_hour(), self.seconds_past_minute(), ) .serialize(serializer) } } impl<'a> Deserialize<'a> for UtcOffset { fn deserialize<D: Deserializer<'a>>(deserializer: D) -> Result<Self, D::Error> { #[cfg(feature = "serde-human-readable")] if deserializer.is_human_readable() { return Self::parse(<&str>::deserialize(deserializer)?, &UTC_OFFSET_FORMAT) .map_err(error::Parse::to_invalid_serde_value::<D>); } let (hours, minutes, seconds) = Deserialize::deserialize(deserializer)?; Self::from_hms(hours, minutes, seconds).map_err(ComponentRange::to_invalid_serde_value::<D>) } } impl Serialize for Weekday { fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> { #[cfg(feature = "serde-human-readable")] if serializer.is_human_readable() { #[cfg(not(feature = "std"))] use alloc::string::ToString; return self.to_string().serialize(serializer); } self.number_from_monday().serialize(serializer) } } impl<'a> Deserialize<'a> for Weekday { fn deserialize<D: Deserializer<'a>>(deserializer: D) -> Result<Self, D::Error> { #[cfg(feature = "serde-human-readable")] if deserializer.is_human_readable() { return match <&str>::deserialize(deserializer)? { "Monday" => Ok(Self::Monday), "Tuesday" => Ok(Self::Tuesday), "Wednesday" => Ok(Self::Wednesday), "Thursday" => Ok(Self::Thursday), "Friday" => Ok(Self::Friday), "Saturday" => Ok(Self::Saturday), "Sunday" => Ok(Self::Sunday), val => Err(D::Error::invalid_value( serde::de::Unexpected::Str(val), &"a day of the week", )), }; } match u8::deserialize(deserializer)? { 1 => Ok(Self::Monday), 2 => Ok(Self::Tuesday), 3 => Ok(Self::Wednesday), 4 => Ok(Self::Thursday), 5 => Ok(Self::Friday), 6 => Ok(Self::Saturday), 7 => Ok(Self::Sunday), val => Err(D::Error::invalid_value( serde::de::Unexpected::Unsigned(val.into()), &"a value in the range 1..=7", )), } } }
eserializer<'a>>(deserializer: D) -> Result<Self, D::Error> { #[cfg(feature = "serde-human-readable")] if deserializer.is_human_readable() { return Self::parse(<&str>::deserialize(deserializer)?, &TIME_FORMAT) .map_err(error::Parse::to_invalid_serde_value::<D>); } let (hour, minute, second, nanosecond) = Deserialize::deserialize(deserializer)?; Self::from_hms_nano(hour, minute, second, nanosecond) .map_err(ComponentRange::to_invalid_serde_value::<D>) } } #[cfg(feature = "serde-human-readable")] const UTC_OFFSET_FORMAT: &[FormatItem<'_>] = &[ FormatItem::Component(Component::OffsetHour(modifier::OffsetHour { sign_is_mandatory: true, padding: modifier::Padding::Zero, })), FormatItem::Literal(b":"), FormatItem::Component(Component::OffsetMinute(modifier::OffsetMinute { padding: modifier::Padding::Zero, })), FormatItem::Literal(b":"), FormatItem::Component(Component::OffsetSecond(modifier::OffsetSecond { padding: modifier::Padding::Zero, })), ]; impl Serialize for UtcOffset { fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> { #[cfg(feature = "serde-human-readable")] if serializer.is_human_readable() { r
random
[ { "content": "/// Deserialize an `OffsetDateTime` from its Unix timestamp\n\npub fn deserialize<'a, D: Deserializer<'a>>(deserializer: D) -> Result<OffsetDateTime, D::Error> {\n\n i64::deserialize(deserializer).and_then(|timestamp| {\n\n OffsetDateTime::from_unix_timestamp(timestamp)\n\n .map_err(ComponentRange::to_invalid_serde_value::<D>)\n\n })\n\n}\n\n\n\n/// Treat an `Option<OffsetDateTime>` as a [Unix timestamp] for the purposes of\n\n/// serde.\n\n///\n\n/// Use this module in combination with serde's [`#[with]`][with] attribute.\n\n///\n\n/// When deserializing, the offset is assumed to be UTC.\n\n///\n\n/// [Unix timestamp]: https://en.wikipedia.org/wiki/Unix_time\n\n/// [with]: https://serde.rs/field-attrs.html#with\n\npub mod option {\n\n #[allow(clippy::wildcard_imports)]\n\n use super::*;\n\n\n", "file_path": "src/serde/timestamp.rs", "rank": 0, "score": 266746.741578922 }, { "content": "fn parse_component(mut s: &str, index: &mut usize) -> Result<Component, InvalidFormatDescription> {\n\n s = helper::consume_whitespace(s, index);\n\n\n\n let component_name;\n\n let component_index = *index;\n\n if let Some(whitespace_loc) = s.find(char::is_whitespace) {\n\n *index += whitespace_loc;\n\n component_name = &s[..whitespace_loc];\n\n s = &s[whitespace_loc..];\n\n s = helper::consume_whitespace(s, index);\n\n } else {\n\n *index += s.len();\n\n component_name = s;\n\n s = \"\";\n\n }\n\n\n\n Ok(NakedComponent::parse(component_name, component_index)?\n\n .attach_modifiers(modifier::Modifiers::parse(component_name, s, index)?))\n\n}\n\n\n", "file_path": "time-macros/src/format_description/parse.rs", "rank": 1, "score": 209875.07964301933 }, { "content": "#[test]\n\nfn nanosecond() {\n\n assert_eq!(datetime!(\"2019-01-01 0:00\").nanosecond(), 0);\n\n assert_eq!(\n\n datetime!(\"2019-01-01 23:59:59.999_999_999\").nanosecond(),\n\n 999_999_999\n\n );\n\n}\n\n\n", "file_path": "tests/integration/primitive_date_time.rs", "rank": 2, "score": 205673.54033401844 }, { "content": "#[test]\n\nfn nanosecond() {\n\n assert_eq!(datetime!(\"2019-01-01 0:00 UTC\").nanosecond(), 0);\n\n assert_eq!(\n\n datetime!(\"2019-01-01 23:59:59.999_999_999 UTC\").nanosecond(),\n\n 999_999_999,\n\n );\n\n}\n\n\n", "file_path": "tests/integration/offset_date_time.rs", "rank": 3, "score": 205673.54033401844 }, { "content": "#[test]\n\nfn hour() {\n\n assert_eq!(datetime!(\"2019-01-01 0:00 UTC\").hour(), 0);\n\n assert_eq!(\n\n datetime!(\"2019-01-01 23:59:59 UTC\")\n\n .to_offset(offset!(\"-2\"))\n\n .hour(),\n\n 21,\n\n );\n\n}\n\n\n", "file_path": "tests/integration/offset_date_time.rs", "rank": 4, "score": 205658.63839497836 }, { "content": "#[test]\n\nfn hour() {\n\n assert_eq!(datetime!(\"2019-01-01 0:00\").hour(), 0);\n\n assert_eq!(datetime!(\"2019-01-01 23:59:59\").hour(), 23);\n\n}\n\n\n", "file_path": "tests/integration/primitive_date_time.rs", "rank": 5, "score": 205658.63839497836 }, { "content": "#[test]\n\nfn second() {\n\n assert_eq!(datetime!(\"2019-01-01 0:00 UTC\").second(), 0);\n\n assert_eq!(\n\n datetime!(\"2019-01-01 23:59:59 UTC\")\n\n .to_offset(offset!(\"+0:00:30\"))\n\n .second(),\n\n 29,\n\n );\n\n}\n\n\n", "file_path": "tests/integration/offset_date_time.rs", "rank": 6, "score": 205638.79121964725 }, { "content": "#[test]\n\nfn second() {\n\n assert_eq!(datetime!(\"2019-01-01 0:00\").second(), 0);\n\n assert_eq!(datetime!(\"2019-01-01 23:59:59\").second(), 59);\n\n}\n\n\n", "file_path": "tests/integration/primitive_date_time.rs", "rank": 7, "score": 205638.79121964725 }, { "content": "#[test]\n\nfn year() {\n\n assert_eq!(datetime!(\"2019-01-01 0:00 UTC\").year(), 2019);\n\n assert_eq!(\n\n datetime!(\"2019-12-31 23:00 UTC\")\n\n .to_offset(offset!(\"+1\"))\n\n .year(),\n\n 2020,\n\n );\n\n assert_eq!(datetime!(\"2020-01-01 0:00 UTC\").year(), 2020);\n\n}\n\n\n", "file_path": "tests/integration/offset_date_time.rs", "rank": 8, "score": 205615.64110340184 }, { "content": "#[test]\n\nfn year() {\n\n assert_eq!(datetime!(\"2019-01-01 0:00\").year(), 2019);\n\n assert_eq!(datetime!(\"2019-12-31 0:00\").year(), 2019);\n\n assert_eq!(datetime!(\"2020-01-01 0:00\").year(), 2020);\n\n}\n\n\n", "file_path": "tests/integration/primitive_date_time.rs", "rank": 9, "score": 205615.64110340184 }, { "content": "#[test]\n\nfn weekday() {\n\n use Weekday::*;\n\n assert_eq!(datetime!(\"2019-01-01 0:00 UTC\").weekday(), Tuesday);\n\n assert_eq!(datetime!(\"2019-02-01 0:00 UTC\").weekday(), Friday);\n\n assert_eq!(datetime!(\"2019-03-01 0:00 UTC\").weekday(), Friday);\n\n}\n\n\n", "file_path": "tests/integration/offset_date_time.rs", "rank": 10, "score": 205567.37461027812 }, { "content": "#[test]\n\nfn weekday() {\n\n use Weekday::*;\n\n assert_eq!(datetime!(\"2019-01-01 0:00\").weekday(), Tuesday);\n\n assert_eq!(datetime!(\"2019-02-01 0:00\").weekday(), Friday);\n\n assert_eq!(datetime!(\"2019-03-01 0:00\").weekday(), Friday);\n\n assert_eq!(datetime!(\"2019-04-01 0:00\").weekday(), Monday);\n\n assert_eq!(datetime!(\"2019-05-01 0:00\").weekday(), Wednesday);\n\n assert_eq!(datetime!(\"2019-06-01 0:00\").weekday(), Saturday);\n\n assert_eq!(datetime!(\"2019-07-01 0:00\").weekday(), Monday);\n\n assert_eq!(datetime!(\"2019-08-01 0:00\").weekday(), Thursday);\n\n assert_eq!(datetime!(\"2019-09-01 0:00\").weekday(), Sunday);\n\n assert_eq!(datetime!(\"2019-10-01 0:00\").weekday(), Tuesday);\n\n assert_eq!(datetime!(\"2019-11-01 0:00\").weekday(), Friday);\n\n assert_eq!(datetime!(\"2019-12-01 0:00\").weekday(), Sunday);\n\n}\n\n\n", "file_path": "tests/integration/primitive_date_time.rs", "rank": 11, "score": 205567.37461027812 }, { "content": "/// Serialize an `OffsetDateTime` as its Unix timestamp\n\npub fn serialize<S: Serializer>(\n\n datetime: &OffsetDateTime,\n\n serializer: S,\n\n) -> Result<S::Ok, S::Error> {\n\n datetime.unix_timestamp().serialize(serializer)\n\n}\n\n\n", "file_path": "src/serde/timestamp.rs", "rank": 12, "score": 202560.13982323676 }, { "content": "#[test]\n\nfn format_date() -> time::Result<()> {\n\n let format_output = [\n\n (fd!(\"[day]\"), \"31\"),\n\n (fd!(\"[month]\"), \"12\"),\n\n (fd!(\"[month repr:short]\"), \"Dec\"),\n\n (fd!(\"[month repr:long]\"), \"December\"),\n\n (fd!(\"[ordinal]\"), \"365\"),\n\n (fd!(\"[weekday]\"), \"Tuesday\"),\n\n (fd!(\"[weekday repr:short]\"), \"Tue\"),\n\n (fd!(\"[weekday repr:sunday]\"), \"3\"),\n\n (fd!(\"[weekday repr:sunday one_indexed:false]\"), \"2\"),\n\n (fd!(\"[weekday repr:monday]\"), \"2\"),\n\n (fd!(\"[weekday repr:monday one_indexed:false]\"), \"1\"),\n\n (fd!(\"[week_number]\"), \"01\"),\n\n (fd!(\"[week_number padding:none]\"), \"1\"),\n\n (fd!(\"[week_number padding:space]\"), \" 1\"),\n\n (fd!(\"[week_number repr:sunday]\"), \"52\"),\n\n (fd!(\"[week_number repr:monday]\"), \"52\"),\n\n (fd!(\"[year]\"), \"2019\"),\n\n (fd!(\"[year base:iso_week]\"), \"2020\"),\n", "file_path": "tests/integration/formatting.rs", "rank": 13, "score": 200725.8743373632 }, { "content": "fn jan_weekday(year: i32, ordinal: i32) -> u8 {\n\n let adj_year = year - 1;\n\n ((ordinal + adj_year + adj_year / 4 - adj_year / 100 + adj_year / 400 + 6).rem_euclid(7)) as _\n\n}\n\n\n\npub(crate) fn days_in_year(year: i32) -> u16 {\n\n 365 + is_leap_year(year) as u16\n\n}\n\n\n\npub(crate) fn days_in_year_month(year: i32, month: u8) -> u8 {\n\n [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31][month as usize - 1]\n\n + (month == 2 && is_leap_year(year)) as u8\n\n}\n\n\n\npub(crate) fn weeks_in_year(year: i32) -> u8 {\n\n 52 + (jan_weekday(year, 1) + is_leap_year(year) as u8 == 3) as u8\n\n}\n\n\n\npub(crate) fn ywd_to_yo(year: i32, week: u8, iso_weekday_number: u8) -> (i32, u16) {\n\n let (ordinal, overflow) = (u16::from(week) * 7 + u16::from(iso_weekday_number))\n", "file_path": "time-macros/src/helpers.rs", "rank": 14, "score": 200426.70559772098 }, { "content": "/// Parse a component from the format description. Neither the leading nor trailing bracket should\n\n/// be present in the parameter.\n\nfn parse_component(mut s: &[u8], index: &mut usize) -> Result<Component, InvalidFormatDescription> {\n\n // Trim any whitespace between the opening bracket and the component name.\n\n s = helper::consume_whitespace(s, index);\n\n\n\n // Everything before the first whitespace is the component name.\n\n let component_index = *index;\n\n let whitespace_loc = s\n\n .iter()\n\n .position(u8::is_ascii_whitespace)\n\n .unwrap_or(s.len());\n\n *index += whitespace_loc;\n\n let component_name = &s[..whitespace_loc];\n\n s = &s[whitespace_loc..];\n\n s = helper::consume_whitespace(s, index);\n\n\n\n Ok(NakedComponent::parse(component_name, component_index)?\n\n .attach_modifiers(&modifier::Modifiers::parse(component_name, s, index)?))\n\n}\n\n\n", "file_path": "src/format_description/parse.rs", "rank": 15, "score": 188520.191803695 }, { "content": "/// Format the ordinal into the designated output.\n\nfn fmt_ordinal(\n\n output: &mut impl io::Write,\n\n date: Date,\n\n modifier::Ordinal { padding }: modifier::Ordinal,\n\n) -> Result<usize, io::Error> {\n\n format_number(output, date.ordinal(), padding, 3)\n\n}\n\n\n", "file_path": "src/formatting/mod.rs", "rank": 16, "score": 180023.91420094253 }, { "content": "// region: time formatters\n\n/// Format the hour into the designated output.\n\nfn fmt_hour(\n\n output: &mut impl io::Write,\n\n time: Time,\n\n modifier::Hour {\n\n padding,\n\n is_12_hour_clock,\n\n }: modifier::Hour,\n\n) -> Result<usize, io::Error> {\n\n let value = match (time.hour(), is_12_hour_clock) {\n\n (hour, false) => hour,\n\n (0, true) | (12, true) => 12,\n\n (hour, true) if hour < 12 => hour,\n\n (hour, true) => hour - 12,\n\n };\n\n format_number(output, value, padding, 2)\n\n}\n\n\n", "file_path": "src/formatting/mod.rs", "rank": 17, "score": 180017.81524067724 }, { "content": "/// Format the minute into the designated output.\n\nfn fmt_minute(\n\n output: &mut impl io::Write,\n\n time: Time,\n\n modifier::Minute { padding }: modifier::Minute,\n\n) -> Result<usize, io::Error> {\n\n format_number(output, time.minute(), padding, 2)\n\n}\n\n\n", "file_path": "src/formatting/mod.rs", "rank": 18, "score": 180008.44779013467 }, { "content": "/// Format the year into the designated output.\n\nfn fmt_year(\n\n output: &mut impl io::Write,\n\n date: Date,\n\n modifier::Year {\n\n padding,\n\n repr,\n\n iso_week_based,\n\n sign_is_mandatory,\n\n }: modifier::Year,\n\n) -> Result<usize, io::Error> {\n\n let full_year = if iso_week_based {\n\n date.iso_year_week().0\n\n } else {\n\n date.year()\n\n };\n\n let value = match repr {\n\n modifier::YearRepr::Full => full_year,\n\n modifier::YearRepr::LastTwo => (full_year % 100).abs(),\n\n };\n\n let width = match repr {\n", "file_path": "src/formatting/mod.rs", "rank": 19, "score": 180003.35372144077 }, { "content": "/// Format the second into the designated output.\n\nfn fmt_second(\n\n output: &mut impl io::Write,\n\n time: Time,\n\n modifier::Second { padding }: modifier::Second,\n\n) -> Result<usize, io::Error> {\n\n format_number(output, time.second(), padding, 2)\n\n}\n\n\n", "file_path": "src/formatting/mod.rs", "rank": 20, "score": 179992.97280208574 }, { "content": "/// Format the weekday into the designated output.\n\nfn fmt_weekday(\n\n output: &mut impl io::Write,\n\n date: Date,\n\n modifier::Weekday { repr, one_indexed }: modifier::Weekday,\n\n) -> Result<usize, io::Error> {\n\n match repr {\n\n modifier::WeekdayRepr::Short => {\n\n output.write(&WEEKDAY_NAMES[date.weekday().number_days_from_monday() as usize][..3])\n\n }\n\n modifier::WeekdayRepr::Long => {\n\n output.write(WEEKDAY_NAMES[date.weekday().number_days_from_monday() as usize])\n\n }\n\n modifier::WeekdayRepr::Sunday => format_number(\n\n output,\n\n date.weekday().number_days_from_sunday() + one_indexed as u8,\n\n modifier::Padding::None,\n\n 1,\n\n ),\n\n modifier::WeekdayRepr::Monday => format_number(\n\n output,\n\n date.weekday().number_days_from_monday() + one_indexed as u8,\n\n modifier::Padding::None,\n\n 1,\n\n ),\n\n }\n\n}\n\n\n", "file_path": "src/formatting/mod.rs", "rank": 21, "score": 179936.458872873 }, { "content": "#[test]\n\nfn to_ordinal_date() {\n\n assert_eq!(\n\n datetime!(\"2019-01-01 0:00 UTC\").to_ordinal_date(),\n\n (2019, 1)\n\n );\n\n}\n\n\n", "file_path": "tests/integration/offset_date_time.rs", "rank": 22, "score": 177075.38084749522 }, { "content": "#[test]\n\nfn to_ordinal_date() {\n\n assert_eq!(datetime!(\"2019-01-01 0:00\").to_ordinal_date(), (2019, 1));\n\n}\n\n\n", "file_path": "tests/integration/primitive_date_time.rs", "rank": 23, "score": 177075.38084749522 }, { "content": "#[test]\n\nfn year() {\n\n assert_eq!(date!(\"2019-002\").year(), 2019);\n\n assert_eq!(date!(\"2020-002\").year(), 2020);\n\n}\n\n\n", "file_path": "tests/integration/date.rs", "rank": 24, "score": 176252.4427944034 }, { "content": "#[test]\n\nfn weekday() {\n\n use Weekday::*;\n\n assert_eq!(date!(\"2019-01-01\").weekday(), Tuesday);\n\n assert_eq!(date!(\"2019-02-01\").weekday(), Friday);\n\n assert_eq!(date!(\"2019-03-01\").weekday(), Friday);\n\n assert_eq!(date!(\"2019-04-01\").weekday(), Monday);\n\n assert_eq!(date!(\"2019-05-01\").weekday(), Wednesday);\n\n assert_eq!(date!(\"2019-06-01\").weekday(), Saturday);\n\n assert_eq!(date!(\"2019-07-01\").weekday(), Monday);\n\n assert_eq!(date!(\"2019-08-01\").weekday(), Thursday);\n\n assert_eq!(date!(\"2019-09-01\").weekday(), Sunday);\n\n assert_eq!(date!(\"2019-10-01\").weekday(), Tuesday);\n\n assert_eq!(date!(\"2019-11-01\").weekday(), Friday);\n\n assert_eq!(date!(\"2019-12-01\").weekday(), Sunday);\n\n}\n\n\n", "file_path": "tests/integration/date.rs", "rank": 25, "score": 176199.63250423863 }, { "content": "#[test]\n\nfn date() {\n\n assert_eq!(datetime!(\"2019-01-01 0:00\").date(), date!(\"2019-01-01\"));\n\n}\n\n\n", "file_path": "tests/integration/primitive_date_time.rs", "rank": 26, "score": 176144.84180100448 }, { "content": "#[test]\n\nfn date() {\n\n assert_eq!(datetime!(\"2019-01-01 0:00 UTC\").date(), date!(\"2019-01-01\"));\n\n assert_eq!(\n\n datetime!(\"2019-01-01 0:00 UTC\")\n\n .to_offset(offset!(\"-1\"))\n\n .date(),\n\n date!(\"2018-12-31\"),\n\n );\n\n}\n\n\n", "file_path": "tests/integration/offset_date_time.rs", "rank": 27, "score": 176144.84180100448 }, { "content": "#[test]\n\nfn time() {\n\n assert_eq!(datetime!(\"2019-01-01 0:00 UTC\").time(), time!(\"0:00\"));\n\n assert_eq!(\n\n datetime!(\"2019-01-01 0:00 UTC\")\n\n .to_offset(offset!(\"-1\"))\n\n .time(),\n\n time!(\"23:00\"),\n\n );\n\n}\n\n\n", "file_path": "tests/integration/offset_date_time.rs", "rank": 28, "score": 175837.83310868323 }, { "content": "#[test]\n\nfn time() {\n\n assert_eq!(datetime!(\"2019-01-01 0:00\").time(), time!(\"0:00\"));\n\n}\n\n\n", "file_path": "tests/integration/primitive_date_time.rs", "rank": 29, "score": 175837.83310868323 }, { "content": "/// Format the week number into the designated output.\n\nfn fmt_week_number(\n\n output: &mut impl io::Write,\n\n date: Date,\n\n modifier::WeekNumber { padding, repr }: modifier::WeekNumber,\n\n) -> Result<usize, io::Error> {\n\n format_number(\n\n output,\n\n match repr {\n\n modifier::WeekNumberRepr::Iso => date.iso_week(),\n\n modifier::WeekNumberRepr::Sunday => date.sunday_based_week(),\n\n modifier::WeekNumberRepr::Monday => date.monday_based_week(),\n\n },\n\n padding,\n\n 2,\n\n )\n\n}\n\n\n", "file_path": "src/formatting/mod.rs", "rank": 30, "score": 174713.19268551885 }, { "content": "// region: offset formatters\n\n/// Format the offset hour into the designated output.\n\nfn fmt_offset_hour(\n\n output: &mut impl io::Write,\n\n offset: UtcOffset,\n\n modifier::OffsetHour {\n\n padding,\n\n sign_is_mandatory,\n\n }: modifier::OffsetHour,\n\n) -> Result<usize, io::Error> {\n\n let mut bytes = 0;\n\n if offset.is_negative() {\n\n bytes += output.write(&[b'-'])?;\n\n } else if sign_is_mandatory {\n\n bytes += output.write(&[b'+'])?;\n\n }\n\n bytes += format_number(output, offset.whole_hours().abs() as u8, padding, 2)?;\n\n Ok(bytes)\n\n}\n\n\n", "file_path": "src/formatting/mod.rs", "rank": 31, "score": 174692.8730552223 }, { "content": "/// Format the offset minute into the designated output.\n\nfn fmt_offset_minute(\n\n output: &mut impl io::Write,\n\n offset: UtcOffset,\n\n modifier::OffsetMinute { padding }: modifier::OffsetMinute,\n\n) -> Result<usize, io::Error> {\n\n format_number(output, offset.minutes_past_hour().abs() as u8, padding, 2)\n\n}\n\n\n", "file_path": "src/formatting/mod.rs", "rank": 32, "score": 174688.2922730137 }, { "content": "/// Format the offset second into the designated output.\n\nfn fmt_offset_second(\n\n output: &mut impl io::Write,\n\n offset: UtcOffset,\n\n modifier::OffsetSecond { padding }: modifier::OffsetSecond,\n\n) -> Result<usize, io::Error> {\n\n format_number(output, offset.seconds_past_minute().abs() as u8, padding, 2)\n\n}\n\n// endregion offset formatters\n", "file_path": "src/formatting/mod.rs", "rank": 33, "score": 174673.32309815267 }, { "content": "#[test]\n\nfn format_time() -> time::Result<()> {\n\n let format_output = [\n\n (fd!(\"[hour]\"), \"13\"),\n\n (fd!(\"[hour repr:12]\"), \"01\"),\n\n (fd!(\"[hour repr:12 padding:none]\"), \"1\"),\n\n (fd!(\"[hour repr:12 padding:space]\"), \" 1\"),\n\n (fd!(\"[hour repr:24]\"), \"13\"),\n\n (fd!(\"[hour repr:24]\"), \"13\"),\n\n (fd!(\"[hour repr:24 padding:none]\"), \"13\"),\n\n (fd!(\"[hour repr:24 padding:space]\"), \"13\"),\n\n (fd!(\"[minute]\"), \"02\"),\n\n (fd!(\"[minute padding:none]\"), \"2\"),\n\n (fd!(\"[minute padding:space]\"), \" 2\"),\n\n (fd!(\"[minute padding:zero]\"), \"02\"),\n\n (fd!(\"[period]\"), \"PM\"),\n\n (fd!(\"[period case:upper]\"), \"PM\"),\n\n (fd!(\"[period case:lower]\"), \"pm\"),\n\n (fd!(\"[second]\"), \"03\"),\n\n (fd!(\"[second padding:none]\"), \"3\"),\n\n (fd!(\"[second padding:space]\"), \" 3\"),\n", "file_path": "tests/integration/formatting.rs", "rank": 34, "score": 174532.98288295398 }, { "content": "#[cfg_attr(__time_03_docs, doc(cfg(feature = \"alloc\")))]\n\npub fn parse(s: &str) -> Result<Vec<FormatItem<'_>>, InvalidFormatDescription> {\n\n let mut compound = Vec::new();\n\n let mut loc = 0;\n\n\n\n let mut s = s.as_bytes();\n\n\n\n while !s.is_empty() {\n\n let ParsedItem { item, remaining } = parse_item(s, &mut loc)?;\n\n s = remaining;\n\n compound.push(item);\n\n }\n\n\n\n Ok(compound)\n\n}\n", "file_path": "src/format_description/parse.rs", "rank": 35, "score": 172386.47572846958 }, { "content": "#[test]\n\nfn ordinal() {\n\n assert_eq!(datetime!(\"2019-01-01 0:00 UTC\").ordinal(), 1);\n\n assert_eq!(\n\n datetime!(\"2019-12-31 23:00 UTC\")\n\n .to_offset(offset!(\"+1\"))\n\n .ordinal(),\n\n 1,\n\n );\n\n}\n\n\n", "file_path": "tests/integration/offset_date_time.rs", "rank": 36, "score": 171260.61474626625 }, { "content": "#[test]\n\nfn ordinal() {\n\n assert_eq!(datetime!(\"2019-01-01 0:00\").ordinal(), 1);\n\n assert_eq!(datetime!(\"2019-12-31 0:00\").ordinal(), 365);\n\n}\n\n\n", "file_path": "tests/integration/primitive_date_time.rs", "rank": 37, "score": 171260.61474626625 }, { "content": "#[test]\n\nfn minute() {\n\n assert_eq!(datetime!(\"2019-01-01 0:00 UTC\").minute(), 0);\n\n assert_eq!(\n\n datetime!(\"2019-01-01 23:59:59 UTC\")\n\n .to_offset(offset!(\"+0:30\"))\n\n .minute(),\n\n 29,\n\n );\n\n}\n\n\n", "file_path": "tests/integration/offset_date_time.rs", "rank": 38, "score": 171245.72561782296 }, { "content": "#[test]\n\nfn minute() {\n\n assert_eq!(datetime!(\"2019-01-01 0:00\").minute(), 0);\n\n assert_eq!(datetime!(\"2019-01-01 23:59:59\").minute(), 59);\n\n}\n\n\n", "file_path": "tests/integration/primitive_date_time.rs", "rank": 39, "score": 171245.72561782296 }, { "content": "#[test]\n\nfn unix_timestamp() {\n\n assert_eq!(OffsetDateTime::UNIX_EPOCH.unix_timestamp(), 0);\n\n assert_eq!(\n\n OffsetDateTime::UNIX_EPOCH\n\n .to_offset(offset!(\"+1\"))\n\n .unix_timestamp(),\n\n 0,\n\n );\n\n assert_eq!(datetime!(\"1970-01-01 0:00 -1\").unix_timestamp(), 3_600);\n\n}\n\n\n", "file_path": "tests/integration/offset_date_time.rs", "rank": 40, "score": 166382.1496865837 }, { "content": "#[test]\n\nfn from_unix_timestamp() {\n\n assert_eq!(\n\n OffsetDateTime::from_unix_timestamp(0),\n\n Ok(OffsetDateTime::UNIX_EPOCH),\n\n );\n\n assert_eq!(\n\n OffsetDateTime::from_unix_timestamp(1_546_300_800),\n\n Ok(datetime!(\"2019-01-01 0:00 UTC\")),\n\n );\n\n}\n\n\n", "file_path": "tests/integration/offset_date_time.rs", "rank": 41, "score": 166382.1496865837 }, { "content": "#[test]\n\nfn nanosecond() -> Result<()> {\n\n for nano in (0..1_000_000_000).step_by(1_000_000) {\n\n assert_eq!(Time::from_hms_nano(0, 0, 0, nano)?.nanosecond(), nano);\n\n assert_eq!(Time::from_hms_nano(23, 59, 59, nano)?.nanosecond(), nano);\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/integration/time.rs", "rank": 42, "score": 166017.1320968581 }, { "content": "#[test]\n\nfn hour() -> Result<()> {\n\n for hour in 0..24 {\n\n assert_eq!(Time::from_hms(hour, 0, 0)?.hour(), hour);\n\n assert_eq!(Time::from_hms(hour, 59, 59)?.hour(), hour);\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/integration/time.rs", "rank": 43, "score": 166001.72489887822 }, { "content": "#[test]\n\nfn second() -> Result<()> {\n\n for second in 0..60 {\n\n assert_eq!(Time::from_hms(0, 0, second)?.second(), second);\n\n assert_eq!(Time::from_hms(23, 59, second)?.second(), second);\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/integration/time.rs", "rank": 44, "score": 165981.20479348503 }, { "content": "#[test]\n\nfn add_duration() {\n\n assert_eq!(\n\n datetime!(\"2019-01-01 0:00 UTC\") + 5.days(),\n\n datetime!(\"2019-01-06 0:00 UTC\"),\n\n );\n\n assert_eq!(\n\n datetime!(\"2019-12-31 0:00 UTC\") + 1.days(),\n\n datetime!(\"2020-01-01 0:00 UTC\"),\n\n );\n\n assert_eq!(\n\n datetime!(\"2019-12-31 23:59:59 UTC\") + 2.seconds(),\n\n datetime!(\"2020-01-01 0:00:01 UTC\"),\n\n );\n\n assert_eq!(\n\n datetime!(\"2020-01-01 0:00:01 UTC\") + (-2).seconds(),\n\n datetime!(\"2019-12-31 23:59:59 UTC\"),\n\n );\n\n assert_eq!(\n\n datetime!(\"1999-12-31 23:00 UTC\") + 1.hours(),\n\n datetime!(\"2000-01-01 0:00 UTC\"),\n\n );\n\n}\n\n\n", "file_path": "tests/integration/offset_date_time.rs", "rank": 45, "score": 165338.99212447702 }, { "content": "#[test]\n\nfn sub_duration() {\n\n assert_eq!(\n\n datetime!(\"2019-01-06 0:00 UTC\") - 5.days(),\n\n datetime!(\"2019-01-01 0:00 UTC\"),\n\n );\n\n assert_eq!(\n\n datetime!(\"2020-01-01 0:00 UTC\") - 1.days(),\n\n datetime!(\"2019-12-31 0:00 UTC\"),\n\n );\n\n assert_eq!(\n\n datetime!(\"2020-01-01 0:00:01 UTC\") - 2.seconds(),\n\n datetime!(\"2019-12-31 23:59:59 UTC\"),\n\n );\n\n assert_eq!(\n\n datetime!(\"2019-12-31 23:59:59 UTC\") - (-2).seconds(),\n\n datetime!(\"2020-01-01 0:00:01 UTC\"),\n\n );\n\n assert_eq!(\n\n datetime!(\"1999-12-31 23:00 UTC\") - (-1).hours(),\n\n datetime!(\"2000-01-01 0:00 UTC\"),\n\n );\n\n}\n\n\n", "file_path": "tests/integration/offset_date_time.rs", "rank": 46, "score": 165338.99212447702 }, { "content": "#[test]\n\nfn add_duration() {\n\n assert_eq!(\n\n datetime!(\"2019-01-01 0:00\") + 5.days(),\n\n datetime!(\"2019-01-06 0:00\"),\n\n );\n\n assert_eq!(\n\n datetime!(\"2019-12-31 0:00\") + 1.days(),\n\n datetime!(\"2020-01-01 0:00\"),\n\n );\n\n assert_eq!(\n\n datetime!(\"2019-12-31 23:59:59\") + 2.seconds(),\n\n datetime!(\"2020-01-01 0:00:01\"),\n\n );\n\n assert_eq!(\n\n datetime!(\"2020-01-01 0:00:01\") + (-2).seconds(),\n\n datetime!(\"2019-12-31 23:59:59\"),\n\n );\n\n assert_eq!(\n\n datetime!(\"1999-12-31 23:00\") + 1.hours(),\n\n datetime!(\"2000-01-01 0:00\"),\n\n );\n\n}\n\n\n", "file_path": "tests/integration/primitive_date_time.rs", "rank": 47, "score": 165338.99212447702 }, { "content": "#[test]\n\nfn sub_duration() {\n\n assert_eq!(\n\n datetime!(\"2019-01-06 0:00\") - 5.days(),\n\n datetime!(\"2019-01-01 0:00\"),\n\n );\n\n assert_eq!(\n\n datetime!(\"2020-01-01 0:00\") - 1.days(),\n\n datetime!(\"2019-12-31 0:00\"),\n\n );\n\n assert_eq!(\n\n datetime!(\"2020-01-01 0:00:01\") - 2.seconds(),\n\n datetime!(\"2019-12-31 23:59:59\"),\n\n );\n\n assert_eq!(\n\n datetime!(\"2019-12-31 23:59:59\") - (-2).seconds(),\n\n datetime!(\"2020-01-01 0:00:01\"),\n\n );\n\n assert_eq!(\n\n datetime!(\"1999-12-31 23:00\") - (-1).hours(),\n\n datetime!(\"2000-01-01 0:00\"),\n\n );\n\n}\n\n\n", "file_path": "tests/integration/primitive_date_time.rs", "rank": 48, "score": 165338.99212447702 }, { "content": "#[proc_macro]\n\npub fn format_description(input: TokenStream) -> TokenStream {\n\n let string = match helpers::get_string_literal(input) {\n\n Ok(string) => string,\n\n Err(err) => return err.to_compile_error(),\n\n };\n\n\n\n let items = match format_description::parse(&string) {\n\n Ok(items) => items,\n\n Err(err) => return err.to_compile_error(),\n\n };\n\n\n\n let mut tokens = TokenStream::new();\n\n for item in items {\n\n tokens.extend(\n\n [\n\n item.to_external_token_stream(),\n\n TokenStream::from(TokenTree::Punct(Punct::new(',', Spacing::Alone))),\n\n ]\n\n .iter()\n\n .cloned()\n", "file_path": "time-macros/src/lib.rs", "rank": 49, "score": 164384.41832564067 }, { "content": "#[test]\n\nfn format_pdt() -> time::Result<()> {\n\n let format_description = fd!(\"[year]-[month]-[day] [hour]:[minute]:[second].[subsecond]\");\n\n\n\n assert_eq!(\n\n datetime!(\"1970-01-01 0:00\").format(&format_description)?,\n\n \"1970-01-01 00:00:00.0\"\n\n );\n\n assert!(\n\n datetime!(\"1970-01-01 0:00\")\n\n .format_into(&mut io::sink(), &format_description)\n\n .is_ok()\n\n );\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/integration/formatting.rs", "rank": 50, "score": 163972.78115278855 }, { "content": "#[test]\n\nfn format_odt() -> time::Result<()> {\n\n // We can't currently handle escaped line breaks in the format description macro. This also\n\n // gives us coverage of the dynamic formatting strings (to an extent).\n\n let format_description = format_description::parse(\n\n \"[year]-[month]-[day] [hour]:[minute]:[second].[subsecond] [offset_hour \\\n\n sign:mandatory]:[offset_minute]:[offset_second]\",\n\n )?;\n\n\n\n assert_eq!(\n\n datetime!(\"1970-01-01 0:00 UTC\").format(&format_description)?,\n\n \"1970-01-01 00:00:00.0 +00:00:00\"\n\n );\n\n assert!(\n\n datetime!(\"1970-01-01 0:00 UTC\")\n\n .format_into(&mut io::sink(), &format_description)\n\n .is_ok()\n\n );\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/integration/formatting.rs", "rank": 51, "score": 163972.78115278855 }, { "content": "#[test]\n\nfn format_offset() -> time::Result<()> {\n\n let value_format_output = [\n\n (\n\n offset!(\"+01:02:03\"),\n\n fd!(\"[offset_hour sign:automatic]\"),\n\n \"01\",\n\n ),\n\n (\n\n offset!(\"+01:02:03\"),\n\n fd!(\"[offset_hour sign:mandatory]\"),\n\n \"+01\",\n\n ),\n\n (\n\n offset!(\"-01:02:03\"),\n\n fd!(\"[offset_hour sign:automatic]\"),\n\n \"-01\",\n\n ),\n\n (\n\n offset!(\"-01:02:03\"),\n\n fd!(\"[offset_hour sign:mandatory]\"),\n", "file_path": "tests/integration/formatting.rs", "rank": 52, "score": 163972.78115278855 }, { "content": "#[test]\n\nfn assume_offset() {\n\n assert_eq!(\n\n datetime!(\"2019-01-01 0:00\")\n\n .assume_offset(offset!(\"UTC\"))\n\n .unix_timestamp(),\n\n 1_546_300_800,\n\n );\n\n assert_eq!(\n\n datetime!(\"2019-01-01 0:00\")\n\n .assume_offset(offset!(\"-1\"))\n\n .unix_timestamp(),\n\n 1_546_304_400,\n\n );\n\n}\n\n\n", "file_path": "tests/integration/primitive_date_time.rs", "rank": 53, "score": 162100.640997658 }, { "content": "#[test]\n\nfn unix_timestamp_nanos() {\n\n assert_eq!(datetime!(\"1970-01-01 0:00 UTC\").unix_timestamp_nanos(), 0);\n\n assert_eq!(\n\n datetime!(\"1970-01-01 1:00 UTC\")\n\n .to_offset(offset!(\"-1\"))\n\n .unix_timestamp_nanos(),\n\n 3_600_000_000_000,\n\n );\n\n}\n\n\n", "file_path": "tests/integration/offset_date_time.rs", "rank": 54, "score": 161808.6167743369 }, { "content": "#[test]\n\nfn from_unix_timestamp_nanos() {\n\n assert_eq!(\n\n OffsetDateTime::from_unix_timestamp_nanos(0),\n\n Ok(OffsetDateTime::UNIX_EPOCH),\n\n );\n\n assert_eq!(\n\n OffsetDateTime::from_unix_timestamp_nanos(1_546_300_800_000_000_000),\n\n Ok(datetime!(\"2019-01-01 0:00 UTC\")),\n\n );\n\n}\n\n\n", "file_path": "tests/integration/offset_date_time.rs", "rank": 55, "score": 161808.6167743369 }, { "content": "#[test]\n\nfn add_std_duration() {\n\n assert_eq!(\n\n datetime!(\"2019-01-01 0:00\") + 5.std_days(),\n\n datetime!(\"2019-01-06 0:00\"),\n\n );\n\n assert_eq!(\n\n datetime!(\"2019-12-31 0:00\") + 1.std_days(),\n\n datetime!(\"2020-01-01 0:00\"),\n\n );\n\n assert_eq!(\n\n datetime!(\"2019-12-31 23:59:59\") + 2.std_seconds(),\n\n datetime!(\"2020-01-01 0:00:01\"),\n\n );\n\n}\n\n\n", "file_path": "tests/integration/primitive_date_time.rs", "rank": 56, "score": 160797.56262346805 }, { "content": "#[test]\n\nfn std_add_duration() {\n\n assert_eq!(\n\n SystemTime::from(datetime!(\"2019-01-01 0:00 UTC\")) + 0.seconds(),\n\n SystemTime::from(datetime!(\"2019-01-01 0:00 UTC\")),\n\n );\n\n assert_eq!(\n\n SystemTime::from(datetime!(\"2019-01-01 0:00 UTC\")) + 5.days(),\n\n SystemTime::from(datetime!(\"2019-01-06 0:00 UTC\")),\n\n );\n\n assert_eq!(\n\n SystemTime::from(datetime!(\"2019-12-31 0:00 UTC\")) + 1.days(),\n\n SystemTime::from(datetime!(\"2020-01-01 0:00 UTC\")),\n\n );\n\n assert_eq!(\n\n SystemTime::from(datetime!(\"2019-12-31 23:59:59 UTC\")) + 2.seconds(),\n\n SystemTime::from(datetime!(\"2020-01-01 0:00:01 UTC\")),\n\n );\n\n assert_eq!(\n\n SystemTime::from(datetime!(\"2020-01-01 0:00:01 UTC\")) + (-2).seconds(),\n\n SystemTime::from(datetime!(\"2019-12-31 23:59:59 UTC\")),\n\n );\n\n}\n\n\n", "file_path": "tests/integration/offset_date_time.rs", "rank": 57, "score": 160797.56262346805 }, { "content": "#[test]\n\nfn sub_std_duration() {\n\n assert_eq!(\n\n datetime!(\"2019-01-06 0:00\") - 5.std_days(),\n\n datetime!(\"2019-01-01 0:00\"),\n\n );\n\n assert_eq!(\n\n datetime!(\"2020-01-01 0:00\") - 1.std_days(),\n\n datetime!(\"2019-12-31 0:00\"),\n\n );\n\n assert_eq!(\n\n datetime!(\"2020-01-01 0:00:01\") - 2.std_seconds(),\n\n datetime!(\"2019-12-31 23:59:59\"),\n\n );\n\n}\n\n\n", "file_path": "tests/integration/primitive_date_time.rs", "rank": 58, "score": 160797.56262346805 }, { "content": "#[test]\n\nfn sub_std_duration() {\n\n assert_eq!(\n\n datetime!(\"2019-01-06 0:00 UTC\") - 5.std_days(),\n\n datetime!(\"2019-01-01 0:00 UTC\"),\n\n );\n\n assert_eq!(\n\n datetime!(\"2020-01-01 0:00 UTC\") - 1.std_days(),\n\n datetime!(\"2019-12-31 0:00 UTC\"),\n\n );\n\n assert_eq!(\n\n datetime!(\"2020-01-01 0:00:01 UTC\") - 2.std_seconds(),\n\n datetime!(\"2019-12-31 23:59:59 UTC\"),\n\n );\n\n}\n\n\n", "file_path": "tests/integration/offset_date_time.rs", "rank": 59, "score": 160797.56262346805 }, { "content": "#[test]\n\nfn sub_assign_duration() {\n\n let mut new_years_day_2019 = datetime!(\"2019-01-06 0:00 UTC\");\n\n new_years_day_2019 -= 5.days();\n\n assert_eq!(new_years_day_2019, datetime!(\"2019-01-01 0:00 UTC\"));\n\n\n\n let mut new_years_day_2020_days = datetime!(\"2020-01-01 0:00 UTC\");\n\n new_years_day_2020_days -= 1.days();\n\n assert_eq!(new_years_day_2020_days, datetime!(\"2019-12-31 0:00 UTC\"));\n\n\n\n let mut new_years_day_2020_seconds = datetime!(\"2020-01-01 0:00:01 UTC\");\n\n new_years_day_2020_seconds -= 2.seconds();\n\n assert_eq!(\n\n new_years_day_2020_seconds,\n\n datetime!(\"2019-12-31 23:59:59 UTC\")\n\n );\n\n\n\n let mut new_years_eve_2020_seconds = datetime!(\"2019-12-31 23:59:59 UTC\");\n\n new_years_eve_2020_seconds -= (-2).seconds();\n\n assert_eq!(\n\n new_years_eve_2020_seconds,\n\n datetime!(\"2020-01-01 0:00:01 UTC\")\n\n );\n\n}\n\n\n", "file_path": "tests/integration/offset_date_time.rs", "rank": 60, "score": 160797.56262346805 }, { "content": "#[test]\n\nfn add_assign_duration() {\n\n let mut new_years_day_2019 = datetime!(\"2019-01-01 0:00 UTC\");\n\n new_years_day_2019 += 5.days();\n\n assert_eq!(new_years_day_2019, datetime!(\"2019-01-06 0:00 UTC\"));\n\n\n\n let mut new_years_eve_2020_days = datetime!(\"2019-12-31 0:00 UTC\");\n\n new_years_eve_2020_days += 1.days();\n\n assert_eq!(new_years_eve_2020_days, datetime!(\"2020-01-01 0:00 UTC\"));\n\n\n\n let mut new_years_eve_2020_seconds = datetime!(\"2019-12-31 23:59:59 UTC\");\n\n new_years_eve_2020_seconds += 2.seconds();\n\n assert_eq!(\n\n new_years_eve_2020_seconds,\n\n datetime!(\"2020-01-01 0:00:01 UTC\")\n\n );\n\n\n\n let mut new_years_day_2020_seconds = datetime!(\"2020-01-01 0:00:01 UTC\");\n\n new_years_day_2020_seconds += (-2).seconds();\n\n assert_eq!(\n\n new_years_day_2020_seconds,\n\n datetime!(\"2019-12-31 23:59:59 UTC\")\n\n );\n\n}\n\n\n", "file_path": "tests/integration/offset_date_time.rs", "rank": 61, "score": 160797.56262346805 }, { "content": "#[test]\n\nfn add_assign_duration() {\n\n let mut new_years_day_2019 = datetime!(\"2019-01-01 0:00\");\n\n new_years_day_2019 += 5.days();\n\n assert_eq!(new_years_day_2019, datetime!(\"2019-01-06 0:00\"));\n\n\n\n let mut new_years_eve_2020_days = datetime!(\"2019-12-31 0:00\");\n\n new_years_eve_2020_days += 1.days();\n\n assert_eq!(new_years_eve_2020_days, datetime!(\"2020-01-01 0:00\"));\n\n\n\n let mut new_years_eve_2020_seconds = datetime!(\"2019-12-31 23:59:59\");\n\n new_years_eve_2020_seconds += 2.seconds();\n\n assert_eq!(new_years_eve_2020_seconds, datetime!(\"2020-01-01 0:00:01\"));\n\n\n\n let mut new_years_day_2020_days = datetime!(\"2020-01-01 0:00:01\");\n\n new_years_day_2020_days += (-2).seconds();\n\n assert_eq!(new_years_day_2020_days, datetime!(\"2019-12-31 23:59:59\"));\n\n}\n\n\n", "file_path": "tests/integration/primitive_date_time.rs", "rank": 62, "score": 160797.56262346805 }, { "content": "#[test]\n\nfn std_sub_duration() {\n\n assert_eq!(\n\n SystemTime::from(datetime!(\"2019-01-06 0:00 UTC\")) - 5.days(),\n\n SystemTime::from(datetime!(\"2019-01-01 0:00 UTC\")),\n\n );\n\n assert_eq!(\n\n SystemTime::from(datetime!(\"2020-01-01 0:00 UTC\")) - 1.days(),\n\n SystemTime::from(datetime!(\"2019-12-31 0:00 UTC\")),\n\n );\n\n assert_eq!(\n\n SystemTime::from(datetime!(\"2020-01-01 0:00:01 UTC\")) - 2.seconds(),\n\n SystemTime::from(datetime!(\"2019-12-31 23:59:59 UTC\")),\n\n );\n\n assert_eq!(\n\n SystemTime::from(datetime!(\"2019-12-31 23:59:59 UTC\")) - (-2).seconds(),\n\n SystemTime::from(datetime!(\"2020-01-01 0:00:01 UTC\")),\n\n );\n\n}\n\n\n", "file_path": "tests/integration/offset_date_time.rs", "rank": 63, "score": 160797.56262346805 }, { "content": "#[test]\n\nfn sub_assign_duration() {\n\n let mut new_years_day_2019 = datetime!(\"2019-01-06 0:00\");\n\n new_years_day_2019 -= 5.days();\n\n assert_eq!(new_years_day_2019, datetime!(\"2019-01-01 0:00\"));\n\n\n\n let mut new_years_day_2020_days = datetime!(\"2020-01-01 0:00\");\n\n new_years_day_2020_days -= 1.days();\n\n assert_eq!(new_years_day_2020_days, datetime!(\"2019-12-31 0:00\"));\n\n\n\n let mut new_years_day_2020_seconds = datetime!(\"2020-01-01 0:00:01\");\n\n new_years_day_2020_seconds -= 2.seconds();\n\n assert_eq!(new_years_day_2020_seconds, datetime!(\"2019-12-31 23:59:59\"));\n\n\n\n let mut new_years_eve_2020_seconds = datetime!(\"2019-12-31 23:59:59\");\n\n new_years_eve_2020_seconds -= (-2).seconds();\n\n assert_eq!(new_years_eve_2020_seconds, datetime!(\"2020-01-01 0:00:01\"));\n\n}\n\n\n", "file_path": "tests/integration/primitive_date_time.rs", "rank": 64, "score": 160797.56262346805 }, { "content": "#[test]\n\nfn add_std_duration() {\n\n assert_eq!(\n\n datetime!(\"2019-01-01 0:00 UTC\") + 5.std_days(),\n\n datetime!(\"2019-01-06 0:00 UTC\"),\n\n );\n\n assert_eq!(\n\n datetime!(\"2019-12-31 0:00 UTC\") + 1.std_days(),\n\n datetime!(\"2020-01-01 0:00 UTC\"),\n\n );\n\n assert_eq!(\n\n datetime!(\"2019-12-31 23:59:59 UTC\") + 2.std_seconds(),\n\n datetime!(\"2020-01-01 0:00:01 UTC\"),\n\n );\n\n}\n\n\n", "file_path": "tests/integration/offset_date_time.rs", "rank": 65, "score": 160797.56262346805 }, { "content": "fn insufficient_type_information() -> Format {\n\n Time::MIDNIGHT\n\n .format(&time::format_description::well_known::Rfc3339)\n\n .unwrap_err()\n\n}\n\n\n", "file_path": "tests/integration/error.rs", "rank": 66, "score": 159874.7064473384 }, { "content": "#[test]\n\nfn rfc_3339() -> time::Result<()> {\n\n assert_eq!(\n\n datetime!(\"2021-01-02 03:04:05 UTC\").format(&Rfc3339)?,\n\n \"2021-01-02T03:04:05Z\"\n\n );\n\n assert_eq!(\n\n datetime!(\"2021-01-02 03:04:05.1 UTC\").format(&Rfc3339)?,\n\n \"2021-01-02T03:04:05.1Z\"\n\n );\n\n assert_eq!(\n\n datetime!(\"2021-01-02 03:04:05.12 UTC\").format(&Rfc3339)?,\n\n \"2021-01-02T03:04:05.12Z\"\n\n );\n\n assert_eq!(\n\n datetime!(\"2021-01-02 03:04:05.123 UTC\").format(&Rfc3339)?,\n\n \"2021-01-02T03:04:05.123Z\"\n\n );\n\n assert_eq!(\n\n datetime!(\"2021-01-02 03:04:05.123_4 UTC\").format(&Rfc3339)?,\n\n \"2021-01-02T03:04:05.1234Z\"\n", "file_path": "tests/integration/formatting.rs", "rank": 67, "score": 156587.20809940842 }, { "content": "#[test]\n\nfn add_assign_std_duration() {\n\n let mut new_years_day_2019 = datetime!(\"2019-01-01 0:00 UTC\");\n\n new_years_day_2019 += 5.std_days();\n\n assert_eq!(new_years_day_2019, datetime!(\"2019-01-06 0:00 UTC\"));\n\n\n\n let mut new_years_eve_2020_days = datetime!(\"2019-12-31 0:00 UTC\");\n\n new_years_eve_2020_days += 1.std_days();\n\n assert_eq!(new_years_eve_2020_days, datetime!(\"2020-01-01 0:00 UTC\"));\n\n\n\n let mut new_years_eve_2020_seconds = datetime!(\"2019-12-31 23:59:59 UTC\");\n\n new_years_eve_2020_seconds += 2.std_seconds();\n\n assert_eq!(\n\n new_years_eve_2020_seconds,\n\n datetime!(\"2020-01-01 0:00:01 UTC\")\n\n );\n\n}\n\n\n", "file_path": "tests/integration/offset_date_time.rs", "rank": 68, "score": 156527.31450538416 }, { "content": "#[test]\n\nfn std_add_assign_duration() {\n\n let mut new_years_day_2019 = SystemTime::from(datetime!(\"2019-01-01 0:00 UTC\"));\n\n new_years_day_2019 += 5.days();\n\n assert_eq!(new_years_day_2019, datetime!(\"2019-01-06 0:00 UTC\"));\n\n\n\n let mut new_years_eve_2020_days = SystemTime::from(datetime!(\"2019-12-31 0:00 UTC\"));\n\n new_years_eve_2020_days += 1.days();\n\n assert_eq!(new_years_eve_2020_days, datetime!(\"2020-01-01 0:00 UTC\"));\n\n\n\n let mut new_years_eve_2020_seconds = SystemTime::from(datetime!(\"2019-12-31 23:59:59 UTC\"));\n\n new_years_eve_2020_seconds += 2.seconds();\n\n assert_eq!(\n\n new_years_eve_2020_seconds,\n\n datetime!(\"2020-01-01 0:00:01 UTC\")\n\n );\n\n\n\n let mut new_years_day_2020_seconds = SystemTime::from(datetime!(\"2020-01-01 0:00:01 UTC\"));\n\n new_years_day_2020_seconds += (-2).seconds();\n\n assert_eq!(\n\n new_years_day_2020_seconds,\n\n datetime!(\"2019-12-31 23:59:59 UTC\")\n\n );\n\n}\n\n\n", "file_path": "tests/integration/offset_date_time.rs", "rank": 69, "score": 156527.31450538416 }, { "content": "#[test]\n\nfn sub_assign_std_duration() {\n\n let mut ny19 = datetime!(\"2019-01-06 0:00 UTC\");\n\n ny19 -= 5.std_days();\n\n assert_eq!(ny19, datetime!(\"2019-01-01 0:00 UTC\"));\n\n\n\n let mut ny20 = datetime!(\"2020-01-01 0:00 UTC\");\n\n ny20 -= 1.std_days();\n\n assert_eq!(ny20, datetime!(\"2019-12-31 0:00 UTC\"));\n\n\n\n let mut ny20t = datetime!(\"2020-01-01 0:00:01 UTC\");\n\n ny20t -= 2.std_seconds();\n\n assert_eq!(ny20t, datetime!(\"2019-12-31 23:59:59 UTC\"));\n\n}\n\n\n", "file_path": "tests/integration/offset_date_time.rs", "rank": 70, "score": 156527.31450538416 }, { "content": "#[test]\n\nfn std_sub_assign_duration() {\n\n let mut new_years_day_2019 = SystemTime::from(datetime!(\"2019-01-06 0:00 UTC\"));\n\n new_years_day_2019 -= 5.days();\n\n assert_eq!(new_years_day_2019, datetime!(\"2019-01-01 0:00 UTC\"));\n\n\n\n let mut new_years_day_2020 = SystemTime::from(datetime!(\"2020-01-01 0:00 UTC\"));\n\n new_years_day_2020 -= 1.days();\n\n assert_eq!(new_years_day_2020, datetime!(\"2019-12-31 0:00 UTC\"));\n\n\n\n let mut new_years_day_2020_seconds = SystemTime::from(datetime!(\"2020-01-01 0:00:01 UTC\"));\n\n new_years_day_2020_seconds -= 2.seconds();\n\n assert_eq!(\n\n new_years_day_2020_seconds,\n\n datetime!(\"2019-12-31 23:59:59 UTC\")\n\n );\n\n\n\n let mut new_years_eve_2020_seconds = SystemTime::from(datetime!(\"2019-12-31 23:59:59 UTC\"));\n\n new_years_eve_2020_seconds -= (-2).seconds();\n\n assert_eq!(\n\n new_years_eve_2020_seconds,\n\n datetime!(\"2020-01-01 0:00:01 UTC\")\n\n );\n\n}\n\n\n", "file_path": "tests/integration/offset_date_time.rs", "rank": 71, "score": 156527.31450538416 }, { "content": "#[test]\n\nfn add_assign_std_duration() {\n\n let mut ny19 = datetime!(\"2019-01-01 0:00\");\n\n ny19 += 5.std_days();\n\n assert_eq!(ny19, datetime!(\"2019-01-06 0:00\"));\n\n\n\n let mut nye20 = datetime!(\"2019-12-31 0:00\");\n\n nye20 += 1.std_days();\n\n assert_eq!(nye20, datetime!(\"2020-01-01 0:00\"));\n\n\n\n let mut nye20t = datetime!(\"2019-12-31 23:59:59\");\n\n nye20t += 2.std_seconds();\n\n assert_eq!(nye20t, datetime!(\"2020-01-01 0:00:01\"));\n\n}\n\n\n", "file_path": "tests/integration/primitive_date_time.rs", "rank": 72, "score": 156527.31450538416 }, { "content": "#[test]\n\nfn sub_assign_std_duration() {\n\n let mut ny19 = datetime!(\"2019-01-06 0:00\");\n\n ny19 -= 5.std_days();\n\n assert_eq!(ny19, datetime!(\"2019-01-01 0:00\"));\n\n\n\n let mut ny20 = datetime!(\"2020-01-01 0:00\");\n\n ny20 -= 1.std_days();\n\n assert_eq!(ny20, datetime!(\"2019-12-31 0:00\"));\n\n\n\n let mut ny20t = datetime!(\"2020-01-01 0:00:01\");\n\n ny20t -= 2.std_seconds();\n\n assert_eq!(ny20t, datetime!(\"2019-12-31 23:59:59\"));\n\n}\n\n\n", "file_path": "tests/integration/primitive_date_time.rs", "rank": 73, "score": 156527.31450538416 }, { "content": "fn is_leap_year(year: i32) -> bool {\n\n (year % 4 == 0) && ((year % 100 != 0) || (year % 400 == 0))\n\n}\n\n\n", "file_path": "time-macros/src/helpers.rs", "rank": 74, "score": 155059.10193859276 }, { "content": "#[test]\n\nfn parse_components() -> time::Result<()> {\n\n macro_rules! parse_component {\n\n ($component:expr, $input:expr,_. $property:ident == $expected:expr) => {\n\n let mut parsed = Parsed::new();\n\n parsed.parse_component($input, $component)?;\n\n assert_eq!(parsed.$property, $expected);\n\n };\n\n }\n\n\n\n parse_component!(\n\n Component::Year(modifier::Year {\n\n padding: modifier::Padding::Zero,\n\n repr: modifier::YearRepr::Full,\n\n iso_week_based: false,\n\n sign_is_mandatory: false,\n\n }),\n\n b\"2021\",\n\n _.year == Some(2021)\n\n );\n\n parse_component!(\n", "file_path": "tests/integration/parsing.rs", "rank": 75, "score": 153596.244331346 }, { "content": "#[test]\n\nfn parse_date() -> time::Result<()> {\n\n let format_input_output = [\n\n (\n\n fd::parse(\"[year]-[month]-[day]\")?,\n\n \"2021-01-02\",\n\n date!(\"2021-01-02\"),\n\n ),\n\n (\n\n fd::parse(\"[year]-[ordinal]\")?,\n\n \"2021-002\",\n\n date!(\"2021-002\"),\n\n ),\n\n (\n\n fd::parse(\"[year base:iso_week]-W[week_number]-[weekday repr:monday]\")?,\n\n \"2020-W53-6\",\n\n date!(\"2021-01-02\"),\n\n ),\n\n (\n\n fd::parse(\"[year]-W[week_number repr:monday]-[weekday repr:monday]\")?,\n\n \"2021-W00-6\",\n", "file_path": "tests/integration/parsing.rs", "rank": 76, "score": 151779.90294899474 }, { "content": "#[test]\n\nfn to_ordinal_date() {\n\n assert_eq!(date!(\"2019-01-01\").to_ordinal_date(), (2019, 1));\n\n}\n\n\n", "file_path": "tests/integration/date.rs", "rank": 77, "score": 147913.91382536053 }, { "content": "fn parse_literal<'a>(s: &'a str, index: &mut usize) -> ParsedItem<'a> {\n\n let loc = s.find('[').unwrap_or_else(|| s.len());\n\n *index += loc;\n\n ParsedItem {\n\n item: FormatItem::Literal(&s[..loc]),\n\n remaining: &s[loc..],\n\n }\n\n}\n\n\n", "file_path": "time-macros/src/format_description/parse.rs", "rank": 78, "score": 147569.43723672908 }, { "content": "#[test]\n\nfn time_fn() {\n\n let (time, value) = Duration::time_fn(|| {\n\n std::thread::sleep(1.std_milliseconds());\n\n 0\n\n });\n\n\n\n assert!(time >= 1.milliseconds());\n\n assert_eq!(value, 0);\n\n}\n\n\n", "file_path": "tests/integration/duration.rs", "rank": 79, "score": 146342.9782035765 }, { "content": "#[test]\n\nfn replace_date_time() {\n\n assert_eq!(\n\n datetime!(\"2020-01-01 12:00 UTC\").replace_date_time(datetime!(\"2020-01-30 16:00\")),\n\n datetime!(\"2020-01-30 16:00 UTC\")\n\n );\n\n assert_eq!(\n\n datetime!(\"2020-01-01 12:00 +1\").replace_date_time(datetime!(\"2020-01-30 0:00\")),\n\n datetime!(\"2020-01-30 0:00 +1\")\n\n );\n\n}\n\n\n", "file_path": "tests/integration/offset_date_time.rs", "rank": 80, "score": 145879.04846012426 }, { "content": "#[test]\n\nfn nanoseconds() {\n\n assert_eq!(Duration::nanoseconds(1), 1.microseconds() / 1_000);\n\n assert_eq!(Duration::nanoseconds(-1), (-1).microseconds() / 1_000);\n\n}\n\n\n", "file_path": "tests/integration/duration.rs", "rank": 81, "score": 142930.18425008934 }, { "content": "#[test]\n\nfn hours() {\n\n assert_eq!(Duration::hours(1), 3_600.seconds());\n\n assert_eq!(Duration::hours(2), (2 * 3_600).seconds());\n\n assert_eq!(Duration::hours(-1), (-3_600).seconds());\n\n assert_eq!(Duration::hours(-2), (2 * -3_600).seconds());\n\n}\n\n\n", "file_path": "tests/integration/duration.rs", "rank": 82, "score": 142914.23632852169 }, { "content": "#[test]\n\nfn minutes() {\n\n assert_eq!(Duration::minutes(1), 60.seconds());\n\n assert_eq!(Duration::minutes(2), (2 * 60).seconds());\n\n assert_eq!(Duration::minutes(-1), (-60).seconds());\n\n assert_eq!(Duration::minutes(-2), (2 * -60).seconds());\n\n}\n\n\n", "file_path": "tests/integration/duration.rs", "rank": 83, "score": 142908.92733546736 }, { "content": "#[test]\n\nfn seconds() {\n\n assert_eq!(Duration::seconds(1), 1_000.milliseconds());\n\n assert_eq!(Duration::seconds(2), (2 * 1_000).milliseconds());\n\n assert_eq!(Duration::seconds(-1), (-1_000).milliseconds());\n\n assert_eq!(Duration::seconds(-2), (2 * -1_000).milliseconds());\n\n}\n\n\n", "file_path": "tests/integration/duration.rs", "rank": 84, "score": 142892.99605940864 }, { "content": "fn component_range() -> ComponentRange {\n\n Date::from_ordinal_date(0, 367).unwrap_err()\n\n}\n\n\n", "file_path": "tests/integration/error.rs", "rank": 85, "score": 141005.78025640655 }, { "content": "#[test]\n\nfn number_from_monday() {\n\n assert_eq!(Monday.number_from_monday(), 1);\n\n assert_eq!(Tuesday.number_from_monday(), 2);\n\n assert_eq!(Wednesday.number_from_monday(), 3);\n\n assert_eq!(Thursday.number_from_monday(), 4);\n\n assert_eq!(Friday.number_from_monday(), 5);\n\n assert_eq!(Saturday.number_from_monday(), 6);\n\n assert_eq!(Sunday.number_from_monday(), 7);\n\n}\n\n\n", "file_path": "tests/integration/weekday.rs", "rank": 86, "score": 139723.3328689584 }, { "content": "#[test]\n\nfn number_from_sunday() {\n\n assert_eq!(Sunday.number_from_sunday(), 1);\n\n assert_eq!(Monday.number_from_sunday(), 2);\n\n assert_eq!(Tuesday.number_from_sunday(), 3);\n\n assert_eq!(Wednesday.number_from_sunday(), 4);\n\n assert_eq!(Thursday.number_from_sunday(), 5);\n\n assert_eq!(Friday.number_from_sunday(), 6);\n\n assert_eq!(Saturday.number_from_sunday(), 7);\n\n}\n\n\n", "file_path": "tests/integration/weekday.rs", "rank": 87, "score": 139723.3328689584 }, { "content": "#[test]\n\nfn with_time() {\n\n assert_eq!(\n\n date!(\"1970-01-01\").with_time(time!(\"0:00\")),\n\n datetime!(\"1970-01-01 0:00\"),\n\n );\n\n}\n\n\n", "file_path": "tests/integration/date.rs", "rank": 88, "score": 139227.13496355707 }, { "content": "#[test]\n\nfn subsec_nanoseconds() {\n\n assert_eq!(1.000_000_4.seconds().subsec_nanoseconds(), 400);\n\n assert_eq!((-1.000_000_4).seconds().subsec_nanoseconds(), -400);\n\n}\n\n\n", "file_path": "tests/integration/duration.rs", "rank": 89, "score": 138686.59900490558 }, { "content": "#[test]\n\nfn whole_nanoseconds() {\n\n assert_eq!(1.microseconds().whole_nanoseconds(), 1_000);\n\n assert_eq!((-1).microseconds().whole_nanoseconds(), -1_000);\n\n assert_eq!(1.nanoseconds().whole_nanoseconds(), 1);\n\n assert_eq!((-1).nanoseconds().whole_nanoseconds(), -1);\n\n}\n\n\n", "file_path": "tests/integration/duration.rs", "rank": 90, "score": 138686.59900490558 }, { "content": "#[test]\n\nfn whole_hours() {\n\n assert_eq!(Duration::hours(1).whole_hours(), 1);\n\n assert_eq!(Duration::hours(-1).whole_hours(), -1);\n\n assert_eq!(Duration::minutes(59).whole_hours(), 0);\n\n assert_eq!(Duration::minutes(-59).whole_hours(), 0);\n\n}\n\n\n", "file_path": "tests/integration/duration.rs", "rank": 91, "score": 138671.1918069257 }, { "content": "// region: date formatters\n\n/// Format the day into the designated output.\n\nfn fmt_day(\n\n output: &mut impl io::Write,\n\n date: Date,\n\n modifier::Day { padding }: modifier::Day,\n\n) -> Result<usize, io::Error> {\n\n format_number(output, date.day(), padding, 2)\n\n}\n\n\n", "file_path": "src/formatting/mod.rs", "rank": 92, "score": 138667.73872858885 }, { "content": "#[test]\n\nfn whole_minutes() {\n\n assert_eq!(1.minutes().whole_minutes(), 1);\n\n assert_eq!((-1).minutes().whole_minutes(), -1);\n\n assert_eq!(59.seconds().whole_minutes(), 0);\n\n assert_eq!((-59).seconds().whole_minutes(), 0);\n\n}\n\n\n", "file_path": "tests/integration/duration.rs", "rank": 93, "score": 138666.06281837917 }, { "content": "/// Format the subsecond into the designated output.\n\nfn fmt_subsecond(\n\n output: &mut impl io::Write,\n\n time: Time,\n\n modifier::Subsecond { digits }: modifier::Subsecond,\n\n) -> Result<usize, io::Error> {\n\n let (value, width) = match digits {\n\n modifier::SubsecondDigits::One => (time.nanosecond() / 100_000_000, 1),\n\n modifier::SubsecondDigits::Two => (time.nanosecond() / 10_000_000, 2),\n\n modifier::SubsecondDigits::Three => (time.nanosecond() / 1_000_000, 3),\n\n modifier::SubsecondDigits::Four => (time.nanosecond() / 100_000, 4),\n\n modifier::SubsecondDigits::Five => (time.nanosecond() / 10_000, 5),\n\n modifier::SubsecondDigits::Six => (time.nanosecond() / 1_000, 6),\n\n modifier::SubsecondDigits::Seven => (time.nanosecond() / 100, 7),\n\n modifier::SubsecondDigits::Eight => (time.nanosecond() / 10, 8),\n\n modifier::SubsecondDigits::Nine => (time.nanosecond(), 9),\n\n modifier::SubsecondDigits::OneOrMore => match time.nanosecond() {\n\n nanos if nanos % 10 != 0 => (nanos, 9),\n\n nanos if (nanos / 10) % 10 != 0 => (nanos / 10, 8),\n\n nanos if (nanos / 100) % 10 != 0 => (nanos / 100, 7),\n\n nanos if (nanos / 1_000) % 10 != 0 => (nanos / 1_000, 6),\n", "file_path": "src/formatting/mod.rs", "rank": 94, "score": 138663.18180237542 }, { "content": "/// Format the period into the designated output.\n\nfn fmt_period(\n\n output: &mut impl io::Write,\n\n time: Time,\n\n modifier::Period { is_uppercase }: modifier::Period,\n\n) -> Result<usize, io::Error> {\n\n match (time.hour() >= 12, is_uppercase) {\n\n (false, false) => output.write(b\"am\"),\n\n (false, true) => output.write(b\"AM\"),\n\n (true, false) => output.write(b\"pm\"),\n\n (true, true) => output.write(b\"PM\"),\n\n }\n\n}\n\n\n", "file_path": "src/formatting/mod.rs", "rank": 95, "score": 138663.18180237542 }, { "content": "/// Format the month into the designated output.\n\nfn fmt_month(\n\n output: &mut impl io::Write,\n\n date: Date,\n\n modifier::Month { padding, repr }: modifier::Month,\n\n) -> Result<usize, io::Error> {\n\n match repr {\n\n modifier::MonthRepr::Numerical => format_number(output, date.month(), padding, 2),\n\n modifier::MonthRepr::Long => output.write(MONTH_NAMES[date.month() as usize - 1]),\n\n modifier::MonthRepr::Short => output.write(&MONTH_NAMES[date.month() as usize - 1][..3]),\n\n }\n\n}\n\n\n", "file_path": "src/formatting/mod.rs", "rank": 96, "score": 138663.18180237542 }, { "content": "#[test]\n\nfn seconds_f64() {\n\n assert_eq!(Duration::seconds_f64(0.5), 0.5.seconds());\n\n assert_eq!(Duration::seconds_f64(-0.5), (-0.5).seconds());\n\n}\n\n\n", "file_path": "tests/integration/duration.rs", "rank": 97, "score": 138650.6717015325 }, { "content": "#[test]\n\nfn whole_seconds() {\n\n assert_eq!(1.seconds().whole_seconds(), 1);\n\n assert_eq!((-1).seconds().whole_seconds(), -1);\n\n assert_eq!(1.minutes().whole_seconds(), 60);\n\n assert_eq!((-1).minutes().whole_seconds(), -60);\n\n}\n\n\n", "file_path": "tests/integration/duration.rs", "rank": 98, "score": 138650.6717015325 }, { "content": "#[test]\n\n#[allow(clippy::float_cmp)]\n\nfn as_seconds_f64() {\n\n assert_eq!(1.seconds().as_seconds_f64(), 1.0);\n\n assert_eq!((-1).seconds().as_seconds_f64(), -1.0);\n\n assert_eq!(1.minutes().as_seconds_f64(), 60.0);\n\n assert_eq!((-1).minutes().as_seconds_f64(), -60.0);\n\n assert_eq!(1.5.seconds().as_seconds_f64(), 1.5);\n\n assert_eq!((-1.5).seconds().as_seconds_f64(), -1.5);\n\n}\n\n\n", "file_path": "tests/integration/duration.rs", "rank": 99, "score": 138650.6717015325 } ]
Rust
crates/kitsune_p2p/direct/src/types/kdhash.rs
mhuesch/holochain
8cade151329117c40e47533449a2f842187c373a
use crate::*; use futures::future::{BoxFuture, FutureExt}; use kitsune_p2p::*; pub use kitsune_p2p_direct_api::KdHash; pub trait KdHashExt: Sized { fn to_kitsune_space(&self) -> Arc<KitsuneSpace>; fn from_kitsune_space(space: &KitsuneSpace) -> Self; fn to_kitsune_agent(&self) -> Arc<KitsuneAgent>; fn from_kitsune_agent(agent: &KitsuneAgent) -> Self; fn to_kitsune_op_hash(&self) -> Arc<KitsuneOpHash>; fn from_kitsune_op_hash(op_hash: &KitsuneOpHash) -> Self; fn to_kitsune_basis(&self) -> Arc<KitsuneBasis>; fn from_kitsune_basis(basis: &KitsuneBasis) -> Self; fn verify_signature( &self, data: sodoken::BufRead, signature: Arc<[u8; 64]>, ) -> BoxFuture<'static, bool>; fn from_data(data: &[u8]) -> BoxFuture<'static, KdResult<Self>>; fn from_coerced_pubkey(data: [u8; 32]) -> BoxFuture<'static, KdResult<Self>>; } impl KdHashExt for KdHash { fn to_kitsune_space(&self) -> Arc<KitsuneSpace> { Arc::new(KitsuneSpace(self.0 .1[3..].to_vec())) } fn from_kitsune_space(space: &KitsuneSpace) -> Self { (*arrayref::array_ref![&space.0, 0, 36]).into() } fn to_kitsune_agent(&self) -> Arc<KitsuneAgent> { Arc::new(KitsuneAgent(self.0 .1[3..].to_vec())) } fn from_kitsune_agent(agent: &KitsuneAgent) -> Self { (*arrayref::array_ref![&agent.0, 0, 36]).into() } fn to_kitsune_op_hash(&self) -> Arc<KitsuneOpHash> { Arc::new(KitsuneOpHash(self.0 .1[3..].to_vec())) } fn from_kitsune_op_hash(op_hash: &KitsuneOpHash) -> Self { (*arrayref::array_ref![&op_hash.0, 0, 36]).into() } fn to_kitsune_basis(&self) -> Arc<KitsuneBasis> { Arc::new(KitsuneBasis(self.0 .1[3..].to_vec())) } fn from_kitsune_basis(basis: &KitsuneBasis) -> Self { (*arrayref::array_ref![&basis.0, 0, 36]).into() } fn verify_signature( &self, data: sodoken::BufRead, signature: Arc<[u8; 64]>, ) -> BoxFuture<'static, bool> { let pk = sodoken::BufReadSized::new_no_lock(*self.as_core_bytes()); async move { async { let sig = sodoken::BufReadSized::new_no_lock(*signature); KdResult::Ok( sodoken::sign::verify_detached(sig, data, pk) .await .map_err(KdError::other)?, ) } .await .unwrap_or(false) } .boxed() } fn from_data(data: &[u8]) -> BoxFuture<'static, KdResult<Self>> { let r = sodoken::BufRead::new_no_lock(data); async move { let hash = <sodoken::BufWriteSized<32>>::new_no_lock(); sodoken::hash::blake2b::hash(hash.clone(), r) .await .map_err(KdError::other)?; let mut out = [0; 32]; out.copy_from_slice(&hash.read_lock()[0..32]); Self::from_coerced_pubkey(out).await } .boxed() } fn from_coerced_pubkey(data: [u8; 32]) -> BoxFuture<'static, KdResult<Self>> { async move { let r = sodoken::BufReadSized::new_no_lock(data); let loc = loc_hash(r).await?; let mut out = [0; 36]; out[0..32].copy_from_slice(&data); out[32..].copy_from_slice(&loc); Ok(out.into()) } .boxed() } } async fn loc_hash(d: sodoken::BufReadSized<32>) -> KdResult<[u8; 4]> { let mut out = [0; 4]; let hash = <sodoken::BufWriteSized<16>>::new_no_lock(); sodoken::hash::blake2b::hash(hash.clone(), d) .await .map_err(KdError::other)?; let hash = hash.read_lock(); out[0] = hash[0]; out[1] = hash[1]; out[2] = hash[2]; out[3] = hash[3]; for i in (4..16).step_by(4) { out[0] ^= hash[i]; out[1] ^= hash[i + 1]; out[2] ^= hash[i + 2]; out[3] ^= hash[i + 3]; } Ok(out) }
use crate::*; use futures::future::{BoxFuture, FutureExt}; use kitsune_p2p::*; pub use kitsune_p2p_direct_api::KdHash; pub trait KdHashExt: Sized { fn to_kitsune_space(&self) -> Arc<KitsuneSpace>; fn from_kitsune_space(space: &KitsuneSpace) -> Self; fn to_kitsune_agent(&self) -> Arc<KitsuneAgent>; fn from_kitsune_agent(agent: &KitsuneAgent) -> Self; fn to_kitsune_op_hash(&self) -> Arc<KitsuneOpHash>; fn from_kitsune_op_hash(op_hash: &KitsuneOpHash) -> Self; fn to_kitsune_basis(&self) -> Arc<KitsuneBasis>; fn from_kitsune_basis(basis: &KitsuneBasis) -> Self; fn verify_signature( &self, data: sodoken::BufRead,
out) }
signature: Arc<[u8; 64]>, ) -> BoxFuture<'static, bool>; fn from_data(data: &[u8]) -> BoxFuture<'static, KdResult<Self>>; fn from_coerced_pubkey(data: [u8; 32]) -> BoxFuture<'static, KdResult<Self>>; } impl KdHashExt for KdHash { fn to_kitsune_space(&self) -> Arc<KitsuneSpace> { Arc::new(KitsuneSpace(self.0 .1[3..].to_vec())) } fn from_kitsune_space(space: &KitsuneSpace) -> Self { (*arrayref::array_ref![&space.0, 0, 36]).into() } fn to_kitsune_agent(&self) -> Arc<KitsuneAgent> { Arc::new(KitsuneAgent(self.0 .1[3..].to_vec())) } fn from_kitsune_agent(agent: &KitsuneAgent) -> Self { (*arrayref::array_ref![&agent.0, 0, 36]).into() } fn to_kitsune_op_hash(&self) -> Arc<KitsuneOpHash> { Arc::new(KitsuneOpHash(self.0 .1[3..].to_vec())) } fn from_kitsune_op_hash(op_hash: &KitsuneOpHash) -> Self { (*arrayref::array_ref![&op_hash.0, 0, 36]).into() } fn to_kitsune_basis(&self) -> Arc<KitsuneBasis> { Arc::new(KitsuneBasis(self.0 .1[3..].to_vec())) } fn from_kitsune_basis(basis: &KitsuneBasis) -> Self { (*arrayref::array_ref![&basis.0, 0, 36]).into() } fn verify_signature( &self, data: sodoken::BufRead, signature: Arc<[u8; 64]>, ) -> BoxFuture<'static, bool> { let pk = sodoken::BufReadSized::new_no_lock(*self.as_core_bytes()); async move { async { let sig = sodoken::BufReadSized::new_no_lock(*signature); KdResult::Ok( sodoken::sign::verify_detached(sig, data, pk) .await .map_err(KdError::other)?, ) } .await .unwrap_or(false) } .boxed() } fn from_data(data: &[u8]) -> BoxFuture<'static, KdResult<Self>> { let r = sodoken::BufRead::new_no_lock(data); async move { let hash = <sodoken::BufWriteSized<32>>::new_no_lock(); sodoken::hash::blake2b::hash(hash.clone(), r) .await .map_err(KdError::other)?; let mut out = [0; 32]; out.copy_from_slice(&hash.read_lock()[0..32]); Self::from_coerced_pubkey(out).await } .boxed() } fn from_coerced_pubkey(data: [u8; 32]) -> BoxFuture<'static, KdResult<Self>> { async move { let r = sodoken::BufReadSized::new_no_lock(data); let loc = loc_hash(r).await?; let mut out = [0; 36]; out[0..32].copy_from_slice(&data); out[32..].copy_from_slice(&loc); Ok(out.into()) } .boxed() } } async fn loc_hash(d: sodoken::BufReadSized<32>) -> KdResult<[u8; 4]> { let mut out = [0; 4]; let hash = <sodoken::BufWriteSized<16>>::new_no_lock(); sodoken::hash::blake2b::hash(hash.clone(), d) .await .map_err(KdError::other)?; let hash = hash.read_lock(); out[0] = hash[0]; out[1] = hash[1]; out[2] = hash[2]; out[3] = hash[3]; for i in (4..16).step_by(4) { out[0] ^= hash[i]; out[1] ^= hash[i + 1]; out[2] ^= hash[i + 2]; out[3] ^= hash[i + 3]; } Ok(
random
[ { "content": "/// internal REPR for holo hash\n\npub fn holo_hash_encode(data: &[u8]) -> String {\n\n format!(\"u{}\", base64::encode_config(data, base64::URL_SAFE_NO_PAD),)\n\n}\n\n\n", "file_path": "crates/holo_hash/src/encode.rs", "rank": 1, "score": 254914.76057821707 }, { "content": "/// internal compute a 32 byte blake2b hash\n\npub fn blake2b_256(data: &[u8]) -> Vec<u8> {\n\n let hash = blake2b_simd::Params::new().hash_length(32).hash(data);\n\n hash.as_bytes().to_vec()\n\n}\n\n\n", "file_path": "crates/holo_hash/src/encode.rs", "rank": 2, "score": 252947.44556847407 }, { "content": "/// internal compute a 16 byte blake2b hash\n\npub fn blake2b_128(data: &[u8]) -> Vec<u8> {\n\n let hash = blake2b_simd::Params::new().hash_length(16).hash(data);\n\n hash.as_bytes().to_vec()\n\n}\n", "file_path": "crates/holo_hash/src/encode.rs", "rank": 3, "score": 252947.44556847407 }, { "content": "/// Any implementor of HashableContent may be used in a HoloHashed to pair\n\n/// data with its HoloHash representation. It also has an associated HashType.\n\npub trait HashableContent: Sized {\n\n /// The HashType which this content will be hashed to\n\n type HashType: HashType;\n\n\n\n /// The HashType which this content will be hashed to\n\n fn hash_type(&self) -> Self::HashType;\n\n\n\n /// Return a subset of the content, either as SerializedBytes \"content\",\n\n /// which will be used to compute the hash, or as an already precomputed\n\n /// hash which will be used directly\n\n fn hashable_content(&self) -> HashableContentBytes;\n\n}\n\n\n\n/// HashableContent can be expressed as \"content\", or \"prehashed\", which affects\n\n/// how a HoloHashed type will be constructed from it.\n\npub enum HashableContentBytes {\n\n /// Denotes that the hash should be computed for the given data\n\n Content(SerializedBytes),\n\n /// Denotes that the given bytes already constitute a valid HoloHash\n\n Prehashed39(Vec<u8>),\n", "file_path": "crates/holo_hash/src/hashable_content.rs", "rank": 4, "score": 243320.5099992964 }, { "content": "#[allow(missing_docs)]\n\npub trait CallbackResult: Sized {\n\n /// if a callback result is definitive we should halt any further iterations over remaining\n\n /// calls e.g. over sparse names or subsequent zomes\n\n /// typically a clear failure is definitive but success and missing dependencies are not\n\n /// in the case of success or missing deps, a subsequent callback could give us a definitive\n\n /// answer like a fail, and we don't want to over-optimise wasm calls and miss a clear failure\n\n fn is_definitive(&self) -> bool;\n\n /// when a WasmError is returned from a callback (e.g. via `?` operator) it might mean either:\n\n ///\n\n /// - There was an error that prevented the callback from coming to a CallbackResult (e.g. failing to connect to database)\n\n /// - There was an error that should be interpreted as a CallbackResult::Fail (e.g. data failed to deserialize)\n\n ///\n\n /// Typically this can be split as host/wasm errors are the former, and serialization/guest errors the latter.\n\n /// This function allows each CallbackResult to explicitly map itself.\n\n fn try_from_wasm_error(wasm_error: WasmError) -> Result<Self, WasmError>;\n\n}\n\n\n\n#[macro_export]\n\n/// Serialization for fixed arrays is generally not available in a way that can be derived.\n\n/// Being able to wrap fixed size arrays is important e.g. for crypto safety etc. so this is a\n", "file_path": "crates/holochain_zome_types/src/lib.rs", "rank": 5, "score": 243309.38075324456 }, { "content": "/// internal compute the holo dht location u32\n\npub fn holo_dht_location_bytes(data: &[u8]) -> Vec<u8> {\n\n // Assert the data size is relatively small so we are\n\n // comfortable executing this synchronously / blocking tokio thread.\n\n assert_eq!(32, data.len(), \"only 32 byte hashes supported\");\n\n\n\n let hash = blake2b_128(data);\n\n let mut out = vec![hash[0], hash[1], hash[2], hash[3]];\n\n for i in (4..16).step_by(4) {\n\n out[0] ^= hash[i];\n\n out[1] ^= hash[i + 1];\n\n out[2] ^= hash[i + 2];\n\n out[3] ^= hash[i + 3];\n\n }\n\n out\n\n}\n\n\n", "file_path": "crates/holo_hash/src/encode.rs", "rank": 6, "score": 242358.53527198575 }, { "content": " pub trait HostFnApiT {\n\n $(\n\n fn $f(&self, _: $in_arg) -> Result<$out_arg, HostFnApiError>;\n\n )*\n\n }\n\n }\n\n}\n\n\n\n// Every externed function that the zome developer exposes to holochain returns `ExternIO`.\n\n// The zome developer can expose callbacks in a \"sparse\" way based on names and the functions\n\n// can take different input (e.g. validation vs. hooks like init, etc.).\n\n// All we can say is that some SerializedBytes are being received and returned.\n\n// In the case of ZomeExtern functions exposed to a client, the data input/output is entirely\n\n// arbitrary so we can't say anything at all. In this case the happ developer must BYO\n\n// deserialization context to match the client, either directly or via. the HDK.\n\n// Note though, that _unlike_ zome externs, the host _does_ know exactly the guest should be\n\n// returning for callbacks, it's just that the unpacking of the return happens in two steps:\n\n// - first the sparse callback is triggered with SB input/output\n\n// - then the guest inflates the expected input or the host the expected output based on the\n\n// callback flavour\n", "file_path": "crates/holochain_zome_types/src/zome_io.rs", "rank": 7, "score": 240751.73371904748 }, { "content": "/// Apply to a data item to indicate it can be encoded / decoded.\n\npub trait Codec: Clone + Sized {\n\n /// Variant identifier (for debugging or as a cheap discriminant).\n\n fn variant_type(&self) -> &'static str;\n\n\n\n /// Encode this item to given writer.\n\n /// You may wish to first wrap your writer in a BufWriter.\n\n fn encode<W>(&self, w: &mut W) -> Result<(), std::io::Error>\n\n where\n\n W: std::io::Write;\n\n\n\n /// Encode this item to an owned vector of bytes.\n\n /// Uses `encode()` internally.\n\n fn encode_vec(&self) -> Result<Vec<u8>, std::io::Error> {\n\n let mut data = Vec::new();\n\n self.encode(&mut data)?;\n\n Ok(data)\n\n }\n\n\n\n /// Decode a reader into this item.\n\n /// You may wish to first wrap your reader in a BufReader.\n", "file_path": "crates/kitsune_p2p/types/src/codec.rs", "rank": 8, "score": 238245.2239509435 }, { "content": "/// Utility trait for test values\n\npub trait TestVal: Sized {\n\n fn test_val() -> Self;\n\n}\n\n\n\n/// Boilerplate shortcut for implementing TestVal on an item\n\n#[macro_export]\n\nmacro_rules! test_val {\n\n ($($item:ty => $code:block,)*) => {$(\n\n impl TestVal for $item { fn test_val() -> Self { $code } }\n\n )*};\n\n}\n\n\n", "file_path": "crates/kitsune_p2p/kitsune_p2p/src/test_util.rs", "rank": 9, "score": 235723.1622116844 }, { "content": "/// Extension trait to augment the direct_api version of KdAgentInfo\n\npub trait KdAgentInfoExt: Sized {\n\n /// convert KdAgentInfo into a kitsune AgentInfoSigned\n\n fn to_kitsune(&self) -> AgentInfoSigned;\n\n\n\n /// convert a kitsune AgentInfoSigned into KdAgentInfo\n\n fn from_kitsune(kitsune: &AgentInfoSigned) -> KdResult<Self>;\n\n}\n\n\n", "file_path": "crates/kitsune_p2p/direct/src/types/kdagent.rs", "rank": 10, "score": 232178.28120040038 }, { "content": "/// Extension trait to augment the direct_api version of KdEntrySigned\n\npub trait KdEntrySignedExt: Sized {\n\n /// Build out a full, checked entry from wire encoding\n\n fn from_wire(wire: Box<[u8]>) -> BoxFuture<'static, KdResult<Self>>;\n\n\n\n /// Build out a full, checked entry from db encoding\n\n fn from_str(s: &str) -> BoxFuture<'static, KdResult<Self>>;\n\n\n\n /// Sign entry data into a full KdEntry instance\n\n fn from_content(\n\n persist: &KdPersist,\n\n content: KdEntryContent,\n\n ) -> BoxFuture<'static, KdResult<Self>>;\n\n\n\n /// Sign entry data into a full KdEntry instance with additional binary data\n\n fn from_content_with_binary(\n\n persist: &KdPersist,\n\n content: KdEntryContent,\n\n binary: &[u8],\n\n ) -> BoxFuture<'static, KdResult<Self>>;\n\n}\n", "file_path": "crates/kitsune_p2p/direct/src/types/kdentry.rs", "rank": 11, "score": 232178.28120040038 }, { "content": "/// Verify the countersigning session contains the specified header.\n\npub fn check_countersigning_session_data_contains_header(\n\n entry_hash: EntryHash,\n\n session_data: &CounterSigningSessionData,\n\n header: NewEntryHeaderRef<'_>,\n\n) -> SysValidationResult<()> {\n\n let header_is_in_session = session_data\n\n .build_header_set(entry_hash)\n\n .map_err(SysValidationError::from)?\n\n .iter()\n\n .any(|session_header| match (&header, session_header) {\n\n (NewEntryHeaderRef::Create(create), Header::Create(session_create)) => {\n\n create == &session_create\n\n }\n\n (NewEntryHeaderRef::Update(update), Header::Update(session_update)) => {\n\n update == &session_update\n\n }\n\n _ => false,\n\n });\n\n if !header_is_in_session {\n\n Err(SysValidationError::ValidationOutcome(\n", "file_path": "crates/holochain/src/core/sys_validate.rs", "rank": 12, "score": 231654.31669270011 }, { "content": "/// Returns the path to the root data directory for all of Holochain.\n\n/// If we can get a user directory it will be an XDG compliant path\n\n/// like \"/home/peter/.local/share/holochain\".\n\n/// If it can't get a user directory it will default to \"/etc/holochain\".\n\npub fn data_root() -> PathBuf {\n\n project_root()\n\n .map(|dirs| dirs.data_dir().to_owned())\n\n .unwrap_or_else(|| PathBuf::from(\"/etc\").join(APPLICATION))\n\n}\n\n\n", "file_path": "crates/holochain_conductor_api/src/config/conductor/paths.rs", "rank": 13, "score": 226202.06114079372 }, { "content": "#[hdk_extern]\n\nfn genesis_self_check(data: GenesisSelfCheckData) -> ExternResult<ValidateCallbackResult> {\n\n let GenesisSelfCheckData {\n\n dna_def: _,\n\n membrane_proof: _,\n\n agent_key: _,\n\n } = data;\n\n Ok(ValidateCallbackResult::Valid)\n\n}\n", "file_path": "crates/test_utils/wasm/wasm_workspace/genesis_self_check_valid/src/lib.rs", "rank": 14, "score": 225710.57312240129 }, { "content": "#[automock]\n\npub trait RibosomeT: Sized + std::fmt::Debug {\n\n fn dna_def(&self) -> &DnaDefHashed;\n\n\n\n fn zomes_to_invoke(&self, zomes_to_invoke: ZomesToInvoke) -> Vec<Zome> {\n\n match zomes_to_invoke {\n\n ZomesToInvoke::All => self\n\n .dna_def()\n\n .zomes\n\n .iter()\n\n .cloned()\n\n .map(Into::into)\n\n .collect(),\n\n ZomesToInvoke::One(zome) => vec![zome],\n\n }\n\n }\n\n\n\n fn zome_to_id(&self, zome: &Zome) -> RibosomeResult<ZomeId> {\n\n let zome_name = zome.zome_name();\n\n match self\n\n .dna_def()\n", "file_path": "crates/holochain/src/core/ribosome.rs", "rank": 15, "score": 222308.98389659554 }, { "content": "/// Sign something that is serializable using the private key for the passed public key.\n\n///\n\n/// Serde convenience for [ `sign_raw `].\n\npub fn sign<K, D>(key: K, data: D) -> ExternResult<Signature>\n\nwhere\n\n K: Into<AgentPubKey>,\n\n D: serde::Serialize + std::fmt::Debug,\n\n{\n\n HDK.with(|h| h.borrow().sign(Sign::new(key.into(), data)?))\n\n}\n\n\n", "file_path": "crates/hdk/src/ed25519.rs", "rank": 16, "score": 221927.76813626787 }, { "content": "/// Builder for non-genesis Headers\n\n///\n\n/// SourceChain::put takes one of these rather than a raw Header, so that it\n\n/// can inject the proper values via `HeaderBuilderCommon`, rather than requiring\n\n/// surrounding code to construct a proper Header outside of the context of\n\n/// the SourceChain.\n\n///\n\n/// This builder does not build pre-genesis Headers, because prior to genesis\n\n/// there is no Agent associated with the source chain, and also the fact that\n\n/// the Dna header has no prev_entry causes a special case that need not be\n\n/// dealt with. SourceChain::genesis already handles genesis in one fell swoop.\n\npub trait HeaderBuilder<H: HeaderInner>: Sized {\n\n fn build(self, common: HeaderBuilderCommon) -> H;\n\n}\n\n\n\nmacro_rules! builder_variant {\n\n ( $name: ident { $($field: ident : $t: ty),* $(,)? } ) => {\n\n\n\n #[derive(Clone, Debug, PartialEq, Eq)]\n\n pub struct $name {\n\n $(pub $field : $t),*\n\n }\n\n\n\n #[allow(clippy::new_without_default)]\n\n impl $name {\n\n pub fn new($($field : $t),* ) -> Self {\n\n Self {\n\n $($field),*\n\n }\n\n }\n\n }\n", "file_path": "crates/holochain_zome_types/src/header/builder.rs", "rank": 17, "score": 219279.30837289884 }, { "content": "/// Sign some data using the private key for the passed public key.\n\n///\n\n/// Assuming the private key for the provided pubkey exists in lair this will work.\n\n/// If we don't have the private key for the public key then we can't sign anything!\n\n///\n\n/// See [ `sign` ]\n\npub fn sign_raw<K>(key: K, data: Vec<u8>) -> ExternResult<Signature>\n\nwhere\n\n K: Into<AgentPubKey>,\n\n{\n\n HDK.with(|h| h.borrow().sign(Sign::new_raw(key.into(), data)))\n\n}\n\n\n", "file_path": "crates/hdk/src/ed25519.rs", "rank": 18, "score": 218835.36399328866 }, { "content": "#[deprecated = \"Raising visibility into a change that needs to happen after `use_existing` is implemented\"]\n\npub fn we_must_remember_to_rework_cell_panic_handling_after_implementing_use_existing_cell_resolution(\n\n) {\n\n}\n\n\n\n/// The result of running Cell resolution\n\n// TODO: rework, make fields private\n\n#[allow(missing_docs)]\n\n#[derive(PartialEq, Eq, Debug)]\n\npub struct CellSlotResolution {\n\n pub agent: AgentPubKey,\n\n pub dnas_to_register: Vec<(DnaFile, Option<MembraneProof>)>,\n\n pub slots: Vec<(SlotId, AppSlot)>,\n\n}\n\n\n\n#[allow(missing_docs)]\n\nimpl CellSlotResolution {\n\n pub fn new(agent: AgentPubKey) -> Self {\n\n Self {\n\n agent,\n\n dnas_to_register: Default::default(),\n", "file_path": "crates/holochain_types/src/app/app_bundle.rs", "rank": 19, "score": 213449.7090274393 }, { "content": "pub fn schedule_fn(\n\n txn: &mut Transaction,\n\n scheduled_fn: ScheduledFn,\n\n maybe_schedule: Option<Schedule>,\n\n now: Timestamp,\n\n) -> StateMutationResult<()> {\n\n let (start, end, ephemeral) = match maybe_schedule {\n\n Some(Schedule::Persisted(ref schedule_string)) => {\n\n // If this cron doesn't parse cleanly we don't even want to\n\n // write it to the db.\n\n let start = if let Some(start) = cron::Schedule::from_str(&schedule_string)\n\n .map_err(|e| ScheduleError::Cron(e.to_string()))?\n\n .after(\n\n &chrono::DateTime::<chrono::Utc>::try_from(now)\n\n .map_err(ScheduleError::Timestamp)?,\n\n )\n\n .next()\n\n {\n\n start\n\n } else {\n", "file_path": "crates/holochain_state/src/mutations.rs", "rank": 20, "score": 213239.88595426176 }, { "content": "/// Check the entry size is under the MAX_ENTRY_SIZE\n\npub fn check_entry_size(entry: &Entry) -> SysValidationResult<()> {\n\n match entry {\n\n Entry::App(bytes) => {\n\n let size = std::mem::size_of_val(&bytes.bytes()[..]);\n\n if size < MAX_ENTRY_SIZE {\n\n Ok(())\n\n } else {\n\n Err(ValidationOutcome::EntryTooLarge(size, MAX_ENTRY_SIZE).into())\n\n }\n\n }\n\n // Other entry types are small\n\n _ => Ok(()),\n\n }\n\n}\n\n\n", "file_path": "crates/holochain/src/core/sys_validate.rs", "rank": 21, "score": 210800.25450737102 }, { "content": "/// Get compressed bytes from some serializable data\n\npub fn encode<T: serde::ser::Serialize>(data: &T) -> MrBundleResult<Vec<u8>> {\n\n let bytes = rmp_serde::to_vec_named(data)?;\n\n let mut enc = flate2::write::GzEncoder::new(Vec::new(), flate2::Compression::default());\n\n enc.write_all(&bytes)?;\n\n Ok(enc.finish()?)\n\n}\n\n\n", "file_path": "crates/mr_bundle/src/encoding.rs", "rank": 22, "score": 209689.99314412137 }, { "content": "/// A Manifest describes the resources in a [`Bundle`](crate::Bundle) and how\n\n/// to pack and unpack them.\n\n///\n\n/// Regardless of the format of your Manifest, it must contain a set of Locations\n\n/// describing where to find resources, and this trait must implement `locations`\n\n/// properly to match the data contained in the manifest.\n\n///\n\n/// You must also specify a relative path for the Manifest, and the extension\n\n/// for the bundle file, if you are using the \"packing\" feature.\n\npub trait Manifest:\n\n Clone + Sized + PartialEq + Eq + serde::Serialize + serde::de::DeserializeOwned\n\n{\n\n /// The list of Locations referenced in the manifest data. This must be\n\n /// correctly implemented to enable resource resolution.\n\n fn locations(&self) -> Vec<Location>;\n\n\n\n /// When unpacking the bundle into a directory structure, this becomes\n\n /// the relative path of the manifest file.\n\n #[cfg(feature = \"packing\")]\n\n fn path() -> PathBuf;\n\n\n\n /// When packing a bundle from a directory structure, the bundle file gets\n\n /// this extension.\n\n #[cfg(feature = \"packing\")]\n\n fn bundle_extension() -> &'static str;\n\n\n\n /// Get only the Bundled locations\n\n fn bundled_paths(&self) -> Vec<PathBuf> {\n\n self.locations()\n", "file_path": "crates/mr_bundle/src/manifest.rs", "rank": 23, "score": 209370.06326930854 }, { "content": "pub trait TryFromRandom {\n\n fn try_from_random() -> ExternResult<Self>\n\n where\n\n Self: Sized;\n\n}\n\n\n\n/// Ideally we wouldn't need to do this with a macro.\n\n/// All we want is to implement this trait with whatever length our random-bytes-new-types need to\n\n/// be, but if we use a const on the trait directly we get 'constant expression depends on a\n\n/// generic parameter'\n\nmacro_rules! impl_try_from_random {\n\n ( $t:ty, $bytes:expr ) => {\n\n impl TryFromRandom for $t {\n\n fn try_from_random() -> $crate::prelude::ExternResult<Self> {\n\n $crate::prelude::random_bytes($bytes as u32).map(|bytes| {\n\n // Always a fatal error if our own bytes generation has the wrong length.\n\n assert_eq!($bytes, bytes.len());\n\n let mut inner = [0; $bytes];\n\n inner.copy_from_slice(bytes.as_ref());\n\n Self::from(inner)\n", "file_path": "crates/hdk/src/random.rs", "rank": 24, "score": 209351.83460005478 }, { "content": "pub trait Store {\n\n /// Get an [`Entry`] from this store.\n\n fn get_entry(&self, hash: &EntryHash) -> StateQueryResult<Option<Entry>>;\n\n\n\n /// Get an [`SignedHeaderHashed`] from this store.\n\n fn get_header(&self, hash: &HeaderHash) -> StateQueryResult<Option<SignedHeaderHashed>>;\n\n\n\n /// Get an [`Element`] from this store.\n\n fn get_element(&self, hash: &AnyDhtHash) -> StateQueryResult<Option<Element>>;\n\n\n\n /// Check if a hash is contained in the store\n\n fn contains_hash(&self, hash: &AnyDhtHash) -> StateQueryResult<bool> {\n\n match *hash.hash_type() {\n\n AnyDht::Entry => self.contains_entry(&hash.clone().into()),\n\n AnyDht::Header => self.contains_header(&hash.clone().into()),\n\n }\n\n }\n\n\n\n /// Check if an entry is contained in the store\n\n fn contains_entry(&self, hash: &EntryHash) -> StateQueryResult<bool>;\n\n\n\n /// Check if a header is contained in the store\n\n fn contains_header(&self, hash: &HeaderHash) -> StateQueryResult<bool>;\n\n}\n\n\n", "file_path": "crates/holochain_state/src/query.rs", "rank": 25, "score": 209351.83460005478 }, { "content": "/// Extend holo_hash::AgentPubKey with additional signature functionality\n\n/// from Keystore.\n\npub trait AgentPubKeyExt {\n\n /// create a new agent keypair in given keystore, returning the AgentPubKey\n\n fn new_from_pure_entropy(\n\n keystore: &KeystoreSender,\n\n ) -> KeystoreApiFuture<holo_hash::AgentPubKey>\n\n where\n\n Self: Sized;\n\n\n\n /// sign some arbitrary data\n\n fn sign<S>(&self, keystore: &KeystoreSender, data: S) -> KeystoreApiFuture<Signature>\n\n where\n\n S: Serialize + std::fmt::Debug;\n\n\n\n /// sign some arbitrary raw bytes\n\n fn sign_raw(&self, keystore: &KeystoreSender, data: &[u8]) -> KeystoreApiFuture<Signature>;\n\n\n\n /// verify a signature for given data with this agent public_key is valid\n\n fn verify_signature<D>(&self, signature: &Signature, data: D) -> KeystoreApiFuture<bool>\n\n where\n\n D: TryInto<SerializedBytes, Error = SerializedBytesError>;\n", "file_path": "crates/holochain_keystore/src/agent_pubkey_ext.rs", "rank": 26, "score": 209295.4544984253 }, { "content": "#[async_trait]\n\npub trait CellConductorApiT: Clone + Send + Sync + Sized {\n\n /// Get this cell id\n\n fn cell_id(&self) -> &CellId;\n\n\n\n /// Invoke a zome function on any cell in this conductor.\n\n /// A zome call on a different Cell than this one corresponds to a bridged call.\n\n async fn call_zome(\n\n &self,\n\n cell_id: &CellId,\n\n call: ZomeCall,\n\n ) -> ConductorApiResult<ZomeCallResult>;\n\n\n\n /// Make a request to the DPKI service running for this Conductor.\n\n /// TODO: decide on actual signature\n\n async fn dpki_request(&self, method: String, args: String) -> ConductorApiResult<String>;\n\n\n\n /// Request access to this conductor's keystore\n\n fn keystore(&self) -> &KeystoreSender;\n\n\n\n /// Access the broadcast Sender which will send a Signal across every\n", "file_path": "crates/holochain/src/conductor/api/api_cell.rs", "rank": 27, "score": 209019.38997455477 }, { "content": "pub fn version(\n\n _ribosome: Arc<impl RibosomeT>,\n\n _call_context: Arc<CallContext>,\n\n _input: (),\n\n) -> Result<ZomeApiVersion, WasmError> {\n\n unreachable!();\n\n}\n", "file_path": "crates/holochain/src/core/ribosome/host_fn/version.rs", "rank": 28, "score": 207801.00168916857 }, { "content": "pub fn trace(\n\n _ribosome: Arc<impl RibosomeT>,\n\n _call_context: Arc<CallContext>,\n\n input: TraceMsg,\n\n) -> Result<(), WasmError> {\n\n // Avoid dialing out to the environment on every trace.\n\n let wasm_log = Lazy::new(|| {\n\n std::env::var(\"WASM_LOG\").unwrap_or_else(|_| \"[wasm_trace]=debug\".to_string())\n\n });\n\n let collector = tracing_subscriber::fmt()\n\n .with_env_filter(tracing_subscriber::EnvFilter::new((*wasm_log).clone()))\n\n .with_target(false)\n\n .finish();\n\n tracing::subscriber::with_default(collector, || wasm_trace(input));\n\n Ok(())\n\n}\n\n\n\n#[cfg(test)]\n\n#[cfg(feature = \"slow_tests\")]\n\npub mod wasm_test {\n", "file_path": "crates/holochain/src/core/ribosome/host_fn/trace.rs", "rank": 29, "score": 207801.00168916857 }, { "content": "pub fn sleep(\n\n _ribosome: Arc<impl RibosomeT>,\n\n _call_context: Arc<CallContext>,\n\n _input: core::time::Duration,\n\n) -> Result<(), WasmError> {\n\n unimplemented!()\n\n}\n", "file_path": "crates/holochain/src/core/ribosome/host_fn/sleep.rs", "rank": 30, "score": 207801.00168916857 }, { "content": "pub fn call(\n\n _ribosome: Arc<impl RibosomeT>,\n\n call_context: Arc<CallContext>,\n\n inputs: Vec<Call>,\n\n) -> Result<Vec<ZomeCallResponse>, WasmError> {\n\n match HostFnAccess::from(&call_context.host_context()) {\n\n HostFnAccess{ write_workspace: Permission::Allow, .. } => {\n\n let results: Vec<Result<Result<ZomeCallResponse, _>, _>> = tokio_helper::block_forever_on(async move {\n\n join_all(inputs.into_iter().map(|input| {\n\n async {\n\n let Call {\n\n to_cell,\n\n zome_name,\n\n fn_name,\n\n cap,\n\n payload,\n\n provenance,\n\n } = input;\n\n let cell_id = to_cell.unwrap_or_else(|| call_context.host_context().call_zome_handle().cell_id().clone());\n\n let invocation = ZomeCall {\n", "file_path": "crates/holochain/src/core/ribosome/host_fn/call.rs", "rank": 31, "score": 207801.00168916857 }, { "content": "pub fn sign(\n\n _ribosome: Arc<impl RibosomeT>,\n\n call_context: Arc<CallContext>,\n\n input: Sign,\n\n) -> Result<Signature, WasmError> {\n\n match HostFnAccess::from(&call_context.host_context()) {\n\n HostFnAccess { keystore: Permission::Allow, .. } => tokio_helper::block_forever_on(async move {\n\n call_context.host_context.keystore().sign(input).await\n\n })\n\n .map_err(|keystore_error| WasmError::Host(keystore_error.to_string())),\n\n _ => unreachable!(),\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\n#[cfg(feature = \"slow_tests\")]\n\npub mod wasm_test {\n\n use crate::fixt::ZomeCallHostAccessFixturator;\n\n use ::fixt::prelude::*;\n\n use hdk::prelude::test_utils::fake_agent_pubkey_1;\n", "file_path": "crates/holochain/src/core/ribosome/host_fn/sign.rs", "rank": 32, "score": 207801.00168916857 }, { "content": "pub fn schedule(\n\n _ribosome: Arc<impl RibosomeT>,\n\n call_context: Arc<CallContext>,\n\n input: String,\n\n) -> Result<(), WasmError> {\n\n match HostFnAccess::from(&call_context.host_context()) {\n\n HostFnAccess{ write_workspace: Permission::Allow, .. } => {\n\n call_context.host_context().workspace().source_chain().scratch().apply(|scratch| {\n\n scratch.add_scheduled_fn(ScheduledFn::new(call_context.zome.zome_name().clone(), input.into()));\n\n }).map_err(|e| WasmError::Host(e.to_string()))?;\n\n Ok(())\n\n },\n\n _ => unreachable!(),\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\npub mod tests {\n\n use ::fixt::prelude::*;\n\n use holochain_wasm_test_utils::TestWasm;\n", "file_path": "crates/holochain/src/core/ribosome/host_fn/schedule.rs", "rank": 33, "score": 207801.00168916857 }, { "content": "pub fn query(\n\n _ribosome: Arc<impl RibosomeT>,\n\n call_context: Arc<CallContext>,\n\n input: ChainQueryFilter,\n\n) -> Result<Vec<Element>, WasmError> {\n\n match HostFnAccess::from(&call_context.host_context()) {\n\n HostFnAccess{ read_workspace: Permission::Allow, .. } => {\n\n tokio_helper::block_forever_on(async move {\n\n let elements: Vec<Element> = call_context\n\n .host_context\n\n .workspace()\n\n .source_chain()\n\n .query(input)\n\n .await\n\n .map_err(|source_chain_error| WasmError::Host(source_chain_error.to_string()))?;\n\n Ok(elements)\n\n })\n\n },\n\n _ => unreachable!(),\n\n }\n", "file_path": "crates/holochain/src/core/ribosome/host_fn/query.rs", "rank": 34, "score": 207801.00168916857 }, { "content": "/// Check the link tag size is under the MAX_TAG_SIZE\n\npub fn check_tag_size(tag: &LinkTag) -> SysValidationResult<()> {\n\n let size = std::mem::size_of_val(&tag.0[..]);\n\n if size < MAX_TAG_SIZE {\n\n Ok(())\n\n } else {\n\n Err(ValidationOutcome::TagTooLarge(size, MAX_TAG_SIZE).into())\n\n }\n\n}\n\n\n", "file_path": "crates/holochain/src/core/sys_validate.rs", "rank": 35, "score": 207702.41471346893 }, { "content": "#[async_trait::async_trait]\n\npub trait ElementExt {\n\n /// Validate the signature matches the data\n\n async fn validate(&self) -> Result<(), KeystoreError>;\n\n}\n\n\n\n#[async_trait::async_trait]\n\nimpl ElementExt for Element {\n\n /// Validates a chain element\n\n async fn validate(&self) -> Result<(), KeystoreError> {\n\n self.signed_header().validate().await?;\n\n\n\n //TODO: make sure that any cases around entry existence are valid:\n\n // SourceChainError::InvalidStructure(HeaderAndEntryMismatch(address)),\n\n Ok(())\n\n }\n\n}\n\n\n\n/// Extension trait to keep zome types minimal\n", "file_path": "crates/holochain_types/src/element.rs", "rank": 36, "score": 206175.20334641635 }, { "content": "/// Trait for binding static [ `EntryDef` ] property access for a type.\n\n/// See [ `register_entry` ]\n\npub trait EntryDefRegistration {\n\n fn entry_def() -> crate::prelude::EntryDef;\n\n\n\n fn entry_def_id() -> crate::prelude::EntryDefId;\n\n\n\n fn entry_visibility() -> crate::prelude::EntryVisibility;\n\n\n\n fn crdt_type() -> crate::prelude::CrdtType;\n\n\n\n fn required_validations() -> crate::prelude::RequiredValidations;\n\n}\n\n\n\n/// Implements conversion traits to allow a struct to be handled as an app entry.\n\n/// If you have some need to implement custom serialization logic or metadata injection\n\n/// you can do so by implementing these traits manually instead.\n\n///\n\n/// This requires that TryFrom and TryInto [ `derive@SerializedBytes` ] is implemented for the entry type,\n\n/// which implies that [ `serde::Serialize` ] and [ `serde::Deserialize` ] is also implemented.\n\n/// These can all be derived and there is an attribute macro that both does the default defines.\n\n#[macro_export]\n", "file_path": "crates/hdk/src/entry.rs", "rank": 37, "score": 206173.738120668 }, { "content": "/// A trait to specify the common parts of a Header\n\npub trait HeaderInner {\n\n /// Get a full header from the subset\n\n fn into_header(self) -> Header;\n\n}\n\n\n\nimpl<I: HeaderInner> From<I> for Header {\n\n fn from(i: I) -> Self {\n\n i.into_header()\n\n }\n\n}\n\n\n\nwrite_into_header! {\n\n Dna,\n\n AgentValidationPkg,\n\n InitZomesComplete,\n\n CreateLink,\n\n DeleteLink,\n\n OpenChain,\n\n CloseChain,\n\n Create,\n", "file_path": "crates/holochain_zome_types/src/header.rs", "rank": 38, "score": 203144.51037744782 }, { "content": "/// Data that requires a validation status.\n\npub trait HasValidationStatus {\n\n /// The type of the inner data\n\n type Data;\n\n\n\n /// Get the status of a some data.\n\n /// None means this data has not been validated yet.\n\n fn validation_status(&self) -> Option<ValidationStatus>;\n\n\n\n /// The data which has the validation status\n\n fn data(&self) -> &Self::Data;\n\n}\n\n\n\nimpl<T> HasValidationStatus for Judged<T> {\n\n type Data = T;\n\n\n\n fn validation_status(&self) -> Option<ValidationStatus> {\n\n self.status\n\n }\n\n\n\n fn data(&self) -> &Self::Data {\n", "file_path": "crates/holochain_zome_types/src/judged.rs", "rank": 39, "score": 203144.46567846782 }, { "content": "pub trait SqlInsert {\n\n fn sql_insert<R: Readable>(&self, txn: &mut R) -> DatabaseResult<()>;\n\n}\n\n\n\nimpl SqlInsert for Entry {\n\n fn sql_insert<R: Readable>(&self, txn: &mut R) -> DatabaseResult<()> {}\n\n}\n", "file_path": "crates/holochain_sqlite/src/schema/tables.rs", "rank": 40, "score": 203138.11531090306 }, { "content": "/// Helpers for constructing AgentActivity\n\npub trait AgentActivityExt {\n\n /// Create an empty chain status\n\n fn empty<T>(agent: &AgentPubKey) -> AgentActivityResponse<T> {\n\n AgentActivityResponse {\n\n agent: agent.clone(),\n\n valid_activity: ChainItems::NotRequested,\n\n rejected_activity: ChainItems::NotRequested,\n\n status: ChainStatus::Empty,\n\n // TODO: Add the actual highest observed in a follow up PR\n\n highest_observed: None,\n\n }\n\n }\n\n}\n\n\n\nimpl AgentActivityExt for AgentActivityResponse {}\n", "file_path": "crates/holochain_types/src/chain.rs", "rank": 41, "score": 203138.11531090306 }, { "content": "/// Every HoloHash is generic over HashType.\n\n/// Additionally, every HashableContent has an associated HashType.\n\n/// The HashType is the glue that binds together HashableContent with its hash.\n\npub trait HashType:\n\n Copy + Clone + std::fmt::Debug + Clone + std::hash::Hash + PartialEq + Eq + PartialOrd + Ord\n\n{\n\n /// Get the 3-byte prefix for the underlying primitive hash type\n\n fn get_prefix(self) -> &'static [u8];\n\n\n\n /// Given a 3-byte prefix, return the corresponding HashType, or error if mismatched.\n\n /// Trivial for PrimitiveHashType, but useful for composite types\n\n fn try_from_prefix(prefix: &[u8]) -> HoloHashResult<Self>;\n\n\n\n /// Get a Display-worthy name for this hash type\n\n fn hash_name(self) -> &'static str;\n\n}\n\n\n", "file_path": "crates/holo_hash/src/hash_type.rs", "rank": 42, "score": 203138.11531090306 }, { "content": "/// Generate a new sandbox.\n\n/// This creates a directory and a [`ConductorConfig`]\n\n/// from an optional network.\n\n/// The root directory and inner directory\n\n/// (where this sandbox will be created) can be overridden.\n\n/// For example `my_root_dir/this_sandbox_dir/`\n\npub fn generate(\n\n network: Option<KitsuneP2pConfig>,\n\n root: Option<PathBuf>,\n\n directory: Option<PathBuf>,\n\n) -> anyhow::Result<PathBuf> {\n\n let dir = generate_directory(root, directory)?;\n\n let mut config = create_config(dir.clone());\n\n config.network = network;\n\n random_admin_port(&mut config);\n\n let path = write_config(dir.clone(), &config);\n\n msg!(\"Config {:?}\", config);\n\n msg!(\n\n \"Created directory at: {} {}\",\n\n ansi_term::Style::new()\n\n .bold()\n\n .underline()\n\n .on(ansi_term::Color::Fixed(254))\n\n .fg(ansi_term::Color::Fixed(4))\n\n .paint(dir.display().to_string()),\n\n ansi_term::Style::new()\n\n .bold()\n\n .paint(\"Keep this path to rerun the same sandbox\")\n\n );\n\n msg!(\"Created config at {}\", path.display());\n\n Ok(dir)\n\n}\n\n\n", "file_path": "crates/hc_sandbox/src/generate.rs", "rank": 43, "score": 202913.1848831937 }, { "content": "/// return the access info used for this call\n\n/// also return who is originated the call (pubkey)\n\npub fn capability_info(\n\n _ribosome: Arc<impl RibosomeT>,\n\n _call_context: Arc<CallContext>,\n\n _input: (),\n\n) -> Result<(), WasmError> {\n\n unimplemented!();\n\n}\n", "file_path": "crates/holochain/src/core/ribosome/host_fn/capability_info.rs", "rank": 44, "score": 202799.0415354399 }, { "content": "#[tracing::instrument(skip(_ribosome, call_context, input))]\n\npub fn remote_signal(\n\n _ribosome: Arc<impl RibosomeT>,\n\n call_context: Arc<CallContext>,\n\n input: RemoteSignal,\n\n) -> Result<(), WasmError> {\n\n match HostFnAccess::from(&call_context.host_context()) {\n\n HostFnAccess {\n\n write_network: Permission::Allow,\n\n ..\n\n } => {\n\n const FN_NAME: &str = \"recv_remote_signal\";\n\n // Timeouts and errors are ignored,\n\n // this is a send and forget operation.\n\n let network = call_context.host_context().network().clone();\n\n let RemoteSignal { agents, signal } = input;\n\n let zome_name: ZomeName = call_context.zome().into();\n\n let fn_name: FunctionName = FN_NAME.into();\n\n for agent in agents {\n\n tokio::task::spawn(\n\n {\n", "file_path": "crates/holochain/src/core/ribosome/host_fn/remote_signal.rs", "rank": 45, "score": 202793.1510045243 }, { "content": "/// return n crypto secure random bytes from the standard holochain crypto lib\n\npub fn random_bytes(\n\n _ribosome: Arc<impl RibosomeT>,\n\n call_context: Arc<CallContext>,\n\n input: u32,\n\n) -> Result<Bytes, WasmError> {\n\n match HostFnAccess::from(&call_context.host_context()) {\n\n HostFnAccess{ non_determinism: Permission::Allow, .. } => {\n\n let system_random = ring::rand::SystemRandom::new();\n\n let mut bytes = vec![0; input as _];\n\n system_random\n\n .fill(&mut bytes)\n\n .map_err(|ring_unspecified_error| WasmError::Host(ring_unspecified_error.to_string()))?;\n\n\n\n Ok(Bytes::from(bytes))\n\n },\n\n _ => unreachable!(),\n\n }\n\n}\n\n\n\n#[cfg(test)]\n", "file_path": "crates/holochain/src/core/ribosome/host_fn/random_bytes.rs", "rank": 46, "score": 202793.1510045243 }, { "content": "pub fn emit_signal(\n\n _ribosome: Arc<impl RibosomeT>,\n\n call_context: Arc<CallContext>,\n\n input: AppSignal,\n\n) -> Result<(), WasmError> {\n\n match HostFnAccess::from(&call_context.host_context()) {\n\n HostFnAccess{ write_workspace: Permission::Allow, .. } => {\n\n let cell_id = call_context.host_context().cell_id().clone();\n\n let signal = Signal::App(cell_id, input);\n\n call_context.host_context().signal_tx().send(signal).map_err(|interface_error| WasmError::Host(interface_error.to_string()))?;\n\n Ok(())\n\n },\n\n _ => unreachable!(),\n\n }\n\n}\n", "file_path": "crates/holochain/src/core/ribosome/host_fn/emit_signal.rs", "rank": 47, "score": 202793.1510045243 }, { "content": "pub fn call_remote(\n\n _ribosome: Arc<impl RibosomeT>,\n\n call_context: Arc<CallContext>,\n\n inputs: Vec<CallRemote>,\n\n) -> Result<Vec<ZomeCallResponse>, WasmError> {\n\n match HostFnAccess::from(&call_context.host_context()) {\n\n HostFnAccess {\n\n write_network: Permission::Allow,\n\n ..\n\n } => {\n\n // it is the network's responsibility to handle timeouts and return an Err result in that case\n\n let results: Vec<Result<SerializedBytes, _>> =\n\n tokio_helper::block_forever_on(async move {\n\n join_all(inputs.into_iter().map(|input| async {\n\n let CallRemote {\n\n target_agent,\n\n zome_name,\n\n fn_name,\n\n cap,\n\n payload,\n", "file_path": "crates/holochain/src/core/ribosome/host_fn/call_remote.rs", "rank": 48, "score": 202793.1510045243 }, { "content": "pub fn extract_entry_def(\n\n ribosome: Arc<impl RibosomeT>,\n\n call_context: Arc<CallContext>,\n\n entry_def_id: EntryDefId,\n\n) -> Result<(holochain_zome_types::header::EntryDefIndex, EntryVisibility), WasmError> {\n\n let app_entry_type = match ribosome\n\n .run_entry_defs((&call_context.host_context).into(), EntryDefsInvocation)\n\n .map_err(|ribosome_error| WasmError::Host(ribosome_error.to_string()))?\n\n {\n\n // the ribosome returned some defs\n\n EntryDefsResult::Defs(defs) => {\n\n let maybe_entry_defs = defs.get(call_context.zome.zome_name());\n\n match maybe_entry_defs {\n\n // convert the entry def id string into a numeric position in the defs\n\n Some(entry_defs) => {\n\n entry_defs\n\n .entry_def_index_from_id(entry_def_id.clone())\n\n .map(|index| {\n\n // build an app entry type from the entry def at the found position\n\n (index, entry_defs[index.0 as usize].visibility)\n", "file_path": "crates/holochain/src/core/ribosome/host_fn/create.rs", "rank": 49, "score": 202793.15100452432 }, { "content": "pub fn verify_signature(\n\n _ribosome: Arc<impl RibosomeT>,\n\n call_context: Arc<CallContext>,\n\n input: VerifySignature,\n\n) -> Result<bool, WasmError> {\n\n match HostFnAccess::from(&call_context.host_context()) {\n\n HostFnAccess { keystore_deterministic: Permission::Allow, .. } => tokio_helper::block_forever_on(async move {\n\n input\n\n .key\n\n .verify_signature_raw(input.as_ref(), input.as_data_ref())\n\n .await\n\n })\n\n .map_err(|keystore_error| WasmError::Host(keystore_error.to_string())),\n\n _ => unreachable!(),\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\n#[cfg(feature = \"slow_tests\")]\n\npub mod wasm_test {\n", "file_path": "crates/holochain/src/core/ribosome/host_fn/verify_signature.rs", "rank": 50, "score": 202793.15100452432 }, { "content": "pub fn call_info(\n\n _ribosome: Arc<impl RibosomeT>,\n\n _call_context: Arc<CallContext>,\n\n _input: (),\n\n) -> Result<CallInfo, WasmError> {\n\n unimplemented!()\n\n}\n\n\n", "file_path": "crates/holochain/src/core/ribosome/host_fn/call_info.rs", "rank": 51, "score": 202793.1510045243 }, { "content": "/// list all the grants stored locally in the chain filtered by tag\n\n/// this is only the current grants as per local CRUD\n\npub fn capability_grants(\n\n _ribosome: Arc<impl RibosomeT>,\n\n _call_context: Arc<CallContext>,\n\n _input: (),\n\n) -> Result<(), WasmError> {\n\n unimplemented!();\n\n}\n\n\n\n#[cfg(test)]\n\n#[cfg(feature = \"slow_tests\")]\n\npub mod wasm_test {\n\n use crate::fixt::ZomeCallHostAccessFixturator;\n\n use crate::{conductor::ConductorBuilder, sweettest::SweetConductor};\n\n use crate::{sweettest::SweetDnaFile};\n\n use ::fixt::prelude::*;\n\n use hdk::prelude::*;\n\n use holochain_types::fixt::CapSecretFixturator;\n\n use holochain_types::prelude::*;\n\n use holochain_types::test_utils::fake_agent_pubkey_1;\n\n use holochain_types::test_utils::fake_agent_pubkey_2;\n", "file_path": "crates/holochain/src/core/ribosome/host_fn/capability_grants.rs", "rank": 52, "score": 202793.15100452432 }, { "content": "pub fn sign_ephemeral(\n\n _ribosome: Arc<impl RibosomeT>,\n\n call_context: Arc<CallContext>,\n\n input: SignEphemeral,\n\n) -> Result<EphemeralSignatures, WasmError> {\n\n match HostFnAccess::from(&call_context.host_context()) {\n\n HostFnAccess{ keystore: Permission::Allow, .. } => {\n\n let rng = SystemRandom::new();\n\n let mut seed = [0; 32];\n\n rng.fill(&mut seed)\n\n .map_err(|e| WasmError::Guest(e.to_string()))?;\n\n let ephemeral_keypair =\n\n Ed25519KeyPair::from_seed_unchecked(&seed).map_err(|e| WasmError::Host(e.to_string()))?;\n\n\n\n let signatures: Result<Vec<Signature>, _> = input\n\n .into_inner()\n\n .into_iter()\n\n .map(|data| ephemeral_keypair.sign(&data).as_ref().try_into())\n\n .collect();\n\n\n", "file_path": "crates/holochain/src/core/ribosome/host_fn/sign_ephemeral.rs", "rank": 53, "score": 202793.15100452432 }, { "content": "/// lists all the local claims filtered by tag\n\npub fn capability_claims(\n\n _ribosome: Arc<impl RibosomeT>,\n\n _call_context: Arc<CallContext>,\n\n _input: (),\n\n) -> Result<(), WasmError> {\n\n unimplemented!();\n\n}\n", "file_path": "crates/holochain/src/core/ribosome/host_fn/capability_claims.rs", "rank": 54, "score": 202793.15100452432 }, { "content": "pub fn dna_info(\n\n _ribosome: Arc<impl RibosomeT>,\n\n _call_context: Arc<CallContext>,\n\n _input: (),\n\n) -> Result<DnaInfo, WasmError> {\n\n unimplemented!()\n\n}\n\n\n", "file_path": "crates/holochain/src/core/ribosome/host_fn/dna_info.rs", "rank": 55, "score": 202793.1510045243 }, { "content": "pub fn app_info(\n\n _ribosome: Arc<impl RibosomeT>,\n\n _call_context: Arc<CallContext>,\n\n _input: (),\n\n) -> Result<AppInfo, WasmError> {\n\n unimplemented!()\n\n}\n\n\n", "file_path": "crates/holochain/src/core/ribosome/host_fn/app_info.rs", "rank": 56, "score": 202793.15100452432 }, { "content": "pub fn zome_info(\n\n ribosome: Arc<impl RibosomeT>,\n\n call_context: Arc<CallContext>,\n\n _input: (),\n\n) -> Result<ZomeInfo, WasmError> {\n\n match HostFnAccess::from(&call_context.host_context()) {\n\n HostFnAccess{ bindings_deterministic: Permission::Allow, .. } => {\n\n Ok(ZomeInfo {\n\n dna_name: ribosome.dna_def().name.clone(),\n\n zome_name: call_context.zome.zome_name().clone(),\n\n dna_hash: ribosome.dna_def().as_hash().clone(),\n\n zome_id: ribosome\n\n .zome_to_id(&call_context.zome)\n\n .expect(\"Failed to get ID for current zome\"),\n\n properties: ribosome.dna_def().properties.clone(),\n\n // @TODO\n\n // public_token: \"\".into(),\n\n })\n\n },\n\n _ => unreachable!(),\n", "file_path": "crates/holochain/src/core/ribosome/host_fn/zome_info.rs", "rank": 57, "score": 202793.1510045243 }, { "content": "pub fn hash_entry(\n\n _ribosome: Arc<impl RibosomeT>,\n\n _call_context: Arc<CallContext>,\n\n input: Entry,\n\n) -> Result<EntryHash, WasmError> {\n\n let entry_hash = holochain_zome_types::entry::EntryHashed::from_content_sync(input).into_hash();\n\n\n\n Ok(entry_hash)\n\n}\n\n\n\n#[cfg(test)]\n\n#[cfg(feature = \"slow_tests\")]\n\npub mod wasm_test {\n\n use super::*;\n\n use crate::core::ribosome::host_fn::hash_entry::hash_entry;\n\n\n\n use crate::fixt::CallContextFixturator;\n\n use crate::fixt::EntryFixturator;\n\n use crate::fixt::RealRibosomeFixturator;\n\n use crate::fixt::ZomeCallHostAccessFixturator;\n", "file_path": "crates/holochain/src/core/ribosome/host_fn/hash_entry.rs", "rank": 58, "score": 202793.15100452432 }, { "content": "pub fn sys_time(\n\n _ribosome: Arc<impl RibosomeT>,\n\n call_context: Arc<CallContext>,\n\n _input: (),\n\n) -> Result<Timestamp, WasmError> {\n\n match HostFnAccess::from(&call_context.host_context()) {\n\n HostFnAccess {\n\n non_determinism: Permission::Allow,\n\n ..\n\n } => Ok(holochain_zome_types::Timestamp::now()),\n\n _ => unreachable!(),\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\n#[cfg(feature = \"slow_tests\")]\n\npub mod wasm_test {\n\n use crate::fixt::ZomeCallHostAccessFixturator;\n\n use ::fixt::prelude::*;\n\n use holochain_wasm_test_utils::TestWasm;\n\n use holochain_zome_types::Timestamp;\n\n\n\n #[tokio::test(flavor = \"multi_thread\")]\n\n async fn invoke_import_sys_time_test() {\n\n let host_access = fixt!(ZomeCallHostAccess, Predictable);\n\n let _: Timestamp =\n\n crate::call_test_ribosome!(host_access, TestWasm::SysTime, \"sys_time\", ()).unwrap();\n\n }\n\n}\n", "file_path": "crates/holochain/src/core/ribosome/host_fn/sys_time.rs", "rank": 59, "score": 202793.1510045243 }, { "content": "#[allow(clippy::extra_unused_lifetimes)]\n\npub fn create<'a>(\n\n ribosome: Arc<impl RibosomeT>,\n\n call_context: Arc<CallContext>,\n\n input: CreateInput,\n\n) -> Result<HeaderHash, WasmError> {\n\n match HostFnAccess::from(&call_context.host_context()) {\n\n HostFnAccess {\n\n write_workspace: Permission::Allow,\n\n ..\n\n } => {\n\n let entry = AsRef::<Entry>::as_ref(&input);\n\n let chain_top_ordering = *input.chain_top_ordering();\n\n\n\n // Countersigned entries have different header handling.\n\n match entry {\n\n Entry::CounterSign(_, _) => tokio_helper::block_forever_on(async move {\n\n call_context\n\n .host_context\n\n .workspace()\n\n .source_chain()\n", "file_path": "crates/holochain/src/core/ribosome/host_fn/create.rs", "rank": 60, "score": 202343.752396564 }, { "content": "#[allow(clippy::extra_unused_lifetimes)]\n\npub fn update<'a>(\n\n ribosome: Arc<impl RibosomeT>,\n\n call_context: Arc<CallContext>,\n\n input: UpdateInput,\n\n) -> Result<HeaderHash, WasmError> {\n\n match HostFnAccess::from(&call_context.host_context()) {\n\n HostFnAccess{ write_workspace: Permission::Allow, .. } => {\n\n // destructure the args out into an app type def id and entry\n\n let UpdateInput {\n\n original_header_address,\n\n create_input,\n\n } = input;\n\n let CreateInput {\n\n entry_def_id,\n\n entry,\n\n chain_top_ordering,\n\n } = create_input;\n\n\n\n // Countersigned entries have different header handling.\n\n match entry {\n", "file_path": "crates/holochain/src/core/ribosome/host_fn/update.rs", "rank": 61, "score": 202343.752396564 }, { "content": "#[allow(clippy::extra_unused_lifetimes)]\n\npub fn delete<'a>(\n\n _ribosome: Arc<impl RibosomeT>,\n\n call_context: Arc<CallContext>,\n\n input: DeleteInput,\n\n) -> Result<HeaderHash, WasmError> {\n\n match HostFnAccess::from(&call_context.host_context()) {\n\n HostFnAccess{ write_workspace: Permission::Allow, .. } => {\n\n let DeleteInput { deletes_header_address, chain_top_ordering } = input;\n\n let deletes_entry_address = get_original_address(call_context.clone(), deletes_header_address.clone())?;\n\n\n\n let host_access = call_context.host_context();\n\n\n\n // handle timeouts at the source chain layer\n\n tokio_helper::block_forever_on(async move {\n\n let source_chain = host_access.workspace().source_chain();\n\n let header_builder = builder::Delete {\n\n deletes_address: deletes_header_address,\n\n deletes_entry_address,\n\n };\n\n let header_hash = source_chain\n", "file_path": "crates/holochain/src/core/ribosome/host_fn/delete.rs", "rank": 62, "score": 202343.752396564 }, { "content": "#[allow(clippy::extra_unused_lifetimes)]\n\npub fn get<'a>(\n\n _ribosome: Arc<impl RibosomeT>,\n\n call_context: Arc<CallContext>,\n\n inputs: Vec<GetInput>,\n\n) -> Result<Vec<Option<Element>>, WasmError> {\n\n match HostFnAccess::from(&call_context.host_context()) {\n\n HostFnAccess{ read_workspace: Permission::Allow, .. } => {\n\n let results: Vec<Result<Option<Element>, _>> = tokio_helper::block_forever_on(async move {\n\n join_all(inputs.into_iter().map(|input| {\n\n async {\n\n let GetInput {\n\n any_dht_hash,\n\n get_options,\n\n } = input;\n\n Cascade::from_workspace_network(\n\n call_context.host_context.workspace(),\n\n call_context.host_context.network().clone()\n\n )\n\n .dht_get(any_dht_hash, get_options).await\n\n }\n", "file_path": "crates/holochain/src/core/ribosome/host_fn/get.rs", "rank": 63, "score": 202343.752396564 }, { "content": "/// Verify the passed signature and public key against the passed serializable input.\n\n///\n\n/// The data is not used literally, it is serialized.\n\n/// This is important to use if you have data structures rather than bytes, as the serialization will\n\n/// be passed through the canonical serialization process, guaranteeing consistent behaviour.\n\n/// If you pass in a Vec<u8> expecting it to be verified literally the signature won't verify correctly.\n\n///\n\n/// See [ `verify_signature_raw` ]\n\npub fn verify_signature<K, S, D>(key: K, signature: S, data: D) -> ExternResult<bool>\n\nwhere\n\n K: Into<AgentPubKey>,\n\n S: Into<Signature>,\n\n D: serde::Serialize + std::fmt::Debug,\n\n{\n\n HDK.with(|h| {\n\n h.borrow()\n\n .verify_signature(VerifySignature::new(key.into(), signature.into(), data)?)\n\n })\n\n}\n\n\n", "file_path": "crates/hdk/src/ed25519.rs", "rank": 64, "score": 201152.0331947471 }, { "content": "/// Sign N serializable things using an ephemeral private key.\n\n///\n\n/// Serde convenience for [ `sign_ephemeral_raw` ].\n\npub fn sign_ephemeral<D>(datas: Vec<D>) -> ExternResult<EphemeralSignatures>\n\nwhere\n\n D: serde::Serialize + std::fmt::Debug,\n\n{\n\n HDK.with(|h| h.borrow().sign_ephemeral(SignEphemeral::new(datas)?))\n\n}\n\n\n", "file_path": "crates/hdk/src/ed25519.rs", "rank": 65, "score": 200987.81992159487 }, { "content": "/// # Call\n\n/// Make a Zome call in another Zome.\n\n/// The Zome can be in another Cell or the\n\n/// same Cell but must be installed on the same conductor.\n\n///\n\n/// ## Parameters\n\n/// - to_cell: The cell you want to call (If None will call the current cell).\n\n/// - zome_name: The name of the zome you want to call.\n\n/// - fn_name: The name of the function in the zome you are calling.\n\n/// - cap_secret: The capability secret if required.\n\n/// - payload: The arguments to the function you are calling.\n\npub fn call<I>(\n\n to_cell: Option<CellId>,\n\n zome_name: ZomeName,\n\n fn_name: FunctionName,\n\n cap_secret: Option<CapSecret>,\n\n payload: I,\n\n) -> ExternResult<ZomeCallResponse>\n\nwhere\n\n I: serde::Serialize + std::fmt::Debug,\n\n{\n\n // @todo is this secure to set this in the wasm rather than have the host inject it?\n\n let provenance = agent_info()?.agent_latest_pubkey;\n\n Ok(HDK\n\n .with(|h| {\n\n h.borrow().call(vec![Call::new(\n\n to_cell,\n\n zome_name,\n\n fn_name,\n\n cap_secret,\n\n ExternIO::encode(payload)?,\n\n provenance,\n\n )])\n\n })?\n\n .into_iter()\n\n .next()\n\n .unwrap())\n\n}\n\n\n", "file_path": "crates/hdk/src/p2p.rs", "rank": 66, "score": 200788.04570911743 }, { "content": "/// You should keep your query type cheap to clone.\n\n/// If there is any large data put it in an Arc.\n\npub trait Query: Clone {\n\n type State;\n\n type Item: HasValidationStatus;\n\n type Output;\n\n\n\n fn query(&self) -> String {\n\n \"\".into()\n\n }\n\n fn params(&self) -> Vec<Params> {\n\n Vec::with_capacity(0)\n\n }\n\n fn init_fold(&self) -> StateQueryResult<Self::State>;\n\n\n\n fn as_filter(&self) -> Box<dyn Fn(&QueryData<Self>) -> bool> {\n\n Box::new(|_| true)\n\n }\n\n\n\n fn as_map(&self) -> Arc<dyn Fn(&Row) -> StateQueryResult<Self::Item>>;\n\n\n\n fn fold(&self, state: Self::State, data: Self::Item) -> StateQueryResult<Self::State>;\n", "file_path": "crates/holochain_state/src/query.rs", "rank": 67, "score": 200315.60189085617 }, { "content": "/// Implementors are able to create a new read-write DB transaction\n\npub trait WriteManager<'e> {\n\n /// Run a closure, passing in a mutable reference to a read-write\n\n /// transaction, and commit the transaction after the closure has run.\n\n /// If there is a SQLite error, recover from it and re-run the closure.\n\n // FIXME: B-01566: implement write failure detection\n\n fn with_commit_sync<E, R, F>(&'e mut self, f: F) -> Result<R, E>\n\n where\n\n E: From<DatabaseError>,\n\n F: 'e + FnOnce(&mut Transaction) -> Result<R, E>;\n\n\n\n // /// Get a raw read-write transaction for this environment.\n\n // /// It is preferable to use WriterManager::with_commit for database writes,\n\n // /// which can properly recover from and manage write failures\n\n // fn writer_unmanaged(&'e mut self) -> DatabaseResult<Writer<'e>>;\n\n\n\n #[cfg(feature = \"test_utils\")]\n\n fn with_commit_test<R, F>(&'e mut self, f: F) -> Result<R, DatabaseError>\n\n where\n\n F: 'e + FnOnce(&mut Transaction) -> R,\n\n {\n", "file_path": "crates/holochain_sqlite/src/db.rs", "rank": 68, "score": 200309.59168285882 }, { "content": "/// Implementors are able to create a new read-only DB transaction\n\npub trait ReadManager<'e> {\n\n /// Run a closure, passing in a new read-only transaction\n\n fn with_reader<E, R, F>(&'e mut self, f: F) -> Result<R, E>\n\n where\n\n E: From<DatabaseError>,\n\n F: 'e + FnOnce(Transaction) -> Result<R, E>;\n\n\n\n #[cfg(feature = \"test_utils\")]\n\n /// Same as with_reader, but with no Results: everything gets unwrapped\n\n fn with_reader_test<R, F>(&'e mut self, f: F) -> R\n\n where\n\n F: 'e + FnOnce(Transaction) -> R;\n\n}\n\n\n", "file_path": "crates/holochain_sqlite/src/db.rs", "rank": 69, "score": 200309.59168285882 }, { "content": "pub trait Invocation: Clone {\n\n /// Some invocations call into a single zome and some call into many or all zomes.\n\n /// An example of an invocation that calls across all zomes is init. Init must pass for every\n\n /// zome in order for the Dna overall to successfully init.\n\n /// An example of an invocation that calls a single zome is validation of an entry, because\n\n /// the entry is only defined in a single zome, so it only makes sense for that exact zome to\n\n /// define the validation logic for that entry.\n\n /// In the future this may be expanded to support a subset of zomes that is larger than one.\n\n /// For example, we may want to trigger a callback in all zomes that implement a\n\n /// trait/interface, but this doesn't exist yet, so the only valid options are All or One.\n\n fn zomes(&self) -> ZomesToInvoke;\n\n /// Invocations execute in a \"sparse\" manner of decreasing specificity. In technical terms this\n\n /// means that the list of strings in FnComponents will be concatenated into a single function\n\n /// name to be called, then the last string will be removed and a shorter function name will\n\n /// be attempted and so on until all variations have been attempted.\n\n /// For example, if FnComponents was vec![\"foo\", \"bar\", \"baz\"] it would loop as \"foo_bar_baz\"\n\n /// then \"foo_bar\" then \"foo\". All of those three callbacks that are defined will be called\n\n /// _unless a definitive callback result is returned_.\n\n /// See [ `CallbackResult::is_definitive` ] in zome_types.\n\n /// All of the individual callback results are then folded into a single overall result value\n", "file_path": "crates/holochain/src/core/ribosome.rs", "rank": 70, "score": 200309.59168285882 }, { "content": "#[async_trait::async_trait]\n\npub trait SignedHeaderHashedExt {\n\n /// Create a hash from data\n\n fn from_content_sync(signed_header: SignedHeader) -> SignedHeaderHashed;\n\n /// Sign some content\n\n #[allow(clippy::new_ret_no_self)]\n\n async fn new(\n\n keystore: &KeystoreSender,\n\n header: HeaderHashed,\n\n ) -> Result<SignedHeaderHashed, KeystoreError>;\n\n /// Validate the data\n\n async fn validate(&self) -> Result<(), KeystoreError>;\n\n}\n\n\n\n#[allow(missing_docs)]\n\n#[async_trait::async_trait]\n\nimpl SignedHeaderHashedExt for SignedHeaderHashed {\n\n fn from_content_sync(signed_header: SignedHeader) -> Self\n\n where\n\n Self: Sized,\n\n {\n", "file_path": "crates/holochain_types/src/element.rs", "rank": 71, "score": 200259.42892952723 }, { "content": "/// A wrapper around HolochainP2pSender that partially applies the dna_hash / agent_pub_key.\n\n/// I.e. a sender that is tied to a specific cell.\n\npub trait HolochainP2pCellT {\n\n /// owned getter\n\n fn dna_hash(&self) -> DnaHash;\n\n\n\n /// owned getter\n\n fn from_agent(&self) -> AgentPubKey;\n\n\n\n /// Construct the CellId from the defined DnaHash and AgentPubKey\n\n fn cell_id(&self) -> CellId {\n\n CellId::new(self.dna_hash(), self.from_agent())\n\n }\n\n\n\n /// The p2p module must be informed at runtime which dna/agent pairs it should be tracking.\n\n async fn join(&self) -> actor::HolochainP2pResult<()>;\n\n\n\n /// If a cell is disabled, we'll need to \\\"leave\\\" the network module as well.\n\n async fn leave(&self) -> actor::HolochainP2pResult<()>;\n\n\n\n /// Invoke a zome function on a remote node (if you have been granted the capability).\n\n async fn call_remote(\n", "file_path": "crates/holochain_p2p/src/lib.rs", "rank": 72, "score": 200257.4365672938 }, { "content": "/// Some legacy APIs to make refactor easier.\n\npub trait KeystoreSenderExt {\n\n /// Generates a new pure entropy keypair in the keystore, returning the public key.\n\n fn generate_sign_keypair_from_pure_entropy(&self) -> KeystoreApiFuture<holo_hash::AgentPubKey>;\n\n\n\n /// Generate a signature for a given blob of binary data.\n\n fn sign(&self, input: Sign) -> KeystoreApiFuture<Signature>;\n\n\n\n /// If we have a TLS cert in lair - return the first one\n\n /// Errors if no certs in lair\n\n fn get_first_tls_cert(&self) -> KeystoreApiFuture<(CertDigest, Cert, CertPrivKey)>;\n\n\n\n /// If we have a TLS cert in lair - return the first one\n\n /// otherwise, generate a TLS cert and return it\n\n fn get_or_create_first_tls_cert(&self) -> KeystoreApiFuture<(CertDigest, Cert, CertPrivKey)>;\n\n\n\n /// Generate a new x25519 keypair in lair and get the pubkey back for general usage.\n\n fn create_x25519_keypair(\n\n &self,\n\n ) -> KeystoreApiFuture<holochain_zome_types::x_salsa20_poly1305::x25519::X25519PubKey>;\n\n\n", "file_path": "crates/holochain_keystore/src/keystore_actor.rs", "rank": 73, "score": 200251.97107912018 }, { "content": "/// Set when a [`DhtOp`] was integrated.\n\npub fn set_when_integrated(\n\n txn: &mut Transaction,\n\n hash: DhtOpHash,\n\n time: Timestamp,\n\n) -> StateMutationResult<()> {\n\n dht_op_update!(txn, hash, {\n\n \"when_integrated\": time,\n\n })?;\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/holochain_state/src/mutations.rs", "rank": 74, "score": 199750.91205363814 }, { "content": "/// Set the integration dependency of a [`DhtOp`] in the database.\n\npub fn set_dependency(\n\n txn: &mut Transaction,\n\n hash: DhtOpHash,\n\n dependency: Dependency,\n\n) -> StateMutationResult<()> {\n\n match dependency {\n\n Dependency::Header(dep) => {\n\n dht_op_update!(txn, hash, {\n\n \"dependency\": dep,\n\n })?;\n\n }\n\n Dependency::Entry(dep) => {\n\n dht_op_update!(txn, hash, {\n\n \"dependency\": dep,\n\n })?;\n\n }\n\n Dependency::Null => (),\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/holochain_state/src/mutations.rs", "rank": 75, "score": 199750.9120536382 }, { "content": "pub fn put(\n\n txn: &mut Transaction,\n\n key: EntryDefBufferKey,\n\n entry_def: EntryDef,\n\n) -> StateMutationResult<()> {\n\n let key: EntryDefStoreKey = key.into();\n\n mutations::insert_entry_def(txn, key, entry_def)\n\n}\n\n\n\nimpl From<EntryDefBufferKey> for EntryDefStoreKey {\n\n fn from(a: EntryDefBufferKey) -> Self {\n\n Self(\n\n a.try_into()\n\n .expect(\"EntryDefStoreKey serialization cannot fail\"),\n\n )\n\n }\n\n}\n\n\n\nimpl From<&[u8]> for EntryDefStoreKey {\n\n fn from(bytes: &[u8]) -> Self {\n", "file_path": "crates/holochain_state/src/entry_def.rs", "rank": 76, "score": 199750.9120536382 }, { "content": "/// Query the _headers_ of a remote agent's chain.\n\n///\n\n/// The agent activity is only the headers of their source chain.\n\n/// The agent activity is held by the neighbourhood centered on the agent's public key, rather than a content hash like the rest of the DHT.\n\n///\n\n/// The agent activity can be filtered with [ `ChainQueryFilter` ] like a local chain query.\n\npub fn get_agent_activity(\n\n agent: AgentPubKey,\n\n query: ChainQueryFilter,\n\n request: ActivityRequest,\n\n) -> ExternResult<AgentActivity> {\n\n HDK.with(|h| {\n\n h.borrow()\n\n .get_agent_activity(GetAgentActivityInput::new(agent, query, request))\n\n })\n\n}\n\n\n", "file_path": "crates/hdk/src/chain.rs", "rank": 77, "score": 199750.91205363814 }, { "content": "/// Insert a [`DhtOp`] into the database.\n\npub fn insert_op(\n\n txn: &mut Transaction,\n\n op: DhtOpHashed,\n\n is_authored: bool,\n\n) -> StateMutationResult<()> {\n\n let (op, hash) = op.into_inner();\n\n let op_light = op.to_light();\n\n let header = op.header();\n\n let timestamp = header.timestamp();\n\n let signature = op.signature().clone();\n\n if let Some(entry) = op.entry() {\n\n let entry_hashed = EntryHashed::with_pre_hashed(\n\n entry.clone(),\n\n header\n\n .entry_hash()\n\n .ok_or_else(|| DhtOpError::HeaderWithoutEntry(header.clone()))?\n\n .clone(),\n\n );\n\n insert_entry(txn, entry_hashed)?;\n\n }\n", "file_path": "crates/holochain_state/src/mutations.rs", "rank": 78, "score": 199750.91205363814 }, { "content": "/// Update a capability secret.\n\n///\n\n/// Wraps the [ `update` ] HDK function with system type parameters set.\n\n/// This guards against updating application entries or setting the wrong entry types.\n\n///\n\n/// Capability grant updates work exactly as a delete+create of the old+new grant entries.\n\n///\n\n/// The first argument is the header hash of the old grant being deleted as per [ `delete_cap_grant` ].\n\n/// The second argument is the entry value of the new grant to create as per [ `create_cap_grant` ].\n\npub fn update_cap_grant(\n\n old_grant_header_hash: HeaderHash,\n\n new_grant_value: CapGrantEntry,\n\n) -> ExternResult<HeaderHash> {\n\n update(\n\n old_grant_header_hash,\n\n CreateInput::new(\n\n EntryDefId::CapGrant,\n\n Entry::CapGrant(new_grant_value),\n\n ChainTopOrdering::default(),\n\n ),\n\n )\n\n}\n", "file_path": "crates/hdk/src/capability.rs", "rank": 79, "score": 199750.91205363814 }, { "content": "/// Generate a new directory structure for a sandbox.\n\npub fn generate_directory(\n\n root: Option<PathBuf>,\n\n directory: Option<PathBuf>,\n\n) -> anyhow::Result<PathBuf> {\n\n let mut dir = root.unwrap_or_else(std::env::temp_dir);\n\n let directory = directory.unwrap_or_else(|| nanoid::nanoid!().into());\n\n dir.push(directory);\n\n std::fs::create_dir(&dir)?;\n\n let mut keystore_dir = dir.clone();\n\n keystore_dir.push(\"keystore\");\n\n std::fs::create_dir(keystore_dir)?;\n\n Ok(dir)\n\n}\n", "file_path": "crates/hc_sandbox/src/generate.rs", "rank": 80, "score": 199750.91205363814 }, { "content": "/// Generate a new sandbox from a full config.\n\npub fn generate_with_config(\n\n config: Option<ConductorConfig>,\n\n root: Option<PathBuf>,\n\n directory: Option<PathBuf>,\n\n) -> anyhow::Result<PathBuf> {\n\n let dir = generate_directory(root, directory)?;\n\n let config = config.unwrap_or_else(|| create_config(dir.clone()));\n\n write_config(dir.clone(), &config);\n\n Ok(dir)\n\n}\n\n\n", "file_path": "crates/hc_sandbox/src/generate.rs", "rank": 81, "score": 199750.9120536382 }, { "content": "/// Lock the chain with the given lock id until the given end time.\n\n/// During this time only the lock id will be unlocked according to `is_chain_locked`.\n\n/// The chain can be unlocked for all lock ids at any time by calling `unlock_chain`.\n\n/// In theory there can be multiple locks active at once.\n\n/// If there are multiple locks active at once effectively all locks are locked\n\n/// because the chain is locked if there are ANY locks that don't match the\n\n/// current id being queried.\n\n/// In practise this is useless so don't do that. One lock at a time please.\n\npub fn lock_chain(\n\n txn: &mut Transaction,\n\n lock: &[u8],\n\n expires_at: &Timestamp,\n\n) -> StateMutationResult<()> {\n\n sql_insert!(txn, ChainLock, {\n\n \"lock\": lock,\n\n \"expires_at_timestamp\": expires_at,\n\n })?;\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/holochain_state/src/mutations.rs", "rank": 82, "score": 199750.91205363814 }, { "content": "/// Turn an [AgentKey] into a [KitsuneAgent]\n\npub fn agent_holo_to_kit(a: holo_hash::AgentPubKey) -> kitsune_p2p::KitsuneAgent {\n\n a.into_kitsune_raw()\n\n}\n\n\n", "file_path": "crates/holochain_p2p/src/types.rs", "rank": 83, "score": 199150.8837450919 }, { "content": "/// Verify the passed signature and public key against the literal bytes input.\n\n///\n\n/// The data is used as-is, there is no serialization or additional processing.\n\n/// This is best to use if you have literal bytes from somewhere.\n\n/// If you pass in a Vec<u8> expecting it to be serialized here, the signature won't verify correctly.\n\n///\n\n/// See [ `verify_signature` ]\n\npub fn verify_signature_raw<K, S>(key: K, signature: S, data: Vec<u8>) -> ExternResult<bool>\n\nwhere\n\n K: Into<AgentPubKey>,\n\n S: Into<Signature>,\n\n{\n\n HDK.with(|h| {\n\n h.borrow()\n\n .verify_signature(VerifySignature::new_raw(key.into(), signature.into(), data))\n\n })\n\n}\n", "file_path": "crates/hdk/src/ed25519.rs", "rank": 84, "score": 198515.11709698988 }, { "content": "pub fn x_25519_x_salsa20_poly1305_encrypt(\n\n _ribosome: Arc<impl RibosomeT>,\n\n call_context: Arc<CallContext>,\n\n input: X25519XSalsa20Poly1305Encrypt,\n\n) -> Result<XSalsa20Poly1305EncryptedData, WasmError> {\n\n match HostFnAccess::from(&call_context.host_context()) {\n\n HostFnAccess{ keystore: Permission::Allow, .. } => {\n\n tokio_helper::block_forever_on(async move {\n\n call_context\n\n .host_context\n\n .keystore()\n\n .x_25519_x_salsa20_poly1305_encrypt(input)\n\n .await\n\n })\n\n .map_err(|keystore_error| WasmError::Host(keystore_error.to_string()))\n\n },\n\n _ => unreachable!(),\n\n }\n\n}\n\n\n", "file_path": "crates/holochain/src/core/ribosome/host_fn/x_25519_x_salsa20_poly1305_encrypt.rs", "rank": 85, "score": 198165.4179174553 }, { "content": "pub fn create_x25519_keypair(\n\n _ribosome: Arc<impl RibosomeT>,\n\n call_context: Arc<CallContext>,\n\n _input: (),\n\n) -> Result<X25519PubKey, WasmError> {\n\n match HostFnAccess::from(&call_context.host_context()) {\n\n HostFnAccess{ keystore: Permission::Allow, .. } => tokio_helper::block_forever_on(async move {\n\n call_context\n\n .host_context\n\n .keystore()\n\n .create_x25519_keypair()\n\n .await\n\n })\n\n .map_err(|keystore_error| WasmError::Host(keystore_error.to_string())),\n\n _ => unreachable!(),\n\n }\n\n}\n\n\n\n// See x_25519_x_salsa20_poly1305_encrypt for testing encryption using created keypairs.\n", "file_path": "crates/holochain/src/core/ribosome/host_fn/create_x25519_keypair.rs", "rank": 86, "score": 198165.4179174553 }, { "content": "pub fn x_salsa20_poly1305_encrypt(\n\n _ribosome: Arc<impl RibosomeT>,\n\n call_context: Arc<CallContext>,\n\n input: XSalsa20Poly1305Encrypt,\n\n) -> Result<XSalsa20Poly1305EncryptedData, WasmError> {\n\n match HostFnAccess::from(&call_context.host_context()) {\n\n HostFnAccess{ keystore: Permission::Allow, .. } => {\n\n let system_random = ring::rand::SystemRandom::new();\n\n let mut nonce_bytes = [0; holochain_zome_types::x_salsa20_poly1305::nonce::NONCE_BYTES];\n\n system_random\n\n .fill(&mut nonce_bytes)\n\n .map_err(|ring_unspecified| WasmError::Host(ring_unspecified.to_string()))?;\n\n\n\n // @todo use the real libsodium somehow instead of this rust crate.\n\n // The main issue here is dependency management - it's not necessarily simple to get libsodium\n\n // reliably on consumer devices, e.g. we might want to statically link it somewhere.\n\n // @todo this key ref should be an opaque ref to lair and the encrypt should happen in lair.\n\n let lib_key = GenericArray::from_slice(input.as_key_ref_ref().as_ref());\n\n let cipher = XSalsa20Poly1305::new(lib_key);\n\n let lib_nonce = GenericArray::from_slice(&nonce_bytes);\n", "file_path": "crates/holochain/src/core/ribosome/host_fn/x_salsa20_poly1305_encrypt.rs", "rank": 87, "score": 198165.4179174553 }, { "content": "pub fn x_25519_x_salsa20_poly1305_decrypt(\n\n _ribosome: Arc<impl RibosomeT>,\n\n call_context: Arc<CallContext>,\n\n input: X25519XSalsa20Poly1305Decrypt,\n\n) -> Result<Option<XSalsa20Poly1305Data>, WasmError> {\n\n match HostFnAccess::from(&call_context.host_context()) {\n\n HostFnAccess{ keystore_deterministic: Permission::Allow, .. } => {\n\n tokio_helper::block_forever_on(async move {\n\n call_context\n\n .host_context\n\n .keystore()\n\n .x_25519_x_salsa20_poly1305_decrypt(input)\n\n .await\n\n })\n\n .map_err(|keystore_error| WasmError::Host(keystore_error.to_string()))\n\n },\n\n _ => unreachable!(),\n\n }\n\n}\n", "file_path": "crates/holochain/src/core/ribosome/host_fn/x_25519_x_salsa20_poly1305_decrypt.rs", "rank": 88, "score": 198165.4179174553 }, { "content": "pub fn get_agent_activity(\n\n _ribosome: Arc<impl RibosomeT>,\n\n call_context: Arc<CallContext>,\n\n input: GetAgentActivityInput,\n\n) -> Result<AgentActivity, WasmError> {\n\n match HostFnAccess::from(&call_context.host_context()) {\n\n HostFnAccess{ read_workspace: Permission::Allow, .. } => {\n\n let GetAgentActivityInput {\n\n agent_pubkey,\n\n chain_query_filter,\n\n activity_request,\n\n } = input;\n\n let options = match activity_request {\n\n ActivityRequest::Status => GetActivityOptions {\n\n include_valid_activity: false,\n\n include_rejected_activity: false,\n\n ..Default::default()\n\n },\n\n ActivityRequest::Full => GetActivityOptions {\n\n include_valid_activity: true,\n", "file_path": "crates/holochain/src/core/ribosome/host_fn/get_agent_activity.rs", "rank": 89, "score": 198165.4179174553 }, { "content": "pub fn x_salsa20_poly1305_decrypt(\n\n _ribosome: Arc<impl RibosomeT>,\n\n call_context: Arc<CallContext>,\n\n input: XSalsa20Poly1305Decrypt,\n\n) -> Result<Option<XSalsa20Poly1305Data>, WasmError> {\n\n match HostFnAccess::from(&call_context.host_context()) {\n\n HostFnAccess{ keystore_deterministic: Permission::Allow, .. } => {\n\n // @todo use a libsodium wrapper instead of an ad-hoc rust implementation.\n\n // Note that the we're mapping any decrypting error to None here.\n\n // @todo this decrypt should be in lair and key refs should be refs to keys in lair\n\n let lib_key = GenericArray::from_slice(input.as_key_ref_ref().as_ref());\n\n let cipher = XSalsa20Poly1305::new(lib_key);\n\n let lib_nonce = GenericArray::from_slice(input.as_encrypted_data_ref().as_nonce_ref().as_ref());\n\n Ok(\n\n match cipher.decrypt(lib_nonce, input.as_encrypted_data_ref().as_encrypted_data_ref()) {\n\n Ok(data) => Some(XSalsa20Poly1305Data::from(data)),\n\n Err(_) => None,\n\n }\n\n )\n\n },\n\n _ => unreachable!(),\n\n }\n\n}\n\n\n\n// Tests for the decrypt round trip are in xsalsa20_poly1305_encrypt.\n", "file_path": "crates/holochain/src/core/ribosome/host_fn/x_salsa20_poly1305_decrypt.rs", "rank": 90, "score": 198165.4179174553 }, { "content": "/// Sign N data using an ephemeral private key.\n\n///\n\n/// This is a complement to [ `sign_raw` ] in case we don't have a meaningful key for the input.\n\n/// __The generated private half of the key is discarded immediately upon signing__.\n\n///\n\n/// The signatures output are pairwise ordered the same as the input data.\n\n/// It is up to the caller to construct meaning for ephemeral signatures in some cryptographic system.\n\npub fn sign_ephemeral_raw(datas: Vec<Vec<u8>>) -> ExternResult<EphemeralSignatures> {\n\n HDK.with(|h| h.borrow().sign_ephemeral(SignEphemeral::new_raw(datas)))\n\n}\n\n\n", "file_path": "crates/hdk/src/ed25519.rs", "rank": 91, "score": 197896.0616327814 }, { "content": "/// Extension trait for channel writers\n\npub trait TransportChannelWriteExt {\n\n /// Write all data and close channel\n\n fn write_and_close(\n\n &mut self,\n\n data: Vec<u8>,\n\n ) -> ghost_actor::dependencies::must_future::MustBoxFuture<TransportResult<()>>;\n\n}\n\n\n\nimpl<T: futures::sink::Sink<Vec<u8>, Error = TransportError> + Send + Unpin + 'static>\n\n TransportChannelWriteExt for T\n\n{\n\n fn write_and_close(\n\n &mut self,\n\n data: Vec<u8>,\n\n ) -> ghost_actor::dependencies::must_future::MustBoxFuture<TransportResult<()>> {\n\n KitsuneTransportMetrics::count_filter(\n\n KitsuneTransportMetrics::Write,\n\n data.len(),\n\n \"transport\",\n\n );\n\n async move {\n\n self.send(data).await?;\n\n self.close().await?;\n\n Ok(())\n\n }\n\n .boxed()\n\n .into()\n\n }\n\n}\n\n\n", "file_path": "crates/kitsune_p2p/types/src/transport.rs", "rank": 92, "score": 197505.82262491548 }, { "content": "/// Extension trait for channel readers\n\npub trait TransportChannelReadExt {\n\n /// Read the stream to close into a single byte vec.\n\n fn read_to_end(self)\n\n -> ghost_actor::dependencies::must_future::MustBoxFuture<'static, Vec<u8>>;\n\n}\n\n\n\nimpl<T: futures::stream::Stream<Item = Vec<u8>> + Send + Unpin + 'static> TransportChannelReadExt\n\n for T\n\n{\n\n fn read_to_end(\n\n self,\n\n ) -> ghost_actor::dependencies::must_future::MustBoxFuture<'static, Vec<u8>> {\n\n async move {\n\n let r = self\n\n .fold(Vec::new(), |mut acc, x| async move {\n\n acc.extend_from_slice(&x);\n\n acc\n\n })\n\n .await;\n\n KitsuneTransportMetrics::count_filter(\n", "file_path": "crates/kitsune_p2p/types/src/transport.rs", "rank": 93, "score": 197505.82262491548 }, { "content": "/// Extension trait for additional methods on TransportListenerSenders\n\npub trait TransportListenerSenderExt {\n\n /// Make a request using a single channel open/close.\n\n fn request(\n\n &self,\n\n url: url2::Url2,\n\n data: Vec<u8>,\n\n ) -> ghost_actor::dependencies::must_future::MustBoxFuture<'static, TransportResult<Vec<u8>>>;\n\n}\n\n\n\nimpl<T: TransportListenerSender> TransportListenerSenderExt for T {\n\n fn request(\n\n &self,\n\n url: url2::Url2,\n\n data: Vec<u8>,\n\n ) -> ghost_actor::dependencies::must_future::MustBoxFuture<'static, TransportResult<Vec<u8>>>\n\n {\n\n let fut = self.create_channel(url);\n\n async move {\n\n let (_url, mut write, read) = fut.await?;\n\n KitsuneTransportMetrics::count_filter(\n", "file_path": "crates/kitsune_p2p/types/src/transport.rs", "rank": 94, "score": 197505.6102149599 }, { "content": "/// Extension trait for converting GhostSender<HolochainP2p> into HolochainP2pCell\n\npub trait HolochainP2pRefToCell {\n\n /// Partially apply dna_hash && agent_pub_key to this sender,\n\n /// binding it to a specific cell context.\n\n fn into_cell(self, dna_hash: DnaHash, from_agent: AgentPubKey) -> crate::HolochainP2pCell;\n\n\n\n /// Clone and partially apply dna_hash && agent_pub_key to this sender,\n\n /// binding it to a specific cell context.\n\n fn to_cell(&self, dna_hash: DnaHash, from_agent: AgentPubKey) -> crate::HolochainP2pCell;\n\n}\n\n\n\nimpl HolochainP2pRefToCell for HolochainP2pRef {\n\n fn into_cell(self, dna_hash: DnaHash, from_agent: AgentPubKey) -> crate::HolochainP2pCell {\n\n crate::HolochainP2pCell {\n\n sender: self,\n\n dna_hash: Arc::new(dna_hash),\n\n from_agent: Arc::new(from_agent),\n\n }\n\n }\n\n\n\n fn to_cell(&self, dna_hash: DnaHash, from_agent: AgentPubKey) -> crate::HolochainP2pCell {\n\n self.clone().into_cell(dna_hash, from_agent)\n\n }\n\n}\n", "file_path": "crates/holochain_p2p/src/types/actor.rs", "rank": 95, "score": 197505.41131365777 }, { "content": "/// Kitsune hashes are expected to be 36 bytes.\n\n/// The first 32 bytes are the proper hash.\n\n/// The final 4 bytes are a hash-of-the-hash that can be treated like a u32 \"location\".\n\npub trait KitsuneBinType:\n\n 'static\n\n + Send\n\n + Sync\n\n + std::fmt::Debug\n\n + Clone\n\n + std::hash::Hash\n\n + PartialEq\n\n + Eq\n\n + PartialOrd\n\n + Ord\n\n + std::convert::Into<Vec<u8>>\n\n{\n\n /// Create an instance, ensuring the proper number of bytes were provided.\n\n fn new(bytes: Vec<u8>) -> Self;\n\n\n\n /// Fetch just the core 32 bytes (without the 4 location bytes).\n\n fn get_bytes(&self) -> &[u8];\n\n\n\n /// Fetch the dht \"loc\" / location for this hash.\n\n fn get_loc(&self) -> DhtLocation;\n\n}\n\n\n", "file_path": "crates/kitsune_p2p/types/src/bin_types.rs", "rank": 96, "score": 197499.35517009164 }, { "content": "/// Wrapper for __call_remote host function.\n\n///\n\n/// There are several positional arguments:\n\n///\n\n/// - agent: The address of the agent to call the RPC style remote function on.\n\n/// - zome: The zome to call the remote function in. Use zome_info() to get the current zome info.\n\n/// - fn_name: The name of the function in the zome to call.\n\n/// - cap_secret: Optional cap claim secret to allow access to the remote call.\n\n/// - payload: The payload to send to the remote function; receiver needs to deserialize cleanly.\n\n///\n\n/// Response is [ `ExternResult` ] which returns [ `ZomeCallResponse` ] of the function call.\n\n/// [ `ZomeCallResponse::NetworkError` ] if there was a network error.\n\n/// [ `ZomeCallResponse::Unauthorized` ] if the provided cap grant is invalid.\n\n/// The unauthorized case should always be handled gracefully because gap grants can be revoked at\n\n/// any time and the claim holder has no way of knowing until they provide a secret for a call.\n\n///\n\n/// An Ok response already includes an [ `ExternIO` ] to be deserialized with `extern_io.decode()?`.\n\n///\n\n/// ```ignore\n\n/// ...\n\n/// let foo: Foo = call_remote(bob, \"foo_zome\", \"do_it\", secret, serializable_payload)?;\n\n/// ...\n\n/// ```\n\npub fn call_remote<I>(\n\n agent: AgentPubKey,\n\n zome: ZomeName,\n\n fn_name: FunctionName,\n\n cap_secret: Option<CapSecret>,\n\n payload: I,\n\n) -> ExternResult<ZomeCallResponse>\n\nwhere\n\n I: serde::Serialize + std::fmt::Debug,\n\n{\n\n Ok(HDK\n\n .with(|h| {\n\n h.borrow().call_remote(vec![CallRemote::new(\n\n agent,\n\n zome,\n\n fn_name,\n\n cap_secret,\n\n ExternIO::encode(payload)?,\n\n )])\n\n })?\n\n .into_iter()\n\n .next()\n\n .unwrap())\n\n}\n\n\n", "file_path": "crates/hdk/src/p2p.rs", "rank": 97, "score": 197461.54929144256 }, { "content": "pub fn rt() -> Runtime {\n\n Builder::new_multi_thread().enable_all().build().unwrap()\n\n}\n", "file_path": "crates/holochain/benches/consistency.rs", "rank": 98, "score": 197455.93559058913 }, { "content": "#[allow(clippy::extra_unused_lifetimes)]\n\npub fn agent_info<'a>(\n\n _ribosome: Arc<impl RibosomeT>,\n\n call_context: Arc<CallContext>,\n\n _input: (),\n\n) -> Result<AgentInfo, WasmError> {\n\n match HostFnAccess::from(&call_context.host_context()) {\n\n HostFnAccess{ agent_info: Permission::Allow, .. } => {\n\n let agent_pubkey = call_context\n\n .host_context\n\n .workspace()\n\n .source_chain()\n\n .agent_pubkey()\n\n .clone();\n\n Ok(AgentInfo {\n\n agent_initial_pubkey: agent_pubkey.clone(),\n\n agent_latest_pubkey: agent_pubkey,\n\n })\n\n },\n\n _ => unreachable!(),\n\n }\n", "file_path": "crates/holochain/src/core/ribosome/host_fn/agent_info.rs", "rank": 99, "score": 197335.90171191975 } ]
Rust
src/theorem.rs
MDeiml/attomath
4aac4dad3cd776dd2cb1602aa930c04c315d5186
use std::{cmp::Ordering, num::Wrapping}; use crate::{ dvr::DVR, error::ProofError, expression::{ is_operator, ChainSubstitution, ShiftSubstitution, Substitution, VariableSubstitution, WholeSubstitution, }, statement::OwnedStatement, types::*, }; #[derive(PartialEq, Eq, PartialOrd, Ord, Clone, Debug)] pub struct Theorem { conclusion: OwnedStatement, assumptions: Vec<OwnedStatement>, dvrs: Vec<DVR>, } impl Theorem { pub fn conclusion(&self) -> &OwnedStatement { &self.conclusion } pub fn assumptions(&self) -> &[OwnedStatement] { &self.assumptions } pub fn dvrs(&self) -> &[DVR] { &self.dvrs } pub fn new( conclusion: OwnedStatement, assumptions: Vec<OwnedStatement>, dvrs: Vec<DVR>, ) -> Self { Theorem { conclusion, assumptions, dvrs, } } pub fn standardize(&mut self) { let max_var = self.max_var(); let mut var_map = vec![None; (Wrapping(max_var as usize) + Wrapping(1)).0]; let mut next_var = 0; self.conclusion .expression .standardize_range(&mut var_map, &mut next_var, ..); self.assumptions.sort_unstable(); self.assumptions.dedup(); let mut indexed_assumptions = self .assumptions .iter() .cloned() .enumerate() .map(|(a, b)| (b, a)) .collect::<Vec<_>>(); let normalized_assumptions = self .assumptions .drain(..) .map(|assumption| { let mut normalized = assumption; normalized.expression.standardize(); normalized }) .collect::<Vec<_>>(); indexed_assumptions.sort_unstable_by_key(|(_, index)| &normalized_assumptions[*index]); let mut temp_next_var = next_var; for (assumption, _) in indexed_assumptions.iter_mut() { assumption .expression .standardize_range(&mut var_map, &mut temp_next_var, ..); } for (i, v) in var_map.iter_mut().enumerate() { *v = if i < next_var as usize { Some(i as Identifier) } else { None }; } let mut var_maps = vec![var_map]; for assumptions in indexed_assumptions .group_by_mut(|(_, i), (_, j)| normalized_assumptions[*i] == normalized_assumptions[*j]) { let mut next_var1 = 0; let mut assumptions_min: Option<Vec<(OwnedStatement, usize)>> = None; let mut var_maps1 = Vec::new(); for var_map in var_maps.iter_mut() { let mut perm = assumptions.iter().cloned().collect::<Vec<_>>(); for (assumption, _) in perm.iter_mut() { assumption.expression.substitute_variables(&var_map); } let mut perm = Permutations::new(&mut perm); while let Some(permutation) = perm.next() { let mut var_map1 = var_map.clone(); next_var1 = next_var; for (assumption, _) in permutation.iter_mut() { assumption.expression.standardize_range( &mut var_map1, &mut next_var1, next_var.., ); } match assumptions_min .as_deref_mut() .map(|a_min| { permutation .iter() .map(|(a, _)| a) .cmp(a_min.iter().map(|(a, _)| a)) }) .unwrap_or(Ordering::Less) { Ordering::Equal => { var_maps1.push(var_map1); } Ordering::Less => { var_maps1.clear(); var_maps1.push(var_map1); assumptions_min = Some(permutation.iter().cloned().collect()); } Ordering::Greater => {} } } } var_maps = var_maps1; next_var = next_var1; assumptions.swap_with_slice(assumptions_min.unwrap().as_mut_slice()); } self.assumptions .extend(indexed_assumptions.into_iter().map(|(a, _)| a)); for dvr in self.dvrs.iter_mut() { *dvr = dvr .substitute(&VariableSubstitution::new(var_maps[0].as_slice()).unwrap()) .next() .unwrap() .unwrap(); } self.dvrs.sort_unstable(); self.dvrs.dedup(); } pub fn max_var(&self) -> Identifier { self.conclusion .expression .variables() .chain( self.assumptions .iter() .map(|st| st.expression.variables()) .flatten(), ) .filter(|symb| !is_operator(*symb)) .max() .unwrap_or(-1) } pub fn substitute<S: Substitution>(&self, substitution: &S) -> Result<Self, ProofError> { self.substitute_skip_assumption(substitution, None) } fn substitute_skip_assumption<S: Substitution>( &self, substitution: &S, skip_assumption: Option<usize>, ) -> Result<Self, ProofError> { let conclusion = self.conclusion.substitute(substitution); let assumptions: Vec<OwnedStatement> = self .assumptions .iter() .enumerate() .filter_map(|(i, a)| { if Some(i) == skip_assumption { None } else { Some(a) } }) .map(|a| a.substitute(substitution)) .collect(); let dvrs = self .dvrs .iter() .map(|dvr| dvr.substitute(substitution)) .flatten() .collect::<Result<Vec<_>, _>>()?; Ok(Theorem { conclusion, assumptions, dvrs, }) } pub fn combine(&self, other: &Theorem, index: usize) -> Result<Self, ProofError> { let max_var = self.max_var(); let mut substitution = WholeSubstitution::with_capacity((Wrapping(max_var as usize) + Wrapping(1)).0); other .conclusion .unify(&self.assumptions[index], &mut substitution)?; let shift = other.max_var() + 1; let shift_sub = ShiftSubstitution::new(shift); let substitution = ChainSubstitution { first: substitution, then: shift_sub, }; let mut t = self.substitute_skip_assumption(&substitution, Some(index))?; t.assumptions.extend_from_slice(&other.assumptions); t.assumptions.shrink_to_fit(); t.dvrs.extend_from_slice(&other.dvrs); t.dvrs.shrink_to_fit(); Ok(t) } } struct Permutations<'a, T> { sequence: &'a mut [T], counters: Vec<usize>, depth: usize, } impl<'a, T> Permutations<'a, T> { fn new(sequence: &'a mut [T]) -> Self { let length = sequence.len(); Permutations { sequence, counters: vec![0; length], depth: 0, } } fn next(&mut self) -> Option<&mut [T]> { if self.depth >= self.sequence.len() { return None; } if self.depth != 0 { while self.counters[self.depth] >= self.depth { self.counters[self.depth] = 0; self.depth += 1; if self.depth >= self.sequence.len() { return None; } } if self.depth % 2 == 0 { self.sequence.swap(0, self.depth); } else { self.sequence.swap(self.counters[self.depth], self.depth); } self.counters[self.depth] += 1; self.depth = 1; Some(&mut self.sequence) } else { self.depth = 1; Some(&mut self.sequence) } } } #[cfg(test)] mod tests { use super::*; #[test] fn permutations() { let mut arr = [0, 1, 2, 3]; let mut perm = Permutations::new(&mut arr); let mut counter = 0; while let Some(_) = perm.next() { counter += 1; } assert_eq!(counter, 24); } }
use std::{cmp::Ordering, num::Wrapping}; use crate::{ dvr::DVR, error::ProofError, expression::{ is_operator, ChainSubstitution, ShiftSubstitution, Substitution, VariableSubstitution, WholeSubstitution, }, statement::OwnedStatement, types::*, }; #[derive(PartialEq, Eq, PartialOrd, Ord, Clone, Debug)] pub struct Theorem { conclusion: OwnedStatement, assumptions: Vec<OwnedStatement>, dvrs: Vec<DVR>, } impl Theorem { pub fn conclusion(&self) -> &OwnedStatement { &self.conclusion } pub fn assumptions(&self) -> &[OwnedStatement] { &self.assumptions } pub fn dvrs(&self) -> &[DVR] { &self.dvrs } pub fn new( conclusion: OwnedStatement, assumptions: Vec<OwnedStatement>, dvrs: Vec<DVR>, ) -> Self { Theorem { conclusion, assumptions, dvrs, } } pub fn standardize(&mut self) { let max_var = self.max_var(); let mut var_map = vec![None; (Wrapping(max_var as usize) + Wrapping(1)).0]; let mut next_var = 0; self.conclusion .expression .standardize_range(&mut var_map, &mut next_var, ..); self.assumptions.sort_unstable(); self.assumptions.dedup(); let mut indexed_assumptions = self .assumptions .iter() .cloned() .enumerate() .map(|(a, b)| (b, a)) .collect::<Vec<_>>(); let normalized_assumptions = self .assumptions .drain(..) .map(|assumption| { let mut normalized = assumption; normalized.expression.standardize(); normalized }) .collect::<Vec<_>>(); indexed_assumptions.sort_unstable_by_key(|(_, index)| &normalized_assumptions[*index]); let mut temp_next_var = next_var; for (assumption, _) in indexed_assumptions.iter_mut() { assumption .expression .standardize_range(&mut var_map, &mut temp_next_var, ..); } for (i, v) in var_map.iter_mut().enumerate() { *v = if i < next_var as usize { Some(i as Identifier) } else { None }; } let mut var_maps = vec![var_map]; for assumptions in indexed_assumptions .group_by_mut(|(_, i), (_, j)| normalized_assumptions[*i] == normalized_assumptions[*j]) { let mut next_var1 = 0; let mut assumptions_min: Option<Vec<(OwnedStatement, usize)>> = None; let mut var_maps1 = Vec::new(); for var_map in var_maps.iter_mut() { let mut perm = assumptions.iter().cloned().collect::<Vec<_>>(); for (assumption, _) in perm.iter_mut() { assumption.expression.substitute_variables(&var_map); } let mut perm = Permutations::new(&mut perm); while let Some(permutation) = perm.next() { let mut var_map1 = var_map.clone(); next_var1 = next_var; for (assumption, _) in permutation.iter_mut() { assumption.expression.standardize_range( &mut var_map1, &mut next_var1, next_var.., ); } match assumptions_min .as_deref_mut() .map(|a_min| { permutation .iter() .map(|(a, _)| a) .cmp(a_min.iter().map(|(a, _)| a)) }) .unwrap_or(Ordering::Less) { Ordering::Equal => { var_maps1.push(var_map1); } Ordering::Less => { var_maps1.clear(); var_maps1.push(var_map1); assumptions_min = Some(permutation.iter().cloned().collect()); } Ordering::Greater => {} } } } var_maps = var_maps1; next_var = next_var1; assumptions.swap_with_slice(assumptions_min.unwrap().as_mut_slice()); } self.assumptions .extend(indexed_assumptions.into_iter().map(|(a, _)| a)); for dvr in self.dvrs.iter_mut() { *dvr = dvr .substitute(&VariableSubstitution::new(var_maps[0].as_slice()).unwrap()) .next() .unwrap() .unwrap(); } self.dvrs.sort_unstable(); self.dvrs.dedup(); } pub fn max_var(&self) -> Identifier { self.conclusion .expression .variables() .chain( self.
pub fn substitute<S: Substitution>(&self, substitution: &S) -> Result<Self, ProofError> { self.substitute_skip_assumption(substitution, None) } fn substitute_skip_assumption<S: Substitution>( &self, substitution: &S, skip_assumption: Option<usize>, ) -> Result<Self, ProofError> { let conclusion = self.conclusion.substitute(substitution); let assumptions: Vec<OwnedStatement> = self .assumptions .iter() .enumerate() .filter_map(|(i, a)| { if Some(i) == skip_assumption { None } else { Some(a) } }) .map(|a| a.substitute(substitution)) .collect(); let dvrs = self .dvrs .iter() .map(|dvr| dvr.substitute(substitution)) .flatten() .collect::<Result<Vec<_>, _>>()?; Ok(Theorem { conclusion, assumptions, dvrs, }) } pub fn combine(&self, other: &Theorem, index: usize) -> Result<Self, ProofError> { let max_var = self.max_var(); let mut substitution = WholeSubstitution::with_capacity((Wrapping(max_var as usize) + Wrapping(1)).0); other .conclusion .unify(&self.assumptions[index], &mut substitution)?; let shift = other.max_var() + 1; let shift_sub = ShiftSubstitution::new(shift); let substitution = ChainSubstitution { first: substitution, then: shift_sub, }; let mut t = self.substitute_skip_assumption(&substitution, Some(index))?; t.assumptions.extend_from_slice(&other.assumptions); t.assumptions.shrink_to_fit(); t.dvrs.extend_from_slice(&other.dvrs); t.dvrs.shrink_to_fit(); Ok(t) } } struct Permutations<'a, T> { sequence: &'a mut [T], counters: Vec<usize>, depth: usize, } impl<'a, T> Permutations<'a, T> { fn new(sequence: &'a mut [T]) -> Self { let length = sequence.len(); Permutations { sequence, counters: vec![0; length], depth: 0, } } fn next(&mut self) -> Option<&mut [T]> { if self.depth >= self.sequence.len() { return None; } if self.depth != 0 { while self.counters[self.depth] >= self.depth { self.counters[self.depth] = 0; self.depth += 1; if self.depth >= self.sequence.len() { return None; } } if self.depth % 2 == 0 { self.sequence.swap(0, self.depth); } else { self.sequence.swap(self.counters[self.depth], self.depth); } self.counters[self.depth] += 1; self.depth = 1; Some(&mut self.sequence) } else { self.depth = 1; Some(&mut self.sequence) } } } #[cfg(test)] mod tests { use super::*; #[test] fn permutations() { let mut arr = [0, 1, 2, 3]; let mut perm = Permutations::new(&mut arr); let mut counter = 0; while let Some(_) = perm.next() { counter += 1; } assert_eq!(counter, 24); } }
assumptions .iter() .map(|st| st.expression.variables()) .flatten(), ) .filter(|symb| !is_operator(*symb)) .max() .unwrap_or(-1) }
function_block-function_prefixed
[ { "content": "/// Tests whether the given identifier is an operator.\n\n///\n\n/// Operators occupy the range `(Identifier::MIN ..= -1)`.\n\n/// The special value `Identifier::MIN` is also an operator.\n\n///\n\n/// # Example\n\n/// ```\n\n/// use attomath::{expression::is_operator, Identifier};\n\n///\n\n/// assert!(is_operator(-2));\n\n/// assert!(is_operator(Identifier::MIN));\n\n/// assert!(!is_operator(0));\n\n/// ```\n\npub fn is_operator(x: Identifier) -> bool {\n\n x < 0\n\n}\n\n\n\n/// A substion to combine two other substitutions.\n\n///\n\n/// Tries to use `first` first and uses `then` if the variable did not get substituted by `first`.\n\npub struct ChainSubstitution<S: Substitution, T: Substitution> {\n\n pub first: S,\n\n pub then: T,\n\n}\n\n\n\n#[derive(Debug)]\n\npub enum Either<S, T> {\n\n Left(S),\n\n Right(T),\n\n}\n\n\n\nimpl<S: Borrow<[Identifier]>, T: Borrow<[Identifier]>> Borrow<[Identifier]> for Either<S, T> {\n\n fn borrow(&self) -> &[Identifier] {\n", "file_path": "src/expression.rs", "rank": 0, "score": 92037.57391576603 }, { "content": "/// A `Substitution` maps variable ids to expressions (represented by a sequence of identifiers).\n\n///\n\n/// This is intented to be used together with [`Expression`]s.\n\npub trait Substitution {\n\n type T: Borrow<[Identifier]> + std::fmt::Debug;\n\n\n\n /// Get the stored substitution for the variable with identifier `id`. Or `None` if the\n\n /// variable should not be replaced.\n\n ///\n\n /// # Panics\n\n /// This may panic if `id` is not in the range of the particular substitution.\n\n ///\n\n /// # Example\n\n /// ```\n\n /// use attomath::expression::{Substitution, WholeSubstitution, Expression};\n\n ///\n\n /// let expr = Expression::from_raw([-2, 0, 1]).unwrap();\n\n /// let mut sub = WholeSubstitution::with_capacity(2);\n\n /// sub.insert(0, expr.to_slice());\n\n /// assert_eq!(sub.substitution_opt(0), Some(expr.to_slice()));\n\n /// assert_eq!(sub.substitution_opt(1), None);\n\n /// ```\n\n fn substitution_opt(&self, id: Identifier) -> Option<Expression<Self::T>>;\n", "file_path": "src/expression.rs", "rank": 1, "score": 77282.71124371825 }, { "content": "pub fn or_fail<I, O, E: ParseError<I>, F>(mut f: F) -> impl FnMut(I) -> IResult<I, O, E>\n\nwhere\n\n F: Parser<I, O, E>,\n\n{\n\n move |input| {\n\n f.parse(input).map_err(|error| match error {\n\n nom::Err::Error(e) => nom::Err::Failure(e),\n\n e => e,\n\n })\n\n }\n\n}\n", "file_path": "src/serialization/error.rs", "rank": 3, "score": 71714.61033394553 }, { "content": "fn main() {\n\n let filename = std::env::args().nth(1).unwrap();\n\n let mut fmt = Formatter::new();\n\n let mut database = Database::new();\n\n\n\n let file = File::open(filename).unwrap();\n\n for (line_number, line) in BufReader::new(file).lines().enumerate() {\n\n let line = line.unwrap();\n\n let command = match Command::parse(&fmt, &line) {\n\n Ok(command) => command,\n\n Err(err) => {\n\n eprintln!(\n\n \"Syntax error in line {}: {:?}\\n\\t{}\",\n\n line_number, err, line\n\n );\n\n return;\n\n }\n\n };\n\n match command.clone().apply(&mut fmt, &mut database) {\n\n Ok(_) => (),\n", "file_path": "src/main.rs", "rank": 4, "score": 36194.49992289758 }, { "content": "pub trait Length {\n\n fn length(&self) -> usize;\n\n}\n\n\n\nimpl Length for &str {\n\n fn length(&self) -> usize {\n\n self.len()\n\n }\n\n}\n\n\n\nimpl Display for GreedyError<&str> {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n for (input, error) in self.0.iter() {\n\n writeln!(f, \"{:?} {}\", error, &input[0..20])?;\n\n }\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl<I> ParseError<I> for GreedyError<I>\n", "file_path": "src/serialization/error.rs", "rank": 5, "score": 30627.413785086275 }, { "content": "/// Type alias for a judgement (see [`Statement`][crate::Statement])\n\npub type Judgement = u8;\n\n\n\n/// Type alias for an identifier representing a variable or operator (see\n\n/// [`Expression`][crate::Expression])\n\npub type Identifier = i16;\n", "file_path": "src/types.rs", "rank": 6, "score": 26566.258129730195 }, { "content": "use crate::{\n\n error::ProofError,\n\n expression::{is_operator, Substitution},\n\n types::*,\n\n};\n\n\n\n/// A _distince variable relation_ for expressing that two variables must be different.\n\n///\n\n/// In the default case it is always assumed that all statements are correct if you replace\n\n/// a variable with a different subexpression. This leads to logical errors in statements like\n\n/// `forall x0. exists x1. x0 != x1`.\n\n#[derive(PartialEq, Eq, Clone, PartialOrd, Ord, Debug)]\n\npub struct DVR(Identifier, Identifier);\n\n\n\nimpl DVR {\n\n /// Returns this `DVR`s variables\n\n pub fn variables(&self) -> (Identifier, Identifier) {\n\n let DVR(a, b) = self;\n\n (*a, *b)\n\n }\n", "file_path": "src/dvr.rs", "rank": 7, "score": 26312.566527326293 }, { "content": "\n\n impl Iterator for Iter {\n\n type Item = Result<DVR, ProofError>;\n\n\n\n fn next(&mut self) -> Option<Result<DVR, ProofError>> {\n\n if self.index >= self.a.len() * self.b.len() {\n\n return None;\n\n }\n\n let res = DVR::new(\n\n self.a[self.index % self.a.len()],\n\n self.b[self.index / self.a.len()],\n\n );\n\n self.index += 1;\n\n Some(res)\n\n }\n\n\n\n fn size_hint(&self) -> (usize, Option<usize>) {\n\n let rem = self.a.len() * self.b.len() - self.index;\n\n (rem, Some(rem))\n\n }\n\n }\n\n Iter {\n\n a: vars_a,\n\n b: vars_b,\n\n index: 0,\n\n }\n\n }\n\n}\n", "file_path": "src/dvr.rs", "rank": 8, "score": 26309.602630386056 }, { "content": " /// expected.dedup();\n\n /// assert_eq!(new_dvrs, Ok(expected));\n\n ///\n\n /// let dvr = DVR::new(0, 1).unwrap();\n\n /// let mut sub = WholeSubstitution::with_capacity(2);\n\n /// let expr0 = Expression::from_raw(vec![-2, 0, 1]).unwrap();\n\n /// sub.insert(0, expr0.to_slice());\n\n /// let expr1 = Expression::from_raw(vec![-2, 1, 2]).unwrap();\n\n /// sub.insert(1, expr1.to_slice());\n\n ///\n\n /// let new_dvrs = dvr.substitute(&sub).collect::<Result<Vec<_>, _>>();\n\n ///\n\n /// assert_eq!(new_dvrs, Err(ProofError::DVRError(1)));\n\n /// ```\n\n pub fn substitute<S: Substitution>(\n\n &self,\n\n substitution: &S,\n\n ) -> impl Iterator<Item = Result<DVR, ProofError>> {\n\n let DVR(a, b) = self;\n\n let vars_a = if let Some(sub) = substitution.substitution_opt(*a) {\n", "file_path": "src/dvr.rs", "rank": 9, "score": 26309.581068958796 }, { "content": " /// use attomath::DVR;\n\n /// use attomath::expression::{Expression, WholeSubstitution};\n\n /// use attomath::error::ProofError;\n\n ///\n\n /// let dvr = DVR::new(0, 1).unwrap();\n\n /// let mut sub = WholeSubstitution::with_capacity(2);\n\n /// let expr0 = Expression::from_raw(vec![-2, 0, 1]).unwrap();\n\n /// sub.insert(0, expr0.to_slice());\n\n /// let expr1 = Expression::from_raw(vec![2]).unwrap();\n\n /// sub.insert(1, expr1.to_slice());\n\n ///\n\n /// let mut new_dvrs = dvr.substitute(&sub).collect::<Result<Vec<_>, _>>();\n\n /// new_dvrs = new_dvrs.map(|mut ds| {\n\n /// ds.sort();\n\n /// ds.dedup();\n\n /// ds\n\n /// });\n\n ///\n\n /// let mut expected = vec![DVR::new(0, 2).unwrap(), DVR::new(1, 2).unwrap()];\n\n /// expected.sort();\n", "file_path": "src/dvr.rs", "rank": 10, "score": 26307.549236171184 }, { "content": "\n\n /// Creates a new `DVR` restricting `a` and `b` from being the same variable.\n\n ///\n\n /// # Errors\n\n /// This function fails with a `DVRError` if `a == b`\n\n ///\n\n /// # Example\n\n /// ```\n\n /// use attomath::DVR;\n\n /// use attomath::error::ProofError;\n\n ///\n\n /// let dvr = DVR::new(0, 1);\n\n /// assert_eq!(dvr.map(|d| d.variables()), Ok((0, 1)));\n\n ///\n\n /// let dvr = DVR::new(1, 1);\n\n /// assert_eq!(dvr, Err(ProofError::DVRError(1)));\n\n /// ```\n\n pub fn new(a: Identifier, b: Identifier) -> Result<Self, ProofError> {\n\n if is_operator(a) {\n\n Err(ProofError::DVRError(a))\n", "file_path": "src/dvr.rs", "rank": 11, "score": 26305.011904230367 }, { "content": " } else if is_operator(b) {\n\n Err(ProofError::DVRError(b))\n\n } else if a < b {\n\n Ok(DVR(a, b))\n\n } else if a > b {\n\n Ok(DVR(b, a))\n\n } else {\n\n Err(ProofError::DVRError(a))\n\n }\n\n }\n\n\n\n /// Uses the given `Substitution` to create new `DVR`s for each pair of variables in the new\n\n /// expressions for `self.variables()`.\n\n ///\n\n /// # Errors\n\n /// The `Iterator` will produce a `DVRError` if the substitutions for this `DVR`s' variables\n\n /// contains common variables\n\n ///\n\n /// # Example\n\n /// ```\n", "file_path": "src/dvr.rs", "rank": 12, "score": 26302.12138959506 }, { "content": " let mut res = sub.variables().collect::<Vec<_>>();\n\n res.sort();\n\n res.dedup();\n\n res\n\n } else {\n\n vec![*a]\n\n };\n\n let vars_b = if let Some(sub) = substitution.substitution_opt(*b) {\n\n let mut res = sub.variables().collect::<Vec<_>>();\n\n res.sort();\n\n res.dedup();\n\n res\n\n } else {\n\n vec![*b]\n\n };\n\n struct Iter {\n\n a: Vec<Identifier>,\n\n b: Vec<Identifier>,\n\n index: usize,\n\n }\n", "file_path": "src/dvr.rs", "rank": 13, "score": 26301.54570216207 }, { "content": " /// assert_eq!(sub.substitution_opt(1), None);\n\n /// ```\n\n pub fn insert(&mut self, id: Identifier, expr: Expression<&'a [Identifier]>) {\n\n self.substitution[id as usize] = Some(expr)\n\n }\n\n}\n\n\n\nimpl<'a> Substitution for WholeSubstitution<'a> {\n\n type T = &'a [Identifier];\n\n\n\n fn substitution_opt(&self, id: Identifier) -> Option<Expression<&'a [Identifier]>> {\n\n self.substitution[id as usize]\n\n }\n\n}\n\n\n\n/// A single variable substitution\n\n#[derive(Debug, Clone, PartialEq, Eq)]\n\npub struct SingleSubstitution(Identifier, Identifier);\n\n\n\nimpl SingleSubstitution {\n", "file_path": "src/expression.rs", "rank": 31, "score": 25372.93692420508 }, { "content": " }\n\n}\n\n\n\n/// A complete variable substitution.\n\n#[derive(Debug, Clone, PartialEq, Eq)]\n\npub struct VariableSubstitution<T: Borrow<[Option<Identifier>]>>(T);\n\n\n\nimpl<T: Borrow<[Option<Identifier>]>> VariableSubstitution<T> {\n\n /// Creates a substitution with the capacity to store replacements for variables `0` to\n\n /// `n - 1`.\n\n ///\n\n /// Returns none if a identifier in `variables` is not a variable (see [is_operator]).\n\n pub fn new(variables: T) -> Option<Self> {\n\n for var in variables.borrow() {\n\n if let Some(var) = var {\n\n if is_operator(*var) {\n\n return None;\n\n }\n\n }\n\n }\n", "file_path": "src/expression.rs", "rank": 32, "score": 25371.314121611784 }, { "content": "\n\n pub(crate) fn substitute_variables(&mut self, var_map: &[Option<Identifier>]) {\n\n for symb in self.data.borrow_mut().iter_mut() {\n\n if !is_operator(*symb) {\n\n if let Some(sub) = var_map[*symb as usize] {\n\n *symb = sub;\n\n }\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl<T: Borrow<[Identifier]> + std::fmt::Debug> Expression<T> {\n\n /// The internal encoding in prefix order\n\n pub fn data<'a>(&'a self) -> &'a T {\n\n &self.data\n\n }\n\n\n\n /// Borrow this expressions contents\n\n pub fn to_slice<'a>(&'a self) -> Expression<&'a [Identifier]> {\n", "file_path": "src/expression.rs", "rank": 33, "score": 25370.31909487706 }, { "content": "\n\nimpl<T: BorrowMut<[Identifier]> + std::fmt::Debug> Expression<T> {\n\n /// Turns this expression into its standard representation, numbering variables in the order of\n\n /// their apperance.\n\n ///\n\n /// # Example\n\n /// ```\n\n /// use attomath::expression::Expression;\n\n ///\n\n /// let mut s = Expression::from_raw([-2, -2, 2, 0, 2]).unwrap();\n\n /// s.standardize();\n\n /// assert_eq!(s.data(), &[-2, -2, 0, 1, 0]);\n\n /// ```\n\n pub fn standardize(&mut self) {\n\n let max_var = self.variables().max().unwrap_or(-1);\n\n let mut var_map = vec![None; (Wrapping(max_var as usize) + Wrapping(1)).0];\n\n let mut next_var = 0;\n\n self.standardize_range(&mut var_map, &mut next_var, ..);\n\n }\n\n\n", "file_path": "src/expression.rs", "rank": 34, "score": 25370.185651580807 }, { "content": " }\n\n }\n\n }\n\n return Ok(());\n\n }\n\n\n\n /// Use the given substitution on this expression to create a new expression\n\n ///\n\n /// # Example\n\n /// ```\n\n /// use attomath::expression::{Expression, WholeSubstitution};\n\n ///\n\n /// let mut sub = WholeSubstitution::with_capacity(2);\n\n /// let expr = Expression::from_raw([-2, 0, 1]).unwrap();\n\n /// sub.insert(0, expr.to_slice());\n\n /// assert_eq!(*expr.substitute(&sub).to_slice().data(), &[-2, -2, 0, 1, 1]);\n\n /// ```\n\n pub fn substitute<S: Substitution>(&self, substitution: &S) -> Expression<Box<[Identifier]>> {\n\n let mut new_expr = Vec::with_capacity(self.data.borrow().len());\n\n for symb in self.data.borrow().iter() {\n", "file_path": "src/expression.rs", "rank": 35, "score": 25367.87040827465 }, { "content": " /// Create a new substitution to rename one variable.\n\n ///\n\n /// Returns `None` if one of the identifiers is not a variable (see [is_operator]).\n\n pub fn new(a: Identifier, b: Identifier) -> Option<Self> {\n\n if is_operator(a) || is_operator(b) {\n\n return None;\n\n }\n\n Some(SingleSubstitution(a, b))\n\n }\n\n}\n\n\n\nimpl Substitution for SingleSubstitution {\n\n type T = [Identifier; 1];\n\n\n\n fn substitution_opt(&self, id: Identifier) -> Option<Expression<[Identifier; 1]>> {\n\n if id == self.0 {\n\n Some(Expression { data: [self.1] })\n\n } else {\n\n None\n\n }\n", "file_path": "src/expression.rs", "rank": 36, "score": 25366.929785037388 }, { "content": " /// `n - 1`.\n\n pub fn with_capacity(n: usize) -> Self {\n\n WholeSubstitution {\n\n substitution: vec![None; n],\n\n }\n\n }\n\n\n\n /// Marks the `id` to be substituted by `expr`.\n\n ///\n\n /// # Panics\n\n /// This method panics if `id` is not in the range of this substitutions capacity\n\n ///\n\n /// # Example\n\n /// ```\n\n /// use attomath::expression::{Expression, Substitution, WholeSubstitution};\n\n ///\n\n /// let expr = Expression::from_raw([-2, 0, 1]).unwrap();\n\n /// let mut sub = WholeSubstitution::with_capacity(2);\n\n /// sub.insert(0, expr.to_slice());\n\n /// assert_eq!(sub.substitution_opt(0), Some(expr.to_slice()));\n", "file_path": "src/expression.rs", "rank": 37, "score": 25366.469948662998 }, { "content": " type T = [Identifier; 1];\n\n\n\n fn substitution_opt(&self, id: Identifier) -> Option<Expression<[Identifier; 1]>> {\n\n if id < 0 {\n\n panic!(\"id is not a variable\");\n\n }\n\n Some(Expression {\n\n data: [id + self.shift as Identifier],\n\n })\n\n }\n\n}\n\n\n\n/// A general substitution\n\n#[derive(Debug, Clone, PartialEq, Eq)]\n\npub struct WholeSubstitution<'a> {\n\n substitution: Vec<Option<Expression<&'a [Identifier]>>>,\n\n}\n\n\n\nimpl<'a> WholeSubstitution<'a> {\n\n /// Creates a substitution with the capacity to store replacements for variables `0` to\n", "file_path": "src/expression.rs", "rank": 38, "score": 25366.005488834904 }, { "content": " }\n\n }\n\n\n\n quickcheck! {\n\n fn unify_substitute(a: Expression<Box<[Identifier]>>, b: Expression<Box<[Identifier]>>) -> bool {\n\n let max_var = b.variables().max().unwrap_or(-1);\n\n let mut substitution =\n\n WholeSubstitution::with_capacity((Wrapping(max_var as usize) + Wrapping(1)).0);\n\n match a.unify(&b, &mut substitution) {\n\n Ok(()) => b.substitute(&substitution) == a,\n\n Err(_) => true\n\n }\n\n }\n\n\n\n fn from_raw_single_variable(a: Identifier) -> bool {\n\n if is_operator(a) {\n\n return true;\n\n }\n\n Expression::from_raw([a]).is_some()\n\n }\n\n }\n\n}\n", "file_path": "src/expression.rs", "rank": 39, "score": 25365.42047769855 }, { "content": " pub(crate) fn standardize_range<R: RangeBounds<Identifier>>(\n\n &mut self,\n\n var_map: &mut [Option<Identifier>],\n\n next_var: &mut Identifier,\n\n range: R,\n\n ) {\n\n for symb in self.data.borrow_mut().iter_mut() {\n\n if !is_operator(*symb) && range.contains(symb) {\n\n let var = var_map[*symb as usize].unwrap_or_else(|| {\n\n let var = *next_var;\n\n var_map[*symb as usize] = Some(var);\n\n *next_var += 1;\n\n var\n\n });\n\n if range.contains(&var) {\n\n *symb = var;\n\n }\n\n }\n\n }\n\n }\n", "file_path": "src/expression.rs", "rank": 40, "score": 25365.276209019783 }, { "content": " .copied()\n\n .filter(|s| !is_operator(*s))\n\n }\n\n\n\n /// Returns the subexpression beginning at the given index.\n\n ///\n\n /// # Panics\n\n /// This method panics if start_index is not in the range `0..self.data().borrow().len()`\n\n ///\n\n /// # Example\n\n /// ```\n\n /// use attomath::expression::Expression;\n\n ///\n\n /// let st = Expression::from_raw([-2, 0, -2, 1, 0]).unwrap();\n\n /// assert_eq!(*st.subexpression(2).data(), &[-2, 1, 0]);\n\n /// assert_eq!(*st.subexpression(3).data(), &[1]);\n\n /// ```\n\n pub fn subexpression<'a>(&'a self, start_index: usize) -> Expression<&'a [Identifier]> {\n\n Self::subexpression_check(&self.data, start_index).expect(\n\n format!(\n", "file_path": "src/expression.rs", "rank": 41, "score": 25364.99523219334 }, { "content": " /// let mut sub = WholeSubstitution::with_capacity(2);\n\n /// let result = a.unify(&b, &mut sub);\n\n /// assert_eq!(result, Err(ProofError::OperatorMismatch(-3, -2)));\n\n /// ```\n\n pub fn unify<'a, S: Borrow<[Identifier]>>(\n\n &'a self,\n\n other: &Expression<S>,\n\n substitution: &mut WholeSubstitution<'a>,\n\n ) -> Result<(), ProofError> {\n\n let expr = self.data.borrow();\n\n let mut expr_index = 0;\n\n for &symb in other.data.borrow().iter() {\n\n if is_operator(symb) {\n\n let symb_self = expr[expr_index];\n\n expr_index += 1;\n\n if symb_self != symb {\n\n return Err(ProofError::OperatorMismatch(symb_self, symb));\n\n }\n\n } else {\n\n if let Some(old) = substitution.substitution_opt(symb) {\n", "file_path": "src/expression.rs", "rank": 42, "score": 25364.76885678759 }, { "content": "use crate::{error::ProofError, types::*};\n\nuse std::{\n\n borrow::{Borrow, BorrowMut},\n\n cmp::Ordering,\n\n num::Wrapping,\n\n ops::RangeBounds,\n\n};\n\n\n\n/// A `Expression` is a binary tree with nodes called \"operators\" and leafs which called\n\n/// \"variables\".\n\n///\n\n/// The exception to this is the special leaf `Identifier::MIN` which is an operator\n\n/// and used as a \"terminator\" two allow for operators with arity 1 or less.\n\n///\n\n/// The expression is encoded by prefix orderinto a sequence of [`Identifier`]s which can be both\n\n/// variables and operators (see [`is_operator`]).\n\n#[derive(Clone, Eq, Ord, Debug, Copy)]\n\npub struct Expression<T: Borrow<[Identifier]>> {\n\n data: T,\n\n}\n", "file_path": "src/expression.rs", "rank": 43, "score": 25364.651359964402 }, { "content": " Some(VariableSubstitution(variables))\n\n }\n\n}\n\n\n\nimpl<S: Borrow<[Option<Identifier>]>> Substitution for VariableSubstitution<S> {\n\n type T = [Identifier; 1];\n\n\n\n fn substitution_opt(&self, id: Identifier) -> Option<Expression<[Identifier; 1]>> {\n\n self.0.borrow()[id as usize].map(|v| Expression { data: [v] })\n\n }\n\n}\n\n\n", "file_path": "src/expression.rs", "rank": 44, "score": 25363.372629998434 }, { "content": " fn subexpression_check<'a>(\n\n expr: &'a T,\n\n start_index: usize,\n\n ) -> Option<Expression<&'a [Identifier]>> {\n\n let mut depth = 1;\n\n for (i, &s) in expr.borrow()[start_index..].iter().enumerate() {\n\n if is_operator(s) && s != Identifier::MIN {\n\n depth += 1;\n\n } else {\n\n depth -= 1;\n\n }\n\n if depth == 0 {\n\n return Some(Expression {\n\n data: &expr.borrow()[start_index..=start_index + i],\n\n });\n\n }\n\n }\n\n None\n\n }\n\n}\n", "file_path": "src/expression.rs", "rank": 45, "score": 25363.227213835507 }, { "content": " Expression {\n\n data: self.data.borrow(),\n\n }\n\n }\n\n\n\n /// Create a expression from its prefix order encoding. This checks if the given sequence\n\n /// encodes a binary tree and returns `None` if it does not.\n\n pub fn from_raw(expr: T) -> Option<Self> {\n\n if Self::check(&expr) {\n\n Some(Expression { data: expr })\n\n } else {\n\n None\n\n }\n\n }\n\n\n\n /// Convencience function to iterate over this expressions variables. May contain duplicates.\n\n pub fn variables<'a>(&'a self) -> impl Iterator<Item = Identifier> + 'a {\n\n self.data\n\n .borrow()\n\n .iter()\n", "file_path": "src/expression.rs", "rank": 46, "score": 25363.16917565984 }, { "content": " })\n\n })\n\n }\n\n}\n\n\n\n/// Substitutes variables by shifting them to the right by the given ammount\n\npub struct ShiftSubstitution {\n\n shift: Identifier,\n\n}\n\n\n\nimpl ShiftSubstitution {\n\n pub fn new(shift: Identifier) -> Self {\n\n if shift < 0 {\n\n panic!(\"shift must be nonnegative\");\n\n }\n\n ShiftSubstitution { shift }\n\n }\n\n}\n\n\n\nimpl Substitution for ShiftSubstitution {\n", "file_path": "src/expression.rs", "rank": 47, "score": 25362.4784856485 }, { "content": " use Either::*;\n\n match self {\n\n Left(expr) => expr.borrow(),\n\n Right(expr) => expr.borrow(),\n\n }\n\n }\n\n}\n\n\n\nimpl<S: Substitution, T: Substitution> Substitution for ChainSubstitution<S, T> {\n\n type T = Either<S::T, T::T>;\n\n\n\n fn substitution_opt(&self, id: Identifier) -> Option<Expression<Self::T>> {\n\n self.first\n\n .substitution_opt(id)\n\n .map(|s| Expression {\n\n data: Either::Left(s.data),\n\n })\n\n .or_else(|| {\n\n self.then.substitution_opt(id).map(|s| Expression {\n\n data: Either::Right(s.data),\n", "file_path": "src/expression.rs", "rank": 48, "score": 25362.220588829474 }, { "content": " ///\n\n /// let a = Expression::from_raw([-2, 0, -2, 1, 0]).unwrap();\n\n /// let b = Expression::from_raw([-2, 0, 1]).unwrap();\n\n /// let mut sub = WholeSubstitution::with_capacity(2);\n\n /// let result = a.unify(&b, &mut sub); // (x0 ~> x0, x1 ~> (x1 -> x0))\n\n /// assert_eq!(result, Ok(()));\n\n /// assert_eq!(b.substitute(&sub), a);\n\n ///\n\n /// let a = Expression::from_raw([-2, 0, -2, 1, 0]).unwrap();\n\n /// let b = Expression::from_raw([-2, 0, 0]).unwrap();\n\n /// let mut sub = WholeSubstitution::with_capacity(2);\n\n /// let result = a.unify(&b, &mut sub); // (x0 ~> x0, x0 ~> (x1 -> x0))\n\n /// assert_eq!(result, Err(ProofError::VariableMismatch(\n\n /// 0,\n\n /// vec![0].into_boxed_slice(),\n\n /// vec![-2, 1, 0].into_boxed_slice(),\n\n /// )));\n\n ///\n\n /// let a = Expression::from_raw([-3, 0, -2, 1, 0]).unwrap();\n\n /// let b = Expression::from_raw([-2, 0, 1]).unwrap();\n", "file_path": "src/expression.rs", "rank": 49, "score": 25361.982198318776 }, { "content": "}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use std::num::Wrapping;\n\n\n\n use quickcheck::Arbitrary;\n\n\n\n use super::*;\n\n\n\n impl Arbitrary for Expression<Box<[Identifier]>> {\n\n fn arbitrary(g: &mut quickcheck::Gen) -> Self {\n\n loop {\n\n let data = Vec::<Identifier>::arbitrary(g).into_boxed_slice();\n\n if let Some(subexpression) = Expression::subexpression_check(&data, 0) {\n\n return Expression {\n\n data: subexpression.data.to_owned().into_boxed_slice(),\n\n };\n\n }\n\n }\n", "file_path": "src/expression.rs", "rank": 50, "score": 25360.011443621555 }, { "content": " \"Somehow an invalid expression was formed: {:?}\",\n\n self.borrow()\n\n )\n\n .as_str(),\n\n )\n\n }\n\n\n\n /// Calculates a `Substitution` which transforms `other` into `this`. If this function suceeds\n\n /// then it is guaranteed that `other.substitute(&substitution) == self`.\n\n ///\n\n /// # Errors\n\n /// * OperatorMismatch - if the operators of `other` do not match the corresponding operators\n\n /// of `self`\n\n /// * VariableMismatch - if a variable in `other` would have to be substituted by two different\n\n /// expressions\n\n ///\n\n /// # Example\n\n /// ```\n\n /// use attomath::expression::{Expression, WholeSubstitution};\n\n /// use attomath::error::ProofError;\n", "file_path": "src/expression.rs", "rank": 51, "score": 25359.12262326818 }, { "content": "\n\nimpl<T: Borrow<[Identifier]>, S: Borrow<[Identifier]>> PartialEq<Expression<S>> for Expression<T> {\n\n fn eq(&self, other: &Expression<S>) -> bool {\n\n self.data.borrow() == other.data.borrow()\n\n }\n\n}\n\n\n\nimpl<T: Borrow<[Identifier]>, S: Borrow<[Identifier]>> PartialOrd<Expression<S>> for Expression<T> {\n\n fn partial_cmp(&self, other: &Expression<S>) -> Option<Ordering> {\n\n // This code is safe, because i16 and u16 have the same memory layout.\n\n // The conversion is done to have operators be larger than variables.\n\n unsafe {\n\n let a: &[i16] = self.data.borrow();\n\n let a_transmuted: &[u16] = std::mem::transmute(a);\n\n let b: &[i16] = other.data.borrow();\n\n let b_transmuted: &[u16] = std::mem::transmute(b);\n\n a_transmuted.partial_cmp(b_transmuted)\n\n }\n\n }\n\n}\n", "file_path": "src/expression.rs", "rank": 52, "score": 25357.718640609266 }, { "content": " if is_operator(*symb) {\n\n new_expr.push(*symb)\n\n } else {\n\n if let Some(e) = substitution.substitution_opt(*symb) {\n\n new_expr.extend_from_slice(e.data.borrow());\n\n } else {\n\n new_expr.push(*symb);\n\n }\n\n }\n\n }\n\n Expression {\n\n data: new_expr.into_boxed_slice(),\n\n }\n\n }\n\n\n\n fn check(expr: &T) -> bool {\n\n Self::subexpression_check(expr, 0).map(|s| s.data.borrow().len())\n\n == Some(expr.borrow().len())\n\n }\n\n\n", "file_path": "src/expression.rs", "rank": 53, "score": 25355.48790749154 }, { "content": " let start = expr_index;\n\n expr_index += old.data.len();\n\n if self.data.borrow().len() < expr_index\n\n || old.data != &self.data.borrow()[start..expr_index]\n\n {\n\n return Err(ProofError::VariableMismatch(\n\n symb,\n\n Box::from(old.data),\n\n Box::from(self.subexpression(start).data.borrow()),\n\n ));\n\n }\n\n } else {\n\n let subexpr = self.subexpression(expr_index);\n\n expr_index += subexpr.data.borrow().len();\n\n substitution.insert(\n\n symb,\n\n Expression {\n\n data: subexpr.data.borrow(),\n\n },\n\n );\n", "file_path": "src/expression.rs", "rank": 54, "score": 25354.44413131935 }, { "content": "\n\n/// Tests whether the given identifier is an operator.\n\n///\n\n/// Operators occupy the range `(Identifier::MIN ..= -1)`.\n\n/// The special value `Identifier::MIN` is also an operator.\n\n///\n\n/// # Example\n\n/// ```\n\n/// use attomath::{expression::is_operator, Identifier};\n\n///\n\n/// assert!(is_operator(-2));\n\n/// assert!(is_operator(Identifier::MIN));\n\n/// assert!(!is_operator(0));\n\n/// ```\n", "file_path": "src/expression.rs", "rank": 55, "score": 25353.562826279664 }, { "content": "#[derive(Debug)]\n\nenum GreedyErrorKind {\n\n Context(&'static str),\n\n Nom(ErrorKind),\n\n Char(char),\n\n}\n\n\n", "file_path": "src/serialization/error.rs", "rank": 56, "score": 20530.037589404605 }, { "content": "use crate::{error::ProofError, expression::SingleSubstitution, Identifier, Theorem};\n\nuse std::collections::HashMap;\n\n\n\n#[derive(Debug, PartialEq, Eq)]\n\npub struct Database {\n\n names: HashMap<String, (usize, usize)>,\n\n theorems: Vec<(Theorem, Proof<usize>, Option<String>)>,\n\n last_name: usize,\n\n}\n\n\n\n#[derive(Debug, PartialEq, Eq, Clone)]\n\npub enum Proof<K> {\n\n Simplify(K, Identifier, Identifier),\n\n Combine(K, K, usize),\n\n Axiom(Theorem),\n\n}\n\n\n\nimpl<K> Proof<K> {\n\n pub fn map_id_result<K1, F, E>(self, f: F) -> Result<Proof<K1>, E>\n\n where\n", "file_path": "src/serialization/database.rs", "rank": 57, "score": 25.738478529193326 }, { "content": "use crate::{\n\n error::ProofError,\n\n expression::{Expression, Substitution, WholeSubstitution},\n\n types::*,\n\n};\n\nuse std::borrow::Borrow;\n\n\n\n/// Type alias for a statement that owns its expression\n\npub type OwnedStatement = Statement<Box<[Identifier]>>;\n\n\n\n/// A a combination of a [`Judgement`] and an [`Expression`], for example _x0 -> x0 is provable_\n\n///\n\n/// The __judgement__ is given in form of an integer, but often represents some meaning, like _this\n\n/// expression is provable_ or _this expression is syntactically correct_.\n\n#[derive(Eq, Clone, PartialOrd, Ord, Debug)]\n\npub struct Statement<T: Borrow<[Identifier]>> {\n\n pub judgement: Judgement,\n\n pub expression: Expression<T>,\n\n}\n\n\n", "file_path": "src/statement.rs", "rank": 58, "score": 23.729272924868724 }, { "content": "/// Statement {\n\n/// judgement: 0,\n\n/// expression: Expression::from_raw(vec![0].into_boxed_slice()).unwrap(),\n\n/// },\n\n/// Statement {\n\n/// judgement: 0,\n\n/// expression: Expression::from_raw(vec![-1, 0, 1].into_boxed_slice()).unwrap(),\n\n/// }\n\n/// ];\n\n/// let dvrs = vec![DVR::new(0, 1).unwrap()];\n\n/// let theorem = Theorem::new(conclusion, assumptions, dvrs);\n\n///\n\n/// let mut s = String::new();\n\n/// fmt.format_theorem(&mut s, &theorem);\n\n/// assert_eq!(s, \"a <> b, |- a, |- (a -> b) => |- b\");\n\n///\n\n/// let (remaining, theorem1) = fmt.parse_theorem(&s).unwrap();\n\n/// assert_eq!(remaining, \"\");\n\n/// assert_eq!(theorem1, theorem);\n\n/// ```\n", "file_path": "src/serialization/formatter.rs", "rank": 59, "score": 23.272828150959636 }, { "content": " self.format_variable(s, a);\n\n s.push_str(\" <> \");\n\n self.format_variable(s, b);\n\n }\n\n\n\n pub fn parse_dvr<'a>(&self, input: &'a str) -> IResult<&'a str, DVR, GreedyError<&'a str>> {\n\n let (input, a) = self.parse_variable(input)?;\n\n let (input, _) = tag(\" <> \")(input)?;\n\n let (input, b) = verify(|input| self.parse_variable(input), |b| *b != a)(input)?;\n\n Ok((input, DVR::new(a, b).unwrap()))\n\n }\n\n\n\n pub fn format_theorem(&self, s: &mut String, theorem: &Theorem) {\n\n let dvrs = theorem.dvrs();\n\n let assumptions = theorem.assumptions();\n\n if !dvrs.is_empty() || !assumptions.is_empty() {\n\n for (i, dvr) in dvrs.iter().enumerate() {\n\n self.format_dvr(s, dvr);\n\n if i != dvrs.len() - 1 || !assumptions.is_empty() {\n\n s.push_str(\", \");\n", "file_path": "src/serialization/formatter.rs", "rank": 60, "score": 21.026565536609127 }, { "content": "\n\n pub fn add_proof<'a>(\n\n &'a mut self,\n\n proof: Proof<(Option<String>, Option<usize>)>,\n\n ) -> Result<&'a Theorem, DatabaseError> {\n\n let proof = proof.map_id_result(|id| self.get_index(id.0.as_deref(), id.1))?;\n\n let new_theorem = match proof {\n\n Proof::Simplify(id, a, b) => {\n\n let theorem = &self.theorems[id].0;\n\n let mut new_theorem =\n\n theorem.substitute(&SingleSubstitution::new(a, b).unwrap())?;\n\n new_theorem.standardize();\n\n new_theorem\n\n }\n\n Proof::Combine(id_a, id_b, index) => {\n\n let theorem_a = &self.theorems[id_a].0;\n\n let theorem_b = &self.theorems[id_b].0;\n\n let mut new_theorem = theorem_a.combine(&theorem_b, index)?;\n\n new_theorem.standardize();\n\n new_theorem\n", "file_path": "src/serialization/database.rs", "rank": 61, "score": 20.61606490046185 }, { "content": "### Theorems\n\nA `Theorem` consists of zero or more assumptions (`Statement`s) and `DVR`s and one conclusion\n\n(also a `Statement`). This makes it possible to formulate something like _if a is provable and\n\na -> b is provable then b is provable_. In this case _a is provable_ and _a -> b is provable_\n\nare assumptions and _b is provable_ is a\n\nconclusion.\n\n\n\nThe structs interface guarantees that only valid theorems can be produced if only axioms are\n\nconstructed using `Theorem::new`, while still being able to crate any theorem that can be proven\n\nfrom the given axioms (these claims are not yet proven).\n\n\n\nIn `attomath`, unlike Metamath, all assumptions (called Hypothesis in Metamath) for a theorem\n\nare stored together with their corresponding theorem. This way variables carry no meaning\n\noutside a theorem, and the context for a statement is always the theorem of that statement.\n\nThis means that `attomath`s format is less compact, since \"variable types\" like _formula_ or\n\n_set variable_ have to be declared for every theorem. But it also leads to a more consistent\n\nformat where a theorem is self-contained.\n", "file_path": "README.md", "rank": 62, "score": 19.396005191348955 }, { "content": " }\n\n Proof::Axiom(ref theorem) => theorem.clone(),\n\n };\n\n self.theorems.push((new_theorem, proof, None));\n\n Ok(&self.theorems.last().unwrap().0)\n\n }\n\n\n\n pub fn substitute(&mut self, theorem: Theorem) -> Result<(), DatabaseError> {\n\n let last = &mut self\n\n .theorems\n\n .last_mut()\n\n .ok_or(DatabaseError::TheoremNotFound(None, None))?\n\n .0;\n\n let mut theorem_standardized = theorem.clone();\n\n theorem_standardized.standardize();\n\n if last == &theorem_standardized {\n\n *last = theorem;\n\n Ok(())\n\n } else {\n\n Err(DatabaseError::TheoremMismatch(theorem, last.clone()))\n", "file_path": "src/serialization/database.rs", "rank": 63, "score": 19.099582216709702 }, { "content": "use std::fmt::Write;\n\n\n\nuse super::error::GreedyError;\n\n\n\n/// ```\n\n/// use attomath::serialization::Formatter;\n\n/// use attomath::Expression;\n\n/// use attomath::Theorem;\n\n/// use attomath::Statement;\n\n/// use attomath::DVR;\n\n///\n\n/// let mut fmt = Formatter::new();\n\n/// fmt.add_operator(\"->\".to_owned(), 2);\n\n/// fmt.add_judgement(\"|-\".to_owned());\n\n///\n\n/// let conclusion = Statement {\n\n/// judgement: 0,\n\n/// expression: Expression::from_raw(vec![1].into_boxed_slice()).unwrap()\n\n/// };\n\n/// let assumptions = vec![\n", "file_path": "src/serialization/formatter.rs", "rank": 64, "score": 18.876382589692177 }, { "content": "use crate::types::*;\n\n\n\n/// An error which is produced when trying to proof something incorrect\n\n#[derive(Debug, PartialEq, Eq)]\n\npub enum ProofError {\n\n /// Error produced when trying to unify expressions with different operators (see\n\n /// [`Expression::unify`](../expression/struct.Expression.html#method.unify)). Contains the\n\n /// identifiers of the mismatched operators.\n\n OperatorMismatch(Identifier, Identifier),\n\n /// Error produced when trying to unify expressions where one variable would have to be\n\n /// substituted by different subexpressions (see\n\n /// [`Expression::unify`](../expression/struct.Expression.html#method.unify)). Contains the\n\n /// identifier for the variable and the mismatched subexpressions.\n\n VariableMismatch(Identifier, Box<[Identifier]>, Box<[Identifier]>),\n\n /// Error produced when trying to unify statements with different judgements (see\n\n /// [`Statement::unify`](../statement/struct.Statement.html#method.unify)). Contains the\n\n /// mismatched judgements.\n\n JudgementMismatch(Judgement, Judgement),\n\n /// Error produced when trying to create a theorem with conflicting dvrs (see\n\n /// [`DVR`](../dvr/struct.DVR.html)).\n\n DVRError(Identifier),\n\n}\n", "file_path": "src/error.rs", "rank": 65, "score": 18.693795490386524 }, { "content": "#[derive(Debug, PartialEq, Eq)]\n\npub struct Formatter {\n\n operators: Vec<(String, u8)>,\n\n judgements: Vec<String>,\n\n}\n\n\n\nimpl Formatter {\n\n pub fn new() -> Self {\n\n Formatter {\n\n operators: Vec::new(),\n\n judgements: Vec::new(),\n\n }\n\n }\n\n\n\n pub fn operators(&self) -> impl Iterator<Item = (&str, u8)> {\n\n self.operators\n\n .iter()\n\n .map(|(operator, arity)| (operator.as_str(), *arity))\n\n }\n\n\n", "file_path": "src/serialization/formatter.rs", "rank": 66, "score": 18.34484638841416 }, { "content": " context(\"conclusion\", |input| self.parse_statement(input))(input)?;\n\n let (input, _) = eof(input)?;\n\n let mut dvrs = Vec::new();\n\n let mut assumptions = Vec::new();\n\n for (dvr, assumption) in dvrs_and_assumptions {\n\n if let Some(dvr) = dvr {\n\n dvrs.push(dvr);\n\n }\n\n if let Some(assumption) = assumption {\n\n assumptions.push(assumption);\n\n }\n\n }\n\n Ok((input, Theorem::new(conclusion, assumptions, dvrs)))\n\n },\n\n |input| {\n\n let (input, conclusion) =\n\n context(\"conclusion\", |input| self.parse_statement(input))(input)?;\n\n let (input, _) = eof(input)?;\n\n Ok((input, Theorem::new(conclusion, vec![], vec![])))\n\n },\n\n ))(input)\n\n }\n\n}\n", "file_path": "src/serialization/formatter.rs", "rank": 67, "score": 18.02905750389721 }, { "content": "impl<T: Borrow<[Identifier]>, S: Borrow<[Identifier]>> PartialEq<Statement<S>> for Statement<T> {\n\n fn eq(&self, other: &Statement<S>) -> bool {\n\n self.judgement == other.judgement && self.expression == other.expression\n\n }\n\n}\n\n\n\nimpl<T: Borrow<[Identifier]> + std::fmt::Debug> Statement<T> {\n\n /// Convenience function for unifying the expressions of two judgements (see\n\n /// [`Expression::unify`])\n\n ///\n\n /// # Errors\n\n /// * `JudgementMismatch` - if `self.judgement != other.judgement`\n\n /// * `VariableMismatch` or `OperatorMismatch` - if `self.expression.unify(other.expression)`\n\n /// fails\n\n ///\n\n /// # Example\n\n /// ```\n\n /// use attomath::Statement;\n\n /// use attomath::expression::{Expression, WholeSubstitution};\n\n /// use attomath::error::ProofError;\n", "file_path": "src/statement.rs", "rank": 68, "score": 17.90121071283427 }, { "content": " match err {\n\n attomath::error::ProofError::OperatorMismatch(op_a, op_b) => eprintln!(\n\n \"operator mismatch: expected {} found {}\",\n\n fmt.operators().nth((-op_b) as usize - 1).unwrap().0,\n\n fmt.operators().nth((-op_a) as usize - 1).unwrap().0,\n\n ),\n\n attomath::error::ProofError::VariableMismatch(var, sub_a, sub_b) => {\n\n let mut sv = String::new();\n\n fmt.format_variable(&mut sv, var);\n\n let mut sa = String::new();\n\n fmt.format_expression(\n\n &mut sa,\n\n &Expression::from_raw(sub_b).unwrap(),\n\n );\n\n let mut sb = String::new();\n\n fmt.format_expression(\n\n &mut sb,\n\n &Expression::from_raw(sub_a).unwrap(),\n\n );\n\n eprintln!(\n", "file_path": "src/main.rs", "rank": 69, "score": 17.5291437383709 }, { "content": " /// [`Database`](../database/struct.Database.html)\n\n TheoremNotFound(Option<String>, Option<usize>),\n\n /// Error produced when trying to insert using a already used theorem id (see\n\n /// [`Database`](../database/struct.Database.html)\n\n NameCollision(String),\n\n TheoremMismatch(Theorem, Theorem),\n\n ProofError(ProofError),\n\n}\n\n\n\nimpl From<ProofError> for DatabaseError {\n\n fn from(e: ProofError) -> Self {\n\n Self::ProofError(e)\n\n }\n\n}\n\n\n\nimpl Database {\n\n pub fn new() -> Self {\n\n Self {\n\n names: HashMap::new(),\n\n theorems: Vec::new(),\n", "file_path": "src/serialization/database.rs", "rank": 70, "score": 17.36582438502145 }, { "content": " .add_proof(Proof::Combine(\n\n (None, None),\n\n (Some(\"b\".to_owned()), None),\n\n 0,\n\n ))\n\n .unwrap();\n\n database.add_name(\"c\".to_owned()).unwrap();\n\n\n\n let theorem = database.get(Some(\"c\"), None).unwrap();\n\n assert_eq!(theorem, &Theorem::new(c, vec![], vec![]));\n\n\n\n let mut fmt = Formatter::new();\n\n fmt.add_operator(\"A\".to_owned(), 0);\n\n fmt.add_operator(\"B\".to_owned(), 0);\n\n fmt.add_operator(\"C\".to_owned(), 0);\n\n fmt.add_judgement(\"|-\".to_owned());\n\n let s = serialize_database(&fmt, &database);\n\n assert_eq!(\n\n s,\n\n r#\"jdg |-\n", "file_path": "src/serialization/command.rs", "rank": 71, "score": 17.171280877626273 }, { "content": " }\n\n DatabaseError::ProofError(err) => {\n\n let (id_a, id_b, index) = match command {\n\n Command::Proof(Proof::Combine(id_a, id_b, index), _, _) => {\n\n (id_a, id_b, index)\n\n }\n\n _ => unreachable!(),\n\n };\n\n let theorem_a = database.get(id_a.0.as_deref(), id_a.1).unwrap();\n\n let theorem_b = database.get(id_b.0.as_deref(), id_b.1).unwrap();\n\n let mut sa = String::new();\n\n fmt.format_theorem(&mut sa, &theorem_a);\n\n let mut sb = String::new();\n\n fmt.format_theorem(&mut sb, &theorem_b);\n\n let mut ss = String::new();\n\n fmt.format_statement(&mut ss, &theorem_a.assumptions()[index]);\n\n eprintln!(\n\n \"proof error combining\\n{}\\ninto the argument {} of\\n{}\",\n\n sb, ss, sa\n\n );\n", "file_path": "src/main.rs", "rank": 72, "score": 17.050745447590874 }, { "content": "//! are stored together with their corresponding theorem. This way variables carry no meaning\n\n//! outside a theorem, and the context for a statement is always the theorem of that statement.\n\n//! This means that `attomath`s format is less compact, since \"variable types\" like _formula_ or\n\n//! _set variable_ have to be declared for every theorem. But it also leads to a more consistent\n\n//! format where a theorem is self-contained.\n\n\n\n#[cfg(feature = \"serialization\")]\n\nextern crate nom;\n\n#[cfg(test)]\n\n#[macro_use]\n\nextern crate quickcheck;\n\n\n\nmod dvr;\n\npub mod error;\n\npub mod expression;\n\npub mod serialization;\n\nmod statement;\n\nmod theorem;\n\nmod types;\n\n\n\npub use dvr::*;\n\npub use expression::Expression;\n\npub use statement::*;\n\npub use theorem::*;\n\npub use types::*;\n", "file_path": "src/lib.rs", "rank": 73, "score": 16.602135838094174 }, { "content": " F: Fn(K) -> Result<K1, E>,\n\n {\n\n Ok(match self {\n\n Proof::Simplify(id, a, b) => Proof::Simplify(f(id)?, a, b),\n\n Proof::Combine(id_a, id_b, index) => Proof::Combine(f(id_a)?, f(id_b)?, index),\n\n Proof::Axiom(theorem) => Proof::Axiom(theorem),\n\n })\n\n }\n\n\n\n pub fn map_id<K1, F>(self, f: F) -> Proof<K1>\n\n where\n\n F: Fn(K) -> K1,\n\n {\n\n self.map_id_result::<_, _, ()>(|id| Ok(f(id))).unwrap()\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub enum DatabaseError {\n\n /// Error produced when trying to use a nonexistent theorem id (see\n", "file_path": "src/serialization/database.rs", "rank": 74, "score": 16.243161984183203 }, { "content": " pub fn judgements(&self) -> impl Iterator<Item = &str> {\n\n self.judgements.iter().map(|judgement| judgement.as_str())\n\n }\n\n\n\n pub fn add_operator(&mut self, operator: String, arity: u8) {\n\n // TODO: verify\n\n self.operators.push((operator, arity));\n\n }\n\n\n\n pub fn add_judgement(&mut self, judgement: String) {\n\n // TODO: verify\n\n self.judgements.push(judgement);\n\n }\n\n\n\n pub fn format_operator<T: Borrow<[Identifier]> + std::fmt::Debug>(\n\n &self,\n\n s: &mut String,\n\n id: Identifier,\n\n left: &Expression<T>,\n\n right: &Expression<T>,\n", "file_path": "src/serialization/formatter.rs", "rank": 75, "score": 16.238229574529026 }, { "content": " /// assert_eq!(res, Err(ProofError::JudgementMismatch(0, 1)));\n\n /// ```\n\n pub fn unify<'a, S: Borrow<[Identifier]>>(\n\n &'a self,\n\n other: &Statement<S>,\n\n substitution: &mut WholeSubstitution<'a>,\n\n ) -> Result<(), ProofError> {\n\n if self.judgement != other.judgement {\n\n return Err(ProofError::JudgementMismatch(\n\n self.judgement,\n\n other.judgement,\n\n ));\n\n }\n\n self.expression.unify(&other.expression, substitution)?;\n\n return Ok(());\n\n }\n\n\n\n /// Convenience function for using a `Substitution` on this statements expression (see\n\n /// [`Expression::substitute`])\n\n pub fn substitute<S: Substitution>(&self, substitution: &S) -> Statement<Box<[Identifier]>> {\n\n Statement {\n\n judgement: self.judgement,\n\n expression: self.expression.substitute(substitution),\n\n }\n\n }\n\n}\n", "file_path": "src/statement.rs", "rank": 76, "score": 16.166600658161276 }, { "content": " )\n\n .unwrap(),\n\n };\n\n let b = Statement {\n\n judgement: 0,\n\n expression: Expression::from_raw(\n\n vec![-2, Identifier::MIN, Identifier::MIN].into_boxed_slice(),\n\n )\n\n .unwrap(),\n\n };\n\n let c = Statement {\n\n judgement: 0,\n\n expression: Expression::from_raw(\n\n vec![-3, Identifier::MIN, Identifier::MIN].into_boxed_slice(),\n\n )\n\n .unwrap(),\n\n };\n\n\n\n database\n\n .add_proof(Proof::Axiom(Theorem::new(a.clone(), vec![], vec![])))\n", "file_path": "src/serialization/command.rs", "rank": 77, "score": 15.836126447173804 }, { "content": " }\n\n\n\n pub fn parse_arity<'a>(\n\n &self,\n\n arity: u8,\n\n input: &'a str,\n\n ) -> IResult<&'a str, Expression<Box<[Identifier]>>, GreedyError<&'a str>> {\n\n if arity == 0 {\n\n let (input, o) = map_opt(is_not(\" ),\"), |s| {\n\n self.operators\n\n .iter()\n\n .enumerate()\n\n .filter_map(|(i, (o, a))| if s == o && *a == 0 { Some(i) } else { None })\n\n .next()\n\n })(input)?;\n\n // TODO: remove unwrap\n\n Ok((\n\n input,\n\n Expression::from_raw(\n\n vec![-(o as Identifier) - 1, Identifier::MIN, Identifier::MIN]\n", "file_path": "src/serialization/formatter.rs", "rank": 78, "score": 15.647403267714603 }, { "content": " pub fn add_name(&mut self, name: String) -> Result<(), DatabaseError> {\n\n if self.theorems.is_empty() {\n\n return Err(DatabaseError::TheoremNotFound(None, Some(0)));\n\n }\n\n let index = self.theorems.len() - 1;\n\n if self.theorems[index].2.is_some() {\n\n return Err(DatabaseError::TheoremNotFound(None, None));\n\n }\n\n match self.names.entry(name.clone()) {\n\n std::collections::hash_map::Entry::Occupied(_) => {\n\n Err(DatabaseError::NameCollision(name))\n\n }\n\n std::collections::hash_map::Entry::Vacant(entry) => {\n\n &entry.insert((self.last_name, index + 1));\n\n self.last_name = index + 1;\n\n self.theorems[index].2 = Some(name.to_owned());\n\n Ok(())\n\n }\n\n }\n\n }\n", "file_path": "src/serialization/database.rs", "rank": 79, "score": 15.426815872409499 }, { "content": " pub fn parse_expression<'a>(\n\n &self,\n\n input: &'a str,\n\n ) -> IResult<&'a str, Expression<Box<[Identifier]>>, GreedyError<&'a str>> {\n\n // TODO: remove unwrap\n\n alt((\n\n map(\n\n |input| self.parse_variable(input),\n\n |v| Expression::from_raw(vec![v].into_boxed_slice()).unwrap(),\n\n ),\n\n |input| self.parse_operator(input),\n\n ))(input)\n\n }\n\n\n\n pub fn format_judgement(&self, s: &mut String, judgement: Judgement) {\n\n s.push_str(&self.judgements[judgement as usize]);\n\n }\n\n\n\n pub fn parse_judgement<'a>(\n\n &self,\n", "file_path": "src/serialization/formatter.rs", "rank": 80, "score": 15.327894914808308 }, { "content": " |input| self.parse_arity(0, input),\n\n ))(input)\n\n }\n\n\n\n pub fn format_variable(&self, s: &mut String, mut id: Identifier) {\n\n assert!(id >= 0);\n\n id += 1;\n\n s.extend(\n\n std::iter::from_fn(move || {\n\n if id == 0 {\n\n None\n\n } else if id <= 26 {\n\n let c = ('a' as u8 + (id % 26) as u8 - 1) as char;\n\n id = 0;\n\n Some(c)\n\n } else {\n\n let c = ('a' as u8 + (id % 26) as u8) as char;\n\n id /= 26;\n\n Some(c)\n\n }\n", "file_path": "src/serialization/formatter.rs", "rank": 81, "score": 15.325967199103612 }, { "content": "//! ## DVRs\n\n//! A [`DVR`] is a way of preventing two variables to be equal.\n\n//!\n\n//! In general it is assumed, that a [`Statement`] does not change its meaning if a\n\n//! variable is replaced with another variable, but this is not true in all cases. For this purpose\n\n//! one can specify that two variables should not be replaced with the same variable or expressions\n\n//! containing a common variable.\n\n//!\n\n//! ## Theorems\n\n//! A [`Theorem`] consists of zero or more assumptions ([`Statement`]s) and\n\n//! [`DVR`]s and one conclusion (also a [`Statement`]). This makes it possible to\n\n//! formulate something like _if a is provable and a -> b is provable then b is provable_. In this\n\n//! case _a is provable_ and _a -> b is provable_ are assumptions and _b is provable_ is a\n\n//! conclusion.\n\n//!\n\n//! The structs interface guarantees that only valid theorems can be produced if only axioms are\n\n//! constructed using [`Theorem::new`], while still being able to crate any theorem that can be\n\n//! proven from the given axioms.\n\n//!\n\n//! In `attomath`, unlike Metamath, all assumptions (called Hypothesis in Metamath) for a theorem\n", "file_path": "src/lib.rs", "rank": 82, "score": 15.282572817020856 }, { "content": "pub enum Command {\n\n Proof(\n\n Proof<(Option<String>, Option<usize>)>,\n\n Option<Theorem>,\n\n Option<String>,\n\n ),\n\n Judgement(String),\n\n Operator(String, u8),\n\n}\n\n\n\nimpl Command {\n\n pub fn apply(self, fmt: &mut Formatter, database: &mut Database) -> Result<(), DatabaseError> {\n\n match self {\n\n Command::Proof(proof, theorem, name) => {\n\n let _theorem = database.add_proof(proof)?;\n\n if let Some(name) = name {\n\n database.add_name(name)?;\n\n }\n\n if let Some(theorem) = theorem {\n\n database.substitute(theorem)?;\n", "file_path": "src/serialization/command.rs", "rank": 83, "score": 15.213482484346908 }, { "content": " input: &'a str,\n\n ) -> IResult<&'a str, Judgement, GreedyError<&'a str>> {\n\n let (input, judgement) = map_opt(is_not(\" \"), |s| {\n\n self.judgements\n\n .iter()\n\n .enumerate()\n\n .filter_map(|(i, j)| if s == j { Some(i) } else { None })\n\n .next()\n\n })(input)?;\n\n Ok((input, judgement as u8))\n\n }\n\n\n\n pub fn format_statement<T: Borrow<[Identifier]> + std::fmt::Debug>(\n\n &self,\n\n s: &mut String,\n\n statement: &Statement<T>,\n\n ) {\n\n self.format_judgement(s, statement.judgement);\n\n s.push(' ');\n\n self.format_expression(s, &statement.expression);\n", "file_path": "src/serialization/formatter.rs", "rank": 84, "score": 15.043740173829725 }, { "content": " }\n\n }\n\n\n\n fn reverse_id(&self, id: usize, current_id: usize) -> (Option<&str>, Option<usize>) {\n\n if let Some(name) = &self.theorems[id].2 {\n\n (Some(name), None)\n\n } else if id == current_id - 1 {\n\n (None, None)\n\n } else if id >= self.last_name {\n\n (None, Some(id - self.last_name))\n\n } else {\n\n let name = self.theorems[id..]\n\n .iter()\n\n .filter_map(|x| x.2.as_ref())\n\n .next()\n\n .unwrap();\n\n let (start, end) = self.names[name];\n\n if end >= current_id {\n\n (None, Some(id - start))\n\n } else {\n", "file_path": "src/serialization/database.rs", "rank": 85, "score": 14.824866218132613 }, { "content": " }\n\n Command::Judgement(judgement) => {\n\n write!(s, \"jdg {}\", judgement).unwrap();\n\n }\n\n Command::Operator(operator, arity) => {\n\n write!(s, \"opr {} {}\", operator, arity).unwrap();\n\n }\n\n }\n\n }\n\n\n\n pub fn from_formatter<'a>(fmt: &'a Formatter) -> impl 'a + Iterator<Item = Self> {\n\n fmt.judgements()\n\n .map(|judgement| Command::Judgement(judgement.to_owned()))\n\n .chain(\n\n fmt.operators()\n\n .map(|(operator, arity)| Command::Operator(operator.to_owned(), arity)),\n\n )\n\n }\n\n\n\n pub fn from_database<'a>(database: &'a Database) -> impl 'a + Iterator<Item = Self> {\n", "file_path": "src/serialization/command.rs", "rank": 86, "score": 14.465906293011706 }, { "content": " (|| {\n\n let command = Command::parse(&fmt, line)?;\n\n\n\n command.apply(fmt, &mut database)?;\n\n Ok(())\n\n })()\n\n .map_err(|e| (line, e))?;\n\n }\n\n Ok(database)\n\n }\n\n\n\n #[test]\n\n fn a_b_c() {\n\n use crate::{expression::Expression, statement::Statement};\n\n let mut database = Database::new();\n\n\n\n let a = Statement {\n\n judgement: 0,\n\n expression: Expression::from_raw(\n\n vec![-1, Identifier::MIN, Identifier::MIN].into_boxed_slice(),\n", "file_path": "src/serialization/command.rs", "rank": 87, "score": 14.41662222234141 }, { "content": " pub fn serialize(&self, s: &mut String, fmt: &Formatter) {\n\n match self {\n\n Command::Proof(Proof::Simplify(id, a, b), theorem, name) => {\n\n s.push_str(\"smp \");\n\n self.serialize_id(s, id);\n\n s.push_str(\" (\");\n\n fmt.format_variable(s, *a);\n\n s.push_str(\" ~ \");\n\n fmt.format_variable(s, *b);\n\n s.push_str(\" )\");\n\n if let Some(theorem) = theorem {\n\n s.push_str(\" { \");\n\n fmt.format_theorem(s, theorem);\n\n s.push_str(\" }\");\n\n }\n\n if let Some(name) = name {\n\n write!(s, \": {}\", name).unwrap();\n\n }\n\n }\n\n Command::Proof(Proof::Combine(id_a, id_b, index), theorem, name) => {\n", "file_path": "src/serialization/command.rs", "rank": 88, "score": 14.179148161147847 }, { "content": " ///\n\n /// let st1 = Statement {\n\n /// judgement: 0,\n\n /// expression: Expression::from_raw([-2, 0, -2, 1, 0]).unwrap()\n\n /// };\n\n /// let st2 = Statement {\n\n /// judgement: 0,\n\n /// expression: Expression::from_raw([-2, 0, 1]).unwrap()\n\n /// };\n\n /// let mut sub = WholeSubstitution::with_capacity(2);\n\n /// let res = st1.unify(&st2, &mut sub);\n\n /// assert!(res.is_ok());\n\n /// assert_eq!(st2.substitute(&sub), st1);\n\n ///\n\n /// let st2 = Statement {\n\n /// judgement: 1,\n\n /// expression: Expression::from_raw([-2, 0, 1]).unwrap()\n\n /// };\n\n /// let mut sub = WholeSubstitution::with_capacity(2);\n\n /// let res = st1.unify(&st2, &mut sub);\n", "file_path": "src/statement.rs", "rank": 89, "score": 13.901987473266992 }, { "content": "#[cfg(test)]\n\nmod tests {\n\n\n\n use crate::Identifier;\n\n\n\n use super::*;\n\n\n\n #[derive(Debug)]\n\n enum Error<'a> {\n\n ParseError(nom::Err<GreedyError<&'a str>>),\n\n DatabaseError(DatabaseError),\n\n }\n\n\n\n impl<'a> From<nom::Err<GreedyError<&'a str>>> for Error<'a> {\n\n fn from(e: nom::Err<GreedyError<&'a str>>) -> Self {\n\n Self::ParseError(e)\n\n }\n\n }\n\n\n\n impl<'a> From<DatabaseError> for Error<'a> {\n", "file_path": "src/serialization/command.rs", "rank": 90, "score": 13.868551014688254 }, { "content": " }\n\n\n\n pub fn format_expression<T: Borrow<[Identifier]> + std::fmt::Debug>(\n\n &self,\n\n s: &mut String,\n\n expression: &Expression<T>,\n\n ) {\n\n let id = expression.to_slice().data()[0];\n\n if id == Identifier::MIN {\n\n return;\n\n }\n\n if is_operator(id) {\n\n let left = expression.subexpression(1);\n\n let right = expression.subexpression(1 + left.data().len());\n\n self.format_operator(s, id, &left, &right);\n\n } else {\n\n self.format_variable(s, id)\n\n }\n\n }\n\n\n", "file_path": "src/serialization/formatter.rs", "rank": 91, "score": 13.844216524875993 }, { "content": " (Some(name), Some(id - start))\n\n }\n\n }\n\n }\n\n\n\n pub fn proofs<'a>(\n\n &'a self,\n\n ) -> impl 'a + Iterator<Item = (&Theorem, Proof<(Option<&str>, Option<usize>)>, Option<&str>)>\n\n {\n\n self.theorems\n\n .iter()\n\n .enumerate()\n\n .map(move |(current_id, (theorem, proof, name))| {\n\n let proof = proof.clone().map_id(|id| self.reverse_id(id, current_id));\n\n (theorem, proof, name.as_deref())\n\n })\n\n }\n\n}\n", "file_path": "src/serialization/database.rs", "rank": 92, "score": 13.824240857707759 }, { "content": " last_name: 0,\n\n }\n\n }\n\n\n\n fn get_index(&self, name: Option<&str>, index: Option<usize>) -> Result<usize, DatabaseError> {\n\n let (start, end) = match name {\n\n Some(name) => *self\n\n .names\n\n .get(name)\n\n .ok_or(DatabaseError::TheoremNotFound(Some(name.to_owned()), index))?,\n\n None => (self.last_name, self.theorems.len()),\n\n };\n\n match index {\n\n Some(i) => {\n\n if start + i < end {\n\n Ok(start + i)\n\n } else {\n\n Err(DatabaseError::TheoremNotFound(\n\n name.map(|s| s.to_owned()),\n\n index,\n", "file_path": "src/serialization/database.rs", "rank": 93, "score": 13.589838148588392 }, { "content": " ))\n\n }\n\n }\n\n None => {\n\n if start == end {\n\n Err(DatabaseError::TheoremNotFound(\n\n name.map(|s| s.to_owned()),\n\n index,\n\n ))\n\n } else {\n\n Ok(end - 1)\n\n }\n\n }\n\n }\n\n }\n\n\n\n pub fn get(&self, name: Option<&str>, index: Option<usize>) -> Result<&Theorem, DatabaseError> {\n\n Ok(&self.theorems[self.get_index(name, index)?].0)\n\n }\n\n\n", "file_path": "src/serialization/database.rs", "rank": 94, "score": 13.446132458788878 }, { "content": " .unwrap();\n\n\n\n database\n\n .add_proof(Proof::Axiom(Theorem::new(b.clone(), vec![], vec![])))\n\n .unwrap();\n\n database.add_name(\"b\".to_owned()).unwrap();\n\n\n\n database\n\n .add_proof(Proof::Axiom(Theorem::new(c.clone(), vec![a, b], vec![])))\n\n .unwrap();\n\n database.add_name(\"abc\".to_owned()).unwrap();\n\n\n\n database\n\n .add_proof(Proof::Combine(\n\n (Some(\"abc\".to_owned()), None),\n\n (Some(\"b\".to_owned()), Some(0)),\n\n 0,\n\n ))\n\n .unwrap();\n\n database\n", "file_path": "src/serialization/command.rs", "rank": 95, "score": 13.375213805774099 }, { "content": " }\n\n }\n\n for (i, assumption) in assumptions.iter().enumerate() {\n\n self.format_statement(s, assumption);\n\n if i != assumptions.len() - 1 {\n\n s.push_str(\", \");\n\n }\n\n }\n\n s.push_str(\" => \");\n\n }\n\n self.format_statement(s, theorem.conclusion());\n\n }\n\n\n\n pub fn parse_theorem<'a>(\n\n &self,\n\n input: &'a str,\n\n ) -> IResult<&'a str, Theorem, GreedyError<&'a str>> {\n\n alt((\n\n |input| {\n\n let (input, dvrs_and_assumptions) = context(\n", "file_path": "src/serialization/formatter.rs", "rank": 96, "score": 13.23795282375125 }, { "content": " Err(err) => {\n\n eprint!(\"Error in line {}: \", line_number);\n\n match err {\n\n DatabaseError::TheoremNotFound(name, id) => match (name, id) {\n\n (Some(name), Some(id)) => eprint!(\"theorem {}.{} not found\", name, id),\n\n (Some(name), None) => eprintln!(\"theorem {} not found\", name),\n\n (None, Some(id)) => eprint!(\"theorem {} not found\", id),\n\n (None, None) => eprintln!(\"theorem $ not found\"),\n\n },\n\n DatabaseError::NameCollision(name) => {\n\n eprintln!(\"{} already defined\", name)\n\n }\n\n DatabaseError::TheoremMismatch(theorem_a, theorem_b) => {\n\n eprintln!(\"theorem mismatch\");\n\n let mut sa = String::new();\n\n fmt.format_theorem(&mut sa, &theorem_a);\n\n let mut sb = String::new();\n\n fmt.format_theorem(&mut sb, &theorem_b);\n\n eprintln!(\"expected: {}\", sb);\n\n eprintln!(\" found: {}\", sa);\n", "file_path": "src/main.rs", "rank": 97, "score": 13.227945434569849 }, { "content": " .into_boxed_slice(),\n\n )\n\n .unwrap(),\n\n ))\n\n } else if arity == 1 {\n\n let (input, _) = char('(')(input)?;\n\n let (input, o) = map_opt(is_not(\" ),\"), |s| {\n\n self.operators\n\n .iter()\n\n .enumerate()\n\n .filter_map(|(i, (o, a))| if s == o && *a == 1 { Some(i) } else { None })\n\n .next()\n\n })(input)?;\n\n let (input, _) = char(' ')(input)?;\n\n let (input, left) = self.parse_expression(input)?;\n\n let (input, _) = char(')')(input)?;\n\n let mut data = Vec::with_capacity(left.data().len() + 2);\n\n data.push(-(o as Identifier) - 1);\n\n data.extend_from_slice(left.data());\n\n data.push(Identifier::MIN);\n", "file_path": "src/serialization/formatter.rs", "rank": 98, "score": 13.154695124582233 }, { "content": "use nom::{\n\n branch::alt,\n\n bytes::complete::{is_not, tag, take_while1},\n\n character::complete::char,\n\n combinator::{eof, map, map_opt, verify},\n\n error::context,\n\n multi::separated_list1,\n\n IResult,\n\n};\n\n\n\n// TODO\n\nuse crate::{\n\n dvr::DVR,\n\n expression::{is_operator, Expression},\n\n statement::Statement,\n\n theorem::Theorem,\n\n types::*,\n\n OwnedStatement,\n\n};\n\nuse std::borrow::Borrow;\n", "file_path": "src/serialization/formatter.rs", "rank": 99, "score": 12.920317421539059 } ]
Rust
libpf-rs/src/filter.rs
kckeiks/pf-rs
608fb83a3b583eb03af63a61d90d7dba0d70d296
use std::fs::File; use std::io::Write; use std::path::Path; use anyhow::Result; use tempfile::tempdir; use crate::bpf::{BPFLink, BPFObj}; use crate::bpfcode::{ DEFINES, EVAL_BOTH_IPVER, EVAL_NOOP, EVAL_ONLY_IP4, EVAL_ONLY_IP6, INCLUDE_HEADERS, IP4RULES_MAPS, IP4_EVAL_FUNCS, IP6RULES_MAPS, IP6_EVAL_FUNCS, PARSERS, PROGRAM, STRUCTS, VMLINUX, }; use crate::error::Error; use crate::rule::{Action, InnerRule, RawRule, Rule}; use crate::{bpf, compile}; #[derive(Debug)] pub struct Filter { default_act: Action, ipv4_rules: Vec<RawRule>, ipv6_rules: Vec<RawRule>, } impl Filter { pub fn new() -> Self { Filter { default_act: Action::Pass, ipv4_rules: Vec::new(), ipv6_rules: Vec::new(), } } pub fn add_rule(&mut self, rule: Rule) { match rule.get_rule() { InnerRule::IPv6Rule(r) => self.ipv6_rules.push(r), InnerRule::IPv4Rule(r) => self.ipv4_rules.push(r), InnerRule::DefaultRule(a) => self.default_act = a, } } pub fn load_on(self, ifindex: i32) -> Result<BPFLink> { let mut bpf_obj = self .generate_and_load() .map_err(|e| Error::Internal(e.to_string()))?; for (i, rule) in self.ipv4_rules.into_iter().enumerate() { let initial_value = bincode2::serialize(&rule).map_err(|e| Error::Internal(e.to_string()))?; let index = bincode2::serialize(&(i as u32)).map_err(|e| Error::Internal(e.to_string()))?; bpf_obj .update_map("ipv4_rules", &index, &initial_value, 0) .map_err(|e| Error::Internal(e.to_string()))?; } for (i, rule) in self.ipv6_rules.into_iter().enumerate() { let initial_value = bincode2::serialize(&rule).map_err(|e| Error::Internal(e.to_string()))?; let index = bincode2::serialize(&(i as u32)).map_err(|e| Error::Internal(e.to_string()))?; bpf_obj .update_map("ipv6_rules", &index, &initial_value, 0) .map_err(|e| Error::Internal(e.to_string()))?; } let link = bpf_obj .attach_prog(ifindex) .map_err(|e| Error::Internal(e.to_string()))?; Ok(link) } pub fn generate_src(self) -> Result<()> { let filename = "pfdebug"; let src_dir = Path::new("./target/"); let hdr_path = src_dir.join("vmlinux.h"); let _hdr = generate_vmlinux_file(hdr_path.as_path())?; let src_path = src_dir.join(format!("{}.bpf.c", filename)); self.generate_src_file(src_path.as_path())?; let obj_path = src_dir.join(format!("{}.o", filename)); compile::compile(src_path.as_path(), obj_path.as_path())?; Ok(()) } fn generate_and_load(&self) -> Result<BPFObj> { let filename = "pf"; let src_dir = tempdir().expect("error creating temp dir"); let hdr_path = src_dir.path().join("vmlinux.h"); let hdr = generate_vmlinux_file(hdr_path.as_path())?; let src_path = src_dir.path().join(format!("{}.bpf.c", filename)); let src = self.generate_src_file(src_path.as_path())?; let obj_dir = tempdir().expect("error creating temp dir"); let obj_path = obj_dir.path().join(format!("{}.o", filename)); compile::compile(src_path.as_path(), obj_path.as_path())?; let bpf_obj = bpf::BPFObj::load_from_file(obj_path).map_err(|e| Error::Internal(e.to_string()))?; drop(hdr); drop(src); if let Err(e) = src_dir.close() { println!("error closing dir: {}", e.to_string()); } if let Err(e) = obj_dir.close() { println!("error closing dir: {}", e.to_string()); } Ok(bpf_obj) } fn generate_src_file(&self, path: &Path) -> Result<File> { let mut src = File::create(path).map_err(|e| Error::Internal(e.to_string()))?; src.write_all(INCLUDE_HEADERS.as_bytes()) .map_err(|e| Error::Internal(e.to_string()))?; src.write_all(DEFINES.as_bytes()) .map_err(|e| Error::Internal(e.to_string()))?; src.write_all( format!( "\ #define DEFAULT_ACTION {}\n\ #define IPV4_RULE_COUNT {}\n\ #define IPV6_RULE_COUNT {}\n", self.default_act as u32, self.ipv4_rules.len(), self.ipv6_rules.len() ) .as_bytes(), ) .map_err(|e| Error::Internal(e.to_string()))?; src.write_all(STRUCTS.as_bytes()) .map_err(|e| Error::Internal(e.to_string()))?; src.write_all(PARSERS.as_bytes()) .map_err(|e| Error::Internal(e.to_string()))?; if !self.ipv4_rules.is_empty() { src.write_all(IP4RULES_MAPS.as_bytes()) .map_err(|e| Error::Internal(e.to_string()))?; src.write_all(IP4_EVAL_FUNCS.as_bytes()) .map_err(|e| Error::Internal(e.to_string()))?; } if !self.ipv6_rules.is_empty() { src.write_all(IP6RULES_MAPS.as_bytes()) .map_err(|e| Error::Internal(e.to_string()))?; src.write_all(IP6_EVAL_FUNCS.as_bytes()) .map_err(|e| Error::Internal(e.to_string()))?; } match (self.ipv6_rules.is_empty(), self.ipv4_rules.is_empty()) { (true, true) => src .write_all(EVAL_NOOP.as_bytes()) .map_err(|e| Error::Internal(e.to_string()))?, (true, false) => src .write_all(EVAL_ONLY_IP4.as_bytes()) .map_err(|e| Error::Internal(e.to_string()))?, (false, true) => src .write_all(EVAL_ONLY_IP6.as_bytes()) .map_err(|e| Error::Internal(e.to_string()))?, (false, false) => src .write_all(EVAL_BOTH_IPVER.as_bytes()) .map_err(|e| Error::Internal(e.to_string()))?, } src.write_all(PROGRAM.as_bytes()) .map_err(|e| Error::Internal(e.to_string()))?; Ok(src) } } fn generate_vmlinux_file(path: &Path) -> Result<File> { let mut hdr = File::create(path).map_err(|e| Error::Internal(e.to_string()))?; if let Err(e) = hdr.write_all(VMLINUX.as_bytes()) { panic!("{}", e.to_string()); } Ok(hdr) }
use std::fs::File; use std::io::Write; use std::path::Path; use anyhow::Result; use tempfile::tempdir; use crate::bpf::{BPFLink, BPFObj}; use crate::bpfcode::{ DEFINES, EVAL_BOTH_IPVER, EVAL_NOOP, EVAL_ONLY_IP4, EVAL_ONLY_IP6, INCLUDE_HEADERS, IP4RULES_MAPS, IP4_EVAL_FUNCS, IP6RULES_MAPS, IP6_EVAL_FUNCS, PARSERS, PROGRAM, STRUCTS, VMLINUX, }; use crate::error::Error; use crate::rule::{Action, InnerRule, RawRule, Rule}; use crate::{bpf, compile}; #[derive(Debug)] pub struct Filter { default_act: Action, ipv4_rules: Vec<RawRule>, ipv6_rules: Vec<RawRule>, } impl Filter { pub fn new() -> Self { Filter { default_act: Action::Pass, ipv4_rules: Vec::new(), ipv6_rules: Vec::new(), } } pub fn add_rule(&mut self, rule: Rule) { match rule.get_rule() { InnerRule::IPv6Rule(r) => self.ipv6_rules.push(r), InnerRule::IPv4Rule(r) => self.ipv4_rules.push(r), InnerRule::DefaultRule(a) => self.default_act = a, } } pub fn load_on(self, ifindex: i32) -> Result<BPFLink> { let mut bpf_obj = self .generate_and_load() .map_err(|e| Error::Internal(e.to_string()))?; for (i, rule) in self.ipv4_rules.into_iter().enumerate() { let initial_value = bincode2::serialize(&rule).map_err(|e| Error::Internal(e.to_string()))?; let index = bincode2::serialize(&(i as u32)).map_err(|e| Error::Internal(e.to_string()))?; bpf_obj
.map_err(|e| Error::Internal(e.to_string()))?; } let link = bpf_obj .attach_prog(ifindex) .map_err(|e| Error::Internal(e.to_string()))?; Ok(link) } pub fn generate_src(self) -> Result<()> { let filename = "pfdebug"; let src_dir = Path::new("./target/"); let hdr_path = src_dir.join("vmlinux.h"); let _hdr = generate_vmlinux_file(hdr_path.as_path())?; let src_path = src_dir.join(format!("{}.bpf.c", filename)); self.generate_src_file(src_path.as_path())?; let obj_path = src_dir.join(format!("{}.o", filename)); compile::compile(src_path.as_path(), obj_path.as_path())?; Ok(()) } fn generate_and_load(&self) -> Result<BPFObj> { let filename = "pf"; let src_dir = tempdir().expect("error creating temp dir"); let hdr_path = src_dir.path().join("vmlinux.h"); let hdr = generate_vmlinux_file(hdr_path.as_path())?; let src_path = src_dir.path().join(format!("{}.bpf.c", filename)); let src = self.generate_src_file(src_path.as_path())?; let obj_dir = tempdir().expect("error creating temp dir"); let obj_path = obj_dir.path().join(format!("{}.o", filename)); compile::compile(src_path.as_path(), obj_path.as_path())?; let bpf_obj = bpf::BPFObj::load_from_file(obj_path).map_err(|e| Error::Internal(e.to_string()))?; drop(hdr); drop(src); if let Err(e) = src_dir.close() { println!("error closing dir: {}", e.to_string()); } if let Err(e) = obj_dir.close() { println!("error closing dir: {}", e.to_string()); } Ok(bpf_obj) } fn generate_src_file(&self, path: &Path) -> Result<File> { let mut src = File::create(path).map_err(|e| Error::Internal(e.to_string()))?; src.write_all(INCLUDE_HEADERS.as_bytes()) .map_err(|e| Error::Internal(e.to_string()))?; src.write_all(DEFINES.as_bytes()) .map_err(|e| Error::Internal(e.to_string()))?; src.write_all( format!( "\ #define DEFAULT_ACTION {}\n\ #define IPV4_RULE_COUNT {}\n\ #define IPV6_RULE_COUNT {}\n", self.default_act as u32, self.ipv4_rules.len(), self.ipv6_rules.len() ) .as_bytes(), ) .map_err(|e| Error::Internal(e.to_string()))?; src.write_all(STRUCTS.as_bytes()) .map_err(|e| Error::Internal(e.to_string()))?; src.write_all(PARSERS.as_bytes()) .map_err(|e| Error::Internal(e.to_string()))?; if !self.ipv4_rules.is_empty() { src.write_all(IP4RULES_MAPS.as_bytes()) .map_err(|e| Error::Internal(e.to_string()))?; src.write_all(IP4_EVAL_FUNCS.as_bytes()) .map_err(|e| Error::Internal(e.to_string()))?; } if !self.ipv6_rules.is_empty() { src.write_all(IP6RULES_MAPS.as_bytes()) .map_err(|e| Error::Internal(e.to_string()))?; src.write_all(IP6_EVAL_FUNCS.as_bytes()) .map_err(|e| Error::Internal(e.to_string()))?; } match (self.ipv6_rules.is_empty(), self.ipv4_rules.is_empty()) { (true, true) => src .write_all(EVAL_NOOP.as_bytes()) .map_err(|e| Error::Internal(e.to_string()))?, (true, false) => src .write_all(EVAL_ONLY_IP4.as_bytes()) .map_err(|e| Error::Internal(e.to_string()))?, (false, true) => src .write_all(EVAL_ONLY_IP6.as_bytes()) .map_err(|e| Error::Internal(e.to_string()))?, (false, false) => src .write_all(EVAL_BOTH_IPVER.as_bytes()) .map_err(|e| Error::Internal(e.to_string()))?, } src.write_all(PROGRAM.as_bytes()) .map_err(|e| Error::Internal(e.to_string()))?; Ok(src) } } fn generate_vmlinux_file(path: &Path) -> Result<File> { let mut hdr = File::create(path).map_err(|e| Error::Internal(e.to_string()))?; if let Err(e) = hdr.write_all(VMLINUX.as_bytes()) { panic!("{}", e.to_string()); } Ok(hdr) }
.update_map("ipv4_rules", &index, &initial_value, 0) .map_err(|e| Error::Internal(e.to_string()))?; } for (i, rule) in self.ipv6_rules.into_iter().enumerate() { let initial_value = bincode2::serialize(&rule).map_err(|e| Error::Internal(e.to_string()))?; let index = bincode2::serialize(&(i as u32)).map_err(|e| Error::Internal(e.to_string()))?; bpf_obj .update_map("ipv6_rules", &index, &initial_value, 0)
random
[ { "content": "pub fn load_filter(rules: Vec<Rule>, ifindex: i32) -> Result<BPFLink> {\n\n let mut f = Filter::new();\n\n for r in rules.into_iter() {\n\n f.add_rule(r);\n\n }\n\n Ok(f.load_on(ifindex)?)\n\n}\n\n\n", "file_path": "pf-rs/src/main.rs", "rank": 0, "score": 148013.19637603714 }, { "content": "pub fn generate_filter(rules: Vec<Rule>) -> Result<()> {\n\n let mut f = Filter::new();\n\n for r in rules.into_iter() {\n\n f.add_rule(r);\n\n }\n\n f.generate_src().map_err(|e| anyhow!(e))\n\n}\n", "file_path": "pf-rs/src/main.rs", "rank": 1, "score": 111464.39072503506 }, { "content": "pub fn compile(src: &Path, dst: &Path) -> Result<()> {\n\n let clang = PathBuf::from(\"clang\");\n\n let mut cmd = Command::new(clang.as_os_str());\n\n\n\n let libbpf_dir = setup_libbpf_headers()?;\n\n let options = format!(\"-I{}\", libbpf_dir.as_ref().to_str().unwrap());\n\n cmd.args(options.split_whitespace());\n\n\n\n let arch = match std::env::consts::ARCH {\n\n \"x86_64\" => \"x86\",\n\n \"aarch64\" => \"arm64\",\n\n _ => std::env::consts::ARCH,\n\n };\n\n\n\n cmd.arg(\"-g\")\n\n .arg(\"-O2\")\n\n .arg(\"-target\")\n\n .arg(\"bpf\")\n\n .arg(\"-c\")\n\n .arg(format!(\"-D__TARGET_ARCH_{}\", arch))\n", "file_path": "libpf-rs/src/compile.rs", "rank": 2, "score": 82062.40802124905 }, { "content": "pub fn setup_libbpf_headers() -> Result<TempDir> {\n\n let tmpdir = tempdir().map_err(|e| Error::Build(e.to_string()))?;\n\n let hdrs_dir = tmpdir.path().join(\"bpf\");\n\n fs::create_dir_all(&hdrs_dir).map_err(|e| Error::Build(e.to_string()))?;\n\n\n\n for (filename, data) in libbpf_sys::API_HEADERS.iter() {\n\n let path = hdrs_dir.as_path().join(filename);\n\n let mut file = OpenOptions::new()\n\n .write(true)\n\n .create(true)\n\n .open(path)\n\n .map_err(|e| Error::Build(e.to_string()))?;\n\n file.write_all(data.as_bytes())\n\n .map_err(|e| Error::Build(e.to_string()))?;\n\n }\n\n Ok(tmpdir)\n\n}\n", "file_path": "libpf-rs/src/compile.rs", "rank": 3, "score": 78617.23912264695 }, { "content": "struct rule {\n\n __u32 action;\n\n __u32 quick;\n\n __u32 proto;\n\n __be16 sport;\n\n __be16 dport;\n\n\n\n struct ip4_addr ip4_addr;\n\n struct ip6_addr ip6_addr;\n\n};\";\n\n\n\npub const IP4RULES_MAPS: &str = r#\"\\\n\nstruct {\n\n __uint(type, BPF_MAP_TYPE_ARRAY);\n\n __uint(max_entries, IPV4_RULE_COUNT);\n\n __type(key, __u32);\n\n __type(value, struct rule);\n\n} ipv4_rules SEC(\".maps\");\"#;\n\n\n\npub const IP6RULES_MAPS: &str = r#\"\\\n\nstruct {\n\n __uint(type, BPF_MAP_TYPE_ARRAY);\n\n __uint(max_entries, IPV6_RULE_COUNT);\n\n __type(key, __u32);\n\n __type(value, struct rule);\n\n} ipv6_rules SEC(\".maps\");\"#;\n\n\n\npub const PARSERS: &str = r##\"\n", "file_path": "libpf-rs/src/bpfcode.rs", "rank": 4, "score": 77995.86584536996 }, { "content": "#[derive(Debug)]\n\nstruct Parts {\n\n action: Action,\n\n is_ipv6: bool,\n\n quick: bool,\n\n proto: Proto,\n\n saddr: Option<SocketAddr>,\n\n daddr: Option<SocketAddr>,\n\n}\n\n\n\nimpl Default for Parts {\n\n fn default() -> Self {\n\n Parts {\n\n action: Action::Pass,\n\n is_ipv6: false,\n\n quick: false,\n\n proto: Proto::Any,\n\n saddr: None,\n\n daddr: None,\n\n }\n\n }\n", "file_path": "libpf-rs/src/rule.rs", "rank": 6, "score": 61682.21382392112 }, { "content": "pub fn get_zero_addr(ipv6: bool) -> SocketAddr {\n\n // These functions should not fail\n\n let ip_addr = if ipv6 {\n\n IpAddr::V6(Ipv6Addr::from_str(\"::\").unwrap())\n\n } else {\n\n IpAddr::V4(Ipv4Addr::from_str(\"0.0.0.0\").unwrap())\n\n };\n\n SocketAddr::new(ip_addr, 0)\n\n}\n", "file_path": "libpf-rs/src/ip.rs", "rank": 7, "score": 56462.500012071076 }, { "content": "#[derive(ClapParser)]\n\n#[clap(name = \"pf\")]\n\n#[clap(author = \"Fausto Miguel Guarniz <mi9uel9@gmail.com>\")]\n\n#[clap(version = \"0.1.0\")]\n\n#[clap(about = \"eBPF-based packet filter for Rust\", long_about = None)]\n\nstruct Cli {\n\n /// index of device where filter should be attached to\n\n ifindex: i32,\n\n\n\n /// path to config file\n\n #[clap(short, long, parse(from_os_str), value_name = \"FILE\")]\n\n config: Option<PathBuf>,\n\n\n\n #[clap(short)]\n\n /// Only generate .c and .o files for filter\n\n generate: bool,\n\n}\n\n\n", "file_path": "pf-rs/src/main.rs", "rank": 8, "score": 38180.42507839902 }, { "content": "fn main() {\n\n let cli = Cli::parse();\n\n\n\n let mut config = PathBuf::from_str(\"/etc/pfrs/pfrs.conf\").unwrap();\n\n if let Some(path) = cli.config.as_deref() {\n\n config = PathBuf::from(path);\n\n }\n\n\n\n let l = Lexer::from_file(config.as_path().to_str().unwrap()).unwrap();\n\n\n\n let pre_proc = PreProc::new(l);\n\n let tokens = pre_proc.preprocess().unwrap();\n\n\n\n let parser = Parser::new(tokens);\n\n let rules = parser.parse_statements().unwrap();\n\n\n\n if cli.generate {\n\n generate_filter(rules).unwrap();\n\n return;\n\n }\n", "file_path": "pf-rs/src/main.rs", "rank": 9, "score": 38175.79947710938 }, { "content": "struct ip6_addr {\n\n __u8 saddr[16];\n\n __u8 daddr[16];\n\n};\n\n\n", "file_path": "libpf-rs/src/bpfcode.rs", "rank": 10, "score": 37136.34838521122 }, { "content": "struct BPFMap {\n\n #[allow(dead_code)]\n\n map_ptr: *mut libbpf_sys::bpf_map,\n\n fd: i32,\n\n key_size: u32,\n\n val_size: u32,\n\n}\n\n\n\nimpl BPFMap {\n\n fn new(map_ptr: *mut libbpf_sys::bpf_map, fd: i32, key_size: u32, val_size: u32) -> Self {\n\n BPFMap {\n\n map_ptr,\n\n fd,\n\n key_size,\n\n val_size,\n\n }\n\n }\n\n\n\n fn update_map(&mut self, key: &[u8], value: &[u8], flags: u64) -> Result<()> {\n\n if key.len() != self.key_size as usize {\n", "file_path": "libpf-rs/src/bpf.rs", "rank": 11, "score": 37136.34838521122 }, { "content": "struct ip4_addr {\n\n __be32 saddr;\n\n __be32 daddr;\n\n};\n\n\n", "file_path": "libpf-rs/src/bpfcode.rs", "rank": 12, "score": 37136.34838521122 }, { "content": "struct hdr_cursor {\n\n void *pos;\n\n};\n\n\n\nstatic int parse_ethhdr(struct hdr_cursor *nh, void *data_end, struct ethhdr **ethhdr)\n\n{\n\n struct ethhdr *eth = nh->pos;\n\n\n\n if (eth + 1 > data_end)\n\n return -1;\n\n\n\n nh->pos = eth + 1;\n\n *ethhdr = eth;\n\n return eth->h_proto;\n\n}\n\n\n\nstatic int parse_ip4hdr(struct hdr_cursor *nh, void *data_end, struct iphdr **iphdr)\n\n{\n\n struct iphdr *iph = nh->pos;\n\n int hdrsize;\n", "file_path": "libpf-rs/src/bpfcode.rs", "rank": 13, "score": 37136.34838521122 }, { "content": "struct BPFProg {\n\n ptr: *mut libbpf_sys::bpf_program,\n\n}\n\n\n\nimpl BPFProg {\n\n fn new(ptr: *mut libbpf_sys::bpf_program) -> Self {\n\n BPFProg { ptr: ptr }\n\n }\n\n\n\n fn attach_xdp(&mut self, ifindex: i32) -> Result<BPFLink> {\n\n let ptr = unsafe { libbpf_sys::bpf_program__attach_xdp(self.ptr, ifindex) };\n\n let err = unsafe { libbpf_sys::libbpf_get_error(ptr as *const _) };\n\n if err != 0 {\n\n bail!(\"error {}: could not attach prog to xdp hook\", err as i32);\n\n }\n\n\n\n Ok(BPFLink { ptr })\n\n }\n\n}\n", "file_path": "libpf-rs/src/bpf.rs", "rank": 14, "score": 37136.34838521122 }, { "content": "pub trait ToSockAddr {\n\n fn to_sock_addr(&self) -> Result<SocketAddr, AddrParseError>;\n\n}\n\n\n\nimpl ToSockAddr for &str {\n\n fn to_sock_addr(&self) -> Result<SocketAddr, AddrParseError> {\n\n SocketAddr::from_str(self).or_else(|_| Ok(SocketAddr::new(IpAddr::from_str(self)?, 0)))\n\n }\n\n}\n\n\n\nimpl ToSockAddr for IpAddr {\n\n fn to_sock_addr(&self) -> Result<SocketAddr, AddrParseError> {\n\n Ok(SocketAddr::new(*self, 0))\n\n }\n\n}\n\n\n\nimpl ToSockAddr for SocketAddr {\n\n fn to_sock_addr(&self) -> Result<SocketAddr, AddrParseError> {\n\n Ok(*self)\n\n }\n\n}\n\n\n", "file_path": "libpf-rs/src/ip.rs", "rank": 15, "score": 35664.67043061107 }, { "content": " .arg(src.as_os_str())\n\n .arg(\"-o\")\n\n .arg(dst);\n\n\n\n let output = cmd.output().map_err(|e| Error::Build(e.to_string()))?;\n\n\n\n if !output.status.success() {\n\n bail!(Error::Build(format!(\n\n \"clang failed to compile BPF program: {:?}\",\n\n output\n\n )));\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "libpf-rs/src/compile.rs", "rank": 16, "score": 25448.061617377 }, { "content": "use std::fs;\n\nuse std::fs::OpenOptions;\n\nuse std::io::Write;\n\nuse std::path::{Path, PathBuf};\n\nuse std::process::Command;\n\n\n\nuse anyhow::{bail, Result};\n\nuse tempfile::{tempdir, TempDir};\n\n\n\nuse crate::error::Error;\n\n\n", "file_path": "libpf-rs/src/compile.rs", "rank": 17, "score": 25447.487705831438 }, { "content": "use std::iter::Peekable;\n\nuse std::vec::IntoIter;\n\n\n\nuse anyhow::{bail, Result};\n\n\n\nuse libpf_rs::rule::{Builder, Rule};\n\n\n\nuse crate::token::Token;\n\n\n\npub struct Parser {\n\n tokens: Peekable<IntoIter<Token>>,\n\n rules: Vec<Rule>,\n\n}\n\n\n\nimpl Parser {\n\n pub fn new(tokens: Vec<Token>) -> Self {\n\n Parser {\n\n tokens: tokens.into_iter().peekable(),\n\n rules: Vec::new(),\n\n }\n", "file_path": "pf-rs/src/parser.rs", "rank": 18, "score": 25405.447504377917 }, { "content": " .expect(\"expected dst IP after `to`\")\n\n .as_str(),\n\n );\n\n\n\n if self.peek_then_read(|t| matches!(t, Token::Port)).is_some() {\n\n let port = self.read_arg().expect(\"missing dst port after `port`\");\n\n builder = builder.to_port(port.parse::<u16>()?);\n\n }\n\n\n\n self.rules.push(builder.build()?);\n\n Ok(())\n\n }\n\n\n\n pub fn parse_statements(mut self) -> Result<Vec<Rule>> {\n\n loop {\n\n if self.tokens.peek().is_none() {\n\n break;\n\n }\n\n self.parse_statement()?;\n\n }\n\n Ok(self.rules)\n\n }\n\n}\n", "file_path": "pf-rs/src/parser.rs", "rank": 19, "score": 25395.59694086361 }, { "content": " None => panic!(\"{}\", msg),\n\n }\n\n }\n\n\n\n fn read_arg(&mut self) -> Option<String> {\n\n match self.tokens.next()? {\n\n Token::Val(s) => Some(s),\n\n _ => None,\n\n }\n\n }\n\n\n\n fn parse_statement(&mut self) -> Result<()> {\n\n let mut builder = Builder::new();\n\n\n\n if self.peek_then_read(|t| matches!(t, Token::Pass)).is_some() {\n\n builder = builder.pass();\n\n } else if self.peek_then_read(|t| matches!(t, Token::Block)).is_some() {\n\n builder = builder.block();\n\n } else {\n\n bail!(\"expected `pass` or `block` token\");\n", "file_path": "pf-rs/src/parser.rs", "rank": 20, "score": 25395.26989513119 }, { "content": " }\n\n\n\n fn peek_then_read<P>(&mut self, p: P) -> Option<Token>\n\n where\n\n P: FnOnce(&Token) -> bool,\n\n {\n\n if let Some(token) = self.tokens.peek() {\n\n if p(token) {\n\n return Some(self.tokens.next().unwrap());\n\n }\n\n }\n\n None\n\n }\n\n\n\n fn read_or_die<P>(&mut self, p: P, msg: &str) -> Token\n\n where\n\n P: FnOnce(&Token) -> bool,\n\n {\n\n match self.peek_then_read(p) {\n\n Some(t) => t,\n", "file_path": "pf-rs/src/parser.rs", "rank": 21, "score": 25395.170633138525 }, { "content": " }\n\n\n\n // self.read_or_die(|t| matches!(t, Token::Proto), \"expected token `proto`\");\n\n // builder = builder.proto(self.read_arg().expect(\"expected protocol after `proto`\"));\n\n\n\n self.read_or_die(|t| matches!(t, Token::From), \"expected token `from`\");\n\n builder = builder.from_addr(\n\n self.read_arg()\n\n .expect(\"expected src IP after `to`\")\n\n .as_str(),\n\n );\n\n\n\n if self.peek_then_read(|t| matches!(t, Token::Port)).is_some() {\n\n let port = self.read_arg().expect(\"missing src port after `port`\");\n\n builder = builder.from_port(port.parse::<u16>()?);\n\n }\n\n\n\n self.read_or_die(|t| matches!(t, Token::To), \"expected token `to`\");\n\n builder = builder.to_addr(\n\n self.read_arg()\n", "file_path": "pf-rs/src/parser.rs", "rank": 22, "score": 25390.708357659074 }, { "content": "}\n\n\n\n#[derive(Debug)]\n\npub struct Builder {\n\n inner: Result<Parts>,\n\n}\n\n\n\nimpl Builder {\n\n pub fn new() -> Self {\n\n Builder {\n\n inner: Ok(Parts::default()),\n\n }\n\n }\n\n\n\n pub fn pass(self) -> Builder {\n\n self.and_then(move |mut parts| {\n\n parts.action = Action::Pass;\n\n Ok(parts)\n\n })\n\n }\n", "file_path": "libpf-rs/src/rule.rs", "rank": 32, "score": 24631.136471979513 }, { "content": " Ok(Rule {\n\n inner: InnerRule::DefaultRule(Action::Pass),\n\n })\n\n })\n\n }\n\n\n\n pub fn block_all(self) -> Result<Rule> {\n\n self.inner.and_then(|_| {\n\n Ok(Rule {\n\n inner: InnerRule::DefaultRule(Action::Block),\n\n })\n\n })\n\n }\n\n\n\n pub fn build(self) -> Result<Rule> {\n\n self.inner.and_then(|parts| {\n\n let mut raw_rule = RawRule::default();\n\n\n\n let is_ipv6: bool;\n\n\n", "file_path": "libpf-rs/src/rule.rs", "rank": 33, "score": 24627.4889979045 }, { "content": "\n\n pub fn block(self) -> Builder {\n\n self.and_then(move |mut parts| {\n\n parts.action = Action::Block;\n\n Ok(parts)\n\n })\n\n }\n\n\n\n pub fn quick(self) -> Builder {\n\n self.and_then(|mut parts| {\n\n parts.quick = true;\n\n Ok(parts)\n\n })\n\n }\n\n\n\n pub fn proto<T: AsRef<str>>(self, proto: T) -> Builder {\n\n self.and_then(|mut parts| {\n\n parts.proto = match proto.as_ref().to_lowercase().as_str() {\n\n \"udp\" => Proto::UDP,\n\n \"tcp\" => Proto::TCP,\n", "file_path": "libpf-rs/src/rule.rs", "rank": 34, "score": 24626.63614966669 }, { "content": " match parts.action {\n\n Action::Block => raw_rule.action = 1,\n\n Action::Pass => raw_rule.action = 2,\n\n }\n\n\n\n raw_rule.quick = match parts.quick {\n\n false => 0,\n\n true => 1,\n\n };\n\n\n\n raw_rule.proto = match &parts.proto {\n\n Proto::TCP => 6,\n\n Proto::UDP => 17,\n\n Proto::Any => 0,\n\n };\n\n\n\n let inner_rule = if is_ipv6 {\n\n InnerRule::IPv6Rule(raw_rule)\n\n } else {\n\n InnerRule::IPv4Rule(raw_rule)\n", "file_path": "libpf-rs/src/rule.rs", "rank": 35, "score": 24626.635516460563 }, { "content": "\n\n#[derive(Debug)]\n\npub struct Rule {\n\n inner: InnerRule,\n\n}\n\n\n\nimpl Rule {\n\n // TODO: need at least rust 1.18\n\n pub(crate) fn get_rule(self) -> InnerRule {\n\n self.inner\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n", "file_path": "libpf-rs/src/rule.rs", "rank": 36, "score": 24625.580315961208 }, { "content": "\n\n#[derive(Clone, Copy, Serialize, Deserialize, PartialEq, Debug, Default)]\n\npub(crate) struct RawRule {\n\n action: u32,\n\n quick: u32,\n\n proto: u32,\n\n sport: u16,\n\n dport: u16,\n\n saddr4: u32,\n\n daddr4: u32,\n\n saddr6: u128,\n\n daddr6: u128,\n\n}\n\n\n\n#[derive(Debug)]\n\npub(crate) enum InnerRule {\n\n DefaultRule(Action),\n\n IPv4Rule(RawRule),\n\n IPv6Rule(RawRule),\n\n}\n", "file_path": "libpf-rs/src/rule.rs", "rank": 37, "score": 24624.989607240583 }, { "content": " };\n\n\n\n Ok(Rule { inner: inner_rule })\n\n })\n\n }\n\n\n\n fn and_then<F>(self, op: F) -> Self\n\n where\n\n F: FnOnce(Parts) -> Result<Parts>,\n\n {\n\n Builder {\n\n inner: self.inner.and_then(op),\n\n }\n\n }\n\n}\n", "file_path": "libpf-rs/src/rule.rs", "rank": 38, "score": 24624.904091323373 }, { "content": "use std::net::SocketAddr;\n\n\n\nuse anyhow::{bail, Result};\n\nuse serde::{Deserialize, Serialize};\n\n\n\nuse crate::error::Error;\n\nuse crate::ip::{get_zero_addr, ToSockAddr};\n\n\n\n#[derive(Debug)]\n\npub(crate) enum Proto {\n\n UDP,\n\n TCP,\n\n Any,\n\n}\n\n\n\n#[derive(Clone, Copy, Debug)]\n\npub enum Action {\n\n Block = 1,\n\n Pass = 2,\n\n}\n", "file_path": "libpf-rs/src/rule.rs", "rank": 39, "score": 24624.64060798267 }, { "content": " }\n\n\n\n pub fn to_port(self, port: u16) -> Builder {\n\n self.and_then(move |mut parts| {\n\n parts.daddr = parts\n\n .daddr\n\n .or_else(|| {\n\n let addr = get_zero_addr(parts.is_ipv6);\n\n Some(addr)\n\n })\n\n .and_then(|mut addr| {\n\n addr.set_port(port);\n\n Some(addr)\n\n });\n\n Ok(parts)\n\n })\n\n }\n\n\n\n pub fn pass_all(self) -> Result<Rule> {\n\n self.inner.and_then(|_| {\n", "file_path": "libpf-rs/src/rule.rs", "rank": 40, "score": 24623.933488104987 }, { "content": " _ => {\n\n bail!(Error::InvalidInput(\n\n \"invalid protocol must be `tcp` or `udp`\".to_string(),\n\n ));\n\n }\n\n };\n\n Ok(parts)\n\n })\n\n }\n\n\n\n pub fn set_ipv4(self) -> Builder {\n\n self.and_then(|mut parts| {\n\n parts.is_ipv6 = false;\n\n Ok(parts)\n\n })\n\n }\n\n\n\n pub fn set_ipv6(self) -> Builder {\n\n self.and_then(|mut parts| {\n\n parts.is_ipv6 = true;\n", "file_path": "libpf-rs/src/rule.rs", "rank": 41, "score": 24622.15688741983 }, { "content": " Ok(parts)\n\n })\n\n }\n\n\n\n pub fn from_addr<T: ToSockAddr>(self, src: T) -> Builder {\n\n self.and_then(move |mut parts| {\n\n let addr = src\n\n .to_sock_addr()\n\n .map_err(|e| Error::InvalidInput(e.to_string()))?;\n\n parts.is_ipv6 = addr.is_ipv6();\n\n parts.saddr = Some(addr);\n\n Ok(parts)\n\n })\n\n }\n\n\n\n pub fn from_port(self, port: u16) -> Builder {\n\n self.and_then(move |mut parts| {\n\n parts.saddr = parts\n\n .saddr\n\n .or_else(|| {\n", "file_path": "libpf-rs/src/rule.rs", "rank": 42, "score": 24621.654297039226 }, { "content": " let addr = get_zero_addr(parts.is_ipv6);\n\n Some(addr)\n\n })\n\n .and_then(|mut addr| {\n\n addr.set_port(port);\n\n Some(addr)\n\n });\n\n Ok(parts)\n\n })\n\n }\n\n\n\n pub fn to_addr<T: ToSockAddr>(self, dst: T) -> Builder {\n\n self.and_then(move |mut parts| {\n\n let addr = dst\n\n .to_sock_addr()\n\n .map_err(|e| Error::InvalidInput(e.to_string()))?;\n\n parts.is_ipv6 = addr.is_ipv6();\n\n parts.daddr = Some(addr);\n\n Ok(parts)\n\n })\n", "file_path": "libpf-rs/src/rule.rs", "rank": 43, "score": 24620.92626315489 }, { "content": " let addr: u32 = (*a.ip()).into();\n\n raw_rule.saddr4 = addr.to_be();\n\n raw_rule.sport = a.port().to_be();\n\n }\n\n if let Some(SocketAddr::V4(a)) = parts.daddr {\n\n let addr: u32 = (*a.ip()).into();\n\n raw_rule.daddr4 = addr.to_be();\n\n raw_rule.dport = a.port().to_be();\n\n }\n\n if let Some(SocketAddr::V6(a)) = parts.saddr {\n\n let addr: u128 = (*a.ip()).into();\n\n raw_rule.saddr6 = addr.to_be();\n\n raw_rule.sport = a.port().to_be();\n\n }\n\n if let Some(SocketAddr::V6(a)) = parts.daddr {\n\n let addr: u128 = (*a.ip()).into();\n\n raw_rule.daddr6 = addr.to_be();\n\n raw_rule.dport = a.port().to_be();\n\n }\n\n\n", "file_path": "libpf-rs/src/rule.rs", "rank": 44, "score": 24620.249323576827 }, { "content": " match (&parts.saddr, &parts.daddr) {\n\n (Some(s), Some(d)) => {\n\n // if we have src and dst then they should be of the same ip version\n\n if s.is_ipv6() != d.is_ipv6() {\n\n bail!(Error::Build(\n\n \"src & dst IP versions do not match\".to_string(),\n\n ));\n\n }\n\n is_ipv6 = s.is_ipv6();\n\n }\n\n (Some(s), None) => is_ipv6 = s.is_ipv6(),\n\n (None, Some(d)) => is_ipv6 = d.is_ipv6(),\n\n (None, None) => is_ipv6 = parts.is_ipv6,\n\n }\n\n\n\n if is_ipv6 != parts.is_ipv6 {\n\n bail!(Error::Build(\"error: IP version mismatch\".to_string()));\n\n }\n\n\n\n if let Some(SocketAddr::V4(a)) = parts.saddr {\n", "file_path": "libpf-rs/src/rule.rs", "rank": 45, "score": 24619.34195275381 }, { "content": "fn cartesian_product(set: Vec<Vec<Token>>) -> VecDeque<VecDeque<Token>> {\n\n let res_len = set.iter().fold(1, |l, e| l * e.len());\n\n let mut res: VecDeque<VecDeque<Token>> = VecDeque::new();\n\n res.resize_with(res_len, || VecDeque::new());\n\n\n\n for tokens in set.into_iter() {\n\n let mut i = 0;\n\n let mut tmp = tokens.iter();\n\n while i < res.len() {\n\n match tmp.next() {\n\n Some(val) => {\n\n res[i].push_back(val.clone());\n\n i += 1;\n\n }\n\n None => tmp = tokens.iter(),\n\n }\n\n }\n\n }\n\n res\n\n}\n", "file_path": "pf-rs/src/preproc.rs", "rank": 46, "score": 24290.903568952344 }, { "content": "### Example\n\n\n\nGiven the code below, `libpf-rs` will create an eBPF program \n\nthat filters (blocks) incoming packets based on the given addresses \n\nand loads it on the device with index 4.\n\n\n\n```Rust\n\nuse libpf_rs::filter::Filter;\n\nuse libpf_rs::rule::Builder;\n\n\n\nfn main() {\n\n let ifindex: i32 = 4;\n\n let addrs = [\n\n (\"10.11.4.2\", \"10.11.3.2\"),\n\n (\"10.11.6.2\", \"10.11.3.2\"),\n\n (\"10.11.5.2\", \"10.11.2.2\"),\n\n (\"10.11.127.2\", \"100.11.2.2\"),\n\n (\"0:0:0:0:0:FFFF:204.152.189.116\", \"1:0:0:0:0:0:0:8\"),\n\n (\"0:0:0:0:0:FFFF:204.152.189.116\", \"1:0:0:0:0:0:0:8\"),\n\n ];\n\n\n\n let mut filter = Filter::new();\n\n\n\n for (src, dst) in addrs.into_iter() {\n\n filter.add_rule(\n\n Builder::new()\n\n .block()\n\n .from_addr(src)\n\n .to_addr(dst)\n\n .build()\n\n .unwrap(),\n\n );\n\n }\n\n\n\n let _link = filter.load_on(ifindex);\n\n \n\n // load_on() returns bpf_link\n\n // eBPF program will be detached once link is dropped\n\n // please see libbpf's doc for more info\n\n loop {}\n\n}\n\n```\n\n\n\n# Feature Checklist\n\n\n\nSome of these are WIP.\n\n\n\n### `pf-rs`\n\n- [x] supports macros\n\n- [X] supports lists\n\n- [ ] supports default actions (`pass all` and `block all`)\n\n- [ ] support nested lists\n\n- [ ] support macro in lists\n\n\n\n### `libpf-rs`\n\n- [x] supports IPv4 and IPv6\n\n- [x] supports default actions (`pass all` and `block all`)\n\n- [x] supports `quick` option (stops rule processing and performs action on first match)\n\n- [x] supports UDP\n\n- [x] supports stateless TCP (only port information) \n\n- [ ] supports stateful inspections\n\n- [ ] supports HTTP\n\n- [ ] supports SSH\n", "file_path": "README.md", "rank": 47, "score": 19.391062394657347 }, { "content": "\n\n Ok(obj)\n\n }\n\n\n\n pub fn update_map<T: AsRef<str>>(\n\n &mut self,\n\n name: T,\n\n key: &[u8],\n\n value: &[u8],\n\n flags: u64,\n\n ) -> Result<()> {\n\n match self.maps.get_mut(name.as_ref()) {\n\n Some(m) => m.update_map(key, value, flags),\n\n _ => bail!(\"unknown map\"),\n\n }\n\n }\n\n\n\n pub fn attach_prog(&mut self, ifindex: i32) -> Result<BPFLink> {\n\n // for now we only support one program\n\n match self.progs.get_mut(0) {\n", "file_path": "libpf-rs/src/bpf.rs", "rank": 48, "score": 12.769423625700192 }, { "content": " if (get_ipv6_rule(i, &rule) < 0) {\n\n bpf_printk(\"Error: failed to get rule [index %d]\", i);\n\n return -1;\n\n }\n\n if (eval_ipv6_rule(rule, packet)) {\n\n action = rule->action;\n\n }\n\n }\n\n }\n\n\n\n // if action != 1 then it must have matched a rule\n\n if (rule && rule->quick && action != -1)\n\n return action;\n\n\n\n return action;\n\n}\"#;\n\n\n\npub static EVAL_ONLY_IP6: &str = r#\"\n\nstatic int eval_rules(int ip_version, struct rule *packet)\n\n{\n", "file_path": "libpf-rs/src/bpfcode.rs", "rank": 49, "score": 12.335382630290432 }, { "content": "\n\npub static EVAL_ONLY_IP4: &str = r#\"\n\nstatic int eval_rules(int ip_version, struct rule *packet)\n\n{\n\n struct rule *rule = NULL;\n\n int action = -1;\n\n if (ip_version == bpf_htons(ETH_P_IP)) {\n\n for (int i=0; i < IPV4_RULE_COUNT; i++) {\n\n if (get_ipv4_rule(i, &rule) < 0) {\n\n bpf_printk(\"Error: failed to get rule [index %d]\", i);\n\n return -1;\n\n }\n\n if (eval_ipv4_rule(rule, packet)) {\n\n action = rule->action;\n\n }\n\n }\n\n }\n\n\n\n // if action != 1 then it must have matched a rule\n\n if (rule && rule->quick && action != -1)\n", "file_path": "libpf-rs/src/bpfcode.rs", "rank": 50, "score": 11.349853963733494 }, { "content": "use std::collections::{HashMap, VecDeque};\n\nuse std::iter::Peekable;\n\nuse std::vec::IntoIter;\n\n\n\nuse anyhow::Result;\n\n\n\nuse crate::token::Token;\n\nuse crate::Lexer;\n\n\n\npub struct PreProc {\n\n tokens: Vec<Token>,\n\n buf: Peekable<IntoIter<Token>>,\n\n idents: HashMap<String, Token>,\n\n}\n\n\n\nimpl PreProc {\n\n pub fn new(lex: Lexer) -> Self {\n\n PreProc {\n\n tokens: Vec::new(),\n\n buf: lex.collect::<Vec<_>>().into_iter().peekable(),\n", "file_path": "pf-rs/src/preproc.rs", "rank": 51, "score": 10.778082738999592 }, { "content": " struct rule *rule = NULL;\n\n int action = -1;\n\n if (ip_version == bpf_htons(ETH_P_IPV6)) {\n\n for (int i=0; i < IPV6_RULE_COUNT; i++) {\n\n if (get_ipv6_rule(i, &rule) < 0) {\n\n bpf_printk(\"Error: failed to get rule [index %d]\", i);\n\n return -1;\n\n }\n\n if (eval_ipv6_rule(rule, packet)) {\n\n action = rule->action;\n\n }\n\n }\n\n }\n\n\n\n // if action != 1 then it must have matched a rule\n\n if (rule && rule->quick && action != -1)\n\n return action;\n\n\n\n return action;\n\n}\"#;\n", "file_path": "libpf-rs/src/bpfcode.rs", "rank": 52, "score": 10.67378545384245 }, { "content": "pub use bpf::BPFLink;\n\n\n\nmod bpf;\n\nmod bpfcode;\n\nmod compile;\n\npub mod error;\n\npub mod filter;\n\nmod ip;\n\npub mod rule;\n", "file_path": "libpf-rs/src/lib.rs", "rank": 53, "score": 10.19117467584007 }, { "content": " }\n\n }\n\n\n\n pub fn from_file(file_path: &str) -> Result<Self> {\n\n Ok(Self::from_str(\n\n fs::read_to_string(file_path)\n\n .expect(\"could not read file\")\n\n .trim_start_matches(|c: char| c.is_ascii_whitespace())\n\n .to_string(),\n\n ))\n\n }\n\n\n\n fn read_ident(&mut self) -> Token {\n\n self.consume_whitespace();\n\n let ident = self.read_next().expect(\"invalid token `$`\");\n\n Token::Ident(ident)\n\n }\n\n\n\n fn read_list_items(&mut self) -> Token {\n\n let mut items: Vec<Token> = Vec::new();\n", "file_path": "pf-rs/src/lexer.rs", "rank": 54, "score": 9.890171148517322 }, { "content": "use std::path::PathBuf;\n\nuse std::str::FromStr;\n\nuse std::sync::atomic::{AtomicBool, Ordering};\n\nuse std::sync::Arc;\n\nuse std::{thread, time};\n\n\n\nuse anyhow::{anyhow, Result};\n\nuse clap::Parser as ClapParser;\n\n\n\nuse lexer::Lexer;\n\nuse libpf_rs::filter::Filter;\n\nuse libpf_rs::rule::Rule;\n\nuse libpf_rs::BPFLink;\n\n\n\nuse crate::parser::Parser;\n\nuse crate::preproc::PreProc;\n\n\n\nmod lexer;\n\nmod parser;\n\nmod preproc;\n\nmod token;\n\n\n\n#[derive(ClapParser)]\n\n#[clap(name = \"pf\")]\n\n#[clap(author = \"Fausto Miguel Guarniz <mi9uel9@gmail.com>\")]\n\n#[clap(version = \"0.1.0\")]\n\n#[clap(about = \"eBPF-based packet filter for Rust\", long_about = None)]\n", "file_path": "pf-rs/src/main.rs", "rank": 55, "score": 9.603675350147807 }, { "content": " return action;\n\n\n\n return action;\n\n}\"#;\n\n\n\npub static EVAL_NOOP: &str = r#\"\n\nstatic int eval_rules(int ip_version, struct rule *packet)\n\n{\n\n return -1;\n\n}\"#;\n\n\n\npub const PROGRAM: &str = r##\"\n\nstatic void print_rule(struct rule *rule)\n\n{\n\n bpf_printk(\"action [ %u ] (DROP: 1) (PASS: 2)\", rule->action);\n\n bpf_printk(\"proto [ %u ]\", rule->proto);\n\n bpf_printk(\"ports [ src %u ] [ dst %u ]\", bpf_ntohs(rule->sport), bpf_ntohs(rule->dport));\n\n bpf_printk(\"ipv4 [ src %pI4 ] [ dst %pI4 ]\", &(rule->ip4_addr.saddr), &(rule->ip4_addr.daddr));\n\n bpf_printk(\"ipv6 [ src %pI6 ]\", &rule->ip6_addr.saddr);\n\n bpf_printk(\"ipv6 [ dst %pI6 ]\", &rule->ip6_addr.daddr);\n", "file_path": "libpf-rs/src/bpfcode.rs", "rank": 56, "score": 9.184979256313758 }, { "content": "}\n\n\"##;\n\n\n\npub static EVAL_BOTH_IPVER: &str = r#\"\n\nstatic int eval_rules(int ip_version, struct rule *packet)\n\n{\n\n struct rule *rule = NULL;\n\n int action = -1;\n\n if (ip_version == bpf_htons(ETH_P_IP)) {\n\n for (int i=0; i < IPV4_RULE_COUNT; i++) {\n\n if (get_ipv4_rule(i, &rule) < 0) {\n\n bpf_printk(\"Error: failed to get rule [index %d]\", i);\n\n return -1;\n\n }\n\n if (eval_ipv4_rule(rule, packet)) {\n\n action = rule->action;\n\n }\n\n }\n\n } else if (ip_version == bpf_htons(ETH_P_IPV6)) {\n\n for (int i=0; i < IPV6_RULE_COUNT; i++) {\n", "file_path": "libpf-rs/src/bpfcode.rs", "rank": 57, "score": 9.13154896776306 }, { "content": "\n\n let res = load_filter(rules, cli.ifindex);\n\n match res {\n\n Ok(_) => println!(\"pf-rs: filter is attached\"),\n\n Err(e) => panic!(\"{}\", e.to_string()),\n\n }\n\n\n\n // /* keep it alive */\n\n let running = Arc::new(AtomicBool::new(true));\n\n let r = running.clone();\n\n\n\n ctrlc::set_handler(move || {\n\n r.store(false, Ordering::SeqCst);\n\n })\n\n .unwrap();\n\n\n\n while running.load(Ordering::SeqCst) {\n\n eprint!(\".\");\n\n thread::sleep(time::Duration::from_secs(1));\n\n }\n\n}\n\n\n", "file_path": "pf-rs/src/main.rs", "rank": 58, "score": 9.06303058718138 }, { "content": "use std::collections::HashMap;\n\nuse std::ffi::{c_void, CStr, CString};\n\nuse std::path::Path;\n\nuse std::ptr;\n\n\n\nuse anyhow::{anyhow, bail, Result};\n\nuse libbpf_sys;\n\n\n\n#[allow(dead_code)]\n\npub struct BPFLink {\n\n ptr: *mut libbpf_sys::bpf_link,\n\n}\n\n\n\npub struct BPFObj {\n\n ptr: *mut libbpf_sys::bpf_object,\n\n progs: Vec<BPFProg>,\n\n maps: HashMap<String, BPFMap>,\n\n}\n\n\n\nimpl BPFObj {\n", "file_path": "libpf-rs/src/bpf.rs", "rank": 59, "score": 8.97627883527656 }, { "content": "pub static VMLINUX: &'static str = include_str!(\"../.headers/vmlinux.h\");\n\n\n\npub const INCLUDE_HEADERS: &str = r##\"\n\n// SPDX-License-Identifier: BSD-3-Clause\n\n/* Copyright (c) 2022 Miguel Guarniz */\n\n#include \"vmlinux.h\"\n\n#include <bpf/bpf_helpers.h>\n\n#include <bpf/bpf_endian.h>\n\n\"##;\n\n\n\npub const DEFINES: &str = \"\\\n\n#define ETH_P_IP 0x0800\\n\\\n\n#define ETH_P_IPV6 0x86DD\\n\\\n\n#define IPPROTO_UDP 17\\n\\\n\n#define IPPROTO_TCP 6\\n\\\n\n#define IPV6_ADDR_LEN 16\\n\\\n\n#define NOOP 0\\n\";\n\n\n\npub const STRUCTS: &str = \"\\\n", "file_path": "libpf-rs/src/bpfcode.rs", "rank": 60, "score": 8.676485179801386 }, { "content": " }\n\n\n\n fn read_next(&mut self) -> Option<String> {\n\n self.read_while(|c| !c.is_ascii_whitespace())\n\n }\n\n\n\n fn read_newline(&mut self) -> Token {\n\n self.consume_whitespace();\n\n Token::Nl\n\n }\n\n}\n\n\n\nimpl Iterator for Lexer {\n\n type Item = Token;\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n\n // skip whitespace except new line char\n\n self.read_while(|c| c.is_ascii_whitespace() && c != NL);\n\n\n\n if self.peek_then_read(|c| c == ASSIGN).is_some() {\n", "file_path": "pf-rs/src/lexer.rs", "rank": 61, "score": 8.503965821991708 }, { "content": "Warning: This project is currently in development. \n\n\n\n# pf-rs\n\n\n\n`pf-rs` allows you to easily create and maintain packet filters.\n\nYou can specify the criteria for your filter via filter rules, \n\nthat use Layer 3 & 4 header information, and `pf-rs` will create \n\nan eBPF program and attach it to the XDP hook for fast packet filtering.\n\n\n\nThe syntax to specify the rules was inspired by OpenBSD's pf.\n\n\n\n### Example\n\n\n\nGiven the rule below, `pf-rs` will create a filter to block any IPv4 \n\npacket with source address 10.11.4.2 or 10.11.5.2 to the destination \n\naddress 10.11.3.2.\n\n\n\n```\n\nblocklist = { 10.11.4.2 10.11.5.2 }\n\n\n\nblock from $blocklist to 10.11.3.2\n\n```\n\n\n\n# libpf-rs\n\n\n\nA Rust library for implementing eBPF-based packet filters. \n\nIt provides an API for creating filter rules via a `Builder` \n\nand building and attaching a packet filter via a `Filter`.\n\n\n\nThis library uses [libbpf](https://github.com/libbpf/libbpf) and the rust bindings for it [libbpf-sys](https://github.com/libbpf/libbpf-sys). \n\nIntegration with [libbpf-rs](https://github.com/libbpf/libbpf-rs) is planned once the API stabilizes.\n\n\n\n\n", "file_path": "README.md", "rank": 62, "score": 8.186238373350225 }, { "content": " let mut s = String::new();\n\n while let Some(c) = self.buf.peek() {\n\n if p(*c) {\n\n s.push(*c);\n\n self.buf.next();\n\n } else {\n\n break;\n\n }\n\n }\n\n\n\n if s.is_empty() {\n\n return None;\n\n }\n\n\n\n Some(s)\n\n }\n\n\n\n // only use if you're absolutely sure that there should not be any ws including a \\n\n\n fn consume_whitespace(&mut self) {\n\n self.read_while(|c| c.is_ascii_whitespace());\n", "file_path": "pf-rs/src/lexer.rs", "rank": 63, "score": 7.345450284040084 }, { "content": "use std::fs;\n\nuse std::iter::Peekable;\n\nuse std::vec::IntoIter;\n\n\n\nuse anyhow::Result;\n\n\n\nuse crate::token::Token;\n\nuse crate::token::{\n\n ALL, ASSIGN, BLOCK, CLOSE_CBRACK, FROM, NL, ON, OPEN_CBRACK, PASS, PORT, PROTO, REPLACE_PREFIX,\n\n TO,\n\n};\n\n\n\npub struct Lexer {\n\n buf: Peekable<IntoIter<char>>,\n\n}\n\n\n\nimpl Lexer {\n\n pub fn from_str(str: String) -> Lexer {\n\n Lexer {\n\n buf: str.chars().collect::<Vec<_>>().into_iter().peekable(),\n", "file_path": "pf-rs/src/lexer.rs", "rank": 64, "score": 7.200346132225727 }, { "content": " pub fn load_from_file<T: AsRef<Path>>(src: T) -> Result<Self> {\n\n let obj_ptr = BPFObj::open_file(src.as_ref())?;\n\n\n\n let res = unsafe { libbpf_sys::bpf_object__load(obj_ptr) };\n\n if res != 0 {\n\n bail!(\"error {}: failed to load bpf object\", -res);\n\n }\n\n\n\n let mut obj = BPFObj {\n\n ptr: obj_ptr,\n\n progs: Vec::new(),\n\n maps: HashMap::new(),\n\n };\n\n\n\n let mut prev_map: *mut libbpf_sys::bpf_map = std::ptr::null_mut();\n\n loop {\n\n let next_ptr = unsafe { libbpf_sys::bpf_object__next_map(obj.ptr, prev_map) };\n\n if next_ptr.is_null() {\n\n break;\n\n }\n", "file_path": "libpf-rs/src/bpf.rs", "rank": 65, "score": 7.111579408242436 }, { "content": " Some(p) => p.attach_xdp(ifindex),\n\n _ => bail!(\"failed to retrieve prog\"),\n\n }\n\n }\n\n}\n\n\n\nimpl Drop for BPFObj {\n\n fn drop(&mut self) {\n\n unsafe {\n\n libbpf_sys::bpf_object__close(self.ptr);\n\n }\n\n }\n\n}\n\n\n", "file_path": "libpf-rs/src/bpf.rs", "rank": 66, "score": 7.08894375820198 }, { "content": " Token::Val(word)\n\n }\n\n\n\n fn peek_then_read<P>(&mut self, p: P) -> Option<char>\n\n where\n\n P: FnOnce(char) -> bool,\n\n {\n\n if let Some(&c) = self.buf.peek() {\n\n if p(c) {\n\n return Some(self.buf.next().unwrap());\n\n }\n\n }\n\n None\n\n }\n\n\n\n // this one peeks and does not consume if there is no match unlike iter.map_while\n\n fn read_while<P>(&mut self, p: P) -> Option<String>\n\n where\n\n P: Fn(char) -> bool,\n\n {\n", "file_path": "pf-rs/src/lexer.rs", "rank": 67, "score": 7.077496275197241 }, { "content": " let token = tokens.next().expect(msg.as_str());\n\n self.idents.insert(name, token);\n\n } else {\n\n res.push(t);\n\n }\n\n }\n\n\n\n Ok(res)\n\n }\n\n\n\n pub fn preprocess(mut self) -> Result<Vec<Token>> {\n\n // skip initial new lines if any\n\n while let Some(Token::Nl) = self.buf.peek() {\n\n self.buf.next();\n\n }\n\n\n\n let mut buf: Vec<Token> = Vec::new();\n\n loop {\n\n let token = self.buf.next();\n\n\n", "file_path": "pf-rs/src/preproc.rs", "rank": 68, "score": 6.734581646814276 }, { "content": " }\n\n\n\n // we're only expecting to handle one program but this will make it easier to extend\n\n let mut prev_prog: *mut libbpf_sys::bpf_program = std::ptr::null_mut();\n\n loop {\n\n let next_ptr = unsafe { libbpf_sys::bpf_object__next_program(obj.ptr, prev_prog) };\n\n if next_ptr.is_null() {\n\n break;\n\n }\n\n\n\n obj.progs.push(BPFProg::new(next_ptr));\n\n prev_prog = next_ptr;\n\n }\n\n\n\n Ok(obj)\n\n }\n\n\n\n fn open_file(path: &Path) -> Result<*mut libbpf_sys::bpf_object> {\n\n let filename = path.file_name().ok_or(anyhow!(\"invalid path\"))?;\n\n let name = filename\n", "file_path": "libpf-rs/src/bpf.rs", "rank": 69, "score": 6.566025958523286 }, { "content": " if (proto == IPPROTO_UDP) {\n\n if (parse_udphdr(&nh, data_end, &udphdr) == -1)\n\n goto out;\n\n sport = udphdr->source;\n\n dport = udphdr->dest;\n\n } else if (proto == IPPROTO_TCP) {\n\n if (parse_tcphdr(&nh, data_end, &tcphdr) == -1)\n\n goto out;\n\n sport = tcphdr->source;\n\n dport = tcphdr->dest;\n\n }\n\n\n\n // eval packet against rules\n\n struct rule packet = { NOOP, NOOP, proto, sport, dport, ip4, ip6 };\n\n if ((action = eval_rules(ip_version ,&packet)) >= 0) {\n\n // (struct rule) packet has info about (net) packet except action\n\n // so we add action only for logging purposes\n\n packet.action = action;\n\n print_rule(&packet);\n\n return action;\n\n }\n\n out:\n\n // default action\n\n return DEFAULT_ACTION;\n\n}\n\n\n\nchar __license[] SEC(\"license\") = \"GPL\";\n\n\"##;\n", "file_path": "libpf-rs/src/bpfcode.rs", "rank": 70, "score": 6.118426134452811 }, { "content": " Ok(())\n\n }\n\n\n\n fn process_macros(&mut self, line: Vec<Token>) -> Result<Vec<Token>> {\n\n let mut res = Vec::new();\n\n\n\n let mut tokens = line.into_iter().peekable();\n\n\n\n while let Some(t) = tokens.next() {\n\n if let Token::Ident(name) = t {\n\n let msg = format!(\"unknown identifier {}\", name.as_str());\n\n let val = self.idents.get(name.as_str()).expect(msg.as_str());\n\n res.push(val.clone());\n\n } else if let Token::Def(name) = t {\n\n tokens\n\n .next()\n\n .filter(|t| matches!(t, Token::Assign))\n\n .expect(\"expected `=` in macro declaration\"); // this will never panic\n\n\n\n let msg = format!(\"invalid `{} = [no value]`\", name.as_str());\n", "file_path": "pf-rs/src/preproc.rs", "rank": 71, "score": 5.357314290318163 }, { "content": " nh->pos += hdrsize;\n\n *tcphdr = tcph;\n\n return 0;\n\n}\"##;\n\n\n\npub const IP4_EVAL_FUNCS: &str = r##\"\n\nstatic int get_ipv4_rule(int i, struct rule **rule)\n\n{\n\n struct rule *res = bpf_map_lookup_elem(&ipv4_rules, &i);\n\n if (!res)\n\n return -1;\n\n *rule = res;\n\n return 0;\n\n}\n\n\n\nstatic int eval_ipv4_rule(struct rule *rule, struct rule *pack)\n\n{\n\n return (rule->proto == 0 || rule->proto == pack->proto) &&\n\n (rule->sport == 0 || rule->sport == pack->sport) &&\n\n (rule->dport == 0 || rule->dport == pack->dport) &&\n", "file_path": "libpf-rs/src/bpfcode.rs", "rank": 72, "score": 5.124111114442295 }, { "content": " };\n\n ($name:ident, $input:expr) => {\n\n #[test]\n\n #[should_panic]\n\n fn $name() {\n\n let rule = String::from($input);\n\n let mut lex = Lexer::from_str(rule.clone());\n\n lex.read_list_items();\n\n }\n\n };\n\n }\n\n\n\n macro_rules! test_next {\n\n ($name:ident, $input:expr, $expect:expr) => {\n\n #[test]\n\n fn $name() {\n\n let input = String::from($input);\n\n let mut lex = Lexer::from_str(input.clone());\n\n assert_eq!(lex.next(), Some($expect), \"input was `{}`\", input);\n\n }\n", "file_path": "pf-rs/src/lexer.rs", "rank": 73, "score": 4.985189929003544 }, { "content": " let rule = String::from($input);\n\n let lex = Lexer::from_str(rule.clone());\n\n assert_eq!(\n\n lex.into_iter().collect::<Vec<_>>(),\n\n $expect,\n\n \"input was `{}`\",\n\n rule\n\n )\n\n }\n\n };\n\n }\n\n\n\n macro_rules! test_list {\n\n ($name:ident, $input:expr, $expect:expr) => {\n\n #[test]\n\n fn $name() {\n\n let rule = String::from($input);\n\n let mut lex = Lexer::from_str(rule.clone());\n\n assert_eq!(lex.read_list_items(), $expect, \"input was `{}`\", rule)\n\n }\n", "file_path": "pf-rs/src/lexer.rs", "rank": 74, "score": 4.9539955149799235 }, { "content": " (rule->ip4_addr.saddr == 0 || rule->ip4_addr.saddr == pack->ip4_addr.saddr) &&\n\n (rule->ip4_addr.daddr == 0 || rule->ip4_addr.daddr == pack->ip4_addr.daddr);\n\n\n\n}\"##;\n\n\n\npub const IP6_EVAL_FUNCS: &str = r##\"\n\nstatic int get_ipv6_rule(int i, struct rule **rule)\n\n{\n\n struct rule *res = bpf_map_lookup_elem(&ipv6_rules, &i);\n\n if (!res)\n\n return -1;\n\n *rule = res;\n\n return 0;\n\n}\n\n\n\nstatic int is_zero(const __u8 a[IPV6_ADDR_LEN])\n\n{\n\n for (int i = 0; i < IPV6_ADDR_LEN ; i++) {\n\n if (a[i] != 0)\n\n return 0;\n", "file_path": "libpf-rs/src/bpfcode.rs", "rank": 75, "score": 4.858762707061822 }, { "content": " if let Some(Token::Nl) = token {\n\n self.process_line(buf)?;\n\n buf = Vec::new();\n\n continue;\n\n }\n\n\n\n match token {\n\n Some(t) => buf.push(t),\n\n None => break,\n\n }\n\n }\n\n\n\n // each line ends in a nl so\n\n // this covers the case when the last line\n\n // does not end in a new line\n\n if !buf.is_empty() {\n\n self.process_line(buf)?;\n\n }\n\n\n\n Ok(self.tokens)\n\n }\n\n}\n\n\n", "file_path": "pf-rs/src/preproc.rs", "rank": 76, "score": 4.715767220679634 }, { "content": " }\n\n Ok(())\n\n }\n\n\n\n fn process_line(&mut self, raw_line: Vec<Token>) -> Result<()> {\n\n let mut buf: Vec<Vec<Token>> = Vec::new();\n\n\n\n let mut line = self.process_macros(raw_line)?;\n\n\n\n for token in line.iter() {\n\n if let Token::List(token_vec) = token {\n\n buf.push(token_vec.clone())\n\n }\n\n }\n\n\n\n if !buf.is_empty() {\n\n self.process_list(line, buf)?;\n\n } else {\n\n self.tokens.append(&mut line);\n\n }\n", "file_path": "pf-rs/src/preproc.rs", "rank": 77, "score": 4.714605690463398 }, { "content": "use thiserror::Error;\n\n\n\n#[derive(Debug, Error)]\n\npub enum Error {\n\n #[error(\"error when building: {0}\")]\n\n Build(String),\n\n #[error(\"internal error: {0}\")]\n\n Internal(String),\n\n #[error(\"invalid input: {0}\")]\n\n InvalidInput(String),\n\n}\n", "file_path": "libpf-rs/src/error.rs", "rank": 78, "score": 4.363549006039364 }, { "content": " BLOCK => Some(Token::Block),\n\n ON => Some(Token::On),\n\n PROTO => Some(Token::Proto),\n\n PORT => Some(Token::Port),\n\n FROM => Some(Token::From),\n\n TO => Some(Token::To),\n\n _ => Some(self.interpret(s)),\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::Lexer;\n\n use super::Token::{Assign, Block, Def, From, Ident, List, Nl, Pass, Proto, To, Val};\n\n\n\n macro_rules! test_lexer {\n\n ($name:ident, $input:expr, $expect:expr) => {\n\n #[test]\n\n fn $name() {\n", "file_path": "pf-rs/src/lexer.rs", "rank": 79, "score": 4.182197121893866 }, { "content": " idents: HashMap::new(),\n\n }\n\n }\n\n\n\n fn process_list(&mut self, line: Vec<Token>, tokens: Vec<Vec<Token>>) -> Result<()> {\n\n let mut buf: VecDeque<VecDeque<Token>> = cartesian_product(tokens);\n\n\n\n while let Some(mut token_vec) = buf.pop_front() {\n\n for token in line.iter() {\n\n if let Token::List(_) = token {\n\n // replace List token with the next token from cartesian product result set\n\n self.tokens.push(\n\n token_vec\n\n .pop_front()\n\n .expect(\"error: failed to process list token\"),\n\n )\n\n } else {\n\n self.tokens.push(token.clone());\n\n }\n\n }\n", "file_path": "pf-rs/src/preproc.rs", "rank": 80, "score": 3.965164995367961 }, { "content": "pub const ALL: &str = \"all\";\n\npub const PASS: &str = \"pass\";\n\npub const BLOCK: &str = \"block\";\n\npub const PROTO: &str = \"proto\";\n\npub const ON: &str = \"on\";\n\npub const FROM: &str = \"from\";\n\npub const TO: &str = \"to\";\n\npub const PORT: &str = \"port\";\n\npub const NL: char = '\\n';\n\npub const ASSIGN: char = '=';\n\npub const REPLACE_PREFIX: char = '$';\n\npub const OPEN_CBRACK: char = '{';\n\npub const CLOSE_CBRACK: char = '}';\n\n\n\n#[derive(Debug, Clone, PartialEq)]\n\npub enum Token {\n\n All,\n\n Assign,\n\n Block,\n\n From,\n", "file_path": "pf-rs/src/token.rs", "rank": 81, "score": 3.6264461260817047 }, { "content": " }\n\n return 1;\n\n}\n\n\n\nstatic int equals(__u8 a[IPV6_ADDR_LEN], __u8 b[IPV6_ADDR_LEN])\n\n{\n\n for (int i = 0; i < IPV6_ADDR_LEN; i++) {\n\n if (a[i] != b[i])\n\n return 0;\n\n }\n\n return 1;\n\n}\n\n\n\nstatic int eval_ipv6_rule(struct rule *rule, struct rule *pack)\n\n{\n\n return (rule->proto == 0 || rule->proto == pack->proto) &&\n\n (rule->sport == 0 || rule->sport == pack->sport) &&\n\n (rule->dport == 0 || rule->dport == pack->dport) &&\n\n (is_zero(rule->ip6_addr.saddr) || equals(rule->ip6_addr.saddr, pack->ip6_addr.saddr)) &&\n\n (is_zero(rule->ip6_addr.daddr) || equals(rule->ip6_addr.daddr, pack->ip6_addr.daddr));\n", "file_path": "libpf-rs/src/bpfcode.rs", "rank": 82, "score": 3.5165544756569243 }, { "content": "use std::net::{AddrParseError, IpAddr, Ipv4Addr, Ipv6Addr, SocketAddr};\n\nuse std::str::FromStr;\n\n\n", "file_path": "libpf-rs/src/ip.rs", "rank": 83, "score": 3.1265298710138563 }, { "content": " return Some(Token::Assign);\n\n }\n\n if self.peek_then_read(|c| c == NL).is_some() {\n\n return Some(self.read_newline());\n\n }\n\n if self.peek_then_read(|c| c == OPEN_CBRACK).is_some() {\n\n return Some(self.read_list_items());\n\n }\n\n if self.peek_then_read(|c| c == REPLACE_PREFIX).is_some() {\n\n return Some(self.read_ident());\n\n }\n\n\n\n let s = match self.read_next() {\n\n Some(w) => w,\n\n None => return None,\n\n };\n\n\n\n match &s[..] {\n\n ALL => Some(Token::All),\n\n PASS => Some(Token::Pass),\n", "file_path": "pf-rs/src/lexer.rs", "rank": 84, "score": 2.7678029176031487 }, { "content": " Nl,\n\n Pass,\n\n Proto,\n\n On,\n\n To,\n\n Port,\n\n Val(String),\n\n List(Vec<Self>),\n\n Def(String),\n\n Ident(String),\n\n}\n", "file_path": "pf-rs/src/token.rs", "rank": 85, "score": 2.69019049196097 }, { "content": " To,\n\n Val(\"dip\".to_string())\n\n ]\n\n );\n\n\n\n test_lexer!(\n\n lex_rule_with_list,\n\n \"block from { a b } to ip\",\n\n vec![\n\n Block,\n\n From,\n\n List(vec![Val(\"a\".to_string()), Val(\"b\".to_string())]),\n\n To,\n\n Val(\"ip\".to_string())\n\n ]\n\n );\n\n\n\n test_lexer!(\n\n lex_rule_with_ident,\n\n \"block proto $var1 from $var2 to $var3\",\n", "file_path": "pf-rs/src/lexer.rs", "rank": 86, "score": 2.6787142346146267 }, { "content": " .to_str()\n\n .ok_or(anyhow!(\"filename contains invalid Unicode\"))?;\n\n\n\n if !name.ends_with(\".o\") {\n\n bail!(\"filename does not have .o extension\");\n\n }\n\n\n\n let str_path = path.to_str().ok_or(anyhow!(\"invalid unicode in path\"))?;\n\n let c_name = CString::new(str_path)?;\n\n let obj_opts = libbpf_sys::bpf_object_open_opts {\n\n sz: std::mem::size_of::<libbpf_sys::bpf_object_open_opts>() as libbpf_sys::size_t,\n\n object_name: c_name.as_ptr(),\n\n ..Default::default()\n\n };\n\n\n\n let obj = unsafe { libbpf_sys::bpf_object__open_file(c_name.as_ptr(), &obj_opts) };\n\n let err = unsafe { libbpf_sys::libbpf_get_error(obj as *const _) };\n\n if err != 0 {\n\n bail!(\"bpf_object__open_file failed with err {}\", err as i32);\n\n }\n", "file_path": "libpf-rs/src/bpf.rs", "rank": 87, "score": 2.5294387040620623 }, { "content": " vec![\n\n Block,\n\n Proto,\n\n Ident(\"var1\".to_string()),\n\n From,\n\n Ident(\"var2\".to_string()),\n\n To,\n\n Ident(\"var3\".to_string())\n\n ]\n\n );\n\n\n\n test_lexer!(\n\n lex_with_multiple_new_lines,\n\n \"\\n\\n block proto a from b to c \\n\\n\\n block proto d from e to f \\n\\n\\n\",\n\n vec![\n\n Nl,\n\n Block,\n\n Proto,\n\n Val(\"a\".to_string()),\n\n From,\n", "file_path": "pf-rs/src/lexer.rs", "rank": 88, "score": 2.1806492835540765 }, { "content": " bail!(\"invalid key size for map\");\n\n };\n\n\n\n if value.len() != self.val_size as usize {\n\n bail!(\"invalid value size for map\");\n\n };\n\n\n\n let res = unsafe {\n\n libbpf_sys::bpf_map_update_elem(\n\n self.fd as i32,\n\n key.as_ptr() as *const c_void,\n\n value.as_ptr() as *const c_void,\n\n flags as libbpf_sys::__u64,\n\n )\n\n };\n\n\n\n if res < 0 {\n\n bail!(\"failed to update the map {}\", res);\n\n } else {\n\n Ok(())\n\n }\n\n }\n\n}\n\n\n", "file_path": "libpf-rs/src/bpf.rs", "rank": 89, "score": 1.8684681053717114 }, { "content": " };\n\n ($name:ident, $input:expr) => {\n\n #[test]\n\n #[should_panic]\n\n fn $name() {\n\n let input = String::from($input);\n\n let mut lex = Lexer::from_str(input.clone());\n\n lex.next();\n\n }\n\n };\n\n }\n\n\n\n test_list!(\n\n read_list_items_one_elem1,\n\n \"{ a }\",\n\n List(vec![Val(\"a\".to_string())])\n\n );\n\n test_list!(\n\n read_list_items_one_elem2,\n\n \"{b}\",\n", "file_path": "pf-rs/src/lexer.rs", "rank": 90, "score": 1.8333558272373556 }, { "content": " let item = self.read_while(|c| !c.is_ascii_whitespace() && c != CLOSE_CBRACK);\n\n if let Some(i) = item {\n\n items.push(Token::Val(i));\n\n }\n\n }\n\n\n\n if items.is_empty() {\n\n panic!(\"error: no tokens inside list\")\n\n }\n\n\n\n Token::List(items)\n\n }\n\n\n\n fn interpret(&mut self, word: String) -> Token {\n\n // there could be nl after this, we don't know what token word is\n\n self.read_while(|c| c.is_ascii_whitespace() && c != NL);\n\n\n\n if self.buf.peek().filter(|&&c| c == ASSIGN).is_some() {\n\n return Token::Def(word);\n\n }\n", "file_path": "pf-rs/src/lexer.rs", "rank": 91, "score": 1.733321902793743 }, { "content": "}\n\n\n\nSEC(\"xdp\")\n\nint xdp_pf(struct xdp_md *ctx)\n\n{\n\n void *data = (void *)(long)ctx->data;\n\n void *data_end = (void *)(long)ctx->data_end;\n\n\n\n // L2, L3 & L4 structures\n\n struct ethhdr *ethhdr;\n\n struct iphdr *iphdr;\n\n struct ipv6hdr *ipv6hdr;\n\n struct udphdr *udphdr;\n\n struct tcphdr *tcphdr;\n\n\n\n int action;\n\n int proto;\n\n __be16 sport = 0;\n\n __be16 dport = 0;\n\n struct ip4_addr ip4 = {0};\n", "file_path": "libpf-rs/src/bpfcode.rs", "rank": 92, "score": 1.711797454512383 }, { "content": "\n\n // bpf_map__name does not return null unless we pass null\n\n let str_ptr = unsafe { libbpf_sys::bpf_map__name(next_ptr) };\n\n\n\n let c_str = unsafe { CStr::from_ptr(str_ptr) };\n\n let name = c_str.to_str()?.to_string();\n\n\n\n let fd = unsafe { libbpf_sys::bpf_map__fd(next_ptr) };\n\n if fd < 0 {\n\n bail!(\"error {}: failed to get file descriptor\", -fd);\n\n }\n\n\n\n // bpf_map__def does not return null unless we pass null\n\n let map_def = unsafe { ptr::read(libbpf_sys::bpf_map__def(next_ptr)) };\n\n\n\n obj.maps.insert(\n\n name,\n\n BPFMap::new(next_ptr, fd, map_def.key_size, map_def.value_size),\n\n );\n\n prev_map = next_ptr;\n", "file_path": "libpf-rs/src/bpf.rs", "rank": 93, "score": 1.1897074854488485 } ]
Rust
src/bin/wasmtime.rs
erights/wasmtime
a5823896b70aab5f7675c5ff7651e37324c88262
#![deny( missing_docs, trivial_numeric_casts, unused_extern_crates, unstable_features )] #![warn(unused_import_braces)] #![cfg_attr(feature = "clippy", plugin(clippy(conf_file = "../clippy.toml")))] #![cfg_attr( feature = "cargo-clippy", allow(clippy::new_without_default, clippy::new_without_default_derive) )] #![cfg_attr( feature = "cargo-clippy", warn( clippy::float_arithmetic, clippy::mut_mut, clippy::nonminimal_bool, clippy::option_map_unwrap_or, clippy::option_map_unwrap_or_else, clippy::unicode_not_nfc, clippy::use_self ) )] use anyhow::{bail, Context as _, Result}; use docopt::Docopt; use serde::Deserialize; use std::path::{Component, Path}; use std::{collections::HashMap, ffi::OsStr, fs::File, process::exit}; use wasi_common::preopen_dir; use wasmtime::{Config, Engine, HostRef, Instance, Module, Store}; use wasmtime_cli::pick_compilation_strategy; use wasmtime_environ::{cache_create_new_config, cache_init}; use wasmtime_environ::{settings, settings::Configurable}; use wasmtime_interface_types::ModuleData; use wasmtime_jit::Features; use wasmtime_wasi::create_wasi_instance; use wasmtime_wasi::old::snapshot_0::create_wasi_instance as create_wasi_instance_snapshot_0; #[cfg(feature = "wasi-c")] use wasmtime_wasi_c::instantiate_wasi_c; const USAGE: &str = " Wasm runner. Takes a binary (wasm) or text (wat) WebAssembly module and instantiates it, including calling the start function if one is present. Additional functions given with --invoke are then called. Usage: wasmtime [-odg] [--enable-simd] [--wasi-c] [--disable-cache | \ --cache-config=<cache_config_file>] [--preload=<wasm>...] [--env=<env>...] [--dir=<dir>...] \ [--mapdir=<mapping>...] [--lightbeam | --cranelift] <file> [<arg>...] wasmtime [-odg] [--enable-simd] [--wasi-c] [--disable-cache | \ --cache-config=<cache_config_file>] [--env=<env>...] [--dir=<dir>...] \ [--mapdir=<mapping>...] --invoke=<fn> [--lightbeam | --cranelift] <file> [<arg>...] wasmtime --create-cache-config [--cache-config=<cache_config_file>] wasmtime --help | --version Options: --invoke=<fn> name of function to run -o, --optimize runs optimization passes on the translated functions --disable-cache disables cache system --cache-config=<cache_config_file> use specified cache configuration; can be used with --create-cache-config to specify custom file --create-cache-config creates default configuration and writes it to the disk, use with --cache-config to specify custom config file instead of default one -g generate debug information -d, --debug enable debug output on stderr/stdout --lightbeam use Lightbeam for all compilation --cranelift use Cranelift for all compilation --enable-simd enable proposed SIMD instructions --wasi-c enable the wasi-c implementation of `wasi_unstable` --preload=<wasm> load an additional wasm module before loading the main module --env=<env> pass an environment variable (\"key=value\") to the program --dir=<dir> grant access to the given host directory --mapdir=<mapping> where <mapping> has the form <wasmdir>::<hostdir>, grant access to the given host directory with the given wasm directory name -h, --help print this help message --version print the Cranelift version "; #[derive(Deserialize, Debug, Clone)] struct Args { arg_file: String, arg_arg: Vec<String>, flag_optimize: bool, flag_disable_cache: bool, flag_cache_config: Option<String>, flag_create_cache_config: bool, flag_debug: bool, flag_g: bool, flag_enable_simd: bool, flag_lightbeam: bool, flag_cranelift: bool, flag_invoke: Option<String>, flag_preload: Vec<String>, flag_env: Vec<String>, flag_dir: Vec<String>, flag_mapdir: Vec<String>, flag_wasi_c: bool, } fn compute_preopen_dirs(flag_dir: &[String], flag_mapdir: &[String]) -> Vec<(String, File)> { let mut preopen_dirs = Vec::new(); for dir in flag_dir { let preopen_dir = preopen_dir(dir).unwrap_or_else(|err| { println!("error while pre-opening directory {}: {}", dir, err); exit(1); }); preopen_dirs.push((dir.clone(), preopen_dir)); } for mapdir in flag_mapdir { let parts: Vec<&str> = mapdir.split("::").collect(); if parts.len() != 2 { println!( "--mapdir argument must contain exactly one double colon ('::'), separating a \ guest directory name and a host directory name" ); exit(1); } let (key, value) = (parts[0], parts[1]); let preopen_dir = preopen_dir(value).unwrap_or_else(|err| { println!("error while pre-opening directory {}: {}", value, err); exit(1); }); preopen_dirs.push((key.to_string(), preopen_dir)); } preopen_dirs } fn compute_argv(argv0: &str, arg_arg: &[String]) -> Vec<String> { let mut result = Vec::new(); result.push( Path::new(argv0) .components() .next_back() .map(Component::as_os_str) .and_then(OsStr::to_str) .unwrap_or("") .to_owned(), ); for arg in arg_arg { result.push(arg.to_owned()); } result } fn compute_environ(flag_env: &[String]) -> Vec<(String, String)> { let mut result = Vec::new(); for env in flag_env { let split = env.splitn(2, '=').collect::<Vec<_>>(); if split.len() != 2 { println!( "environment variables must be of the form \"key=value\"; got \"{}\"", env ); } result.push((split[0].to_owned(), split[1].to_owned())); } result } fn main() -> Result<()> { let version = env!("CARGO_PKG_VERSION"); let args: Args = Docopt::new(USAGE) .and_then(|d| { d.help(true) .version(Some(String::from(version))) .deserialize() }) .unwrap_or_else(|e| e.exit()); let log_config = if args.flag_debug { pretty_env_logger::init(); None } else { let prefix = "wasmtime.dbg."; wasmtime_cli::init_file_per_thread_logger(prefix); Some(prefix) }; if args.flag_create_cache_config { match cache_create_new_config(args.flag_cache_config) { Ok(path) => { println!( "Successfully created new configuation file at {}", path.display() ); return Ok(()); } Err(err) => { eprintln!("Error: {}", err); exit(1); } } } let errors = cache_init( !args.flag_disable_cache, args.flag_cache_config.as_ref(), log_config, ); if !errors.is_empty() { eprintln!("Cache initialization failed. Errors:"); for e in errors { eprintln!("-> {}", e); } exit(1); } let mut flag_builder = settings::builder(); let mut features: Features = Default::default(); flag_builder.enable("avoid_div_traps")?; let debug_info = args.flag_g; if cfg!(debug_assertions) { flag_builder.enable("enable_verifier")?; } if args.flag_enable_simd { flag_builder.enable("enable_simd")?; features.simd = true; } if args.flag_optimize { flag_builder.set("opt_level", "speed")?; } let strategy = pick_compilation_strategy(args.flag_cranelift, args.flag_lightbeam); let mut config = Config::new(); config .features(features) .flags(settings::Flags::new(flag_builder)) .debug_info(debug_info) .strategy(strategy); let engine = HostRef::new(Engine::new(&config)); let store = HostRef::new(Store::new(&engine)); let mut module_registry = HashMap::new(); let preopen_dirs = compute_preopen_dirs(&args.flag_dir, &args.flag_mapdir); let argv = compute_argv(&args.arg_file, &args.arg_arg); let environ = compute_environ(&args.flag_env); let wasi_unstable = HostRef::new(if args.flag_wasi_c { #[cfg(feature = "wasi-c")] { let global_exports = store.borrow().global_exports().clone(); let handle = instantiate_wasi_c(global_exports, &preopen_dirs, &argv, &environ)?; Instance::from_handle(&store, handle) } #[cfg(not(feature = "wasi-c"))] { bail!("wasi-c feature not enabled at build time") } } else { create_wasi_instance_snapshot_0(&store, &preopen_dirs, &argv, &environ)? }); let wasi_snapshot_preview1 = HostRef::new(create_wasi_instance( &store, &preopen_dirs, &argv, &environ, )?); module_registry.insert("wasi_unstable".to_owned(), wasi_unstable); module_registry.insert("wasi_snapshot_preview1".to_owned(), wasi_snapshot_preview1); for filename in &args.flag_preload { let path = Path::new(&filename); instantiate_module(&store, &module_registry, path) .with_context(|| format!("failed to process preload at `{}`", path.display()))?; } let path = Path::new(&args.arg_file); handle_module(&store, &module_registry, &args, path) .with_context(|| format!("failed to process main module `{}`", path.display()))?; Ok(()) } fn instantiate_module( store: &HostRef<Store>, module_registry: &HashMap<String, HostRef<Instance>>, path: &Path, ) -> Result<(HostRef<Instance>, HostRef<Module>, Vec<u8>)> { let data = wat::parse_file(path.to_path_buf())?; let module = HostRef::new(Module::new(store, &data)?); let imports = module .borrow() .imports() .iter() .map(|i| { let module_name = i.module(); if let Some(instance) = module_registry.get(module_name) { let field_name = i.name(); if let Some(export) = instance.borrow().find_export_by_name(field_name) { Ok(export.clone()) } else { bail!( "Import {} was not found in module {}", field_name, module_name ) } } else { bail!("Import module {} was not found", module_name) } }) .collect::<Result<Vec<_>, _>>()?; let instance = HostRef::new(Instance::new(store, &module, &imports)?); Ok((instance, module, data)) } fn handle_module( store: &HostRef<Store>, module_registry: &HashMap<String, HostRef<Instance>>, args: &Args, path: &Path, ) -> Result<()> { let (instance, module, data) = instantiate_module(store, module_registry, path)?; if let Some(f) = &args.flag_invoke { let data = ModuleData::new(&data)?; invoke_export(instance, &data, f, args)?; } else if module .borrow() .exports() .iter() .find(|export| export.name().is_empty()) .is_some() { let data = ModuleData::new(&data)?; invoke_export(instance, &data, "", args)?; } else { let data = ModuleData::new(&data)?; invoke_export(instance, &data, "_start", args)?; } Ok(()) } fn invoke_export( instance: HostRef<Instance>, data: &ModuleData, name: &str, args: &Args, ) -> Result<()> { use wasm_webidl_bindings::ast; use wasmtime_interface_types::Value; let mut handle = instance.borrow().handle().clone(); let binding = data.binding_for_export(&mut handle, name)?; if binding.param_types()?.len() > 0 { eprintln!( "warning: using `--invoke` with a function that takes arguments \ is experimental and may break in the future" ); } let mut values = Vec::new(); let mut args = args.arg_arg.iter(); for ty in binding.param_types()? { let val = match args.next() { Some(s) => s, None => bail!("not enough arguments for `{}`", name), }; values.push(match ty { ast::WebidlScalarType::Long => Value::I32(val.parse()?), ast::WebidlScalarType::LongLong => Value::I64(val.parse()?), ast::WebidlScalarType::UnsignedLong => Value::U32(val.parse()?), ast::WebidlScalarType::UnsignedLongLong => Value::U64(val.parse()?), ast::WebidlScalarType::Float | ast::WebidlScalarType::UnrestrictedFloat => { Value::F32(val.parse()?) } ast::WebidlScalarType::Double | ast::WebidlScalarType::UnrestrictedDouble => { Value::F64(val.parse()?) } ast::WebidlScalarType::DomString => Value::String(val.to_string()), t => bail!("unsupported argument type {:?}", t), }); } let results = data .invoke_export(&instance, name, &values) .with_context(|| format!("failed to invoke `{}`", name))?; if results.len() > 0 { eprintln!( "warning: using `--invoke` with a function that returns values \ is experimental and may break in the future" ); } for result in results { println!("{}", result); } Ok(()) }
#![deny( missing_docs, trivial_numeric_casts, unused_extern_crates, unstable_features )] #![warn(unused_import_braces)] #![cfg_attr(feature = "clippy", plugin(clippy(conf_file = "../clippy.toml")))] #![cfg_attr( feature = "cargo-clippy", allow(clippy::new_without_default, clippy::new_without_default_derive) )] #![cfg_attr( feature = "cargo-clippy", warn( clippy::float_arithmetic, clippy::mut_mut, clippy::nonminimal_bool, clippy::option_map_unwrap_or, clippy::option_map_unwrap_or_else, clippy::unicode_not_nfc, clippy::use_self ) )] use anyhow::{bail, Context as _, Result}; use docopt::Docopt; use serde::Deserialize; use std::path::{Component, Path}; use std::{collections::HashMap, ffi::OsStr, fs::File, process::exit}; use wasi_common::preopen_dir; use wasmtime::{Config, Engine, HostRef, Instance, Module, Store}; use wasmtime_cli::pick_compilation_strategy; use wasmtime_environ::{cache_create_new_config, cache_init}; use wasmtime_environ::{settings, settings::Configurable}; use wasmtime_interface_types::ModuleData; use wasmtime_jit::Features; use wasmtime_wasi::create_wasi_instance; use wasmtime_wasi::old::snapshot_0::create_wasi_instance as create_wasi_instance_snapshot_0; #[cfg(feature = "wasi-c")] use wasmtime_wasi_c::instantiate_wasi_c; const USAGE: &str = " Wasm runner. Takes a binary (wasm) or text (wat) WebAssembly module and instantiates it, including calling the start function if one is present. Additional functions given with --invoke are then called. Usage: wasmtime [-odg] [--enable-simd] [--wasi-c] [--disable-cache | \ --cache-config=<cache_config_file>] [--preload=<wasm>...] [--env=<env>...] [--dir=<dir>...] \ [--mapdir=<mapping>...] [--lightbeam | --cranelift] <file> [<arg>...] wasmtime [-odg] [--enable-simd] [--wasi-c] [--disable-cache | \ --cache-config=<cache_config_file>] [--env=<env>...] [--dir=<dir>...] \ [--mapdir=<mapping>...] --invoke=<fn> [--lightbeam | --cranelift] <file> [<arg>...] wasmtime --create-cache-config [--cache-config=<cache_config_file>] wasmtime --help | --version Options: --invoke=<fn> name of function to run -o, --optimize runs optimization passes on the translated functions --disable-cache disables cache system --cache-config=<cache_config_file> use specified cache configuration; can be used with --create-cache-config to specify custom file --create-cache-config creates default configuration and writes it to the disk, use with --cache-config to specify custom config file instead of default one -g generate debug information -d, --debug enable debug output on stderr/stdout --lightbeam use Lightbeam for all compilation --cranelift use Cranelift for all compilation --enable-simd enable proposed SIMD instructions --wasi-c enable the wasi-c implementation of `wasi_unstable` --preload=<wasm> load an additional wasm module before loading the main module --env=<env> pass an environment variable (\"key=value\") to the program --dir=<dir> grant access to the given host directory --mapdir=<mapping> where <mapping> has the form <wasmdir>::<hostdir>, grant access to the given host directory with the given wasm directory name -h, --help print this help message --version print the Cranelift version "; #[derive(Deserialize, Debug, Clone)] struct Args { arg_file: String, arg_arg: Vec<String>, flag_optimize: bool, flag_disable_cache: bool, flag_cache_config: Option<String>, flag_create_cache_config: bool, flag_debug: bool, flag_g: bool, flag_enable_simd: bool, flag_lightbeam: bool, flag_cranelift: bool, flag_invoke: Option<String>, flag_preload: Vec<String>, flag_env: Vec<String>, flag_dir: Vec<String>, flag_mapdir: Vec<String>, flag_wasi_c: bool, } fn compute_preopen_dirs(flag_dir: &[String], flag_mapdir: &[String]) -> Vec<(String, File)> { let mut preopen_dirs = Vec::new(); for dir in flag_dir { let preopen_dir = preopen_dir(dir).unwrap_or_else(|err| { println!("error while pre-opening directory {}: {}", dir, err); exit(1); }); preopen_dirs.push((dir.clone(), preopen_dir)); } for mapdir in flag_mapdir { let parts: Vec<&str> = mapdir.split("::").collect(); if parts.len() != 2 { println!( "--mapdir argument must contain exactly one double colon ('::'), separating a \ guest directory name and a host directory name" ); exit(1); } let (key, value) = (parts[0], parts[1]); let preopen_dir = preopen_dir(value).unwrap_or_else(|err| { println!("error while pre-opening directory {}: {}", value, err); exit(1); }); preopen_dirs.push((key.to_string(), preopen_dir)); } preopen_dirs } fn compute_argv(argv0: &str, arg_arg: &[String]) -> Vec<String> { let mut result = Vec::new(); result.push( Path::new(argv0) .components() .next_back() .map(Component::as_os_str) .and_then(OsStr::to_str) .unwrap_or("") .to_owned(), ); for arg in arg_arg { result.push(arg.to_owned()); } result } fn compute_environ(flag_env: &[String]) -> Vec<(String, String)> { let mut result = Vec::new(); for env in flag_env { let split = env.splitn(2, '=').collect::<Vec<_>>(); if split.len() != 2 { println!( "environment variables must be of the form \"key=value\"; got \"{}\"", env ); } result.push((split[0].to_owned(), split[1].to_owned())); } result } fn main() -> Result<()> { let version = env!("CARGO_PKG_VERSION"); let args: Args = Docopt::new(USAGE) .and_then(|d| { d.help(true) .version(Some(String::from(version))) .deserialize() }) .unwrap_or_else(|e| e.exit()); let log_config = if args.flag_debug { pretty_env_logger::init(); None } else { let prefix = "wasmtime.dbg."; wasmtime_cli::init_file_per_thread_logger(prefix); Some(prefix) }; if args.flag_create_cache_config { match cache_create_new_config(args.flag_cache_config) { Ok(path) => { println!( "Successfully created new configuation file at {}", path.display() ); return Ok(()); } Err(err) => { eprintln!("Error: {}", err); exit(1); } } } let errors = cache_init( !args.flag_disable_cache, args.flag_cache_config.as_ref(), log_config, );
let mut flag_builder = settings::builder(); let mut features: Features = Default::default(); flag_builder.enable("avoid_div_traps")?; let debug_info = args.flag_g; if cfg!(debug_assertions) { flag_builder.enable("enable_verifier")?; } if args.flag_enable_simd { flag_builder.enable("enable_simd")?; features.simd = true; } if args.flag_optimize { flag_builder.set("opt_level", "speed")?; } let strategy = pick_compilation_strategy(args.flag_cranelift, args.flag_lightbeam); let mut config = Config::new(); config .features(features) .flags(settings::Flags::new(flag_builder)) .debug_info(debug_info) .strategy(strategy); let engine = HostRef::new(Engine::new(&config)); let store = HostRef::new(Store::new(&engine)); let mut module_registry = HashMap::new(); let preopen_dirs = compute_preopen_dirs(&args.flag_dir, &args.flag_mapdir); let argv = compute_argv(&args.arg_file, &args.arg_arg); let environ = compute_environ(&args.flag_env); let wasi_unstable = HostRef::new(if args.flag_wasi_c { #[cfg(feature = "wasi-c")] { let global_exports = store.borrow().global_exports().clone(); let handle = instantiate_wasi_c(global_exports, &preopen_dirs, &argv, &environ)?; Instance::from_handle(&store, handle) } #[cfg(not(feature = "wasi-c"))] { bail!("wasi-c feature not enabled at build time") } } else { create_wasi_instance_snapshot_0(&store, &preopen_dirs, &argv, &environ)? }); let wasi_snapshot_preview1 = HostRef::new(create_wasi_instance( &store, &preopen_dirs, &argv, &environ, )?); module_registry.insert("wasi_unstable".to_owned(), wasi_unstable); module_registry.insert("wasi_snapshot_preview1".to_owned(), wasi_snapshot_preview1); for filename in &args.flag_preload { let path = Path::new(&filename); instantiate_module(&store, &module_registry, path) .with_context(|| format!("failed to process preload at `{}`", path.display()))?; } let path = Path::new(&args.arg_file); handle_module(&store, &module_registry, &args, path) .with_context(|| format!("failed to process main module `{}`", path.display()))?; Ok(()) } fn instantiate_module( store: &HostRef<Store>, module_registry: &HashMap<String, HostRef<Instance>>, path: &Path, ) -> Result<(HostRef<Instance>, HostRef<Module>, Vec<u8>)> { let data = wat::parse_file(path.to_path_buf())?; let module = HostRef::new(Module::new(store, &data)?); let imports = module .borrow() .imports() .iter() .map(|i| { let module_name = i.module(); if let Some(instance) = module_registry.get(module_name) { let field_name = i.name(); if let Some(export) = instance.borrow().find_export_by_name(field_name) { Ok(export.clone()) } else { bail!( "Import {} was not found in module {}", field_name, module_name ) } } else { bail!("Import module {} was not found", module_name) } }) .collect::<Result<Vec<_>, _>>()?; let instance = HostRef::new(Instance::new(store, &module, &imports)?); Ok((instance, module, data)) } fn handle_module( store: &HostRef<Store>, module_registry: &HashMap<String, HostRef<Instance>>, args: &Args, path: &Path, ) -> Result<()> { let (instance, module, data) = instantiate_module(store, module_registry, path)?; if let Some(f) = &args.flag_invoke { let data = ModuleData::new(&data)?; invoke_export(instance, &data, f, args)?; } else if module .borrow() .exports() .iter() .find(|export| export.name().is_empty()) .is_some() { let data = ModuleData::new(&data)?; invoke_export(instance, &data, "", args)?; } else { let data = ModuleData::new(&data)?; invoke_export(instance, &data, "_start", args)?; } Ok(()) } fn invoke_export( instance: HostRef<Instance>, data: &ModuleData, name: &str, args: &Args, ) -> Result<()> { use wasm_webidl_bindings::ast; use wasmtime_interface_types::Value; let mut handle = instance.borrow().handle().clone(); let binding = data.binding_for_export(&mut handle, name)?; if binding.param_types()?.len() > 0 { eprintln!( "warning: using `--invoke` with a function that takes arguments \ is experimental and may break in the future" ); } let mut values = Vec::new(); let mut args = args.arg_arg.iter(); for ty in binding.param_types()? { let val = match args.next() { Some(s) => s, None => bail!("not enough arguments for `{}`", name), }; values.push(match ty { ast::WebidlScalarType::Long => Value::I32(val.parse()?), ast::WebidlScalarType::LongLong => Value::I64(val.parse()?), ast::WebidlScalarType::UnsignedLong => Value::U32(val.parse()?), ast::WebidlScalarType::UnsignedLongLong => Value::U64(val.parse()?), ast::WebidlScalarType::Float | ast::WebidlScalarType::UnrestrictedFloat => { Value::F32(val.parse()?) } ast::WebidlScalarType::Double | ast::WebidlScalarType::UnrestrictedDouble => { Value::F64(val.parse()?) } ast::WebidlScalarType::DomString => Value::String(val.to_string()), t => bail!("unsupported argument type {:?}", t), }); } let results = data .invoke_export(&instance, name, &values) .with_context(|| format!("failed to invoke `{}`", name))?; if results.len() > 0 { eprintln!( "warning: using `--invoke` with a function that returns values \ is experimental and may break in the future" ); } for result in results { println!("{}", result); } Ok(()) }
if !errors.is_empty() { eprintln!("Cache initialization failed. Errors:"); for e in errors { eprintln!("-> {}", e); } exit(1); }
if_condition
[ { "content": "fn write_stats_file(path: &Path, stats: &ModuleCacheStatistics) -> bool {\n\n toml::to_string_pretty(&stats)\n\n .map_err(|err| {\n\n warn!(\n\n \"Failed to serialize stats file, path: {}, err: {}\",\n\n path.display(),\n\n err\n\n )\n\n })\n\n .and_then(|serialized| {\n\n if fs_write_atomic(path, \"stats\", serialized.as_bytes()) {\n\n Ok(())\n\n } else {\n\n Err(())\n\n }\n\n })\n\n .is_ok()\n\n}\n\n\n", "file_path": "crates/environ/src/cache/worker.rs", "rank": 0, "score": 453620.54576932255 }, { "content": "pub fn instantiate(data: &[u8], bin_name: &str, workspace: Option<&Path>) -> anyhow::Result<()> {\n\n // Prepare runtime\n\n let mut flag_builder = settings::builder();\n\n\n\n // Enable proper trap for division\n\n flag_builder\n\n .enable(\"avoid_div_traps\")\n\n .context(\"error while enabling proper division trap\")?;\n\n\n\n let mut config = Config::new();\n\n config.flags(settings::Flags::new(flag_builder));\n\n let engine = HostRef::new(Engine::new(&config));\n\n let store = HostRef::new(Store::new(&engine));\n\n\n\n let global_exports = store.borrow().global_exports().clone();\n\n let get_preopens = |workspace: Option<&Path>| -> anyhow::Result<Vec<_>> {\n\n if let Some(workspace) = workspace {\n\n let preopen_dir = wasi_common::preopen_dir(workspace)\n\n .context(format!(\"error while preopening {:?}\", workspace))?;\n\n\n", "file_path": "crates/test-programs/tests/wasm_tests/runtime.rs", "rank": 1, "score": 453610.8249368341 }, { "content": "fn start_test_module(out: &mut String, testsuite: &str) -> anyhow::Result<()> {\n\n writeln!(out, \"mod {} {{\", testsuite)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "build.rs", "rank": 2, "score": 447833.84980145714 }, { "content": "/// Creates a new configuration file at specified path, or default path if None is passed.\n\n/// Fails if file already exists.\n\npub fn create_new_config<P: AsRef<Path> + Debug>(\n\n config_file: Option<P>,\n\n) -> Result<PathBuf, String> {\n\n trace!(\"Creating new config file, path: {:?}\", config_file);\n\n\n\n let config_file = config_file.as_ref().map_or_else(\n\n || DEFAULT_CONFIG_PATH.as_ref().map(|p| p.as_ref()),\n\n |p| Ok(p.as_ref()),\n\n )?;\n\n\n\n if config_file.exists() {\n\n Err(format!(\n\n \"Specified config file already exists! Path: {}\",\n\n config_file.display()\n\n ))?;\n\n }\n\n\n\n let parent_dir = config_file\n\n .parent()\n\n .ok_or_else(|| format!(\"Invalid cache config path: {}\", config_file.display()))?;\n", "file_path": "crates/environ/src/cache/config.rs", "rank": 3, "score": 442507.7773556998 }, { "content": "/// Same as `open_scratch_directory` above, except uses `wasi_snapshot_preview1`\n\n/// APIs instead of `wasi_unstable` ones.\n\n///\n\n/// This is intended to replace `open_scratch_directory` once all the tests are\n\n/// updated.\n\npub fn open_scratch_directory_new(path: &str) -> Result<wasi::Fd, String> {\n\n unsafe {\n\n for i in 3.. {\n\n let stat = match wasi::fd_prestat_get(i) {\n\n Ok(s) => s,\n\n Err(_) => break,\n\n };\n\n if stat.pr_type != wasi::PREOPENTYPE_DIR {\n\n continue;\n\n }\n\n let mut dst = Vec::with_capacity(stat.u.dir.pr_name_len);\n\n if wasi::fd_prestat_dir_name(i, dst.as_mut_ptr(), dst.capacity()).is_err() {\n\n continue;\n\n }\n\n dst.set_len(stat.u.dir.pr_name_len);\n\n if dst == path.as_bytes() {\n\n return Ok(wasi::path_open(i, 0, \".\", wasi::OFLAGS_DIRECTORY, 0, 0, 0)\n\n .expect(\"failed to open dir\"));\n\n }\n\n }\n\n\n\n Err(format!(\"failed to find scratch dir\"))\n\n }\n\n}\n", "file_path": "crates/test-programs/wasi-tests/src/lib.rs", "rank": 4, "score": 438471.3368527488 }, { "content": "fn read_stats_file(path: &Path) -> Option<ModuleCacheStatistics> {\n\n fs::read(path)\n\n .map_err(|err| {\n\n trace!(\n\n \"Failed to read stats file, path: {}, err: {}\",\n\n path.display(),\n\n err\n\n )\n\n })\n\n .and_then(|bytes| {\n\n toml::from_slice::<ModuleCacheStatistics>(&bytes[..]).map_err(|err| {\n\n trace!(\n\n \"Failed to parse stats file, path: {}, err: {}\",\n\n path.display(),\n\n err,\n\n )\n\n })\n\n })\n\n .ok()\n\n}\n\n\n", "file_path": "crates/environ/src/cache/worker.rs", "rank": 5, "score": 422890.5927175912 }, { "content": "// Assumption: path inside cache directory.\n\n// Then, we don't have to use sound OS-specific exclusive file access.\n\n// Note: there's no need to remove temporary file here - cleanup task will do it later.\n\nfn fs_write_atomic(path: &Path, reason: &str, contents: &[u8]) -> bool {\n\n let lock_path = path.with_extension(format!(\"wip-atomic-write-{}\", reason));\n\n fs::OpenOptions::new()\n\n .create_new(true) // atomic file creation (assumption: no one will open it without this flag)\n\n .write(true)\n\n .open(&lock_path)\n\n .and_then(|mut file| file.write_all(contents))\n\n // file should go out of scope and be closed at this point\n\n .and_then(|()| fs::rename(&lock_path, &path)) // atomic file rename\n\n .map_err(|err| {\n\n warn!(\n\n \"Failed to write file with rename, lock path: {}, target path: {}, err: {}\",\n\n lock_path.display(),\n\n path.display(),\n\n err\n\n )\n\n })\n\n .is_ok()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests;\n", "file_path": "crates/environ/src/cache.rs", "rank": 6, "score": 420565.1269410146 }, { "content": "pub fn extract_exec_name_from_path(path: &Path) -> anyhow::Result<String> {\n\n path.file_stem()\n\n .and_then(|s| s.to_str())\n\n .map(String::from)\n\n .ok_or_else(|| {\n\n anyhow::anyhow!(\n\n \"couldn't extract the file stem from path {}\",\n\n path.display()\n\n )\n\n })\n\n}\n", "file_path": "crates/test-programs/tests/wasm_tests/utils.rs", "rank": 7, "score": 416696.6292800589 }, { "content": "/// Opens a fresh file descriptor for `path` where `path` should be a preopened\n\n/// directory. This is intended to be used with `wasi_unstable`, not with\n\n/// `wasi_snapshot_preview1`. This is getting phased out and will likely be\n\n/// deleted soon.\n\npub fn open_scratch_directory(path: &str) -> Result<wasi_unstable::Fd, String> {\n\n // Open the scratch directory.\n\n let dir_fd: wasi_unstable::Fd = unsafe {\n\n let cstr = CString::new(path.as_bytes()).unwrap();\n\n libc::open(cstr.as_ptr(), libc::O_RDONLY | libc::O_DIRECTORY)\n\n } as wasi_unstable::Fd;\n\n\n\n if (dir_fd as std::os::raw::c_int) < 0 {\n\n Err(format!(\n\n \"error opening scratch directory '{}': {}\",\n\n path,\n\n io::Error::last_os_error()\n\n ))\n\n } else {\n\n Ok(dir_fd)\n\n }\n\n}\n\n\n", "file_path": "crates/test-programs/wasi-tests/src/lib.rs", "rank": 8, "score": 398951.0891619626 }, { "content": "fn translate_wat(wat: &str) -> ExecutableModule {\n\n let wasm = wat::parse_str(wat).unwrap();\n\n let compiled = translate(&wasm).unwrap();\n\n compiled\n\n}\n\n\n", "file_path": "crates/lightbeam/tests/wrongs.rs", "rank": 9, "score": 383960.69898568874 }, { "content": "fn translate_wat(wat: &str) -> ExecutableModule {\n\n let wasm = wat::parse_str(wat).unwrap();\n\n let compiled = translate(&wasm).unwrap();\n\n compiled\n\n}\n\n\n\nmod op32 {\n\n use super::{lazy_static, quickcheck, translate_wat, ExecutableModule};\n\n\n\n macro_rules! binop_test {\n\n ($op:ident, $func:expr) => {\n\n mod $op {\n\n use super::{lazy_static, quickcheck, translate_wat, ExecutableModule};\n\n use std::sync::Once;\n\n\n\n const OP: &str = stringify!($op);\n\n\n\n lazy_static! {\n\n static ref AS_PARAMS: ExecutableModule = translate_wat(&format!(\n\n \"(module (func (param i32) (param i32) (result i32)\n", "file_path": "crates/lightbeam/tests/quickchecks.rs", "rank": 10, "score": 383960.69898568874 }, { "content": "pub fn preopen_dir<P: AsRef<Path>>(path: P) -> Result<File> {\n\n File::open(path).map_err(Into::into)\n\n}\n", "file_path": "crates/wasi-common/src/sys/unix/mod.rs", "rank": 11, "score": 382718.5097557956 }, { "content": "pub fn preopen_dir<P: AsRef<Path>>(path: P) -> Result<File> {\n\n use std::fs::OpenOptions;\n\n use std::os::windows::fs::OpenOptionsExt;\n\n use winapi::um::winbase::FILE_FLAG_BACKUP_SEMANTICS;\n\n\n\n // To open a directory using CreateFile, specify the\n\n // FILE_FLAG_BACKUP_SEMANTICS flag as part of dwFileFlags...\n\n // cf. https://docs.microsoft.com/en-us/windows/desktop/api/fileapi/nf-fileapi-createfile2\n\n OpenOptions::new()\n\n .create(false)\n\n .write(true)\n\n .read(true)\n\n .attributes(FILE_FLAG_BACKUP_SEMANTICS)\n\n .open(path)\n\n .map_err(Into::into)\n\n}\n", "file_path": "crates/wasi-common/src/sys/windows/mod.rs", "rank": 12, "score": 382718.5097557956 }, { "content": "/// Parses the Start section of the wasm module.\n\npub fn start(_index: u32) -> Result<(), Error> {\n\n // TODO\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/lightbeam/src/translate_sections.rs", "rank": 13, "score": 381401.7777912843 }, { "content": "fn create_file_with_mtime(filename: &Path, contents: &str, offset_sign: &str, offset: &Duration) {\n\n fs::write(filename, contents).expect(\"Failed to create a file\");\n\n let mtime = match offset_sign {\n\n \"past\" => system_time_stub::NOW\n\n .checked_sub(*offset)\n\n .expect(\"Failed to calculate new mtime\"),\n\n \"future\" => system_time_stub::NOW\n\n .checked_add(*offset)\n\n .expect(\"Failed to calculate new mtime\"),\n\n _ => unreachable!(),\n\n };\n\n filetime::set_file_mtime(filename, mtime.into()).expect(\"Failed to set mtime\");\n\n}\n", "file_path": "crates/environ/src/cache/worker/tests.rs", "rank": 14, "score": 368948.1487485683 }, { "content": "/// Translate from a slice of bytes holding a wasm module.\n\npub fn translate_only(data: &[u8]) -> Result<TranslatedModule, Error> {\n\n let mut reader = ModuleReader::new(data)?;\n\n let mut output = TranslatedModule::default();\n\n\n\n reader.skip_custom_sections()?;\n\n if reader.eof() {\n\n return Ok(output);\n\n }\n\n let mut section = reader.read()?;\n\n\n\n if let SectionCode::Type = section.code {\n\n let types_reader = section.get_type_section_reader()?;\n\n output.ctx.types = translate_sections::type_(types_reader)?;\n\n\n\n reader.skip_custom_sections()?;\n\n if reader.eof() {\n\n return Ok(output);\n\n }\n\n section = reader.read()?;\n\n }\n", "file_path": "crates/lightbeam/src/module.rs", "rank": 15, "score": 368523.677455846 }, { "content": "/// Parses the Function section of the wasm module.\n\npub fn function(functions: FunctionSectionReader) -> Result<Vec<u32>, Error> {\n\n functions\n\n .into_iter()\n\n .map(|r| r.map_err(Into::into))\n\n .collect()\n\n}\n\n\n", "file_path": "crates/lightbeam/src/translate_sections.rs", "rank": 16, "score": 367696.65637631796 }, { "content": "/// Initialize the table memory from the provided initializers.\n\nfn initialize_tables(instance: &mut Instance) -> Result<(), InstantiationError> {\n\n let vmctx: *mut VMContext = instance.vmctx_mut();\n\n let module = Rc::clone(&instance.module);\n\n for init in &module.table_elements {\n\n let start = get_table_init_start(init, instance);\n\n let slice = get_table_slice(\n\n init,\n\n &instance.module,\n\n &mut instance.tables,\n\n &instance.vmctx,\n\n &instance.offsets,\n\n );\n\n\n\n let subslice = &mut slice[start..start + init.elements.len()];\n\n for (i, func_idx) in init.elements.iter().enumerate() {\n\n let callee_sig = instance.module.functions[*func_idx];\n\n let (callee_ptr, callee_vmctx) =\n\n if let Some(index) = instance.module.defined_func_index(*func_idx) {\n\n (instance.finished_functions[index], vmctx)\n\n } else {\n", "file_path": "crates/runtime/src/instance.rs", "rank": 17, "score": 367188.72144783975 }, { "content": "/// Return an instance implementing the \"spectest\" interface used in the\n\n/// spec testsuite.\n\npub fn instantiate_spectest(store: &HostRef<Store>) -> HashMap<&'static str, Extern> {\n\n let mut ret = HashMap::new();\n\n\n\n let ty = FuncType::new(Box::new([]), Box::new([]));\n\n let func = wrap(store, ty, |_params, _results| Ok(()));\n\n ret.insert(\"print\", Extern::Func(HostRef::new(func)));\n\n\n\n let ty = FuncType::new(Box::new([ValType::I32]), Box::new([]));\n\n let func = wrap(store, ty, |params, _results| {\n\n println!(\"{}: i32\", params[0].unwrap_i32());\n\n Ok(())\n\n });\n\n ret.insert(\"print_i32\", Extern::Func(HostRef::new(func)));\n\n\n\n let ty = FuncType::new(Box::new([ValType::I64]), Box::new([]));\n\n let func = wrap(store, ty, |params, _results| {\n\n println!(\"{}: i64\", params[0].unwrap_i64());\n\n Ok(())\n\n });\n\n ret.insert(\"print_i64\", Extern::Func(HostRef::new(func)));\n", "file_path": "crates/wast/src/spectest.rs", "rank": 18, "score": 363718.3859435647 }, { "content": "pub fn pick_compilation_strategy(cranelift: bool, lightbeam: bool) -> CompilationStrategy {\n\n // Decide how to compile.\n\n match (lightbeam, cranelift) {\n\n #[cfg(feature = \"lightbeam\")]\n\n (true, false) => CompilationStrategy::Lightbeam,\n\n #[cfg(not(feature = \"lightbeam\"))]\n\n (true, false) => panic!(\"--lightbeam given, but Lightbeam support is not enabled\"),\n\n (false, true) => CompilationStrategy::Cranelift,\n\n (false, false) => CompilationStrategy::Auto,\n\n (true, true) => panic!(\"Can't enable --cranelift and --lightbeam at the same time\"),\n\n }\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 19, "score": 363382.88221473986 }, { "content": "/// Read a global in the given instance identified by an export name.\n\npub fn get(instance: &InstanceHandle, global_name: &str) -> Result<RuntimeValue, ActionError> {\n\n let (definition, global) = match unsafe { instance.lookup_immutable(global_name) } {\n\n Some(Export::Global {\n\n definition,\n\n vmctx: _,\n\n global,\n\n }) => (definition, global),\n\n Some(_) => {\n\n return Err(ActionError::Kind(format!(\n\n \"exported item \\\"{}\\\" is not a global variable\",\n\n global_name\n\n )));\n\n }\n\n None => {\n\n return Err(ActionError::Field(format!(\n\n \"no export named \\\"{}\\\"\",\n\n global_name\n\n )));\n\n }\n\n };\n", "file_path": "crates/jit/src/action.rs", "rank": 20, "score": 363226.251682078 }, { "content": "pub fn prepare_workspace(exe_name: &str) -> anyhow::Result<TempDir> {\n\n let prefix = format!(\"wasi_common_{}\", exe_name);\n\n let tempdir = Builder::new().prefix(&prefix).tempdir()?;\n\n Ok(tempdir)\n\n}\n\n\n", "file_path": "crates/test-programs/tests/wasm_tests/utils.rs", "rank": 21, "score": 357380.25069286174 }, { "content": "fn check_table_init_bounds(instance: &mut Instance) -> Result<(), InstantiationError> {\n\n let module = Rc::clone(&instance.module);\n\n for init in &module.table_elements {\n\n let start = get_table_init_start(init, instance);\n\n let slice = get_table_slice(\n\n init,\n\n &instance.module,\n\n &mut instance.tables,\n\n &instance.vmctx,\n\n &instance.offsets,\n\n );\n\n\n\n if slice.get_mut(start..start + init.elements.len()).is_none() {\n\n return Err(InstantiationError::Link(LinkError(\n\n \"elements segment does not fit\".to_owned(),\n\n )));\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/runtime/src/instance.rs", "rank": 22, "score": 356823.86883161316 }, { "content": "pub fn translate(data: &[u8]) -> Result<ExecutableModule, Error> {\n\n translate_only(data).map(|m| m.instantiate())\n\n}\n\n\n", "file_path": "crates/lightbeam/src/module.rs", "rank": 23, "score": 355206.41086275934 }, { "content": "#[test]\n\nfn test_cache_write_default_config() {\n\n let dir = tempfile::tempdir().expect(\"Can't create temporary directory\");\n\n let config_path = dir.path().join(\"cache-config.toml\");\n\n\n\n let result = cache_create_new_config(Some(&config_path));\n\n assert!(result.is_ok());\n\n assert!(config_path.exists());\n\n assert_eq!(config_path, result.unwrap());\n\n}\n", "file_path": "crates/environ/tests/cache_write_default_config.rs", "rank": 24, "score": 354466.9364206166 }, { "content": "/// Initializes the cache system. Should be called exactly once,\n\n/// and before using the cache system. Otherwise it can panic.\n\n/// Returns list of errors. If empty, initialization succeeded.\n\npub fn init<P: AsRef<Path> + Debug>(\n\n enabled: bool,\n\n config_file: Option<P>,\n\n init_file_per_thread_logger: Option<&'static str>,\n\n) -> &'static Vec<String> {\n\n INIT_CALLED\n\n .compare_exchange(false, true, Ordering::SeqCst, Ordering::SeqCst)\n\n .expect(\"Cache system init must be called at most once\");\n\n assert!(\n\n CONFIG.r#try().is_none(),\n\n \"Cache system init must be called before using the system.\"\n\n );\n\n let conf_file_str = format!(\"{:?}\", config_file);\n\n let conf = CONFIG.call_once(|| CacheConfig::from_file(enabled, config_file));\n\n if conf.errors.is_empty() {\n\n if conf.enabled() {\n\n worker::init(init_file_per_thread_logger);\n\n }\n\n debug!(\"Cache init(\\\"{}\\\"): {:#?}\", conf_file_str, conf)\n\n } else {\n\n error!(\n\n \"Cache init(\\\"{}\\\"): errors: {:#?}\",\n\n conf_file_str, conf.errors,\n\n )\n\n }\n\n &conf.errors\n\n}\n\n\n", "file_path": "crates/environ/src/cache/config.rs", "rank": 25, "score": 353087.91246137396 }, { "content": "fn finish_test_module(out: &mut String) -> anyhow::Result<()> {\n\n out.push_str(\"}\\n\");\n\n Ok(())\n\n}\n\n\n", "file_path": "build.rs", "rank": 26, "score": 352346.7982902356 }, { "content": "pub fn is_wasi_module(name: &str) -> bool {\n\n // FIXME: this should be more conservative, but while WASI is in flux and\n\n // we're figuring out how to support multiple revisions, this should do the\n\n // trick.\n\n name.starts_with(\"wasi\")\n\n}\n", "file_path": "crates/wasi/src/lib.rs", "rank": 27, "score": 349623.25140849606 }, { "content": "pub fn is_wasi_module(name: &str) -> bool {\n\n // FIXME: this should be more conservative, but while WASI is in flux and\n\n // we're figuring out how to support multiple revisions, this should do the\n\n // trick.\n\n name.starts_with(\"wasi\")\n\n}\n", "file_path": "crates/wasi/src/old/snapshot_0/mod.rs", "rank": 28, "score": 339734.8430273473 }, { "content": "struct InstanceTranslateContext(pub wasmtime::HostRef<wasmtime::Instance>);\n\n\n\nimpl TranslateContext for InstanceTranslateContext {\n\n fn invoke_alloc(&mut self, alloc_func_name: &str, len: i32) -> Result<i32> {\n\n let alloc = self\n\n .0\n\n .borrow()\n\n .find_export_by_name(alloc_func_name)\n\n .ok_or_else(|| format_err!(\"failed to find alloc function `{}`\", alloc_func_name))?\n\n .func()\n\n .ok_or_else(|| format_err!(\"`{}` is not a (alloc) function\", alloc_func_name))?\n\n .clone();\n\n let alloc_args = vec![wasmtime::Val::I32(len)];\n\n let results = match alloc.borrow().call(&alloc_args) {\n\n Ok(values) => values,\n\n Err(trap) => bail!(\"trapped: {:?}\", trap),\n\n };\n\n if results.len() != 1 {\n\n bail!(\"allocator function wrong number of results\");\n\n }\n", "file_path": "crates/interface-types/src/lib.rs", "rank": 29, "score": 339404.4051320654 }, { "content": "fn main() -> Result<(), Error> {\n\n // Initialize.\n\n println!(\"Initializing...\");\n\n let engine = HostRef::new(Engine::default());\n\n let store = HostRef::new(Store::new(&engine));\n\n\n\n // Load binary.\n\n println!(\"Loading binary...\");\n\n let binary = wat::parse_str(\n\n r#\"\n\n (module\n\n (memory (export \"memory\") 2 3)\n\n\n\n (func (export \"size\") (result i32) (memory.size))\n\n (func (export \"load\") (param i32) (result i32)\n\n (i32.load8_s (local.get 0))\n\n )\n\n (func (export \"store\") (param i32 i32)\n\n (i32.store8 (local.get 0) (local.get 1))\n\n )\n", "file_path": "crates/api/examples/memory.rs", "rank": 30, "score": 333762.5611144785 }, { "content": "fn new_module_cache_data(rng: &mut impl Rng) -> ModuleCacheData {\n\n let funcs = (0..rng.gen_range(0, 10))\n\n .map(|i| {\n\n let mut sm = SecondaryMap::new(); // doesn't implement from iterator\n\n sm.resize(i as usize * 2);\n\n sm.values_mut().enumerate().for_each(|(j, v)| {\n\n if rng.gen_bool(0.33) {\n\n *v = (j as u32) * 3 / 4\n\n }\n\n });\n\n CompiledFunction {\n\n body: (0..(i * 3 / 2)).collect(),\n\n jt_offsets: sm,\n\n unwind_info: (0..(i * 3 / 2)).collect(),\n\n }\n\n })\n\n .collect();\n\n\n\n let relocs = (0..rng.gen_range(1, 0x10))\n\n .map(|i| {\n", "file_path": "crates/environ/src/cache/tests.rs", "rank": 31, "score": 331917.43840297754 }, { "content": "fn new_module(rng: &mut impl Rng) -> Module {\n\n // There are way too many fields. Just fill in some of them.\n\n let mut m = Module::new();\n\n\n\n if rng.gen_bool(0.5) {\n\n m.signatures.push(ir::Signature {\n\n params: vec![],\n\n returns: vec![],\n\n call_conv: isa::CallConv::Fast,\n\n });\n\n }\n\n\n\n for i in 0..rng.gen_range(1, 0x8) {\n\n m.functions.push(SignatureIndex::new(i));\n\n }\n\n\n\n if rng.gen_bool(0.8) {\n\n m.memory_plans.push(MemoryPlan {\n\n memory: Memory {\n\n minimum: rng.gen(),\n", "file_path": "crates/environ/src/cache/tests.rs", "rank": 32, "score": 331869.83369098674 }, { "content": "fn new_isa(name: &str) -> Box<dyn isa::TargetIsa> {\n\n let shared_builder = settings::builder();\n\n let shared_flags = settings::Flags::new(shared_builder);\n\n isa::lookup(triple!(name))\n\n .expect(\"can't find specified isa\")\n\n .finish(shared_flags)\n\n}\n\n\n", "file_path": "crates/environ/src/cache/tests.rs", "rank": 33, "score": 331815.9989302904 }, { "content": "fn str_for_trace<'str>(ptr: *const i8, len: usize) -> Result<&'str str, str::Utf8Error> {\n\n str::from_utf8(unsafe { slice::from_raw_parts(ptr as *const u8, len) })\n\n}\n\n\n", "file_path": "crates/wasi-c/src/syscalls.rs", "rank": 34, "score": 326959.8246072349 }, { "content": "pub fn get_file_path(file: &File) -> Result<OsString> {\n\n use winapi::um::fileapi::GetFinalPathNameByHandleW;\n\n\n\n let mut raw_path: Vec<u16> = vec![0; WIDE_MAX_PATH as usize];\n\n\n\n let handle = file.as_raw_handle();\n\n let read_len =\n\n unsafe { GetFinalPathNameByHandleW(handle, raw_path.as_mut_ptr(), WIDE_MAX_PATH, 0) };\n\n\n\n if read_len == 0 {\n\n // failed to read\n\n return Err(winerror::WinError::last());\n\n }\n\n\n\n // obtain a slice containing the written bytes, and check for it being too long\n\n // (practically probably impossible)\n\n let written_bytes = raw_path\n\n .get(..read_len as usize)\n\n .ok_or(winerror::WinError::ERROR_BUFFER_OVERFLOW)?;\n\n\n\n Ok(OsString::from_wide(written_bytes))\n\n}\n\n\n", "file_path": "crates/wasi-common/winx/src/file.rs", "rank": 35, "score": 325758.8551453783 }, { "content": "struct ModuleCacheEntryInner<'config, 'worker> {\n\n mod_cache_path: PathBuf,\n\n cache_config: &'config CacheConfig,\n\n worker: &'worker Worker,\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Debug, PartialEq, Eq)]\n\npub struct ModuleCacheData {\n\n compilation: Compilation,\n\n relocations: Relocations,\n\n address_transforms: ModuleAddressMap,\n\n value_ranges: ValueLabelsRanges,\n\n stack_slots: PrimaryMap<DefinedFuncIndex, ir::StackSlots>,\n\n traps: Traps,\n\n}\n\n\n", "file_path": "crates/environ/src/cache.rs", "rank": 36, "score": 314507.53573277144 }, { "content": "fn const_value(val: LabelValue) -> impl FnMut(&mut Assembler) {\n\n move |asm| match val {\n\n LabelValue::I32(val) => dynasm!(asm\n\n ; .dword val\n\n ),\n\n LabelValue::I64(val) => dynasm!(asm\n\n ; .qword val\n\n ),\n\n }\n\n}\n\n\n", "file_path": "crates/lightbeam/src/backend.rs", "rank": 37, "score": 302717.43591114227 }, { "content": "fn translate_loc(loc: ValueLoc, frame_info: Option<&FunctionFrameInfo>) -> Option<Vec<u8>> {\n\n match loc {\n\n ValueLoc::Reg(reg) => {\n\n let machine_reg = map_reg(reg).0 as u8;\n\n assert_lt!(machine_reg, 32); // FIXME\n\n Some(vec![gimli::constants::DW_OP_reg0.0 + machine_reg])\n\n }\n\n ValueLoc::Stack(ss) => {\n\n if let Some(frame_info) = frame_info {\n\n if let Some(ss_offset) = frame_info.stack_slots[ss].offset {\n\n use gimli::write::Writer;\n\n let endian = gimli::RunTimeEndian::Little;\n\n let mut writer = write::EndianVec::new(endian);\n\n writer\n\n .write_u8(gimli::constants::DW_OP_breg0.0 + X86_64::RBP.0 as u8)\n\n .expect(\"bp wr\");\n\n writer.write_sleb128(ss_offset as i64 + 16).expect(\"ss wr\");\n\n writer\n\n .write_u8(gimli::constants::DW_OP_deref.0 as u8)\n\n .expect(\"bp wr\");\n\n let buf = writer.into_vec();\n\n return Some(buf);\n\n }\n\n }\n\n None\n\n }\n\n _ => None,\n\n }\n\n}\n\n\n", "file_path": "crates/debug/src/transform/expression.rs", "rank": 38, "score": 301681.69681224023 }, { "content": "fn const_values(a: LabelValue, b: LabelValue) -> impl FnMut(&mut Assembler) {\n\n move |asm| {\n\n match a {\n\n LabelValue::I32(val) => dynasm!(asm\n\n ; .dword val\n\n ),\n\n LabelValue::I64(val) => dynasm!(asm\n\n ; .qword val\n\n ),\n\n }\n\n\n\n match b {\n\n LabelValue::I32(val) => dynasm!(asm\n\n ; .dword val\n\n ),\n\n LabelValue::I64(val) => dynasm!(asm\n\n ; .qword val\n\n ),\n\n }\n\n }\n", "file_path": "crates/lightbeam/src/backend.rs", "rank": 39, "score": 300329.87301540683 }, { "content": "/// Extract a valid Rust identifier from the stem of a path.\n\nfn extract_name(path: impl AsRef<Path>) -> String {\n\n path.as_ref()\n\n .file_stem()\n\n .expect(\"filename should have a stem\")\n\n .to_str()\n\n .expect(\"filename should be representable as a string\")\n\n .replace(\"-\", \"_\")\n\n .replace(\"/\", \"_\")\n\n}\n\n\n", "file_path": "build.rs", "rank": 40, "score": 299881.2265804759 }, { "content": "// Each of the tests included from `wast_testsuite_tests` will call this\n\n// function which actually executes the `wast` test suite given the `strategy`\n\n// to compile it.\n\nfn run_wast(wast: &str, strategy: CompilationStrategy) -> anyhow::Result<()> {\n\n let wast = Path::new(wast);\n\n let features = Features {\n\n simd: wast.iter().any(|s| s == \"simd\"),\n\n multi_value: wast.iter().any(|s| s == \"multi-value\"),\n\n ..Default::default()\n\n };\n\n\n\n let mut flag_builder = settings::builder();\n\n flag_builder.enable(\"enable_verifier\").unwrap();\n\n flag_builder.enable(\"avoid_div_traps\").unwrap();\n\n flag_builder.enable(\"enable_simd\").unwrap();\n\n\n\n let mut cfg = Config::new();\n\n cfg.strategy(strategy)\n\n .flags(settings::Flags::new(flag_builder))\n\n .features(features);\n\n let store = HostRef::new(Store::new(&HostRef::new(Engine::new(&cfg))));\n\n let mut wast_context = WastContext::new(store);\n\n wast_context.register_spectest()?;\n\n wast_context.run_file(wast)?;\n\n Ok(())\n\n}\n", "file_path": "tests/wast_testsuites.rs", "rank": 41, "score": 298447.9887380852 }, { "content": "fn write_at(mut file: &File, buf: &[u8], offset: u64) -> io::Result<usize> {\n\n // get current cursor position\n\n let cur_pos = file.seek(SeekFrom::Current(0))?;\n\n // perform a seek write by a specified offset\n\n let nwritten = file.seek_write(buf, offset)?;\n\n // rewind the cursor back to the original position\n\n file.seek(SeekFrom::Start(cur_pos))?;\n\n Ok(nwritten)\n\n}\n\n\n\n// TODO refactor common code with unix\n\npub(crate) fn fd_pread(\n\n file: &File,\n\n buf: &mut [u8],\n\n offset: wasi::__wasi_filesize_t,\n\n) -> Result<usize> {\n\n read_at(file, buf, offset).map_err(Into::into)\n\n}\n\n\n\n// TODO refactor common code with unix\n", "file_path": "crates/wasi-common/src/sys/windows/hostcalls_impl/fs.rs", "rank": 42, "score": 298296.4078308011 }, { "content": "#[derive(Deserialize, Debug)]\n\n#[serde(deny_unknown_fields)]\n\nstruct Config {\n\n cache: CacheConfig,\n\n}\n\n\n\n#[derive(Deserialize, Debug, Clone)]\n\n#[serde(deny_unknown_fields)]\n\npub struct CacheConfig {\n\n #[serde(skip)]\n\n errors: Vec<String>,\n\n\n\n enabled: bool,\n\n directory: Option<PathBuf>,\n\n #[serde(\n\n default,\n\n rename = \"worker-event-queue-size\",\n\n deserialize_with = \"deserialize_si_prefix\"\n\n )]\n\n worker_event_queue_size: Option<u64>,\n\n #[serde(rename = \"baseline-compression-level\")]\n\n baseline_compression_level: Option<i32>,\n", "file_path": "crates/environ/src/cache/config.rs", "rank": 43, "score": 298163.3770900572 }, { "content": "fn main() -> anyhow::Result<()> {\n\n let data = wat::parse_str(WAT)?;\n\n let translated = translate(&data)?;\n\n let result: u32 = translated.execute_func(0, (5u32, 3u32))?;\n\n println!(\"f(5, 3) = {}\", result);\n\n\n\n Ok(())\n\n}\n", "file_path": "crates/lightbeam/examples/test.rs", "rank": 44, "score": 297715.0101305719 }, { "content": "/// Parses the Global section of the wasm module.\n\npub fn global(globals: GlobalSectionReader) -> Result<(), Error> {\n\n for entry in globals {\n\n entry?; // TODO\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/lightbeam/src/translate_sections.rs", "rank": 45, "score": 297685.5821702922 }, { "content": "/// Parses the Import section of the wasm module.\n\npub fn import(imports: ImportSectionReader) -> Result<(), Error> {\n\n for entry in imports {\n\n entry?; // TODO\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/lightbeam/src/translate_sections.rs", "rank": 46, "score": 297685.5821702922 }, { "content": "/// Parses the Data section of the wasm module.\n\npub fn data(data: DataSectionReader) -> Result<(), Error> {\n\n for entry in data {\n\n entry?; // TODO\n\n }\n\n Ok(())\n\n}\n", "file_path": "crates/lightbeam/src/translate_sections.rs", "rank": 47, "score": 297685.58217029215 }, { "content": "/// Parses the Export section of the wasm module.\n\npub fn export(exports: ExportSectionReader) -> Result<(), Error> {\n\n for entry in exports {\n\n entry?; // TODO\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/lightbeam/src/translate_sections.rs", "rank": 48, "score": 297685.5821702922 }, { "content": "/// Parses the Element section of the wasm module.\n\npub fn element(elements: ElementSectionReader) -> Result<(), Error> {\n\n for entry in elements {\n\n entry?;\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/lightbeam/src/translate_sections.rs", "rank": 49, "score": 297685.5821702922 }, { "content": "// Used by `add_wrappers_to_module` defined in the macro above\n\nfn get_memory(vmctx: &mut VMContext) -> Result<&mut [u8], wasi::__wasi_errno_t> {\n\n unsafe {\n\n match vmctx.lookup_global_export(\"memory\") {\n\n Some(wasmtime_runtime::Export::Memory {\n\n definition,\n\n vmctx: _,\n\n memory: _,\n\n }) => Ok(std::slice::from_raw_parts_mut(\n\n (*definition).base,\n\n (*definition).current_length,\n\n )),\n\n x => {\n\n log::error!(\n\n \"no export named \\\"memory\\\", or the export isn't a mem: {:?}\",\n\n x\n\n );\n\n Err(wasi::__WASI_ERRNO_INVAL)\n\n }\n\n }\n\n }\n\n}\n", "file_path": "crates/wasi/src/instantiate.rs", "rank": 50, "score": 297562.90842146886 }, { "content": "#[test]\n\nfn test_cache_default_config_in_memory() {\n\n let errors = cache_init::<&str>(true, None, None);\n\n assert!(\n\n errors.is_empty(),\n\n \"This test loads config from the default location, if there's one. Make sure it's correct!\"\n\n );\n\n}\n", "file_path": "crates/environ/tests/cache_default_config_in_memory.rs", "rank": 51, "score": 296574.3891550724 }, { "content": "fn write_at(mut file: &File, buf: &[u8], offset: u64) -> io::Result<usize> {\n\n // get current cursor position\n\n let cur_pos = file.seek(SeekFrom::Current(0))?;\n\n // perform a seek write by a specified offset\n\n let nwritten = file.seek_write(buf, offset)?;\n\n // rewind the cursor back to the original position\n\n file.seek(SeekFrom::Start(cur_pos))?;\n\n Ok(nwritten)\n\n}\n\n\n\n// TODO refactor common code with unix\n\npub(crate) fn fd_pread(\n\n file: &File,\n\n buf: &mut [u8],\n\n offset: wasi::__wasi_filesize_t,\n\n) -> Result<usize> {\n\n read_at(file, buf, offset).map_err(Into::into)\n\n}\n\n\n\n// TODO refactor common code with unix\n", "file_path": "crates/wasi-common/src/old/snapshot_0/sys/windows/hostcalls_impl/fs.rs", "rank": 52, "score": 291702.47172710655 }, { "content": "// Used by `add_wrappers_to_module` defined in the macro above\n\nfn get_wasi_ctx(vmctx: &mut VMContext) -> Result<&mut WasiCtx, wasi::__wasi_errno_t> {\n\n unsafe {\n\n vmctx\n\n .host_state()\n\n .downcast_mut::<WasiCtx>()\n\n .ok_or_else(|| panic!(\"no host state named WasiCtx available\"))\n\n }\n\n}\n\n\n", "file_path": "crates/wasi/src/instantiate.rs", "rank": 53, "score": 289990.4032816506 }, { "content": "#[test]\n\nfn test_trap_return() -> Result<(), String> {\n\n struct HelloCallback;\n\n\n\n impl Callable for HelloCallback {\n\n fn call(&self, _params: &[Val], _results: &mut [Val]) -> Result<(), HostRef<Trap>> {\n\n Err(HostRef::new(Trap::new(\"test 123\")))\n\n }\n\n }\n\n\n\n let engine = HostRef::new(Engine::default());\n\n let store = HostRef::new(Store::new(&engine));\n\n let binary = parse_str(\n\n r#\"\n\n (module\n\n (func $hello (import \"\" \"hello\"))\n\n (func (export \"run\") (call $hello))\n\n )\n\n \"#,\n\n )\n\n .map_err(|e| format!(\"failed to parse WebAssembly text source: {}\", e))?;\n", "file_path": "crates/api/tests/traps.rs", "rank": 54, "score": 289660.22860376805 }, { "content": "// note: config loading during validation creates cache directory to canonicalize its path,\n\n// that's why these function and macro always use custom cache directory\n\n// note: tempdir removes directory when being dropped, so we need to return it to the caller,\n\n// so the paths are valid\n\npub fn test_prolog() -> (TempDir, PathBuf, PathBuf) {\n\n let _ = pretty_env_logger::try_init();\n\n let temp_dir = tempfile::tempdir().expect(\"Can't create temporary directory\");\n\n let cache_dir = temp_dir.path().join(\"cache-dir\");\n\n let config_path = temp_dir.path().join(\"cache-config.toml\");\n\n (temp_dir, cache_dir, config_path)\n\n}\n\n\n\nmacro_rules! load_config {\n\n ($config_path:ident, $content_fmt:expr, $cache_dir:ident) => {{\n\n let config_path = &$config_path;\n\n let content = format!(\n\n $content_fmt,\n\n cache_dir = toml::to_string_pretty(&format!(\"{}\", $cache_dir.display())).unwrap()\n\n );\n\n fs::write(config_path, content).expect(\"Failed to write test config file\");\n\n CacheConfig::from_file(true, Some(config_path))\n\n }};\n\n}\n\n\n\n// test without macros to test being disabled\n", "file_path": "crates/environ/src/cache/config/tests.rs", "rank": 55, "score": 289558.2205290567 }, { "content": "#[test]\n\nfn test_cache_fail_invalid_path_to_config() {\n\n let dir = tempfile::tempdir().expect(\"Can't create temporary directory\");\n\n let config_path = dir.path().join(\"cache-config.toml\"); // doesn't exist\n\n let errors = cache_init(true, Some(&config_path), None);\n\n assert!(!errors.is_empty());\n\n}\n", "file_path": "crates/environ/tests/cache_fail_invalid_path_to_config.rs", "rank": 56, "score": 288281.7610109788 }, { "content": "fn get_reloc_target_special_import_name(target: RelocationTarget) -> Option<&'static str> {\n\n Some(match target {\n\n RelocationTarget::Memory32Grow => &\"wasmtime_memory32_grow\",\n\n RelocationTarget::ImportedMemory32Grow => &\"wasmtime_memory32_grow\",\n\n RelocationTarget::Memory32Size => &\"wasmtime_memory32_size\",\n\n RelocationTarget::ImportedMemory32Size => &\"wasmtime_imported_memory32_size\",\n\n _ => return None,\n\n })\n\n}\n\n\n", "file_path": "crates/obj/src/function.rs", "rank": 57, "score": 286299.1005357257 }, { "content": "/// Construct a dummy global for the given global type.\n\npub fn dummy_global(store: &HostRef<Store>, ty: GlobalType) -> Result<Global, HostRef<Trap>> {\n\n let val = dummy_value(ty.content())?;\n\n Ok(Global::new(store, ty, val))\n\n}\n\n\n", "file_path": "crates/fuzzing/src/oracles/dummy.rs", "rank": 58, "score": 285437.00603370526 }, { "content": "/// Construct a dummy table for the given table type.\n\npub fn dummy_table(store: &HostRef<Store>, ty: TableType) -> Result<Table, HostRef<Trap>> {\n\n let init_val = dummy_value(&ty.element())?;\n\n Ok(Table::new(store, ty, init_val))\n\n}\n\n\n", "file_path": "crates/fuzzing/src/oracles/dummy.rs", "rank": 59, "score": 285437.00603370526 }, { "content": "fn run_example(name: &'static str) {\n\n let cargo = env::var(\"CARGO\").unwrap_or(\"cargo\".to_string());\n\n let pkg_dir = env!(\"CARGO_MANIFEST_DIR\");\n\n assert!(\n\n Command::new(cargo)\n\n .current_dir(pkg_dir)\n\n .stdout(Stdio::null())\n\n .args(&[\"run\", \"-q\", \"--example\", name])\n\n .status()\n\n .expect(\"success\")\n\n .success(),\n\n \"failed to execute the example '{}'\",\n\n name,\n\n );\n\n}\n\n\n", "file_path": "crates/api/tests/examples.rs", "rank": 60, "score": 283176.2203350194 }, { "content": "#[bench]\n\nfn bench_fibonacci_compile_run(b: &mut test::Bencher) {\n\n let wasm = wat::parse_str(FIBONACCI).unwrap();\n\n\n\n b.iter(|| translate(&wasm).unwrap().execute_func::<_, u32>(0, (20,)));\n\n}\n\n\n", "file_path": "crates/lightbeam/src/benches.rs", "rank": 61, "score": 282678.56048090383 }, { "content": "fn witx_path(phase: &str, id: &str) -> String {\n\n let root = env!(\"CARGO_MANIFEST_DIR\");\n\n format!(\"{}/../WASI/phases/{}/witx/{}.witx\", root, phase, id)\n\n}\n\n\n", "file_path": "crates/wasi-common/wig/src/utils.rs", "rank": 62, "score": 280364.77362698916 }, { "content": "/// Returns cache configuration.\n\n///\n\n/// If system has not been initialized, it disables it.\n\n/// You mustn't call init() after it.\n\npub fn cache_config() -> &'static CacheConfig {\n\n CONFIG.call_once(CacheConfig::new_cache_disabled)\n\n}\n\n\n", "file_path": "crates/environ/src/cache/config.rs", "rank": 63, "score": 279852.92788261815 }, { "content": "fn read_at(mut file: &File, buf: &mut [u8], offset: u64) -> io::Result<usize> {\n\n // get current cursor position\n\n let cur_pos = file.seek(SeekFrom::Current(0))?;\n\n // perform a seek read by a specified offset\n\n let nread = file.seek_read(buf, offset)?;\n\n // rewind the cursor back to the original position\n\n file.seek(SeekFrom::Start(cur_pos))?;\n\n Ok(nread)\n\n}\n\n\n", "file_path": "crates/wasi-common/src/sys/windows/hostcalls_impl/fs.rs", "rank": 64, "score": 278485.8089031065 }, { "content": "fn arbitrary_choice<'a, T, U>(input: &mut U, choices: &'a [T]) -> Result<Option<&'a T>, U::Error>\n\nwhere\n\n U: Unstructured + ?Sized,\n\n{\n\n if choices.is_empty() {\n\n Ok(None)\n\n } else {\n\n let i = usize::arbitrary(input)? % choices.len();\n\n Ok(Some(&choices[i]))\n\n }\n\n}\n\n\n", "file_path": "crates/fuzzing/src/generators/api.rs", "rank": 65, "score": 278337.3995585015 }, { "content": "fn create_returns_from_wasm_type(\n\n ty: wasmparser::TypeOrFuncType,\n\n) -> Result<Vec<SignlessType>, BinaryReaderError> {\n\n match ty {\n\n wasmparser::TypeOrFuncType::Type(ty) => Ok(Vec::from_iter(Type::from_wasm(ty))),\n\n wasmparser::TypeOrFuncType::FuncType(_) => Err(BinaryReaderError {\n\n message: \"Unsupported func type\",\n\n offset: -1isize as usize,\n\n }),\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct BrTable<L> {\n\n pub targets: Vec<BrTargetDrop<L>>,\n\n pub default: BrTargetDrop<L>,\n\n}\n\n\n\n#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]\n\npub enum NameTag {\n", "file_path": "crates/lightbeam/src/microwasm.rs", "rank": 66, "score": 277527.97160747304 }, { "content": "#[test]\n\nfn test_disabled() {\n\n let dir = tempfile::tempdir().expect(\"Can't create temporary directory\");\n\n let config_path = dir.path().join(\"cache-config.toml\");\n\n let config_content = \"[cache]\\n\\\n\n enabled = true\\n\";\n\n fs::write(&config_path, config_content).expect(\"Failed to write test config file\");\n\n let conf = CacheConfig::from_file(false, Some(&config_path));\n\n assert!(!conf.enabled());\n\n assert!(conf.errors.is_empty());\n\n\n\n let config_content = \"[cache]\\n\\\n\n enabled = false\\n\";\n\n fs::write(&config_path, config_content).expect(\"Failed to write test config file\");\n\n let conf = CacheConfig::from_file(true, Some(&config_path));\n\n assert!(!conf.enabled());\n\n assert!(conf.errors.is_empty());\n\n}\n\n\n", "file_path": "crates/environ/src/cache/config/tests.rs", "rank": 67, "score": 277395.73606264865 }, { "content": "/// Fetches a symbol by `name` and stores it in `cache`.\n\nfn fetch(cache: &AtomicUsize, name: &CStr) -> Option<usize> {\n\n match cache.load(SeqCst) {\n\n 0 => {}\n\n 1 => return None,\n\n n => return Some(n),\n\n }\n\n let sym = unsafe { libc::dlsym(libc::RTLD_DEFAULT, name.as_ptr() as *const _) };\n\n let (val, ret) = if sym.is_null() {\n\n (1, None)\n\n } else {\n\n (sym as usize, Some(sym as usize))\n\n };\n\n cache.store(val, SeqCst);\n\n return ret;\n\n}\n", "file_path": "crates/wasi-common/src/sys/unix/bsd/filetime.rs", "rank": 68, "score": 277358.4664969376 }, { "content": "/// Parses the Memory section of the wasm module.\n\npub fn memory(memories: MemorySectionReader) -> Result<Vec<MemoryType>, Error> {\n\n memories\n\n .into_iter()\n\n .map(|r| r.map_err(Into::into))\n\n .collect()\n\n}\n\n\n", "file_path": "crates/lightbeam/src/translate_sections.rs", "rank": 69, "score": 275913.55617302307 }, { "content": "/// Parses the Table section of the wasm module.\n\npub fn table(tables: TableSectionReader) -> Result<Vec<TableType>, Error> {\n\n tables.into_iter().map(|r| r.map_err(Into::into)).collect()\n\n}\n\n\n", "file_path": "crates/lightbeam/src/translate_sections.rs", "rank": 70, "score": 275913.55617302307 }, { "content": "fn read_at(mut file: &File, buf: &mut [u8], offset: u64) -> io::Result<usize> {\n\n // get current cursor position\n\n let cur_pos = file.seek(SeekFrom::Current(0))?;\n\n // perform a seek read by a specified offset\n\n let nread = file.seek_read(buf, offset)?;\n\n // rewind the cursor back to the original position\n\n file.seek(SeekFrom::Start(cur_pos))?;\n\n Ok(nread)\n\n}\n\n\n", "file_path": "crates/wasi-common/src/old/snapshot_0/sys/windows/hostcalls_impl/fs.rs", "rank": 71, "score": 273453.9970919305 }, { "content": "fn convert_faerie_elf_to_loadable_file(bytes: &mut Vec<u8>, code_ptr: *const u8) {\n\n use std::ffi::CStr;\n\n use std::os::raw::c_char;\n\n\n\n assert!(\n\n bytes[0x4] == 2 && bytes[0x5] == 1,\n\n \"bits and endianess in .ELF\"\n\n );\n\n let e_phoff = unsafe { *(bytes.as_ptr().offset(0x20) as *const u64) };\n\n let e_phnum = unsafe { *(bytes.as_ptr().offset(0x38) as *const u16) };\n\n assert!(\n\n e_phoff == 0 && e_phnum == 0,\n\n \"program header table is empty\"\n\n );\n\n let e_phentsize = unsafe { *(bytes.as_ptr().offset(0x36) as *const u16) };\n\n assert_eq!(e_phentsize, 0x38, \"size of ph\");\n\n let e_shentsize = unsafe { *(bytes.as_ptr().offset(0x3A) as *const u16) };\n\n assert_eq!(e_shentsize, 0x40, \"size of sh\");\n\n\n\n let e_shoff = unsafe { *(bytes.as_ptr().offset(0x28) as *const u64) };\n", "file_path": "crates/debug/src/lib.rs", "rank": 72, "score": 273135.89701705397 }, { "content": "/// Parses the Type section of the wasm module.\n\npub fn type_(types_reader: TypeSectionReader) -> Result<Vec<FuncType>, Error> {\n\n types_reader\n\n .into_iter()\n\n .map(|r| r.map_err(Into::into))\n\n .collect()\n\n}\n\n\n", "file_path": "crates/lightbeam/src/translate_sections.rs", "rank": 73, "score": 271989.1311319282 }, { "content": "pub fn query_access_information(handle: RawHandle) -> Result<AccessMode> {\n\n let mut io_status_block = IO_STATUS_BLOCK::default();\n\n let mut info = FILE_ACCESS_INFORMATION::default();\n\n\n\n unsafe {\n\n let status = NtQueryInformationFile(\n\n handle,\n\n &mut io_status_block,\n\n &mut info as *mut _ as *mut c_void,\n\n std::mem::size_of::<FILE_ACCESS_INFORMATION>() as u32,\n\n FILE_INFORMATION_CLASS::FileAccessInformation,\n\n );\n\n\n\n if status != ntstatus::STATUS_SUCCESS {\n\n return Err(winerror::WinError::from_u32(RtlNtStatusToDosError(status)));\n\n }\n\n }\n\n\n\n Ok(AccessMode::from_bits_truncate(info.AccessFlags))\n\n}\n\n\n", "file_path": "crates/wasi-common/winx/src/file.rs", "rank": 74, "score": 271005.54927242594 }, { "content": "/// Fetches a symbol by `name` and stores it in `cache`.\n\nfn fetch(cache: &AtomicUsize, name: &CStr) -> Option<usize> {\n\n match cache.load(SeqCst) {\n\n 0 => {}\n\n 1 => return None,\n\n n => return Some(n),\n\n }\n\n let sym = unsafe { libc::dlsym(libc::RTLD_DEFAULT, name.as_ptr() as *const _) };\n\n let (val, ret) = if sym.is_null() {\n\n (1, None)\n\n } else {\n\n (sym as usize, Some(sym as usize))\n\n };\n\n cache.store(val, SeqCst);\n\n return ret;\n\n}\n", "file_path": "crates/wasi-common/src/old/snapshot_0/sys/unix/bsd/filetime.rs", "rank": 75, "score": 269927.9008168342 }, { "content": "fn get_function_address_map<'data>(\n\n context: &Context,\n\n data: &FunctionBodyData<'data>,\n\n body_len: usize,\n\n isa: &dyn isa::TargetIsa,\n\n) -> FunctionAddressMap {\n\n let mut instructions = Vec::new();\n\n\n\n let func = &context.func;\n\n let mut ebbs = func.layout.ebbs().collect::<Vec<_>>();\n\n ebbs.sort_by_key(|ebb| func.offsets[*ebb]); // Ensure inst offsets always increase\n\n\n\n let encinfo = isa.encoding_info();\n\n for ebb in ebbs {\n\n for (offset, inst, size) in func.inst_offsets(ebb, &encinfo) {\n\n let srcloc = func.srclocs[inst];\n\n instructions.push(InstructionAddressMap {\n\n srcloc,\n\n code_offset: offset as usize,\n\n code_len: size as usize,\n", "file_path": "crates/environ/src/cranelift.rs", "rank": 76, "score": 269405.47274772765 }, { "content": "pub fn get_fileinfo(file: &File) -> io::Result<fileapi::BY_HANDLE_FILE_INFORMATION> {\n\n use fileapi::{GetFileInformationByHandle, BY_HANDLE_FILE_INFORMATION};\n\n use std::mem;\n\n\n\n let handle = file.as_raw_handle();\n\n let info = unsafe {\n\n let mut info: BY_HANDLE_FILE_INFORMATION = mem::zeroed();\n\n cvt(GetFileInformationByHandle(handle, &mut info))?;\n\n info\n\n };\n\n\n\n Ok(info)\n\n}\n\n\n", "file_path": "crates/wasi-common/winx/src/file.rs", "rank": 77, "score": 269004.0412796581 }, { "content": "pub fn init_file_per_thread_logger(prefix: &'static str) {\n\n file_per_thread_logger::initialize(prefix);\n\n\n\n // Extending behavior of default spawner:\n\n // https://docs.rs/rayon/1.1.0/rayon/struct.ThreadPoolBuilder.html#method.spawn_handler\n\n // Source code says DefaultSpawner is implementation detail and\n\n // shouldn't be used directly.\n\n rayon::ThreadPoolBuilder::new()\n\n .spawn_handler(move |thread| {\n\n let mut b = std::thread::Builder::new();\n\n if let Some(name) = thread.name() {\n\n b = b.name(name.to_owned());\n\n }\n\n if let Some(stack_size) = thread.stack_size() {\n\n b = b.stack_size(stack_size);\n\n }\n\n b.spawn(move || {\n\n file_per_thread_logger::initialize(prefix);\n\n thread.run()\n\n })?;\n\n Ok(())\n\n })\n\n .build_global()\n\n .unwrap();\n\n}\n", "file_path": "src/lib.rs", "rank": 78, "score": 268666.659244691 }, { "content": "#[test]\n\nfn test_disk_space_settings() {\n\n let (_td, cd, cp) = test_prolog();\n\n let conf = load_config!(\n\n cp,\n\n \"[cache]\\n\\\n\n enabled = true\\n\\\n\n directory = {cache_dir}\\n\\\n\n files-total-size-soft-limit = '76'\",\n\n cd\n\n );\n\n assert!(conf.enabled());\n\n assert!(conf.errors.is_empty());\n\n assert_eq!(conf.files_total_size_soft_limit(), 76);\n\n\n\n let conf = load_config!(\n\n cp,\n\n \"[cache]\\n\\\n\n enabled = true\\n\\\n\n directory = {cache_dir}\\n\\\n\n files-total-size-soft-limit = '42 Mi'\",\n", "file_path": "crates/environ/src/cache/config/tests.rs", "rank": 79, "score": 266631.7692710004 }, { "content": "#[test]\n\nfn test_si_prefix_settings() {\n\n let (_td, cd, cp) = test_prolog();\n\n let conf = load_config!(\n\n cp,\n\n \"[cache]\\n\\\n\n enabled = true\\n\\\n\n directory = {cache_dir}\\n\\\n\n worker-event-queue-size = '42'\\n\\\n\n optimized-compression-usage-counter-threshold = '4K'\\n\\\n\n file-count-soft-limit = '3M'\",\n\n cd\n\n );\n\n assert!(conf.enabled());\n\n assert!(conf.errors.is_empty());\n\n assert_eq!(conf.worker_event_queue_size(), 42);\n\n assert_eq!(conf.optimized_compression_usage_counter_threshold(), 4_000);\n\n assert_eq!(conf.file_count_soft_limit(), 3_000_000);\n\n\n\n let conf = load_config!(\n\n cp,\n", "file_path": "crates/environ/src/cache/config/tests.rs", "rank": 80, "score": 266630.12286116224 }, { "content": "pub fn translate_wasm<M>(\n\n session: &mut CodeGenSession<M>,\n\n reloc_sink: &mut dyn binemit::RelocSink,\n\n func_idx: u32,\n\n body: &wasmparser::FunctionBody,\n\n) -> Result<(), Error>\n\nwhere\n\n M: ModuleContext,\n\n for<'any> &'any M::Signature: Into<OpSig>,\n\n{\n\n let ty = session.module_context.defined_func_type(func_idx);\n\n\n\n if DISASSEMBLE {\n\n let microwasm_conv = MicrowasmConv::new(\n\n session.module_context,\n\n ty.params().iter().map(SigType::to_microwasm_type),\n\n ty.returns().iter().map(SigType::to_microwasm_type),\n\n body,\n\n )?;\n\n\n", "file_path": "crates/lightbeam/src/function_body.rs", "rank": 81, "score": 266253.429871131 }, { "content": "fn new_function_body_inputs<'data>(\n\n rng: &mut impl Rng,\n\n code_container: &'data Vec<u8>,\n\n) -> PrimaryMap<DefinedFuncIndex, FunctionBodyData<'data>> {\n\n let len = code_container.len();\n\n let mut pos = rng.gen_range(0, code_container.len());\n\n (2..rng.gen_range(4, 14))\n\n .map(|j| {\n\n let (old_pos, end) = (pos, min(pos + rng.gen_range(0x10, 0x200), len));\n\n pos = end % len;\n\n FunctionBodyData {\n\n data: &code_container[old_pos..end],\n\n module_offset: (rng.next_u64() + j) as usize,\n\n }\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "crates/environ/src/cache/tests.rs", "rank": 82, "score": 263573.5713971657 }, { "content": "fn main() {\n\n let mut expected = Box::new(2);\n\n mut_args(expected.as_mut());\n\n let given = unsafe {\n\n let given = Box::new(2);\n\n let raw = Box::into_raw(given);\n\n wasi_common_mut_args(raw);\n\n Box::from_raw(raw)\n\n };\n\n assert_eq!(*given, *expected);\n\n}\n", "file_path": "crates/wasi-common/wasi-common-cbindgen/tests/mut_args.rs", "rank": 83, "score": 262762.686281233 }, { "content": "/// Declares data segment symbol\n\npub fn declare_table(obj: &mut Artifact, index: usize) -> Result<(), String> {\n\n let name = format!(\"_table_{}\", index);\n\n obj.declare(name, Decl::data())\n\n .map_err(|err| format!(\"{}\", err))?;\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/obj/src/table.rs", "rank": 84, "score": 262677.35235550976 }, { "content": "/// Emit segment data and initialization location\n\npub fn emit_table(obj: &mut Artifact, index: usize) -> Result<(), String> {\n\n let name = format!(\"_table_{}\", index);\n\n // FIXME: We need to initialize table using function symbols\n\n obj.define(name, Vec::new())\n\n .map_err(|err| format!(\"{}\", err))?;\n\n Ok(())\n\n}\n", "file_path": "crates/obj/src/table.rs", "rank": 85, "score": 262677.35235550976 }, { "content": "#[test]\n\nfn test_on_get_create_stats_file() {\n\n let (_tempdir, cache_dir, config_path) = test_prolog();\n\n let cache_config = load_config!(\n\n config_path,\n\n \"[cache]\\n\\\n\n enabled = true\\n\\\n\n directory = {cache_dir}\",\n\n cache_dir\n\n );\n\n assert!(cache_config.enabled());\n\n let worker = Worker::start_new(&cache_config, None);\n\n\n\n let mod_file = cache_dir.join(\"some-mod\");\n\n worker.on_cache_get_async(mod_file);\n\n worker.wait_for_all_events_handled();\n\n assert_eq!(worker.events_dropped(), 0);\n\n\n\n let stats_file = cache_dir.join(\"some-mod.stats\");\n\n let stats = read_stats_file(&stats_file).expect(\"Failed to read stats file\");\n\n assert_eq!(stats.usages, 1);\n\n assert_eq!(\n\n stats.compression_level,\n\n cache_config.baseline_compression_level()\n\n );\n\n}\n\n\n", "file_path": "crates/environ/src/cache/worker/tests.rs", "rank": 86, "score": 261575.39227671546 }, { "content": "pub fn take_api_trap() -> Option<HostRef<Trap>> {\n\n RECORDED_API_TRAP.with(|data| data.take())\n\n}\n\n\n\npub(crate) struct TrapSink {\n\n pub traps: Vec<TrapInformation>,\n\n}\n\n\n\nimpl TrapSink {\n\n pub fn new() -> Self {\n\n Self { traps: Vec::new() }\n\n }\n\n}\n\n\n\nimpl binemit::TrapSink for TrapSink {\n\n fn trap(\n\n &mut self,\n\n code_offset: binemit::CodeOffset,\n\n source_loc: SourceLoc,\n\n trap_code: TrapCode,\n\n ) {\n\n self.traps.push(TrapInformation {\n\n code_offset,\n\n source_loc,\n\n trap_code,\n\n });\n\n }\n\n}\n", "file_path": "crates/api/src/trampoline/trap.rs", "rank": 87, "score": 261106.96753161604 }, { "content": "fn main() -> Result<()> {\n\n // Initialize.\n\n println!(\"Initializing...\");\n\n let mut cfg = Config::new();\n\n cfg.features(wasmtime_jit::Features {\n\n multi_value: true,\n\n ..Default::default()\n\n });\n\n let engine = HostRef::new(Engine::new(&cfg));\n\n let store = HostRef::new(Store::new(&engine));\n\n\n\n // Load binary.\n\n println!(\"Loading binary...\");\n\n let binary = wat::parse_str(WAT)?;\n\n\n\n // Compile.\n\n println!(\"Compiling module...\");\n\n let module = HostRef::new(Module::new(&store, &binary).context(\"Error compiling module!\")?);\n\n\n\n // Create external print functions.\n", "file_path": "crates/api/examples/multi.rs", "rank": 88, "score": 260724.64790452702 }, { "content": "fn main() -> Result<()> {\n\n let wasm = wat::parse_str(WAT)?;\n\n\n\n // Instantiate engine and store.\n\n let engine = HostRef::new(Engine::default());\n\n let store = HostRef::new(Store::new(&engine));\n\n\n\n // Load a module.\n\n let module = HostRef::new(Module::new(&store, &wasm)?);\n\n\n\n // Find index of the `gcd` export.\n\n let gcd_index = module\n\n .borrow()\n\n .exports()\n\n .iter()\n\n .enumerate()\n\n .find(|(_, export)| export.name().to_string() == \"gcd\")\n\n .unwrap()\n\n .0;\n\n\n", "file_path": "crates/api/examples/gcd.rs", "rank": 89, "score": 260724.64790452702 }, { "content": "fn main() -> Result<()> {\n\n // Configure the initial compilation environment, creating more global\n\n // structures such as an `Engine` and a `Store`.\n\n println!(\"Initializing...\");\n\n let engine = HostRef::new(Engine::default());\n\n let store = HostRef::new(Store::new(&engine));\n\n\n\n // Next upload the `*.wasm` binary file, which in this case we're going to\n\n // be parsing an inline text format into a binary.\n\n println!(\"Loading binary...\");\n\n let binary = wat::parse_str(\n\n r#\"\n\n (module\n\n (func $hello (import \"\" \"hello\"))\n\n (func (export \"run\") (call $hello))\n\n )\n\n \"#,\n\n )?;\n\n\n\n // Compiler the `*.wasm` binary into an in-memory instance of a `Module`.\n", "file_path": "crates/api/examples/hello.rs", "rank": 90, "score": 260724.64790452697 }, { "content": "fn main() -> anyhow::Result<()> {\n\n let out_dir = PathBuf::from(\n\n env::var_os(\"OUT_DIR\").expect(\"The OUT_DIR environment variable must be set\"),\n\n );\n\n let mut out = String::new();\n\n\n\n for strategy in &[\n\n \"Cranelift\",\n\n #[cfg(feature = \"lightbeam\")]\n\n \"Lightbeam\",\n\n ] {\n\n writeln!(out, \"#[cfg(test)]\")?;\n\n writeln!(out, \"#[allow(non_snake_case)]\")?;\n\n writeln!(out, \"mod {} {{\", strategy)?;\n\n\n\n test_directory(&mut out, \"tests/misc_testsuite\", strategy)?;\n\n let spec_tests = test_directory(&mut out, \"tests/spec_testsuite\", strategy)?;\n\n // Skip running spec_testsuite tests if the submodule isn't checked\n\n // out.\n\n if spec_tests > 0 {\n", "file_path": "build.rs", "rank": 91, "score": 260245.71753871918 }, { "content": "/// Compute the offset for a memory data initializer.\n\nfn get_memory_init_start(init: &DataInitializer<'_>, instance: &mut Instance) -> usize {\n\n let mut start = init.location.offset;\n\n\n\n if let Some(base) = init.location.base {\n\n let global = if let Some(def_index) = instance.module.defined_global_index(base) {\n\n instance.global_mut(def_index)\n\n } else {\n\n instance.imported_global(base).from\n\n };\n\n start += usize::try_from(*unsafe { (*global).as_u32() }).unwrap();\n\n }\n\n\n\n start\n\n}\n\n\n", "file_path": "crates/runtime/src/instance.rs", "rank": 92, "score": 259944.53635220253 }, { "content": "fn get_export_memory(exports: &[Extern], i: usize) -> Result<HostRef<Memory>, Error> {\n\n if exports.len() <= i {\n\n bail!(\"> Error accessing memory export {}!\", i);\n\n }\n\n Ok(exports[i]\n\n .memory()\n\n .with_context(|| format!(\"> Error accessing memory export {}!\", i))?\n\n .clone())\n\n}\n\n\n", "file_path": "crates/api/examples/memory.rs", "rank": 93, "score": 259187.75817339128 }, { "content": "fn get_export_func(exports: &[Extern], i: usize) -> Result<HostRef<Func>, Error> {\n\n if exports.len() <= i {\n\n bail!(\"> Error accessing function export {}!\", i);\n\n }\n\n Ok(exports[i]\n\n .func()\n\n .with_context(|| format!(\"> Error accessing function export {}!\", i))?\n\n .clone())\n\n}\n\n\n\nmacro_rules! check {\n\n ($actual:expr, $expected:expr) => {\n\n if $actual != $expected {\n\n bail!(\"> Error on result, expected {}, got {}\", $expected, $actual);\n\n }\n\n };\n\n}\n\n\n\nmacro_rules! check_ok {\n\n ($func:expr, $($p:expr),*) => {\n", "file_path": "crates/api/examples/memory.rs", "rank": 94, "score": 259187.75817339134 }, { "content": "pub fn arg_locs(types: impl IntoIterator<Item = SignlessType>) -> Result<Vec<CCLoc>, Error> {\n\n let types = types.into_iter();\n\n let mut out = Vec::with_capacity(types.size_hint().0);\n\n // TODO: VmCtx is in the first register\n\n let mut int_gpr_iter = INTEGER_ARGS_IN_GPRS.iter();\n\n let mut float_gpr_iter = FLOAT_ARGS_IN_GPRS.iter();\n\n let mut stack_idx = 0;\n\n\n\n for ty in types {\n\n match ty {\n\n I32 | I64 => out.push(int_gpr_iter.next().map(|&r| CCLoc::Reg(r)).unwrap_or_else(\n\n || {\n\n let out = CCLoc::Stack(stack_idx);\n\n stack_idx += 1;\n\n out\n\n },\n\n )),\n\n F32 | F64 => match float_gpr_iter.next() {\n\n None => {\n\n return Err(Error::Microwasm(\n", "file_path": "crates/lightbeam/src/backend.rs", "rank": 95, "score": 257967.6739284864 }, { "content": "fn values_equal(v1: &Val, v2: &Val) -> Result<bool> {\n\n Ok(match (v1, v2) {\n\n (Val::I32(a), Val::I32(b)) => a == b,\n\n (Val::I64(a), Val::I64(b)) => a == b,\n\n // Note that these float comparisons are comparing bits, not float\n\n // values, so we're testing for bit-for-bit equivalence\n\n (Val::F32(a), Val::F32(b)) => a == b,\n\n (Val::F64(a), Val::F64(b)) => a == b,\n\n (Val::V128(a), Val::V128(b)) => a == b,\n\n _ => bail!(\"don't know how to compare {:?} and {:?} yet\", v1, v2),\n\n })\n\n}\n", "file_path": "crates/wast/src/wast.rs", "rank": 96, "score": 253937.83736798132 }, { "content": "#[test]\n\nfn test_cache_disabled() {\n\n let errors = cache_init::<&str>(false, None, None);\n\n assert!(errors.is_empty(), \"Failed to disable cache system\");\n\n}\n", "file_path": "crates/environ/tests/cache_disabled.rs", "rank": 97, "score": 253743.36440289242 }, { "content": "/// Ignore tests that aren't supported yet.\n\nfn ignore(testsuite: &str, testname: &str, strategy: &str) -> bool {\n\n match strategy {\n\n #[cfg(feature = \"lightbeam\")]\n\n \"Lightbeam\" => match (testsuite, testname) {\n\n (_, _) if testname.starts_with(\"simd\") => return true,\n\n (_, _) if testsuite.ends_with(\"multi_value\") => return true,\n\n _ => (),\n\n },\n\n \"Cranelift\" => match (testsuite, testname) {\n\n _ => {}\n\n },\n\n _ => panic!(\"unrecognized strategy\"),\n\n }\n\n\n\n false\n\n}\n", "file_path": "build.rs", "rank": 98, "score": 251098.54409716272 }, { "content": "/// Add environment-specific function parameters.\n\npub fn translate_signature(mut sig: ir::Signature, pointer_type: ir::Type) -> ir::Signature {\n\n // Prepend the vmctx argument.\n\n sig.params.insert(\n\n 0,\n\n AbiParam::special(pointer_type, ArgumentPurpose::VMContext),\n\n );\n\n sig\n\n}\n\n\n\n/// A memory index and offset within that memory where a data initialization\n\n/// should is to be performed.\n\n#[derive(Clone)]\n\npub struct DataInitializerLocation {\n\n /// The index of the memory to initialize.\n\n pub memory_index: MemoryIndex,\n\n\n\n /// Optionally a globalvar base to initialize at.\n\n pub base: Option<GlobalIndex>,\n\n\n\n /// A constant offset to initialize at.\n", "file_path": "crates/environ/src/module_environ.rs", "rank": 99, "score": 250526.6289223138 } ]
Rust
src/io/read/take.rs
mvucenovic/async-std
98c79f4ff90e92de0ebec103709c9c41badc3dbd
use std::cmp; use std::pin::Pin; use crate::io::{self, BufRead, Read}; use crate::task::{Context, Poll}; #[derive(Debug)] pub struct Take<T> { pub(crate) inner: T, pub(crate) limit: u64, } impl<T> Take<T> { pub fn limit(&self) -> u64 { self.limit } pub fn set_limit(&mut self, limit: u64) { self.limit = limit; } pub fn into_inner(self) -> T { self.inner } pub fn get_ref(&self) -> &T { &self.inner } pub fn get_mut(&mut self) -> &mut T { &mut self.inner } } impl<T: Read + Unpin> Read for Take<T> { fn poll_read( mut self: Pin<&mut Self>, cx: &mut Context<'_>, buf: &mut [u8], ) -> Poll<io::Result<usize>> { let Self { inner, limit } = &mut *self; take_read_internal(Pin::new(inner), cx, buf, limit) } } pub fn take_read_internal<R: Read + ?Sized>( mut rd: Pin<&mut R>, cx: &mut Context<'_>, buf: &mut [u8], limit: &mut u64, ) -> Poll<io::Result<usize>> { if *limit == 0 { return Poll::Ready(Ok(0)); } let max = cmp::min(buf.len() as u64, *limit) as usize; match futures_core::ready!(rd.as_mut().poll_read(cx, &mut buf[..max])) { Ok(n) => { *limit -= n as u64; Poll::Ready(Ok(n)) } Err(e) => Poll::Ready(Err(e)), } } impl<T: BufRead + Unpin> BufRead for Take<T> { fn poll_fill_buf(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<io::Result<&[u8]>> { let Self { inner, limit } = unsafe { self.get_unchecked_mut() }; let inner = unsafe { Pin::new_unchecked(inner) }; if *limit == 0 { return Poll::Ready(Ok(&[])); } match futures_core::ready!(inner.poll_fill_buf(cx)) { Ok(buf) => { let cap = cmp::min(buf.len() as u64, *limit) as usize; Poll::Ready(Ok(&buf[..cap])) } Err(e) => Poll::Ready(Err(e)), } } fn consume(mut self: Pin<&mut Self>, amt: usize) { let amt = cmp::min(amt as u64, self.limit) as usize; self.limit -= amt as u64; let rd = Pin::new(&mut self.inner); rd.consume(amt); } } #[cfg(test)] mod tests { use crate::io; use crate::prelude::*; use crate::task; #[test] fn test_take_basics() -> std::io::Result<()> { let source: io::Cursor<Vec<u8>> = io::Cursor::new(vec![0, 1, 2, 3, 4, 5, 6, 7, 8]); task::block_on(async move { let mut buffer = [0u8; 5]; let mut handle = source.take(5); handle.read(&mut buffer).await?; assert_eq!(buffer, [0, 1, 2, 3, 4]); assert_eq!(handle.read(&mut buffer).await.unwrap(), 0); Ok(()) }) } }
use std::cmp; use std::pin::Pin; use crate::io::{self, BufRead, Read}; use crate::task::{Context, Poll}; #[derive(Debug)] pub struct Take<T> { pub(crate) inner: T, pub(crate) limit: u64, } impl<T> Take<T> { pub fn limit(&self) -> u64 { self.limit } pub fn set_limit(&mut self, limit: u64) { self.limit = limit; } pub fn into_inner(self) -> T { self.inner } pub fn get_ref(&self) -> &T { &self.inner } pub fn get_mut(&mut self) -> &mut T { &mut self.inner } } impl<T: Read + Unpin> Read for Take<T> { fn poll_read( mut self: Pin<&mut Self>, cx: &mut Context<'_>, buf: &mut [u8], ) -> Poll<io::Result<usize>> { let Self { inner, limit } = &mut *self; take_read_internal(Pin::new(inner), cx, buf, limit) } }
impl<T: BufRead + Unpin> BufRead for Take<T> { fn poll_fill_buf(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<io::Result<&[u8]>> { let Self { inner, limit } = unsafe { self.get_unchecked_mut() }; let inner = unsafe { Pin::new_unchecked(inner) }; if *limit == 0 { return Poll::Ready(Ok(&[])); } match futures_core::ready!(inner.poll_fill_buf(cx)) { Ok(buf) => { let cap = cmp::min(buf.len() as u64, *limit) as usize; Poll::Ready(Ok(&buf[..cap])) } Err(e) => Poll::Ready(Err(e)), } } fn consume(mut self: Pin<&mut Self>, amt: usize) { let amt = cmp::min(amt as u64, self.limit) as usize; self.limit -= amt as u64; let rd = Pin::new(&mut self.inner); rd.consume(amt); } } #[cfg(test)] mod tests { use crate::io; use crate::prelude::*; use crate::task; #[test] fn test_take_basics() -> std::io::Result<()> { let source: io::Cursor<Vec<u8>> = io::Cursor::new(vec![0, 1, 2, 3, 4, 5, 6, 7, 8]); task::block_on(async move { let mut buffer = [0u8; 5]; let mut handle = source.take(5); handle.read(&mut buffer).await?; assert_eq!(buffer, [0, 1, 2, 3, 4]); assert_eq!(handle.read(&mut buffer).await.unwrap(), 0); Ok(()) }) } }
pub fn take_read_internal<R: Read + ?Sized>( mut rd: Pin<&mut R>, cx: &mut Context<'_>, buf: &mut [u8], limit: &mut u64, ) -> Poll<io::Result<usize>> { if *limit == 0 { return Poll::Ready(Ok(0)); } let max = cmp::min(buf.len() as u64, *limit) as usize; match futures_core::ready!(rd.as_mut().poll_read(cx, &mut buf[..max])) { Ok(n) => { *limit -= n as u64; Poll::Ready(Ok(n)) } Err(e) => Poll::Ready(Err(e)), } }
function_block-full_function
[ { "content": "pub fn read_line_internal<R: BufRead + ?Sized>(\n\n reader: Pin<&mut R>,\n\n cx: &mut Context<'_>,\n\n buf: &mut String,\n\n bytes: &mut Vec<u8>,\n\n read: &mut usize,\n\n) -> Poll<io::Result<usize>> {\n\n let ret = futures_core::ready!(read_until_internal(reader, cx, b'\\n', bytes, read));\n\n if str::from_utf8(&bytes).is_err() {\n\n Poll::Ready(ret.and_then(|_| {\n\n Err(io::Error::new(\n\n io::ErrorKind::InvalidData,\n\n \"stream did not contain valid UTF-8\",\n\n ))\n\n }))\n\n } else {\n\n debug_assert!(buf.is_empty());\n\n debug_assert_eq!(*read, 0);\n\n // Safety: `bytes` is a valid UTF-8 because `str::from_utf8` returned `Ok`.\n\n mem::swap(unsafe { buf.as_mut_vec() }, bytes);\n\n Poll::Ready(ret)\n\n }\n\n}\n", "file_path": "src/io/buf_read/lines.rs", "rank": 0, "score": 234648.14622406798 }, { "content": "pub fn read_until_internal<R: BufReadExt + ?Sized>(\n\n mut reader: Pin<&mut R>,\n\n cx: &mut Context<'_>,\n\n byte: u8,\n\n buf: &mut Vec<u8>,\n\n read: &mut usize,\n\n) -> Poll<io::Result<usize>> {\n\n loop {\n\n let (done, used) = {\n\n let available = futures_core::ready!(reader.as_mut().poll_fill_buf(cx))?;\n\n if let Some(i) = memchr::memchr(byte, available) {\n\n buf.extend_from_slice(&available[..=i]);\n\n (true, i + 1)\n\n } else {\n\n buf.extend_from_slice(available);\n\n (false, available.len())\n\n }\n\n };\n\n reader.as_mut().consume(used);\n\n *read += used;\n\n if done || used == 0 {\n\n return Poll::Ready(Ok(mem::replace(read, 0)));\n\n }\n\n }\n\n}\n", "file_path": "src/io/buf_read/mod.rs", "rank": 1, "score": 213270.23010512057 }, { "content": "// This uses an adaptive system to extend the vector when it fills. We want to\n\n// avoid paying to allocate and zero a huge chunk of memory if the reader only\n\n// has 4 bytes while still making large reads if the reader does have a ton\n\n// of data to return. Simply tacking on an extra DEFAULT_BUF_SIZE space every\n\n// time is 4,500 times (!) slower than this if the reader has a very small\n\n// amount of data to return.\n\n//\n\n// Because we're extending the buffer with uninitialized data for trusted\n\n// readers, we need to make sure to truncate that if any of this panics.\n\npub fn read_to_end_internal<R: Read + ?Sized>(\n\n mut rd: Pin<&mut R>,\n\n cx: &mut Context<'_>,\n\n buf: &mut Vec<u8>,\n\n start_len: usize,\n\n) -> Poll<io::Result<usize>> {\n\n struct Guard<'a> {\n\n buf: &'a mut Vec<u8>,\n\n len: usize,\n\n }\n\n\n\n impl Drop for Guard<'_> {\n\n fn drop(&mut self) {\n\n unsafe {\n\n self.buf.set_len(self.len);\n\n }\n\n }\n\n }\n\n\n\n let mut g = Guard {\n", "file_path": "src/io/read/read_to_end.rs", "rank": 2, "score": 191262.11675954453 }, { "content": " pub trait BufRead [BufReadExt: futures_io::AsyncBufRead] {\n\n #[doc = r#\"\n\n Returns the contents of the internal buffer, filling it with more data from the\n\n inner reader if it is empty.\n\n\n\n This function is a lower-level call. It needs to be paired with the [`consume`]\n\n method to function properly. When calling this method, none of the contents will be\n\n \"read\" in the sense that later calling `read` may return the same contents. As\n\n such, [`consume`] must be called with the number of bytes that are consumed from\n\n this buffer to ensure that the bytes are never returned twice.\n\n\n\n [`consume`]: #tymethod.consume\n\n\n\n An empty buffer returned indicates that the stream has reached EOF.\n\n \"#]\n\n // TODO: write a proper doctest with `consume`\n\n fn poll_fill_buf(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<io::Result<&[u8]>>;\n\n\n\n #[doc = r#\"\n\n Tells this buffer that `amt` bytes have been consumed from the buffer, so they\n", "file_path": "src/io/buf_read/mod.rs", "rank": 4, "score": 186770.02177843705 }, { "content": "/// Creates an instance of a reader that infinitely repeats one byte.\n\n///\n\n/// All reads from this reader will succeed by filling the specified buffer with the given byte.\n\n///\n\n/// ## Examples\n\n///\n\n/// ```rust\n\n/// # fn main() -> std::io::Result<()> { async_std::task::block_on(async {\n\n/// #\n\n/// use async_std::io;\n\n/// use async_std::prelude::*;\n\n///\n\n/// let mut buffer = [0; 3];\n\n/// io::repeat(0b101).read_exact(&mut buffer).await?;\n\n///\n\n/// assert_eq!(buffer, [0b101, 0b101, 0b101]);\n\n/// #\n\n/// # Ok(()) }) }\n\n/// ```\n\npub fn repeat(byte: u8) -> Repeat {\n\n Repeat { byte }\n\n}\n\n\n\n/// A reader which yields one byte over and over and over and over and over and...\n\n///\n\n/// This reader is constructed by the [`repeat`] function.\n\n///\n\n/// [`repeat`]: fn.repeat.html\n\npub struct Repeat {\n\n byte: u8,\n\n}\n\n\n\nimpl fmt::Debug for Repeat {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n f.pad(\"Empty { .. }\")\n\n }\n\n}\n\n\n\nimpl Read for Repeat {\n", "file_path": "src/io/repeat.rs", "rank": 5, "score": 183002.91550240977 }, { "content": "struct PollFn<F> {\n\n f: F,\n\n}\n\n\n\nimpl<F> Unpin for PollFn<F> {}\n\n\n\nimpl<T, F> Future for PollFn<F>\n\nwhere\n\n F: FnMut(&mut Context<'_>) -> Poll<T>,\n\n{\n\n type Output = T;\n\n\n\n fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<T> {\n\n (&mut self.f)(cx)\n\n }\n\n}\n", "file_path": "src/future/poll_fn.rs", "rank": 6, "score": 173355.24880669682 }, { "content": "#[derive(Debug)]\n\nstruct Inner {\n\n /// The blocking stdin handle.\n\n stdin: std::io::Stdin,\n\n\n\n /// The line buffer.\n\n line: String,\n\n\n\n /// The write buffer.\n\n buf: Vec<u8>,\n\n\n\n /// The result of the last asynchronous operation on the stdin.\n\n last_op: Option<Operation>,\n\n}\n\n\n\n/// Possible results of an asynchronous operation on the stdin.\n", "file_path": "src/io/stdin.rs", "rank": 7, "score": 149839.6639975609 }, { "content": "#[derive(Debug)]\n\nstruct Inner {\n\n /// The blocking stderr handle.\n\n stderr: std::io::Stderr,\n\n\n\n /// The write buffer.\n\n buf: Vec<u8>,\n\n\n\n /// The result of the last asynchronous operation on the stderr.\n\n last_op: Option<Operation>,\n\n}\n\n\n\n/// Possible results of an asynchronous operation on the stderr.\n", "file_path": "src/io/stderr.rs", "rank": 8, "score": 149839.6639975609 }, { "content": "#[derive(Debug)]\n\nstruct Inner {\n\n /// The blocking stdout handle.\n\n stdout: std::io::Stdout,\n\n\n\n /// The write buffer.\n\n buf: Vec<u8>,\n\n\n\n /// The result of the last asynchronous operation on the stdout.\n\n last_op: Option<Operation>,\n\n}\n\n\n\n/// Possible results of an asynchronous operation on the stdout.\n", "file_path": "src/io/stdout.rs", "rank": 9, "score": 149839.6639975609 }, { "content": " pub trait Read [ReadExt: futures_io::AsyncRead] {\n\n #[doc = r#\"\n\n Attempt to read from the `AsyncRead` into `buf`.\n\n \"#]\n\n fn poll_read(\n\n self: Pin<&mut Self>,\n\n cx: &mut Context<'_>,\n\n buf: &mut [u8],\n\n ) -> Poll<io::Result<usize>>;\n\n\n\n #[doc = r#\"\n\n Attempt to read from the `AsyncRead` into `bufs` using vectored IO operations.\n\n \"#]\n\n fn poll_read_vectored(\n\n self: Pin<&mut Self>,\n\n cx: &mut Context<'_>,\n\n bufs: &mut [IoSliceMut<'_>],\n\n ) -> Poll<io::Result<usize>> {\n\n unreachable!(\"this impl only appears in the rendered docs\")\n\n }\n", "file_path": "src/io/read/mod.rs", "rank": 10, "score": 135862.45463836956 }, { "content": "/// Creates a reader that contains no data.\n\n///\n\n/// # Examples\n\n///\n\n/// ```rust\n\n/// # fn main() -> std::io::Result<()> { async_std::task::block_on(async {\n\n/// #\n\n/// use async_std::io;\n\n/// use async_std::prelude::*;\n\n///\n\n/// let mut buf = Vec::new();\n\n/// let mut reader = io::empty();\n\n/// reader.read_to_end(&mut buf).await?;\n\n///\n\n/// assert!(buf.is_empty());\n\n/// #\n\n/// # Ok(()) }) }\n\n/// ```\n\npub fn empty() -> Empty {\n\n Empty { _private: () }\n\n}\n\n\n\n/// A reader that contains no data.\n\n///\n\n/// This reader is constructed by the [`sink`] function.\n\n///\n\n/// [`sink`]: fn.sink.html\n\npub struct Empty {\n\n _private: (),\n\n}\n\n\n\nimpl fmt::Debug for Empty {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n f.pad(\"Empty { .. }\")\n\n }\n\n}\n\n\n\nimpl Read for Empty {\n", "file_path": "src/io/empty.rs", "rank": 11, "score": 129720.4838174223 }, { "content": "/// Constructs a new handle to the standard input of the current process.\n\n///\n\n/// This function is an async version of [`std::io::stdin`].\n\n///\n\n/// [`std::io::stdin`]: https://doc.rust-lang.org/std/io/fn.stdin.html\n\n///\n\n/// # Examples\n\n///\n\n/// ```no_run\n\n/// # fn main() -> std::io::Result<()> { async_std::task::block_on(async {\n\n/// #\n\n/// use async_std::io;\n\n///\n\n/// let stdin = io::stdin();\n\n/// let mut line = String::new();\n\n/// stdin.read_line(&mut line).await?;\n\n/// #\n\n/// # Ok(()) }) }\n\n/// ```\n\npub fn stdin() -> Stdin {\n\n Stdin(Mutex::new(State::Idle(Some(Inner {\n\n stdin: std::io::stdin(),\n\n line: String::new(),\n\n buf: Vec::new(),\n\n last_op: None,\n\n }))))\n\n}\n\n\n\n/// A handle to the standard input of the current process.\n\n///\n\n/// Created by the [`stdin`] function.\n\n///\n\n/// This type is an async version of [`std::io::Stdin`].\n\n///\n\n/// [`stdin`]: fn.stdin.html\n\n/// [`std::io::Stdin`]: https://doc.rust-lang.org/std/io/struct.Stdin.html\n\n#[derive(Debug)]\n\npub struct Stdin(Mutex<State>);\n\n\n\n/// The state of the asynchronous stdin.\n\n///\n\n/// The stdin can be either idle or busy performing an asynchronous operation.\n", "file_path": "src/io/stdin.rs", "rank": 12, "score": 129713.66301130582 }, { "content": "/// Creates a writer that consumes and drops all data.\n\n///\n\n/// # Examples\n\n///\n\n/// ```rust\n\n/// # fn main() -> std::io::Result<()> { async_std::task::block_on(async {\n\n/// #\n\n/// use async_std::io;\n\n/// use async_std::prelude::*;\n\n///\n\n/// let mut writer = io::sink();\n\n/// writer.write(b\"hello world\").await?;\n\n/// #\n\n/// # Ok(()) }) }\n\n/// ```\n\npub fn sink() -> Sink {\n\n Sink { _private: () }\n\n}\n\n\n\n/// A writer that consumes and drops all data.\n\n///\n\n/// This writer is constructed by the [`sink`] function.\n\n///\n\n/// [`sink`]: fn.sink.html\n\npub struct Sink {\n\n _private: (),\n\n}\n\n\n\nimpl fmt::Debug for Sink {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n f.pad(\"Sink { .. }\")\n\n }\n\n}\n\n\n\nimpl Write for Sink {\n", "file_path": "src/io/sink.rs", "rank": 13, "score": 129709.37427689008 }, { "content": "/// Constructs a new handle to the standard error of the current process.\n\n///\n\n/// This function is an async version of [`std::io::stderr`].\n\n///\n\n/// [`std::io::stderr`]: https://doc.rust-lang.org/std/io/fn.stderr.html\n\n///\n\n/// # Examples\n\n///\n\n/// ```no_run\n\n/// # fn main() -> std::io::Result<()> { async_std::task::block_on(async {\n\n/// #\n\n/// use async_std::io;\n\n/// use async_std::prelude::*;\n\n///\n\n/// let mut stderr = io::stderr();\n\n/// stderr.write_all(b\"Hello, world!\").await?;\n\n/// #\n\n/// # Ok(()) }) }\n\n/// ```\n\npub fn stderr() -> Stderr {\n\n Stderr(Mutex::new(State::Idle(Some(Inner {\n\n stderr: std::io::stderr(),\n\n buf: Vec::new(),\n\n last_op: None,\n\n }))))\n\n}\n\n\n\n/// A handle to the standard error of the current process.\n\n///\n\n/// Created by the [`stderr`] function.\n\n///\n\n/// This type is an async version of [`std::io::Stderr`].\n\n///\n\n/// [`stderr`]: fn.stderr.html\n\n/// [`std::io::Stderr`]: https://doc.rust-lang.org/std/io/struct.Stderr.html\n\n#[derive(Debug)]\n\npub struct Stderr(Mutex<State>);\n\n\n\n/// The state of the asynchronous stderr.\n\n///\n\n/// The stderr can be either idle or busy performing an asynchronous operation.\n", "file_path": "src/io/stderr.rs", "rank": 14, "score": 129708.945181324 }, { "content": "/// Constructs a new handle to the standard output of the current process.\n\n///\n\n/// This function is an async version of [`std::io::stdout`].\n\n///\n\n/// [`std::io::stdout`]: https://doc.rust-lang.org/std/io/fn.stdout.html\n\n///\n\n/// # Examples\n\n///\n\n/// ```no_run\n\n/// # fn main() -> std::io::Result<()> { async_std::task::block_on(async {\n\n/// #\n\n/// use async_std::io;\n\n/// use async_std::prelude::*;\n\n///\n\n/// let mut stdout = io::stdout();\n\n/// stdout.write_all(b\"Hello, world!\").await?;\n\n/// #\n\n/// # Ok(()) }) }\n\n/// ```\n\npub fn stdout() -> Stdout {\n\n Stdout(Mutex::new(State::Idle(Some(Inner {\n\n stdout: std::io::stdout(),\n\n buf: Vec::new(),\n\n last_op: None,\n\n }))))\n\n}\n\n\n\n/// A handle to the standard output of the current process.\n\n///\n\n/// Created by the [`stdout`] function.\n\n///\n\n/// This type is an async version of [`std::io::Stdout`].\n\n///\n\n/// [`stdout`]: fn.stdout.html\n\n/// [`std::io::Stdout`]: https://doc.rust-lang.org/std/io/struct.Stdout.html\n\n#[derive(Debug)]\n\npub struct Stdout(Mutex<State>);\n\n\n\n/// The state of the asynchronous stdout.\n\n///\n\n/// The stdout can be either idle or busy performing an asynchronous operation.\n", "file_path": "src/io/stdout.rs", "rank": 15, "score": 129708.945181324 }, { "content": "/// Returns a handle to the current task.\n\n///\n\n/// # Panics\n\n///\n\n/// This function will panic if not called within the context of a task created by [`block_on`],\n\n/// [`spawn`], or [`Builder::spawn`].\n\n///\n\n/// [`block_on`]: fn.block_on.html\n\n/// [`spawn`]: fn.spawn.html\n\n/// [`Builder::spawn`]: struct.Builder.html#method.spawn\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// # fn main() { async_std::task::block_on(async {\n\n/// #\n\n/// use async_std::task;\n\n///\n\n/// println!(\"The name of this task is {:?}\", task::current().name());\n\n/// #\n\n/// # }) }\n\n/// ```\n\npub fn current() -> Task {\n\n get_task(|task| task.clone()).expect(\"`task::current()` called outside the context of a task\")\n\n}\n\n\n\nthread_local! {\n\n static TAG: Cell<*const task::Tag> = Cell::new(ptr::null_mut());\n\n}\n\n\n\npub(crate) fn set_tag<F, R>(tag: *const task::Tag, f: F) -> R\n\nwhere\n\n F: FnOnce() -> R,\n\n{\n\n struct ResetTag<'a>(&'a Cell<*const task::Tag>);\n\n\n\n impl Drop for ResetTag<'_> {\n\n fn drop(&mut self) {\n\n self.0.set(ptr::null());\n\n }\n\n }\n\n\n", "file_path": "src/task/worker.rs", "rank": 16, "score": 129705.2890713141 }, { "content": "#[bench]\n\nfn get(b: &mut Bencher) {\n\n task_local! {\n\n static VAL: u64 = 1;\n\n }\n\n\n\n let mut sum = 0;\n\n task::block_on(async {\n\n b.iter(|| VAL.with(|v| sum += v));\n\n });\n\n black_box(sum);\n\n}\n", "file_path": "benches/task_local.rs", "rank": 17, "score": 128234.83425080808 }, { "content": "/// Generates a random number in `0..n`.\n\npub fn random(n: u32) -> u32 {\n\n thread_local! {\n\n static RNG: Cell<Wrapping<u32>> = Cell::new(Wrapping(1406868647));\n\n }\n\n\n\n RNG.with(|rng| {\n\n // This is the 32-bit variant of Xorshift.\n\n //\n\n // Source: https://en.wikipedia.org/wiki/Xorshift\n\n let mut x = rng.get();\n\n x ^= x << 13;\n\n x ^= x >> 17;\n\n x ^= x << 5;\n\n rng.set(x);\n\n\n\n // This is a fast alternative to `x % n`.\n\n //\n\n // Author: Daniel Lemire\n\n // Source: https://lemire.me/blog/2016/06/27/a-fast-alternative-to-the-modulo-reduction/\n\n ((x.0 as u64).wrapping_mul(n as u64) >> 32) as u32\n\n })\n\n}\n\n\n", "file_path": "tests/rwlock.rs", "rank": 18, "score": 123505.4812381426 }, { "content": "#[inline]\n\npub fn abort_on_panic<T>(f: impl FnOnce() -> T) -> T {\n\n struct Bomb;\n\n\n\n impl Drop for Bomb {\n\n fn drop(&mut self) {\n\n process::abort();\n\n }\n\n }\n\n\n\n let bomb = Bomb;\n\n let t = f();\n\n mem::forget(bomb);\n\n t\n\n}\n\n\n\n/// Defines an extension trait for a base trait from the `futures` crate.\n\n///\n\n/// In generated docs, the base trait will contain methods from the extension trait. In actual\n\n/// code, the base trait will be re-exported and the extension trait will be hidden. We then\n\n/// re-export the extension trait from the prelude.\n", "file_path": "src/utils.rs", "rank": 19, "score": 120394.00710401678 }, { "content": "/// Creates a stream that doesn't yield any items.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// # fn main() { async_std::task::block_on(async {\n\n/// #\n\n/// use async_std::prelude::*;\n\n/// use async_std::stream;\n\n///\n\n/// let mut s = stream::empty::<i32>();\n\n///\n\n/// assert_eq!(s.next().await, None);\n\n/// #\n\n/// # }) }\n\n/// ```\n\npub fn empty<T>() -> Empty<T> {\n\n Empty {\n\n _marker: PhantomData,\n\n }\n\n}\n\n\n\n/// A stream that doesn't yield any items.\n\n///\n\n/// This stream is constructed by the [`empty`] function.\n\n///\n\n/// [`empty`]: fn.empty.html\n\n#[derive(Debug)]\n\npub struct Empty<T> {\n\n _marker: PhantomData<T>,\n\n}\n\n\n\nimpl<T> Stream for Empty<T> {\n\n type Item = T;\n\n\n\n fn poll_next(self: Pin<&mut Self>, _: &mut Context<'_>) -> Poll<Option<Self::Item>> {\n\n Poll::Ready(None)\n\n }\n\n}\n", "file_path": "src/stream/empty.rs", "rank": 20, "score": 119912.4193689878 }, { "content": "/// Creates a stream that yields a single item.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// # fn main() { async_std::task::block_on(async {\n\n/// #\n\n/// use async_std::prelude::*;\n\n/// use async_std::stream;\n\n///\n\n/// let mut s = stream::once(7);\n\n///\n\n/// assert_eq!(s.next().await, Some(7));\n\n/// assert_eq!(s.next().await, None);\n\n/// #\n\n/// # }) }\n\n/// ```\n\npub fn once<T>(t: T) -> Once<T> {\n\n Once { value: Some(t) }\n\n}\n\n\n\n/// A stream that yields a single item.\n\n///\n\n/// This stream is constructed by the [`once`] function.\n\n///\n\n/// [`once`]: fn.once.html\n\n#[derive(Debug)]\n\npub struct Once<T> {\n\n value: Option<T>,\n\n}\n\n\n\nimpl<T: Unpin> Stream for Once<T> {\n\n type Item = T;\n\n\n\n fn poll_next(mut self: Pin<&mut Self>, _: &mut Context<'_>) -> Poll<Option<T>> {\n\n Poll::Ready(self.value.take())\n\n }\n\n}\n", "file_path": "src/stream/once.rs", "rank": 21, "score": 118876.02652218826 }, { "content": "#[test]\n\nfn into_inner() {\n\n let m = Mutex::new(10);\n\n assert_eq!(m.into_inner(), 10);\n\n}\n\n\n", "file_path": "tests/mutex.rs", "rank": 22, "score": 116814.6470998152 }, { "content": "#[test]\n\nfn into_inner() {\n\n let lock = RwLock::new(10);\n\n assert_eq!(lock.into_inner(), 10);\n\n}\n\n\n", "file_path": "tests/rwlock.rs", "rank": 23, "score": 116814.6470998152 }, { "content": "#[test]\n\nfn get_mut() {\n\n let mut lock = RwLock::new(10);\n\n *lock.get_mut() = 20;\n\n assert_eq!(lock.into_inner(), 20);\n\n}\n\n\n", "file_path": "tests/rwlock.rs", "rank": 24, "score": 114722.16529607357 }, { "content": "#[test]\n\nfn get_mut() {\n\n let mut m = Mutex::new(10);\n\n *m.get_mut() = 20;\n\n assert_eq!(m.into_inner(), 20);\n\n}\n\n\n", "file_path": "tests/mutex.rs", "rank": 25, "score": 114722.16529607357 }, { "content": "#[test]\n\nfn into_inner_and_drop() {\n\n struct Counter(Arc<AtomicUsize>);\n\n\n\n impl Drop for Counter {\n\n fn drop(&mut self) {\n\n self.0.fetch_add(1, Ordering::SeqCst);\n\n }\n\n }\n\n\n\n let cnt = Arc::new(AtomicUsize::new(0));\n\n let lock = RwLock::new(Counter(cnt.clone()));\n\n assert_eq!(cnt.load(Ordering::SeqCst), 0);\n\n\n\n {\n\n let _inner = lock.into_inner();\n\n assert_eq!(cnt.load(Ordering::SeqCst), 0);\n\n }\n\n\n\n assert_eq!(cnt.load(Ordering::SeqCst), 1);\n\n}\n\n\n", "file_path": "tests/rwlock.rs", "rank": 26, "score": 114700.57782276602 }, { "content": "/// Creates a stream that yields the same item repeatedly.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// # fn main() { async_std::task::block_on(async {\n\n/// #\n\n/// use async_std::prelude::*;\n\n/// use async_std::stream;\n\n///\n\n/// let mut s = stream::repeat(7);\n\n///\n\n/// assert_eq!(s.next().await, Some(7));\n\n/// assert_eq!(s.next().await, Some(7));\n\n/// #\n\n/// # }) }\n\n/// ```\n\npub fn repeat<T>(item: T) -> Repeat<T>\n\nwhere\n\n T: Clone,\n\n{\n\n Repeat { item }\n\n}\n\n\n\n/// A stream that yields the same item repeatedly.\n\n///\n\n/// This stream is constructed by the [`repeat`] function.\n\n///\n\n/// [`repeat`]: fn.repeat.html\n\n#[derive(Debug)]\n\npub struct Repeat<T> {\n\n item: T,\n\n}\n\n\n\nimpl<T: Clone> Stream for Repeat<T> {\n\n type Item = T;\n\n\n\n fn poll_next(self: Pin<&mut Self>, _: &mut Context<'_>) -> Poll<Option<Self::Item>> {\n\n Poll::Ready(Some(self.item.clone()))\n\n }\n\n}\n", "file_path": "src/stream/repeat.rs", "rank": 27, "score": 111945.40256313907 }, { "content": "/// Spawns a task and blocks the current thread on its result.\n\n///\n\n/// Calling this function is similar to [spawning] a thread and immediately [joining] it, except an\n\n/// asynchronous task will be spawned.\n\n///\n\n/// [spawning]: https://doc.rust-lang.org/std/thread/fn.spawn.html\n\n/// [joining]: https://doc.rust-lang.org/std/thread/struct.JoinHandle.html#method.join\n\n///\n\n/// # Examples\n\n///\n\n/// ```no_run\n\n/// use async_std::task;\n\n///\n\n/// fn main() {\n\n/// task::block_on(async {\n\n/// println!(\"Hello, world!\");\n\n/// })\n\n/// }\n\n/// ```\n\npub fn block_on<F, T>(future: F) -> T\n\nwhere\n\n F: Future<Output = T>,\n\n{\n\n unsafe {\n\n // A place on the stack where the result will be stored.\n\n let out = &mut UnsafeCell::new(None);\n\n\n\n // Wrap the future into one that stores the result into `out`.\n\n let future = {\n\n let out = out.get();\n\n\n\n async move {\n\n let future = CatchUnwindFuture {\n\n future: AssertUnwindSafe(future),\n\n };\n\n *out = Some(future.await);\n\n }\n\n };\n\n\n", "file_path": "src/task/block_on.rs", "rank": 28, "score": 111940.26780734504 }, { "content": "/// Spawns a task.\n\n///\n\n/// This function is similar to [`std::thread::spawn`], except it spawns an asynchronous task.\n\n///\n\n/// [`std::thread`]: https://doc.rust-lang.org/std/thread/fn.spawn.html\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// # fn main() { async_std::task::block_on(async {\n\n/// #\n\n/// use async_std::task;\n\n///\n\n/// let handle = task::spawn(async {\n\n/// 1 + 2\n\n/// });\n\n///\n\n/// assert_eq!(handle.await, 3);\n\n/// #\n\n/// # }) }\n\n/// ```\n\npub fn spawn<F, T>(future: F) -> JoinHandle<T>\n\nwhere\n\n F: Future<Output = T> + Send + 'static,\n\n T: Send + 'static,\n\n{\n\n Builder::new().spawn(future).expect(\"cannot spawn future\")\n\n}\n\n\n\npub(crate) struct Pool {\n\n pub injector: Injector<task::Runnable>,\n\n pub stealers: Vec<Stealer<task::Runnable>>,\n\n pub sleepers: Sleepers,\n\n}\n\n\n\nimpl Pool {\n\n /// Spawn a future onto the pool.\n\n pub fn spawn<F, T>(&self, future: F, builder: Builder) -> JoinHandle<T>\n\n where\n\n F: Future<Output = T> + Send + 'static,\n\n T: Send + 'static,\n", "file_path": "src/task/pool.rs", "rank": 29, "score": 106962.48097320803 }, { "content": "/// Spawns a blocking task.\n\n///\n\n/// The task will be spawned onto a thread pool specifically dedicated to blocking tasks.\n\npub fn spawn<F, R>(future: F) -> JoinHandle<R>\n\nwhere\n\n F: Future<Output = R> + Send + 'static,\n\n R: Send + 'static,\n\n{\n\n let (task, handle) = async_task::spawn(future, schedule, ());\n\n task.schedule();\n\n JoinHandle(handle)\n\n}\n\n\n\n/// A handle to a blocking task.\n\npub struct JoinHandle<R>(async_task::JoinHandle<R, ()>);\n\n\n\nimpl<R> Unpin for JoinHandle<R> {}\n\n\n\nimpl<R> Future for JoinHandle<R> {\n\n type Output = R;\n\n\n\n fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {\n\n Pin::new(&mut self.0).poll(cx).map(|out| out.unwrap())\n", "file_path": "src/task/blocking.rs", "rank": 30, "score": 106949.69791137299 }, { "content": "#[test]\n\nfn incoming_read() -> io::Result<()> {\n\n task::block_on(async {\n\n let listener = TcpListener::bind(\"127.0.0.1:0\").await?;\n\n let addr = listener.local_addr()?;\n\n\n\n task::spawn(async move {\n\n let mut stream = TcpStream::connect(&addr).await?;\n\n stream.write_all(THE_WINTERS_TALE).await?;\n\n io::Result::Ok(())\n\n });\n\n\n\n let mut buf = vec![0; 1024];\n\n let mut incoming = listener.incoming();\n\n let mut stream = incoming.next().await.unwrap()?;\n\n\n\n let n = stream.read(&mut buf).await?;\n\n assert_eq!(&buf[..n], THE_WINTERS_TALE);\n\n\n\n Ok(())\n\n })\n\n}\n", "file_path": "tests/tcp.rs", "rank": 31, "score": 106052.33311385367 }, { "content": "use std::pin::Pin;\n\n\n\nuse super::read_until_internal;\n\nuse crate::future::Future;\n\nuse crate::io::{self, BufRead};\n\nuse crate::task::{Context, Poll};\n\n\n\n#[doc(hidden)]\n\n#[allow(missing_debug_implementations)]\n\npub struct ReadUntilFuture<'a, T: Unpin + ?Sized> {\n\n pub(crate) reader: &'a mut T,\n\n pub(crate) byte: u8,\n\n pub(crate) buf: &'a mut Vec<u8>,\n\n pub(crate) read: usize,\n\n}\n\n\n\nimpl<T: BufRead + Unpin + ?Sized> Future for ReadUntilFuture<'_, T> {\n\n type Output = io::Result<usize>;\n\n\n\n fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {\n", "file_path": "src/io/buf_read/read_until.rs", "rank": 32, "score": 97169.05832089839 }, { "content": " let Self {\n\n reader,\n\n byte,\n\n buf,\n\n read,\n\n } = &mut *self;\n\n read_until_internal(Pin::new(reader), cx, *byte, buf, read)\n\n }\n\n}\n", "file_path": "src/io/buf_read/read_until.rs", "rank": 33, "score": 97145.29327013162 }, { "content": "use std::mem;\n\nuse std::pin::Pin;\n\nuse std::str;\n\n\n\nuse super::read_until_internal;\n\nuse crate::future::Future;\n\nuse crate::io::{self, BufRead};\n\nuse crate::task::{Context, Poll};\n\n\n\n#[doc(hidden)]\n\n#[allow(missing_debug_implementations)]\n\npub struct ReadLineFuture<'a, T: Unpin + ?Sized> {\n\n pub(crate) reader: &'a mut T,\n\n pub(crate) buf: &'a mut String,\n\n pub(crate) bytes: Vec<u8>,\n\n pub(crate) read: usize,\n\n}\n\n\n\nimpl<T: BufRead + Unpin + ?Sized> Future for ReadLineFuture<'_, T> {\n\n type Output = io::Result<usize>;\n", "file_path": "src/io/buf_read/read_line.rs", "rank": 34, "score": 95484.84384357125 }, { "content": "\n\n fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {\n\n let Self {\n\n reader,\n\n buf,\n\n bytes,\n\n read,\n\n } = &mut *self;\n\n let reader = Pin::new(reader);\n\n\n\n let ret = futures_core::ready!(read_until_internal(reader, cx, b'\\n', bytes, read));\n\n if str::from_utf8(&bytes).is_err() {\n\n Poll::Ready(ret.and_then(|_| {\n\n Err(io::Error::new(\n\n io::ErrorKind::InvalidData,\n\n \"stream did not contain valid UTF-8\",\n\n ))\n\n }))\n\n } else {\n\n debug_assert!(buf.is_empty());\n\n debug_assert_eq!(*read, 0);\n\n // Safety: `bytes` is a valid UTF-8 because `str::from_utf8` returned `Ok`.\n\n mem::swap(unsafe { buf.as_mut_vec() }, bytes);\n\n Poll::Ready(ret)\n\n }\n\n }\n\n}\n", "file_path": "src/io/buf_read/read_line.rs", "rank": 35, "score": 95477.85693065014 }, { "content": "///\n\n/// assert_eq!(future::poll_fn(poll_greeting).await, \"hello world\");\n\n/// #\n\n/// # }) }\n\n/// ```\n\npub async fn poll_fn<F, T>(f: F) -> T\n\nwhere\n\n F: FnMut(&mut Context<'_>) -> Poll<T>,\n\n{\n\n let fut = PollFn { f };\n\n fut.await\n\n}\n\n\n", "file_path": "src/future/poll_fn.rs", "rank": 36, "score": 90385.50628664129 }, { "content": "use std::pin::Pin;\n\n\n\nuse crate::future::Future;\n\nuse crate::task::{Context, Poll};\n\n\n\n/// Creates a new future wrapping around a function returning [`Poll`].\n\n///\n\n/// Polling the returned future delegates to the wrapped function.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// # fn main() { async_std::task::block_on(async {\n\n/// #\n\n/// use async_std::future;\n\n/// use async_std::task::{Context, Poll};\n\n///\n\n/// fn poll_greeting(_: &mut Context<'_>) -> Poll<String> {\n\n/// Poll::Ready(\"hello world\".to_string())\n\n/// }\n", "file_path": "src/future/poll_fn.rs", "rank": 37, "score": 90379.35648899608 }, { "content": " pub(crate) reader: R,\n\n pub(crate) buf: String,\n\n pub(crate) bytes: Vec<u8>,\n\n pub(crate) read: usize,\n\n}\n\n\n\nimpl<R: BufRead> Stream for Lines<R> {\n\n type Item = io::Result<String>;\n\n\n\n fn poll_next(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {\n\n let Self {\n\n reader,\n\n buf,\n\n bytes,\n\n read,\n\n } = unsafe { self.get_unchecked_mut() };\n\n let reader = unsafe { Pin::new_unchecked(reader) };\n\n let n = futures_core::ready!(read_line_internal(reader, cx, buf, bytes, read))?;\n\n if n == 0 && buf.is_empty() {\n\n return Poll::Ready(None);\n", "file_path": "src/io/buf_read/lines.rs", "rank": 38, "score": 87842.71210432926 }, { "content": " impl<T: BufRead + Unpin + ?Sized> BufRead for &mut T {\n\n fn poll_fill_buf(\n\n self: Pin<&mut Self>,\n\n cx: &mut Context<'_>,\n\n ) -> Poll<io::Result<&[u8]>> {\n\n unreachable!(\"this impl only appears in the rendered docs\")\n\n }\n\n\n\n fn consume(self: Pin<&mut Self>, amt: usize) {\n\n unreachable!(\"this impl only appears in the rendered docs\")\n\n }\n\n }\n\n\n\n impl<P> BufRead for Pin<P>\n\n where\n\n P: DerefMut + Unpin,\n\n <P as Deref>::Target: BufRead,\n\n {\n\n fn poll_fill_buf(\n\n self: Pin<&mut Self>,\n", "file_path": "src/io/buf_read/mod.rs", "rank": 39, "score": 87842.57277959264 }, { "content": " buf: String::new(),\n\n bytes: Vec::new(),\n\n read: 0,\n\n }\n\n }\n\n }\n\n\n\n impl<T: BufRead + Unpin + ?Sized> BufRead for Box<T> {\n\n fn poll_fill_buf(\n\n self: Pin<&mut Self>,\n\n cx: &mut Context<'_>,\n\n ) -> Poll<io::Result<&[u8]>> {\n\n unreachable!(\"this impl only appears in the rendered docs\")\n\n }\n\n\n\n fn consume(self: Pin<&mut Self>, amt: usize) {\n\n unreachable!(\"this impl only appears in the rendered docs\")\n\n }\n\n }\n\n\n", "file_path": "src/io/buf_read/mod.rs", "rank": 40, "score": 87840.96683059308 }, { "content": " cx: &mut Context<'_>,\n\n ) -> Poll<io::Result<&[u8]>> {\n\n unreachable!(\"this impl only appears in the rendered docs\")\n\n }\n\n\n\n fn consume(self: Pin<&mut Self>, amt: usize) {\n\n unreachable!(\"this impl only appears in the rendered docs\")\n\n }\n\n }\n\n\n\n impl BufRead for &[u8] {\n\n fn poll_fill_buf(\n\n self: Pin<&mut Self>,\n\n cx: &mut Context<'_>,\n\n ) -> Poll<io::Result<&[u8]>> {\n\n unreachable!()\n\n }\n\n\n\n fn consume(self: Pin<&mut Self>, amt: usize) {\n\n unreachable!(\"this impl only appears in the rendered docs\")\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/io/buf_read/mod.rs", "rank": 41, "score": 87838.02631627514 }, { "content": " let mut buf = vec![];\n\n\n\n let mut size = reader.read_until(b'\\n', &mut buf).await?;\n\n assert_eq!(size, 7);\n\n assert_eq!(buf, b\"append\\n\");\n\n\n\n size += reader.read_until(b'\\n', &mut buf).await?;\n\n assert_eq!(size, from.len());\n\n\n\n assert_eq!(buf, from);\n\n #\n\n # Ok(()) }) }\n\n ```\n\n \"#]\n\n fn read_until<'a>(\n\n &'a mut self,\n\n byte: u8,\n\n buf: &'a mut Vec<u8>,\n\n ) -> impl Future<Output = usize> + 'a [ReadUntilFuture<'a, Self>]\n\n where\n", "file_path": "src/io/buf_read/mod.rs", "rank": 42, "score": 87829.17837190662 }, { "content": " let mut file = BufReader::new(File::open(\"a.txt\").await?);\n\n\n\n let mut buf = String::new();\n\n file.read_line(&mut buf).await?;\n\n #\n\n # Ok(()) }) }\n\n ```\n\n \"#]\n\n fn read_line<'a>(\n\n &'a mut self,\n\n buf: &'a mut String,\n\n ) -> impl Future<Output = io::Result<usize>> + 'a [ReadLineFuture<'a, Self>]\n\n where\n\n Self: Unpin,\n\n {\n\n ReadLineFuture {\n\n reader: self,\n\n bytes: unsafe { mem::replace(buf.as_mut_vec(), Vec::new()) },\n\n buf,\n\n read: 0,\n", "file_path": "src/io/buf_read/mod.rs", "rank": 43, "score": 87828.95491444797 }, { "content": " use async_std::prelude::*;\n\n\n\n let mut file = BufReader::new(File::open(\"a.txt\").await?);\n\n\n\n let mut buf = Vec::with_capacity(1024);\n\n let n = file.read_until(b'\\n', &mut buf).await?;\n\n #\n\n # Ok(()) }) }\n\n ```\n\n\n\n Multiple successful calls to `read_until` append all bytes up to and including to\n\n `buf`:\n\n ```\n\n # fn main() -> std::io::Result<()> { async_std::task::block_on(async {\n\n #\n\n use async_std::io::BufReader;\n\n use async_std::prelude::*;\n\n\n\n let from: &[u8] = b\"append\\nexample\\n\";\n\n let mut reader = BufReader::new(from);\n", "file_path": "src/io/buf_read/mod.rs", "rank": 44, "score": 87827.47862707841 }, { "content": "use std::mem;\n\nuse std::pin::Pin;\n\nuse std::str;\n\n\n\nuse super::read_until_internal;\n\nuse crate::io::{self, BufRead};\n\nuse crate::stream::Stream;\n\nuse crate::task::{Context, Poll};\n\n\n\n/// A stream of lines in a byte stream.\n\n///\n\n/// This stream is created by the [`lines`] method on types that implement [`BufRead`].\n\n///\n\n/// This type is an async version of [`std::io::Lines`].\n\n///\n\n/// [`lines`]: trait.BufRead.html#method.lines\n\n/// [`BufRead`]: trait.BufRead.html\n\n/// [`std::io::Lines`]: https://doc.rust-lang.org/std/io/struct.Lines.html\n\n#[derive(Debug)]\n\npub struct Lines<R> {\n", "file_path": "src/io/buf_read/lines.rs", "rank": 45, "score": 87826.58155270161 }, { "content": "mod lines;\n\nmod read_line;\n\nmod read_until;\n\n\n\npub use lines::Lines;\n\nuse read_line::ReadLineFuture;\n\nuse read_until::ReadUntilFuture;\n\n\n\nuse std::mem;\n\nuse std::pin::Pin;\n\n\n\nuse cfg_if::cfg_if;\n\n\n\nuse crate::io;\n\nuse crate::task::{Context, Poll};\n\nuse crate::utils::extension_trait;\n\n\n\ncfg_if! {\n\n if #[cfg(feature = \"docs\")] {\n\n use std::ops::{Deref, DerefMut};\n", "file_path": "src/io/buf_read/mod.rs", "rank": 46, "score": 87825.57748846336 }, { "content": " use async_std::prelude::*;\n\n\n\n let file = File::open(\"a.txt\").await?;\n\n let mut lines = BufReader::new(file).lines();\n\n let mut count = 0;\n\n\n\n while let Some(line) = lines.next().await {\n\n line?;\n\n count += 1;\n\n }\n\n #\n\n # Ok(()) }) }\n\n ```\n\n \"#]\n\n fn lines(self) -> Lines<Self>\n\n where\n\n Self: Unpin + Sized,\n\n {\n\n Lines {\n\n reader: self,\n", "file_path": "src/io/buf_read/mod.rs", "rank": 47, "score": 87825.45162786459 }, { "content": " should no longer be returned in calls to `read`.\n\n \"#]\n\n fn consume(self: Pin<&mut Self>, amt: usize);\n\n\n\n #[doc = r#\"\n\n Reads all bytes into `buf` until the delimiter `byte` or EOF is reached.\n\n\n\n This function will read bytes from the underlying stream until the delimiter or EOF\n\n is found. Once found, all bytes up to, and including, the delimiter (if found) will\n\n be appended to `buf`.\n\n\n\n If successful, this function will return the total number of bytes read.\n\n\n\n # Examples\n\n\n\n ```no_run\n\n # fn main() -> std::io::Result<()> { async_std::task::block_on(async {\n\n #\n\n use async_std::fs::File;\n\n use async_std::io::BufReader;\n", "file_path": "src/io/buf_read/mod.rs", "rank": 48, "score": 87825.10721488524 }, { "content": " Self: Unpin,\n\n {\n\n ReadUntilFuture {\n\n reader: self,\n\n byte,\n\n buf,\n\n read: 0,\n\n }\n\n }\n\n\n\n #[doc = r#\"\n\n Reads all bytes and appends them into `buf` until a newline (the 0xA byte) is\n\n reached.\n\n\n\n This function will read bytes from the underlying stream until the newline\n\n delimiter (the 0xA byte) or EOF is found. Once found, all bytes up to, and\n\n including, the delimiter (if found) will be appended to `buf`.\n\n\n\n If successful, this function will return the total number of bytes read.\n\n\n", "file_path": "src/io/buf_read/mod.rs", "rank": 49, "score": 87822.50004766362 }, { "content": " If this function returns `Ok(0)`, the stream has reached EOF.\n\n\n\n # Errors\n\n\n\n This function has the same error semantics as [`read_until`] and will also return\n\n an error if the read bytes are not valid UTF-8. If an I/O error is encountered then\n\n `buf` may contain some bytes already read in the event that all data read so far\n\n was valid UTF-8.\n\n\n\n [`read_until`]: #method.read_until\n\n\n\n # Examples\n\n\n\n ```no_run\n\n # fn main() -> std::io::Result<()> { async_std::task::block_on(async {\n\n #\n\n use async_std::fs::File;\n\n use async_std::io::BufReader;\n\n use async_std::prelude::*;\n\n\n", "file_path": "src/io/buf_read/mod.rs", "rank": 50, "score": 87818.3550757654 }, { "content": " }\n\n}\n\n\n\nextension_trait! {\n\n #[doc = r#\"\n\n Allows reading from a buffered byte stream.\n\n\n\n This trait is a re-export of [`futures::io::AsyncBufRead`] and is an async version of\n\n [`std::io::BufRead`].\n\n\n\n The [provided methods] do not really exist in the trait itself, but they become\n\n available when the prelude is imported:\n\n\n\n ```\n\n # #[allow(unused_imports)]\n\n use async_std::prelude::*;\n\n ```\n\n\n\n [`std::io::BufRead`]: https://doc.rust-lang.org/std/io/trait.BufRead.html\n\n [`futures::io::AsyncBufRead`]:\n\n https://docs.rs/futures-preview/0.3.0-alpha.17/futures/io/trait.AsyncBufRead.html\n\n [provided methods]: #provided-methods\n\n \"#]\n", "file_path": "src/io/buf_read/mod.rs", "rank": 51, "score": 87817.84765266998 }, { "content": " }\n\n if buf.ends_with('\\n') {\n\n buf.pop();\n\n if buf.ends_with('\\r') {\n\n buf.pop();\n\n }\n\n }\n\n Poll::Ready(Some(Ok(mem::replace(buf, String::new()))))\n\n }\n\n}\n\n\n", "file_path": "src/io/buf_read/lines.rs", "rank": 52, "score": 87813.79413577913 }, { "content": " }\n\n }\n\n\n\n #[doc = r#\"\n\n Returns a stream over the lines of this byte stream.\n\n\n\n The stream returned from this function will yield instances of\n\n [`io::Result`]`<`[`String`]`>`. Each string returned will *not* have a newline byte\n\n (the 0xA byte) or CRLF (0xD, 0xA bytes) at the end.\n\n\n\n [`io::Result`]: type.Result.html\n\n [`String`]: https://doc.rust-lang.org/std/string/struct.String.html\n\n\n\n # Examples\n\n\n\n ```no_run\n\n # fn main() -> std::io::Result<()> { async_std::task::block_on(async {\n\n #\n\n use async_std::fs::File;\n\n use async_std::io::BufReader;\n", "file_path": "src/io/buf_read/mod.rs", "rank": 53, "score": 87811.1921011403 }, { "content": "/// The current state of a file.\n\n///\n\n/// The `File` struct protects this state behind a lock.\n\n///\n\n/// Filesystem operations that get spawned as blocking tasks will acquire the lock, take ownership\n\n/// of the state and return it back once the operation completes.\n\nstruct State {\n\n /// The inner file.\n\n file: Arc<std::fs::File>,\n\n\n\n /// The current mode (idle, reading, or writing).\n\n mode: Mode,\n\n\n\n /// The read/write cache.\n\n ///\n\n /// If in reading mode, the cache contains a chunk of data that has been read from the file.\n\n /// If in writing mode, the cache contains data that will eventually be written to the file.\n\n cache: Vec<u8>,\n\n\n\n /// Set to `true` if the file is flushed.\n\n ///\n\n /// When a file is flushed, the write cache and the inner file's buffer are empty.\n\n is_flushed: bool,\n\n\n\n /// The last read error that came from an async operation.\n\n last_read_err: Option<io::Error>,\n", "file_path": "src/fs/file.rs", "rank": 54, "score": 74581.21086160022 }, { "content": "struct Pool {\n\n sender: Sender<async_task::Task<()>>,\n\n receiver: Receiver<async_task::Task<()>>,\n\n}\n\n\n\nlazy_static! {\n\n static ref POOL: Pool = {\n\n for _ in 0..2 {\n\n thread::Builder::new()\n\n .name(\"async-blocking-driver\".to_string())\n\n .spawn(|| abort_on_panic(|| {\n\n for task in &POOL.receiver {\n\n task.run();\n\n }\n\n }))\n\n .expect(\"cannot start a thread driving blocking tasks\");\n\n }\n\n\n\n // We want to use an unbuffered channel here to help\n\n // us drive our dynamic control. In effect, the\n\n // kernel's scheduler becomes the queue, reducing\n\n // the number of buffers that work must flow through\n\n // before being acted on by a core. This helps keep\n\n // latency snappy in the overall async system by\n\n // reducing bufferbloat.\n\n let (sender, receiver) = bounded(0);\n\n Pool { sender, receiver }\n\n };\n\n}\n\n\n", "file_path": "src/task/blocking.rs", "rank": 55, "score": 74576.68984632537 }, { "content": "/// The state of a networking driver.\n\nstruct Reactor {\n\n /// A mio instance that polls for new events.\n\n poller: mio::Poll,\n\n\n\n /// A collection of registered I/O handles.\n\n entries: Mutex<Slab<Arc<Entry>>>,\n\n\n\n /// Dummy I/O handle that is only used to wake up the polling thread.\n\n notify_reg: (mio::Registration, mio::SetReadiness),\n\n\n\n /// An identifier for the notification handle.\n\n notify_token: mio::Token,\n\n}\n\n\n\nimpl Reactor {\n\n /// Creates a new reactor for polling I/O events.\n\n fn new() -> io::Result<Reactor> {\n\n let poller = mio::Poll::new()?;\n\n let notify_reg = mio::Registration::new2();\n\n\n", "file_path": "src/net/driver/mod.rs", "rank": 56, "score": 73562.26510880998 }, { "content": "#[derive(Debug)]\n\nstruct Entry {\n\n /// A unique identifier.\n\n token: mio::Token,\n\n\n\n /// Tasks that are blocked on reading from this I/O handle.\n\n readers: Mutex<Vec<Waker>>,\n\n\n\n /// Thasks that are blocked on writing to this I/O handle.\n\n writers: Mutex<Vec<Waker>>,\n\n}\n\n\n", "file_path": "src/net/driver/mod.rs", "rank": 57, "score": 73562.26510880998 }, { "content": "#[derive(Debug)]\n\nstruct BarrierState {\n\n waker: BroadcastChannel<(usize, usize)>,\n\n count: usize,\n\n generation_id: usize,\n\n}\n\n\n\n/// A `BarrierWaitResult` is returned by `wait` when all threads in the `Barrier` have rendezvoused.\n\n///\n\n/// [`wait`]: struct.Barrier.html#method.wait\n\n/// [`Barrier`]: struct.Barrier.html\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use async_std::sync::Barrier;\n\n///\n\n/// let barrier = Barrier::new(1);\n\n/// let barrier_wait_result = barrier.wait();\n\n/// ```\n\n#[cfg_attr(feature = \"docs\", doc(cfg(unstable)))]\n", "file_path": "src/sync/barrier.rs", "rank": 58, "score": 73562.26510880998 }, { "content": "struct Pending<T> {\n\n _marker: PhantomData<T>,\n\n}\n\n\n\nimpl<T> Future for Pending<T> {\n\n type Output = T;\n\n\n\n fn poll(self: Pin<&mut Self>, _: &mut Context<'_>) -> Poll<T> {\n\n Poll::Pending\n\n }\n\n}\n", "file_path": "src/future/pending.rs", "rank": 59, "score": 70924.41836126085 }, { "content": "#[test]\n\n#[should_panic = \"boom\"]\n\nfn panic() {\n\n task::block_on(async {\n\n // This panic should get propagated into the parent thread.\n\n panic!(\"boom\");\n\n });\n\n}\n", "file_path": "tests/block_on.rs", "rank": 60, "score": 70307.66848639211 }, { "content": "#[test]\n\nfn smoke() {\n\n task::block_on(async {\n\n let m = Mutex::new(());\n\n drop(m.lock().await);\n\n drop(m.lock().await);\n\n })\n\n}\n\n\n", "file_path": "tests/mutex.rs", "rank": 61, "score": 70307.66848639211 }, { "content": "#[test]\n\nfn smoke() {\n\n let res = task::block_on(async { 1 + 2 });\n\n assert_eq!(res, 3);\n\n}\n\n\n", "file_path": "tests/block_on.rs", "rank": 62, "score": 70307.66848639211 }, { "content": "#[test]\n\nfn contention() {\n\n const N: u32 = 10;\n\n const M: usize = 1000;\n\n\n\n let (tx, mut rx) = mpsc::unbounded();\n\n let tx = Arc::new(tx);\n\n let rw = Arc::new(RwLock::new(()));\n\n\n\n // Spawn N tasks that randomly acquire the lock M times.\n\n for _ in 0..N {\n\n let tx = tx.clone();\n\n let rw = rw.clone();\n\n\n\n task::spawn(async move {\n\n for _ in 0..M {\n\n if random(N) == 0 {\n\n drop(rw.write().await);\n\n } else {\n\n drop(rw.read().await);\n\n }\n", "file_path": "tests/rwlock.rs", "rank": 63, "score": 70307.66848639211 }, { "content": "#[test]\n\nfn contention() {\n\n task::block_on(async {\n\n let (tx, mut rx) = mpsc::unbounded();\n\n\n\n let tx = Arc::new(tx);\n\n let mutex = Arc::new(Mutex::new(0));\n\n let num_tasks = 10000;\n\n\n\n for _ in 0..num_tasks {\n\n let tx = tx.clone();\n\n let mutex = mutex.clone();\n\n\n\n task::spawn(async move {\n\n let mut lock = mutex.lock().await;\n\n *lock += 1;\n\n tx.unbounded_send(()).unwrap();\n\n drop(lock);\n\n });\n\n }\n\n\n\n for _ in 0..num_tasks {\n\n rx.next().await.unwrap();\n\n }\n\n\n\n let lock = mutex.lock().await;\n\n assert_eq!(num_tasks, *lock);\n\n });\n\n}\n", "file_path": "tests/mutex.rs", "rank": 64, "score": 70307.66848639211 }, { "content": "fn main() {\n\n femme::start(log::LevelFilter::Trace).unwrap();\n\n\n\n task::block_on(async {\n\n let handle = task::spawn(async {\n\n log::info!(\"Hello world!\");\n\n });\n\n\n\n handle.await;\n\n })\n\n}\n", "file_path": "examples/logging.rs", "rank": 65, "score": 70307.66848639211 }, { "content": "#[test]\n\nfn smoke() {\n\n task::block_on(async {\n\n let lock = RwLock::new(());\n\n drop(lock.read().await);\n\n drop(lock.write().await);\n\n drop((lock.read().await, lock.read().await));\n\n drop(lock.write().await);\n\n });\n\n}\n\n\n", "file_path": "tests/rwlock.rs", "rank": 66, "score": 70307.66848639211 }, { "content": "#[doc(hidden)]\n\n#[allow(missing_debug_implementations)]\n\nstruct TimeoutFuture<F> {\n\n future: F,\n\n delay: Delay,\n\n}\n\n\n\nimpl<F> TimeoutFuture<F> {\n\n pin_utils::unsafe_pinned!(future: F);\n\n pin_utils::unsafe_pinned!(delay: Delay);\n\n}\n\n\n\nimpl<F: Future> Future for TimeoutFuture<F> {\n\n type Output = Result<F::Output, TimeoutError>;\n\n\n\n fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {\n\n match self.as_mut().future().poll(cx) {\n\n Poll::Ready(v) => Poll::Ready(Ok(v)),\n\n Poll::Pending => match self.delay().poll(cx) {\n\n Poll::Ready(_) => Poll::Ready(Err(TimeoutError { _private: () })),\n\n Poll::Pending => Poll::Pending,\n\n },\n", "file_path": "src/future/timeout.rs", "rank": 67, "score": 69954.35537850598 }, { "content": "/// The state of a lock.\n\nstruct LockState<T> {\n\n /// Set to `true` when locked.\n\n locked: AtomicBool,\n\n\n\n /// The inner value.\n\n value: UnsafeCell<T>,\n\n\n\n /// A list of tasks interested in acquiring the lock.\n\n wakers: Mutex<Vec<Waker>>,\n\n}\n\n\n\nimpl<T> Lock<T> {\n\n /// Creates a new lock initialized with `value`.\n\n fn new(value: T) -> Lock<T> {\n\n Lock(Arc::new(LockState {\n\n locked: AtomicBool::new(false),\n\n value: UnsafeCell::new(value),\n\n wakers: Mutex::new(Vec::new()),\n\n }))\n\n }\n", "file_path": "src/fs/file.rs", "rank": 68, "score": 69954.35537850598 }, { "content": "#[cfg_attr(feature = \"docs\", doc(cfg(unstable)))]\n\n#[cfg(any(feature = \"unstable\", feature = \"docs\"))]\n\npub trait IntoStream {\n\n /// The type of the elements being iterated over.\n\n type Item;\n\n\n\n /// Which kind of stream are we turning this into?\n\n type IntoStream: Stream<Item = Self::Item>;\n\n\n\n /// Creates a stream from a value.\n\n fn into_stream(self) -> Self::IntoStream;\n\n}\n\n\n\nimpl<I: Stream> IntoStream for I {\n\n type Item = I::Item;\n\n type IntoStream = I;\n\n\n\n #[inline]\n\n fn into_stream(self) -> I {\n\n self\n\n }\n\n}\n", "file_path": "src/stream/into_stream.rs", "rank": 69, "score": 69497.79813200954 }, { "content": "fn main() {\n\n task::block_on(async {\n\n println!(\"var = {}\", VAR.with(|v| v.get()));\n\n VAR.with(|v| v.set(2));\n\n println!(\"var = {}\", VAR.with(|v| v.get()));\n\n })\n\n}\n", "file_path": "examples/task-local.rs", "rank": 70, "score": 69256.99984238383 }, { "content": "fn main() {\n\n task::block_on(async {\n\n let raw_response = get().await.expect(\"request\");\n\n let response = String::from_utf8(raw_response).expect(\"utf8 conversion\");\n\n println!(\"received: {}\", response);\n\n });\n\n}\n", "file_path": "examples/socket-timeouts.rs", "rank": 71, "score": 69256.99984238383 }, { "content": "fn main() {\n\n task::block_on(say_hi())\n\n}\n", "file_path": "examples/hello-world.rs", "rank": 72, "score": 69256.99984238383 }, { "content": "fn main() {}\n", "file_path": "examples/surf-web.rs", "rank": 73, "score": 69256.99984238383 }, { "content": "#[test]\n\nfn try_lock() {\n\n let m = Mutex::new(());\n\n *m.try_lock().unwrap() = ();\n\n}\n\n\n", "file_path": "tests/mutex.rs", "rank": 74, "score": 69256.99984238383 }, { "content": "fn main() {\n\n task::block_on(async {\n\n task::Builder::new()\n\n .name(\"my-task\".to_string())\n\n .spawn(print_name())\n\n .unwrap()\n\n .await;\n\n })\n\n}\n", "file_path": "examples/task-name.rs", "rank": 75, "score": 69256.99984238383 }, { "content": "#[test]\n\nfn writer_and_readers() {\n\n #[derive(Default)]\n\n struct Yield(Cell<bool>);\n\n\n\n impl Future for Yield {\n\n type Output = ();\n\n\n\n fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {\n\n if self.0.get() {\n\n Poll::Ready(())\n\n } else {\n\n self.0.set(true);\n\n cx.waker().wake_by_ref();\n\n Poll::Pending\n\n }\n\n }\n\n }\n\n\n\n let lock = Arc::new(RwLock::new(0i32));\n\n let (tx, mut rx) = mpsc::unbounded();\n", "file_path": "tests/rwlock.rs", "rank": 76, "score": 69256.99984238383 }, { "content": "#[test]\n\nfn try_write() {\n\n task::block_on(async {\n\n let lock = RwLock::new(0isize);\n\n let read_guard = lock.read().await;\n\n assert!(lock.try_write().is_none());\n\n drop(read_guard);\n\n });\n\n}\n\n\n", "file_path": "tests/rwlock.rs", "rank": 77, "score": 69256.99984238383 }, { "content": "struct CatchUnwindFuture<F> {\n\n future: F,\n\n}\n\n\n\nimpl<F> CatchUnwindFuture<F> {\n\n pin_utils::unsafe_pinned!(future: F);\n\n}\n\n\n\nimpl<F: Future + UnwindSafe> Future for CatchUnwindFuture<F> {\n\n type Output = thread::Result<F::Output>;\n\n\n\n fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {\n\n panic::catch_unwind(AssertUnwindSafe(|| self.future().poll(cx)))?.map(Ok)\n\n }\n\n}\n\n\n", "file_path": "src/task/block_on.rs", "rank": 78, "score": 69025.8064446091 }, { "content": "/// A trait for objects which can be converted or resolved to one or more [`SocketAddr`] values.\n\n///\n\n/// This trait is an async version of [`std::net::ToSocketAddrs`].\n\n///\n\n/// [`std::net::ToSocketAddrs`]: https://doc.rust-lang.org/std/net/trait.ToSocketAddrs.html\n\n/// [`SocketAddr`]: https://doc.rust-lang.org/std/net/enum.SocketAddr.html\n\npub trait ToSocketAddrs {\n\n /// Returned iterator over socket addresses which this type may correspond to.\n\n type Iter: Iterator<Item = SocketAddr>;\n\n\n\n /// Converts this object to an iterator of resolved `SocketAddr`s.\n\n ///\n\n /// The returned iterator may not actually yield any values depending on the outcome of any\n\n /// resolution performed.\n\n ///\n\n /// Note that this function may block a backend thread while resolution is performed.\n\n fn to_socket_addrs(&self) -> ret!('_, ToSocketAddrsFuture, Self::Iter);\n\n}\n\n\n\n#[doc(hidden)]\n\n#[allow(missing_debug_implementations)]\n\npub enum ToSocketAddrsFuture<'a, I> {\n\n Phantom(PhantomData<&'a ()>),\n\n Join(blocking::JoinHandle<io::Result<I>>),\n\n Ready(Option<io::Result<I>>),\n\n}\n", "file_path": "src/net/addr.rs", "rank": 79, "score": 68531.28320755763 }, { "content": "#[test]\n\nfn to_socket_addr_str() {\n\n let a = SocketAddr::V4(SocketAddrV4::new(Ipv4Addr::new(77, 88, 21, 11), 24352));\n\n assert_eq!(Ok(vec![a]), blocking_resolve(\"77.88.21.11:24352\"));\n\n\n\n let a = SocketAddr::V6(SocketAddrV6::new(\n\n Ipv6Addr::new(0x2a02, 0x6b8, 0, 1, 0, 0, 0, 1),\n\n 53,\n\n 0,\n\n 0,\n\n ));\n\n assert_eq!(Ok(vec![a]), blocking_resolve(\"[2a02:6b8:0:1::1]:53\"));\n\n\n\n let a = SocketAddr::V4(SocketAddrV4::new(Ipv4Addr::new(127, 0, 0, 1), 23924));\n\n #[cfg(not(target_env = \"sgx\"))]\n\n assert!(blocking_resolve(\"localhost:23924\").unwrap().contains(&a));\n\n #[cfg(target_env = \"sgx\")]\n\n let _ = a;\n\n}\n\n\n", "file_path": "tests/addr.rs", "rank": 80, "score": 68253.30503877175 }, { "content": "#[test]\n\nfn drop_local() {\n\n static DROP_LOCAL: AtomicBool = AtomicBool::new(false);\n\n\n\n struct Local;\n\n\n\n impl Drop for Local {\n\n fn drop(&mut self) {\n\n DROP_LOCAL.store(true, Ordering::SeqCst);\n\n }\n\n }\n\n\n\n task_local! {\n\n static LOCAL: Local = Local;\n\n }\n\n\n\n // Spawn a task that just touches its task-local.\n\n let handle = task::spawn(async {\n\n LOCAL.with(|_| ());\n\n });\n\n let task = handle.task().clone();\n\n\n\n // Wait for the task to finish and make sure its task-local has been dropped.\n\n task::block_on(async {\n\n handle.await;\n\n assert!(DROP_LOCAL.load(Ordering::SeqCst));\n\n drop(task);\n\n });\n\n}\n", "file_path": "tests/task_local.rs", "rank": 81, "score": 68253.30503877175 }, { "content": "#[test]\n\nfn to_socket_addr_string() {\n\n let a = SocketAddr::V4(SocketAddrV4::new(Ipv4Addr::new(77, 88, 21, 11), 24352));\n\n let s: &str = \"77.88.21.11:24352\";\n\n assert_eq!(Ok(vec![a]), blocking_resolve(s));\n\n\n\n let s: &String = &\"77.88.21.11:24352\".to_string();\n\n assert_eq!(Ok(vec![a]), blocking_resolve(s));\n\n\n\n let s: String = \"77.88.21.11:24352\".to_string();\n\n assert_eq!(Ok(vec![a]), blocking_resolve(s));\n\n}\n\n\n\n// FIXME: figure out why this fails on openbsd and fix it\n", "file_path": "tests/addr.rs", "rank": 82, "score": 68253.30503877175 }, { "content": " /// A trait to express the ability to consume an object and acquire ownership of\n\n /// its raw `HANDLE`.\n\n pub trait IntoRawHandle {\n\n /// Consumes this object, returning the raw underlying handle.\n\n ///\n\n /// This function **transfers ownership** of the underlying handle to the\n\n /// caller. Callers are then the unique owners of the handle and must close\n\n /// it once it's no longer needed.\n\n fn into_raw_handle(self) -> RawHandle;\n\n }\n\n } else {\n\n pub use std::os::windows::io::{\n\n AsRawHandle, FromRawHandle, IntoRawHandle, RawHandle, RawSocket,\n\n };\n\n }\n\n}\n", "file_path": "src/os/windows/io.rs", "rank": 83, "score": 67606.13049206424 }, { "content": " /// Construct I/O objects from raw handles.\n\n pub trait FromRawHandle {\n\n /// Constructs a new I/O object from the specified raw handle.\n\n ///\n\n /// This function will **consume ownership** of the handle given,\n\n /// passing responsibility for closing the handle to the returned\n\n /// object.\n\n ///\n\n /// This function is also unsafe as the primitives currently returned\n\n /// have the contract that they are the sole owner of the file\n\n /// descriptor they are wrapping. Usage of this function could\n\n /// accidentally allow violating this contract which can cause memory\n\n /// unsafety in code that relies on it being true.\n\n unsafe fn from_raw_handle(handle: RawHandle) -> Self;\n\n }\n\n\n", "file_path": "src/os/windows/io.rs", "rank": 84, "score": 67606.13049206424 }, { "content": " /// A trait to extract the raw unix file descriptor from an underlying\n\n /// object.\n\n ///\n\n /// This is only available on unix platforms and must be imported in order\n\n /// to call the method. Windows platforms have a corresponding `AsRawHandle`\n\n /// and `AsRawSocket` set of traits.\n\n pub trait AsRawFd {\n\n /// Extracts the raw file descriptor.\n\n ///\n\n /// This method does **not** pass ownership of the raw file descriptor\n\n /// to the caller. The descriptor is only guaranteed to be valid while\n\n /// the original object has not yet been destroyed.\n\n fn as_raw_fd(&self) -> RawFd;\n\n }\n\n\n", "file_path": "src/os/unix/io.rs", "rank": 85, "score": 67606.13049206424 }, { "content": " /// Extracts raw handles.\n\n pub trait AsRawHandle {\n\n /// Extracts the raw handle, without taking any ownership.\n\n fn as_raw_handle(&self) -> RawHandle;\n\n }\n\n\n", "file_path": "src/os/windows/io.rs", "rank": 86, "score": 67606.13049206424 }, { "content": " /// A trait to express the ability to consume an object and acquire ownership of\n\n /// its raw file descriptor.\n\n pub trait IntoRawFd {\n\n /// Consumes this object, returning the raw underlying file descriptor.\n\n ///\n\n /// This function **transfers ownership** of the underlying file descriptor\n\n /// to the caller. Callers are then the unique owners of the file descriptor\n\n /// and must close the descriptor once it's no longer needed.\n\n fn into_raw_fd(self) -> RawFd;\n\n }\n\n } else {\n\n pub use std::os::unix::io::{AsRawFd, FromRawFd, IntoRawFd, RawFd};\n\n }\n\n}\n", "file_path": "src/os/unix/io.rs", "rank": 87, "score": 67606.13049206424 }, { "content": " /// A trait to express the ability to construct an object from a raw file\n\n /// descriptor.\n\n pub trait FromRawFd {\n\n /// Constructs a new instance of `Self` from the given raw file\n\n /// descriptor.\n\n ///\n\n /// This function **consumes ownership** of the specified file\n\n /// descriptor. The returned object will take responsibility for closing\n\n /// it when the object goes out of scope.\n\n ///\n\n /// This function is also unsafe as the primitives currently returned\n\n /// have the contract that they are the sole owner of the file\n\n /// descriptor they are wrapping. Usage of this function could\n\n /// accidentally allow violating this contract which can cause memory\n\n /// unsafety in code that relies on it being true.\n\n unsafe fn from_raw_fd(fd: RawFd) -> Self;\n\n }\n\n\n", "file_path": "src/os/unix/io.rs", "rank": 88, "score": 67606.13049206424 }, { "content": "#[cfg_attr(feature = \"docs\", doc(cfg(unstable)))]\n\npub trait Extend<A> {\n\n /// Extends a collection with the contents of a stream.\n\n fn stream_extend<'a, T: IntoStream<Item = A> + 'a>(\n\n &'a mut self,\n\n stream: T,\n\n ) -> Pin<Box<dyn Future<Output = ()> + 'a>>;\n\n}\n\n\n\nimpl Extend<()> for () {\n\n fn stream_extend<'a, T: IntoStream<Item = ()> + 'a>(\n\n &'a mut self,\n\n stream: T,\n\n ) -> Pin<Box<dyn Future<Output = ()> + 'a>> {\n\n let stream = stream.into_stream();\n\n Box::pin(async move {\n\n pin_utils::pin_mut!(stream);\n\n while let Some(_) = stream.next().await {}\n\n })\n\n }\n\n}\n", "file_path": "src/stream/extend.rs", "rank": 89, "score": 67401.85839128001 }, { "content": "#[test]\n\nfn to_socket_addr_ipaddr_u16() {\n\n let a = Ipv4Addr::new(77, 88, 21, 11);\n\n let p = 12345;\n\n let e = SocketAddr::V4(SocketAddrV4::new(a, p));\n\n assert_eq!(Ok(vec![e]), blocking_resolve((a, p)));\n\n}\n\n\n", "file_path": "tests/addr.rs", "rank": 90, "score": 67293.50275974587 }, { "content": "#[test]\n\nfn to_socket_addr_str_u16() {\n\n let a = SocketAddr::V4(SocketAddrV4::new(Ipv4Addr::new(77, 88, 21, 11), 24352));\n\n assert_eq!(Ok(vec![a]), blocking_resolve((\"77.88.21.11\", 24352)));\n\n\n\n let a = SocketAddr::V6(SocketAddrV6::new(\n\n Ipv6Addr::new(0x2a02, 0x6b8, 0, 1, 0, 0, 0, 1),\n\n 53,\n\n 0,\n\n 0,\n\n ));\n\n assert_eq!(Ok(vec![a]), blocking_resolve((\"2a02:6b8:0:1::1\", 53)));\n\n\n\n let a = SocketAddr::V4(SocketAddrV4::new(Ipv4Addr::new(127, 0, 0, 1), 23924));\n\n #[cfg(not(target_env = \"sgx\"))]\n\n assert!(blocking_resolve((\"localhost\", 23924)).unwrap().contains(&a));\n\n #[cfg(target_env = \"sgx\")]\n\n let _ = a;\n\n}\n\n\n", "file_path": "tests/addr.rs", "rank": 91, "score": 67293.50275974587 }, { "content": "#[test]\n\n#[should_panic(expected = \"timed out\")]\n\nfn io_timeout_timedout() {\n\n task::block_on(async {\n\n io::timeout(Duration::from_secs(1), async {\n\n let stdin = io::stdin();\n\n let mut line = String::new();\n\n let _n = stdin.read_line(&mut line).await?;\n\n Ok(())\n\n })\n\n .await\n\n .unwrap(); // We should panic with a timeout error\n\n });\n\n}\n\n\n", "file_path": "tests/io_timeout.rs", "rank": 92, "score": 67293.50275974587 }, { "content": "#[test]\n\n#[cfg(not(any(windows, target_os = \"openbsd\")))]\n\nfn to_socket_addr_str_bad() {\n\n assert!(blocking_resolve(\"1200::AB00:1234::2552:7777:1313:34300\").is_err());\n\n}\n", "file_path": "tests/addr.rs", "rank": 93, "score": 67293.50275974587 }, { "content": " /// Unix-specific extension methods for `DirEntry`.\n\n pub trait DirEntryExt {\n\n /// Returns the underlying `d_ino` field in the contained `dirent`\n\n /// structure.\n\n fn ino(&self) -> u64;\n\n }\n\n\n", "file_path": "src/os/unix/fs.rs", "rank": 94, "score": 66719.7404526913 }, { "content": " /// Unix-specific extensions to `DirBuilder`.\n\n pub trait DirBuilderExt {\n\n /// Sets the mode to create new directories with. This option defaults to\n\n /// `0o777`.\n\n fn mode(&mut self, mode: u32) -> &mut Self;\n\n }\n\n\n", "file_path": "src/os/unix/fs.rs", "rank": 95, "score": 66719.7404526913 }, { "content": " /// Unix-specific extensions to `OpenOptions`.\n\n pub trait OpenOptionsExt {\n\n /// Sets the mode bits that a new file will be created with.\n\n ///\n\n /// If a new file is created as part of a `File::open_opts` call then this\n\n /// specified `mode` will be used as the permission bits for the new file.\n\n /// If no `mode` is set, the default of `0o666` will be used.\n\n /// The operating system masks out bits with the systems `umask`, to produce\n\n /// the final permissions.\n\n fn mode(&mut self, mode: u32) -> &mut Self;\n\n\n\n /// Pass custom flags to the `flags` argument of `open`.\n\n ///\n\n /// The bits that define the access mode are masked out with `O_ACCMODE`, to\n\n /// ensure they do not interfere with the access mode set by Rusts options.\n\n ///\n\n /// Custom flags can only set flags, not remove flags set by Rusts options.\n\n /// This options overwrites any previously set custom flags.\n\n fn custom_flags(&mut self, flags: i32) -> &mut Self;\n\n }\n\n } else {\n\n pub use std::os::unix::fs::{DirBuilderExt, OpenOptionsExt};\n\n }\n\n}\n", "file_path": "src/os/unix/fs.rs", "rank": 96, "score": 66719.7404526913 }, { "content": "#[cfg_attr(feature = \"docs\", doc(cfg(unstable)))]\n\n#[cfg(any(feature = \"unstable\", feature = \"docs\"))]\n\npub trait FromStream<T> {\n\n /// Creates a value from a stream.\n\n ///\n\n /// # Examples\n\n ///\n\n /// Basic usage:\n\n ///\n\n /// ```\n\n /// // use async_std::stream::FromStream;\n\n ///\n\n /// // let _five_fives = async_std::stream::repeat(5).take(5);\n\n /// ```\n\n fn from_stream<'a, S: IntoStream<Item = T> + 'a>(\n\n stream: S,\n\n ) -> Pin<Box<dyn core::future::Future<Output = Self> + 'a>>;\n\n}\n", "file_path": "src/stream/from_stream.rs", "rank": 97, "score": 66435.3434668281 }, { "content": "#[test]\n\n#[should_panic(expected = \"My custom error\")]\n\nfn io_timeout_future_err() {\n\n task::block_on(async {\n\n io::timeout(Duration::from_secs(1), async {\n\n Err::<(), io::Error>(io::Error::new(io::ErrorKind::Other, \"My custom error\"))\n\n })\n\n .await\n\n .unwrap(); // We should panic with our own error\n\n });\n\n}\n\n\n", "file_path": "tests/io_timeout.rs", "rank": 98, "score": 66374.7754206093 }, { "content": "#[test]\n\nfn io_timeout_future_ok() {\n\n task::block_on(async {\n\n io::timeout(Duration::from_secs(1), async { Ok(()) })\n\n .await\n\n .unwrap(); // We shouldn't panic at all\n\n });\n\n}\n", "file_path": "tests/io_timeout.rs", "rank": 99, "score": 66374.7754206093 } ]
Rust
src/simulation.rs
Phraeyll/rballistics-flat
451ec376bd22f98059d5dd22c05896d9b458133d
use crate::{ consts::{FRAC_PI_2, PI}, error::{Error, Result}, my_quantity, projectiles::ProjectileImpl, units::{ celsius, fahrenheit, foot_per_second, grain, inch, inch_of_mercury, kelvin, kilogram, meter, meter_per_second, meter_per_second_squared, mile_per_hour, pascal, radian, second, Acceleration, Angle, Length, Mass, MyQuantity, Pressure, ThermodynamicTemperature, Time, Velocity, }, Numeric, }; use std::ops::DerefMut; #[derive(Debug)] pub struct Simulation<T> { pub(crate) flags: Flags, pub(crate) projectile: T, pub(crate) scope: Scope, pub(crate) atmosphere: Atmosphere, pub(crate) wind: Wind, pub(crate) shooter: Shooter, pub(crate) time_step: Time, } #[derive(Debug)] pub struct Atmosphere { pub(crate) temperature: ThermodynamicTemperature, pub(crate) pressure: Pressure, pub(crate) humidity: Numeric, } #[derive(Debug)] pub struct Flags { pub(crate) coriolis: bool, pub(crate) drag: bool, pub(crate) gravity: bool, } #[derive(Debug)] pub struct Scope { pub(crate) yaw: Angle, pub(crate) pitch: Angle, pub(crate) roll: Angle, pub(crate) height: Length, pub(crate) offset: Length, } #[derive(Debug)] pub struct Shooter { pub(crate) yaw: Angle, pub(crate) pitch: Angle, pub(crate) roll: Angle, pub(crate) lattitude: Angle, pub(crate) gravity: Acceleration, } #[derive(Debug)] pub struct Wind { pub(crate) yaw: Angle, pub(crate) pitch: Angle, pub(crate) roll: Angle, pub(crate) velocity: Velocity, } #[derive(Debug)] pub struct SimulationBuilder<T> { pub(crate) builder: Simulation<T>, } impl<T> From<SimulationBuilder<T>> for Simulation<T> { fn from(other: SimulationBuilder<T>) -> Self { Self { ..other.builder } } } impl<T> From<Simulation<T>> for SimulationBuilder<T> { fn from(other: Simulation<T>) -> Self { Self { builder: other } } } impl<T> Default for SimulationBuilder<T> where T: From<ProjectileImpl>, { fn default() -> Self { Self { builder: Simulation { flags: Flags { coriolis: true, drag: true, gravity: true, }, projectile: From::from(ProjectileImpl { caliber: Length::new::<inch>(0.264), weight: Mass::new::<grain>(140.0), bc: 0.305, velocity: Velocity::new::<foot_per_second>(2710.0), }), scope: Scope { yaw: Angle::new::<radian>(0.0), pitch: Angle::new::<radian>(0.0), roll: Angle::new::<radian>(0.0), height: Length::new::<inch>(1.5), offset: Length::new::<inch>(0.0), }, atmosphere: Atmosphere { temperature: ThermodynamicTemperature::new::<fahrenheit>(68.0), pressure: Pressure::new::<inch_of_mercury>(29.92), humidity: 0.0, }, wind: Wind { yaw: Angle::new::<radian>(0.0), pitch: Angle::new::<radian>(0.0), roll: Angle::new::<radian>(0.0), velocity: Velocity::new::<mile_per_hour>(0.0), }, shooter: Shooter { yaw: Angle::new::<radian>(0.0), pitch: Angle::new::<radian>(0.0), roll: Angle::new::<radian>(0.0), lattitude: Angle::new::<radian>(0.0), gravity: my_quantity!(-9.806_65), }, time_step: Time::new::<second>(0.000_001), }, } } } impl<T> SimulationBuilder<T> where T: From<ProjectileImpl>, { pub fn new() -> Self { Default::default() } } impl<T> SimulationBuilder<T> { pub fn init(self) -> Simulation<T> { From::from(self) } pub fn set_time_step(mut self, value: Time) -> Result<Self> { let min = Time::new::<second>(0.0); let max = Time::new::<second>(0.1); if value > min && value <= max { self.builder.time_step = value; Ok(self) } else { Err(Error::OutOfRange { min: min.get::<second>(), max: max.get::<second>(), }) } } pub fn set_temperature(mut self, value: ThermodynamicTemperature) -> Result<Self> { let min = ThermodynamicTemperature::new::<celsius>(-80.0); let max = ThermodynamicTemperature::new::<celsius>(50.0); if value >= min && value <= max { self.builder.atmosphere.temperature = value; Ok(self) } else { Err(Error::OutOfRange { min: min.get::<kelvin>(), max: max.get::<kelvin>(), }) } } pub fn set_pressure(mut self, value: Pressure) -> Result<Self> { if value.is_sign_positive() { self.builder.atmosphere.pressure = value; Ok(self) } else { Err(Error::PositiveExpected(value.get::<pascal>())) } } pub fn set_humidity(mut self, value: Numeric) -> Result<Self> { let (min, max) = (0.0, 1.0); if value >= min && value <= max { self.builder.atmosphere.humidity = value; Ok(self) } else { Err(Error::OutOfRange { min, max }) } } pub fn use_coriolis(mut self, value: bool) -> Self { self.builder.flags.coriolis = value; self } pub fn use_drag(mut self, value: bool) -> Self { self.builder.flags.drag = value; self } pub fn use_gravity(mut self, value: bool) -> Self { self.builder.flags.gravity = value; self } pub fn set_shot_angle(mut self, value: Angle) -> Result<Self> { let min = Angle::new::<radian>(-FRAC_PI_2); let max = Angle::new::<radian>(FRAC_PI_2); if value >= min && value <= max { self.builder.shooter.pitch = value; Ok(self) } else { Err(Error::OutOfRange { min: min.get::<radian>(), max: max.get::<radian>(), }) } } pub fn set_lattitude(mut self, value: Angle) -> Result<Self> { let min = Angle::new::<radian>(-FRAC_PI_2); let max = Angle::new::<radian>(FRAC_PI_2); if value >= min && value <= max { self.builder.shooter.lattitude = value; Ok(self) } else { Err(Error::OutOfRange { min: min.get::<radian>(), max: max.get::<radian>(), }) } } pub fn set_bearing(mut self, value: Angle) -> Result<Self> { let min = Angle::new::<radian>(-2.0 * PI); let max = Angle::new::<radian>(2.0 * PI); if value >= min && value <= max { self.builder.shooter.yaw = value; Ok(self) } else { Err(Error::OutOfRange { min: min.get::<radian>(), max: max.get::<radian>(), }) } } pub fn set_gravity(mut self, value: Acceleration) -> Result<Self> { if value.is_sign_negative() { self.builder.shooter.gravity = value; Ok(self) } else { Err(Error::NegativeExpected( value.get::<meter_per_second_squared>(), )) } } pub fn set_wind_speed(mut self, value: Velocity) -> Result<Self> { if value.is_sign_positive() { self.builder.wind.velocity = value; Ok(self) } else { Err(Error::PositiveExpected(value.get::<meter_per_second>())) } } pub fn set_wind_angle(mut self, value: Angle) -> Result<Self> { let min = Angle::new::<radian>(-2.0 * PI); let max = Angle::new::<radian>(2.0 * PI); if value >= min && value <= max { self.builder.wind.yaw = value; Ok(self) } else { Err(Error::OutOfRange { min: min.get::<radian>(), max: max.get::<radian>(), }) } } pub fn set_scope_height(mut self, value: Length) -> Self { self.builder.scope.height = value; self } pub fn set_scope_offset(mut self, value: Length) -> Self { self.builder.scope.offset = value; self } pub fn set_scope_pitch(mut self, value: Angle) -> Self { self.builder.scope.pitch = value; self } pub fn set_scope_yaw(mut self, value: Angle) -> Self { self.builder.scope.yaw = value; self } pub fn set_scope_roll(mut self, value: Angle) -> Self { self.builder.scope.roll = value; self } } impl<T> SimulationBuilder<T> where T: DerefMut<Target = ProjectileImpl>, { pub fn set_caliber(mut self, value: Length) -> Result<Self> { if value.is_sign_positive() { self.builder.projectile.caliber = value; Ok(self) } else { Err(Error::PositiveExpected(value.get::<meter>())) } } pub fn set_velocity(mut self, value: Velocity) -> Result<Self> { if value.is_sign_positive() { self.builder.projectile.velocity = value; Ok(self) } else { Err(Error::PositiveExpected(value.get::<meter_per_second>())) } } pub fn set_mass(mut self, value: Mass) -> Result<Self> { if value.is_sign_positive() { self.builder.projectile.weight = value; Ok(self) } else { Err(Error::PositiveExpected(value.get::<kilogram>())) } } pub fn set_bc(mut self, value: Numeric) -> Result<Self> { if value.is_sign_positive() { self.builder.projectile.bc = value; Ok(self) } else { Err(Error::PositiveExpected(value)) } } }
use crate::{ consts::{FRAC_PI_2, PI}, error::{Error, Result}, my_quantity, projectiles::ProjectileImpl, units::{ celsius, fahrenheit, foot_per_second, grain, inch, inch_of_mercury, kelvin, kilogram, meter, meter_per_second, meter_per_second_squared, mile_per_hour, pascal, radian, second, Acceleration, Angle, Length, Mass, MyQuantity, Pressure, ThermodynamicTemperature, Time, Velocity, }, Numeric, }; use std::ops::DerefMut; #[derive(Debug)] pub struct Simulation<T> { pub(crate) flags: Flags, pub(crate) projectile: T, pub(crate) scope: Scope, pub(crate) atmosphere: Atmosphere, pub(crate) wind: Wind, pub(crate) shooter: Shooter, pub(crate) time_step: Time, } #[derive(Debug)] pub struct Atmosphere { pub(crate) temperature: ThermodynamicTemperature, pub(crate) pressure: Pressure, pub(crate) humidity: Numeric, } #[derive(Debug)] pub struct Flags { pub(crate) coriolis: bool, pub(crate) drag: bool, pub(crate) gravity: bool, } #[derive(Debug)] pub struct Scope { pub(crate) yaw: Angle, pub(crate) pitch: Angle, pub(crate) roll: Angle, pub(crate) height: Length, pub(crate) offset: Length, } #[derive(Debug)] pub struct Shooter { pub(crate) yaw: Angle, pub(crate) pitch: Angle, pub(crate) roll: Angle, pub(crate) lattitude: Angle, pub(crate) gravity: Acceleration, } #[derive(Debug)] pub struct Wind { pub(crate) yaw: Angle, pub(crate) pitch: Angle, pub(crate) roll: Angle, pub(crate) velocity: Velocity, } #[derive(Debug)] pub struct SimulationBuilder<T> { pub(crate) builder: Simulation<T>, } impl<T> From<SimulationBuilder<T>> for Simulation<T> { fn from(other: SimulationBuilder<T>) -> Self { Self { ..other.builder } } } impl<T> From<Simulation<T>> for SimulationBuilder<T> { fn from(other: Simulation<T>) -> Self { Self { builder: other } } } impl<T> Default for SimulationBuilder<T> where T: From<ProjectileImpl>, { fn default() -> Self { Self { builder: Simulation { flags: Flags { coriolis: true, drag: true, gravity: true, }, projectile: From::from(ProjectileImpl { caliber: Length::new::<inch>(0.264), weight: Mass::new::<grain>(140.0), bc: 0.305, velocity: Velocity::new::<foot_per_second>(2710.0), }), scope: Scope { yaw: Angle::new::<radian>(0.0), pitch: Angle::new::<radian>(0.0), roll: Angle::new::<radian>(0.0), height: Length::new::<inch>(1.5), offset: Length::new::<inch>(0.0), }, atmosphere: Atmosphere { temperature: ThermodynamicTemperature::new::<fahrenheit>(68.0), pressure: Pressure::new::<inch_of_mercury>(29.92), humidity: 0.0, }, wind: Wind { yaw: Angle::new::<radian>(0.0), pitch: Angle::new::<radian>(0.0), roll: Angle::new::<radian>(0.0), velocity: Velocity::new::<mile_per_hour>(0.0), }, shooter: Shooter { yaw: Angle::new::<radian>(0.0), pitch: Angle::new::<radian>(0.0), roll: Angle::new::<radian>(0.0), lattitude: Angle::new::<radian>(0.0), gravity: my_quantity!(-9.806_65), }, time_step: Time::new::<second>(0.000_001), }, } } } impl<T> SimulationBuilder<T> where T: From<ProjectileImpl>, { pub fn new() -> Self { Default::default() } } impl<T> SimulationBuilder<T> { pub fn init(self) -> Simulation<T> { From::from(self) } pub fn set_time_step(mut self, value: Time) -> Result<Self> { let min = Time::new::<second>(0.0); let max = Time::new::<second>(0.1); if value > min && value <= max { self.builder.time_step = value; Ok(self) } else { Err(Error::OutOfRange { min: min.get::<second>(), max: max.get::<second>(), }) } } pub fn set_temperature(mut self, value: ThermodynamicTemperature) -> Result<Self> { let min = ThermodynamicTemperature::new::<celsius>(-80.0); let max = ThermodynamicTemperature::new::<celsius>(50.0); if value >= min && value <= max { self.builder.atmosphere.temperature = value; Ok(self) } else { Err(Error::OutOfRange { min: min.get::<kelvin>(), max: max.get::<kelvin>(), }) } } pub fn set_pressure(mut self, value: Pressure) -> Result<Self> { if value.is_sign_positive() { self.builder.atmosphere.pressure = value; Ok(self) } else { Err(Error::PositiveExpected(value.get::<pascal>())) } } pub fn set_humidity(mut self, value: Numeric) -> Result<Self> { let (min, max) = (0.0, 1.0); if value >= min && value <= max { self.builder.atmosphere.humidity = value; Ok(self) } else { Err(Error::OutOfRange { min, max }) } } pub fn use_coriolis(mut self, value: bool) -> Self { self.builder.flags.coriolis = value; self } pub fn use_drag(mut self, value: bool) -> Self { self.builder.flags.drag = value; self } pub fn use_gravity(mut self, value: bool) -> Self { self.builder.flags.gravity = value; self } pub fn set_shot_angle(mut self, value: Angle) -> Result<Self> { let min = Angle::new::<radian>(-FRAC_PI_2); let max = Angle::new::<radian>(FRAC_PI_2); if value >= min && value <= max { self.builder.shooter.pitch = value; Ok(self) } else { Err(Error::OutOfRange { min: min.get::<radian>(), max: max.get::<radian>(), }) } } pub fn set_lattitude(mut self, value: Angle) -> Result<Self> { let min = Angle::new::<radian>(-FRAC_PI_2); let max = Angle::new::<radian>(FRAC_PI_2); if value >= min && value <= max { self.builder.shooter.lattitude = value; Ok(self) } else { Err(Error::OutOfRange { min: min.get::<radian>(), max: max.get::<radian>(), }) } } pub fn set_bearing(mut self, value: Angle) -> Result<Self> { let min = Angle::new::<radian>(-2.0 * PI); let max = Angle::new::<radian>(2.0 * PI); if value >= min && value <= max { self.builder.shooter.yaw = value; Ok(self) } else { Err(Error::OutOfRange { min: min.get::<radian>(), max: max.get::<radian>(), }) } } pub fn set_gravity(mut self, value: Acceleration) -> Result<Self> { if value.is_sign_negative() { self.builder.shooter.gravity = value; Ok(self) } else { Err(Error::NegativeExpected( value.get::<meter_per_second_squared>(), )) } } pub fn set_wind_speed(mut self, value: Velocity) -> Resu
pub fn set_wind_angle(mut self, value: Angle) -> Result<Self> { let min = Angle::new::<radian>(-2.0 * PI); let max = Angle::new::<radian>(2.0 * PI); if value >= min && value <= max { self.builder.wind.yaw = value; Ok(self) } else { Err(Error::OutOfRange { min: min.get::<radian>(), max: max.get::<radian>(), }) } } pub fn set_scope_height(mut self, value: Length) -> Self { self.builder.scope.height = value; self } pub fn set_scope_offset(mut self, value: Length) -> Self { self.builder.scope.offset = value; self } pub fn set_scope_pitch(mut self, value: Angle) -> Self { self.builder.scope.pitch = value; self } pub fn set_scope_yaw(mut self, value: Angle) -> Self { self.builder.scope.yaw = value; self } pub fn set_scope_roll(mut self, value: Angle) -> Self { self.builder.scope.roll = value; self } } impl<T> SimulationBuilder<T> where T: DerefMut<Target = ProjectileImpl>, { pub fn set_caliber(mut self, value: Length) -> Result<Self> { if value.is_sign_positive() { self.builder.projectile.caliber = value; Ok(self) } else { Err(Error::PositiveExpected(value.get::<meter>())) } } pub fn set_velocity(mut self, value: Velocity) -> Result<Self> { if value.is_sign_positive() { self.builder.projectile.velocity = value; Ok(self) } else { Err(Error::PositiveExpected(value.get::<meter_per_second>())) } } pub fn set_mass(mut self, value: Mass) -> Result<Self> { if value.is_sign_positive() { self.builder.projectile.weight = value; Ok(self) } else { Err(Error::PositiveExpected(value.get::<kilogram>())) } } pub fn set_bc(mut self, value: Numeric) -> Result<Self> { if value.is_sign_positive() { self.builder.projectile.bc = value; Ok(self) } else { Err(Error::PositiveExpected(value)) } } }
lt<Self> { if value.is_sign_positive() { self.builder.wind.velocity = value; Ok(self) } else { Err(Error::PositiveExpected(value.get::<meter_per_second>())) } }
function_block-function_prefixed
[ { "content": "pub fn table() -> NumericMap {\n\n float_btree_map![\n\n 0.00 => 0.1710,\n\n 0.05 => 0.1719,\n\n 0.10 => 0.1727,\n\n 0.15 => 0.1732,\n\n 0.20 => 0.1734,\n\n 0.25 => 0.1730,\n\n 0.30 => 0.1718,\n\n 0.35 => 0.1696,\n\n 0.40 => 0.1668,\n\n 0.45 => 0.1637,\n\n 0.50 => 0.1603,\n\n 0.55 => 0.1566,\n\n 0.60 => 0.1529,\n\n 0.65 => 0.1497,\n\n 0.70 => 0.1473,\n\n 0.75 => 0.1463,\n\n 0.80 => 0.1489,\n\n 0.85 => 0.1583,\n", "file_path": "src/projectiles/g5.rs", "rank": 0, "score": 105832.68697992741 }, { "content": "pub fn table() -> NumericMap {\n\n float_btree_map![\n\n 0.00 => 0.2282,\n\n 0.05 => 0.2282,\n\n 0.10 => 0.2282,\n\n 0.15 => 0.2282,\n\n 0.20 => 0.2282,\n\n 0.25 => 0.2282,\n\n 0.30 => 0.2282,\n\n 0.35 => 0.2282,\n\n 0.40 => 0.2282,\n\n 0.45 => 0.2282,\n\n 0.50 => 0.2282,\n\n 0.55 => 0.2282,\n\n 0.60 => 0.2282,\n\n 0.65 => 0.2282,\n\n 0.70 => 0.2282,\n\n 0.725 => 0.2353,\n\n 0.75 => 0.2434,\n\n 0.775 => 0.2515,\n", "file_path": "src/projectiles/gi.rs", "rank": 1, "score": 105832.68697992741 }, { "content": "pub fn table() -> NumericMap {\n\n float_btree_map![\n\n 0.00 => 0.2105,\n\n 0.05 => 0.2105,\n\n 0.10 => 0.2104,\n\n 0.15 => 0.2104,\n\n 0.20 => 0.2103,\n\n 0.25 => 0.2103,\n\n 0.30 => 0.2103,\n\n 0.35 => 0.2103,\n\n 0.40 => 0.2103,\n\n 0.45 => 0.2102,\n\n 0.50 => 0.2102,\n\n 0.55 => 0.2102,\n\n 0.60 => 0.2102,\n\n 0.65 => 0.2102,\n\n 0.70 => 0.2103,\n\n 0.75 => 0.2103,\n\n 0.80 => 0.2104,\n\n 0.825 => 0.2104,\n", "file_path": "src/projectiles/g8.rs", "rank": 2, "score": 105832.68697992741 }, { "content": "pub fn table() -> NumericMap {\n\n float_btree_map![\n\n 0.00 => 0.2629,\n\n 0.05 => 0.2558,\n\n 0.10 => 0.2487,\n\n 0.15 => 0.2413,\n\n 0.20 => 0.2344,\n\n 0.25 => 0.2278,\n\n 0.30 => 0.2214,\n\n 0.35 => 0.2155,\n\n 0.40 => 0.2104,\n\n 0.45 => 0.2061,\n\n 0.50 => 0.2032,\n\n 0.55 => 0.2020,\n\n 0.60 => 0.2034,\n\n 0.70 => 0.2165,\n\n 0.725 => 0.2230,\n\n 0.75 => 0.2313,\n\n 0.775 => 0.2417,\n\n 0.80 => 0.2546,\n", "file_path": "src/projectiles/g1.rs", "rank": 3, "score": 105832.68697992741 }, { "content": "pub fn table() -> NumericMap {\n\n float_btree_map![\n\n 0.00 => 0.2303,\n\n 0.05 => 0.2298,\n\n 0.10 => 0.2287,\n\n 0.15 => 0.2271,\n\n 0.20 => 0.2251,\n\n 0.25 => 0.2227,\n\n 0.30 => 0.2196,\n\n 0.35 => 0.2156,\n\n 0.40 => 0.2107,\n\n 0.45 => 0.2048,\n\n 0.50 => 0.1980,\n\n 0.55 => 0.1905,\n\n 0.60 => 0.1828,\n\n 0.65 => 0.1758,\n\n 0.70 => 0.1702,\n\n 0.75 => 0.1669,\n\n 0.775 => 0.1664,\n\n 0.80 => 0.1667,\n", "file_path": "src/projectiles/g2.rs", "rank": 4, "score": 105832.68697992741 }, { "content": "pub fn table() -> NumericMap {\n\n float_btree_map![\n\n 0.00 => 0.2617,\n\n 0.05 => 0.2553,\n\n 0.10 => 0.2491,\n\n 0.15 => 0.2432,\n\n 0.20 => 0.2376,\n\n 0.25 => 0.2324,\n\n 0.30 => 0.2278,\n\n 0.35 => 0.2238,\n\n 0.40 => 0.2205,\n\n 0.45 => 0.2177,\n\n 0.50 => 0.2155,\n\n 0.55 => 0.2138,\n\n 0.60 => 0.2126,\n\n 0.65 => 0.2121,\n\n 0.70 => 0.2122,\n\n 0.75 => 0.2132,\n\n 0.80 => 0.2154,\n\n 0.85 => 0.2194,\n", "file_path": "src/projectiles/g6.rs", "rank": 5, "score": 105832.68697992741 }, { "content": "pub fn table() -> NumericMap {\n\n float_btree_map![\n\n 0.00 => 0.4662,\n\n 0.05 => 0.4689,\n\n 0.10 => 0.4717,\n\n 0.15 => 0.4745,\n\n 0.20 => 0.4772,\n\n 0.25 => 0.4800,\n\n 0.30 => 0.4827,\n\n 0.35 => 0.4852,\n\n 0.40 => 0.4882,\n\n 0.45 => 0.4920,\n\n 0.50 => 0.4970,\n\n 0.55 => 0.5080,\n\n 0.60 => 0.5260,\n\n 0.65 => 0.5590,\n\n 0.70 => 0.5920,\n\n 0.75 => 0.6258,\n\n 0.80 => 0.6610,\n\n 0.85 => 0.6985,\n", "file_path": "src/projectiles/gs.rs", "rank": 6, "score": 105832.68697992741 }, { "content": "pub fn table() -> NumericMap {\n\n float_btree_map![\n\n 0.00 => 0.1198,\n\n 0.05 => 0.1197,\n\n 0.10 => 0.1196,\n\n 0.15 => 0.1194,\n\n 0.20 => 0.1193,\n\n 0.25 => 0.1194,\n\n 0.30 => 0.1194,\n\n 0.35 => 0.1194,\n\n 0.40 => 0.1193,\n\n 0.45 => 0.1193,\n\n 0.50 => 0.1194,\n\n 0.55 => 0.1193,\n\n 0.60 => 0.1194,\n\n 0.65 => 0.1197,\n\n 0.70 => 0.1202,\n\n 0.725 => 0.1207,\n\n 0.754 => 0.1215,\n\n 0.7754 => 0.1226,\n", "file_path": "src/projectiles/g7.rs", "rank": 7, "score": 105832.68697992741 }, { "content": "pub trait Projectile {\n\n fn area(&self) -> Area {\n\n PI * self.radius().powi(P2::new())\n\n }\n\n fn i(&self) -> Ratio {\n\n self.sd() / self.bc()\n\n }\n\n\n\n fn mass(&self) -> Mass;\n\n fn radius(&self) -> Length;\n\n fn velocity(&self) -> Velocity;\n\n fn bc(&self) -> SectionalDensity;\n\n fn sd(&self) -> SectionalDensity;\n\n fn cd(&self, x: Numeric) -> Result<Numeric>;\n\n}\n\n\n\npub struct ProjectileImpl {\n\n pub caliber: Length,\n\n pub weight: Mass,\n\n pub bc: Numeric,\n", "file_path": "src/projectiles.rs", "rank": 8, "score": 68813.84924206614 }, { "content": "pub trait Cross<Rhs = Self> {\n\n type Output;\n\n fn cross(&self, rhs: Rhs) -> Self::Output;\n\n}\n\n\n", "file_path": "src/vectors.rs", "rank": 9, "score": 68014.56298477836 }, { "content": "fn wrap_own<V>((k, v): (Numeric, V)) -> (OrdF<Numeric>, V) {\n\n (OrdF(k), v)\n\n}\n\n\n\n// Initialize BTreeMap with OrdereredFloat wrapper around k, and FloatMap wrapper\n\n// around entire map. Used for drag tables and output/drop tables\n\nmacro_rules! float_btree_map {\n\n ( $($k:expr => $v:expr,)+ ) => {\n\n float_btree_map![\n\n $($k => $v),+\n\n ]\n\n };\n\n ( $($k:expr => $v:expr),* ) => {{\n\n let mut _float_map = $crate::float_map::FloatMap::new();\n\n $(\n\n let _ = _float_map.insert($k, $v);\n\n )*\n\n _float_map\n\n }};\n\n}\n", "file_path": "src/float_map.rs", "rank": 10, "score": 49825.98207852127 }, { "content": "fn wrap_bound(bound: Bound<&Numeric>) -> Bound<OrdF<Numeric>> {\n\n match bound {\n\n Bound::Unbounded => Bound::Unbounded,\n\n Bound::Excluded(f) => Bound::Excluded(OrdF(*f)),\n\n Bound::Included(f) => Bound::Included(OrdF(*f)),\n\n }\n\n}\n", "file_path": "src/float_map.rs", "rank": 11, "score": 49538.87337429666 }, { "content": "fn unwrap_own<V>((OrdF(k), v): (OrdF<Numeric>, V)) -> (Numeric, V) {\n\n (k, v)\n\n}\n", "file_path": "src/float_map.rs", "rank": 12, "score": 46512.81271161758 }, { "content": "fn unwrap_ref<'k, 'v, V>((&OrdF(k), v): (&'k OrdF<Numeric>, &'v V)) -> (Numeric, &'v V) {\n\n (k, v)\n\n}\n", "file_path": "src/float_map.rs", "rank": 13, "score": 43526.1812308379 }, { "content": "fn unwrap_mut<'k, 'v, V>((&OrdF(k), v): (&'k OrdF<Numeric>, &'v mut V)) -> (Numeric, &'v mut V) {\n\n (k, v)\n\n}\n", "file_path": "src/float_map.rs", "rank": 14, "score": 40734.80810689156 }, { "content": "pub trait Vectors {\n\n type Output;\n\n\n\n fn new(x: Self::Output, y: Self::Output, z: Self::Output) -> Self;\n\n fn get_x(&self) -> Self::Output;\n\n fn get_y(&self) -> Self::Output;\n\n fn get_z(&self) -> Self::Output;\n\n}\n\n\n\nimpl<Dl: ?Sized, Dr: ?Sized, Ul: ?Sized, Ur: ?Sized, V> Cross<&DimVector3<Dr, Ur, V>>\n\n for DimVector3<Dl, Ul, V>\n\nwhere\n\n Dl: Dimension,\n\n Dr: Dimension,\n\n Ul: Units<V>,\n\n Ur: Units<V>,\n\n V: Conversion<V> + Scalar + Copy + ClosedAdd + ClosedMul + ClosedSub,\n\n Dl::L: Add<Dr::L>,\n\n Dl::M: Add<Dr::M>,\n\n Dl::T: Add<Dr::T>,\n", "file_path": "src/vectors.rs", "rank": 15, "score": 38235.04453639965 }, { "content": "pub trait Measurements {\n\n fn time(&self) -> Time;\n\n fn velocity(&self) -> Velocity;\n\n fn energy(&self) -> Energy;\n\n fn distance(&self) -> Length;\n\n fn elevation(&self) -> Length;\n\n fn windage(&self) -> Length;\n\n fn angle(&self) -> Angle;\n\n fn vertical_angle(&self, tolerance: Length) -> Angle;\n\n fn horizontal_angle(&self, tolerance: Length) -> Angle;\n\n fn relative_position(&self) -> MyVector3<length::Dimension>;\n\n fn offset_vertical_angle(&self, offset: Length, tolerance: Length) -> Angle;\n\n fn offset_horizontal_angle(&self, offset: Length, tolerance: Length) -> Angle;\n\n}\n", "file_path": "src/output.rs", "rank": 16, "score": 38235.04453639965 }, { "content": "pub trait Newtonian {\n\n fn acceleration(\n\n &self,\n\n _velocity: MyVector3<velocity::Dimension>,\n\n ) -> MyVector3<acceleration::Dimension> {\n\n MyVector3::new(\n\n Acceleration::new::<meter_per_second_squared>(0.0),\n\n Acceleration::new::<meter_per_second_squared>(0.0),\n\n Acceleration::new::<meter_per_second_squared>(0.0),\n\n )\n\n }\n\n fn delta_time(&self) -> Time {\n\n Time::new::<second>(0.000_005)\n\n }\n\n // 'Second Equation of Motion'\n\n fn delta_position(\n\n &self,\n\n velocity: MyVector3<velocity::Dimension>,\n\n ) -> MyVector3<length::Dimension> {\n\n velocity * self.delta_time()\n", "file_path": "src/iter.rs", "rank": 17, "score": 38235.04453639965 }, { "content": "pub trait Norm {\n\n type Output;\n\n fn norm(&self) -> Self::Output;\n\n}\n\n\n", "file_path": "src/vectors.rs", "rank": 18, "score": 38235.04453639965 }, { "content": "struct IterFindAdjustments<'t, T, F, E, W>\n\nwhere\n\n T: Projectile,\n\n F: Fn(&Packet<T>) -> bool,\n\n E: Fn(&Packet<T>) -> Angle,\n\n W: Fn(&Packet<T>) -> Angle,\n\n{\n\n sim: &'t mut Simulation<T>,\n\n\n\n finder: F,\n\n elevation_adjuster: E,\n\n windage_adjuster: W,\n\n\n\n elevation_adjustment: Angle,\n\n windage_adjustment: Angle,\n\n count: u64,\n\n}\n\n\n\n// This never returns None - it returns Some(Result) which can indicate failure instead\n\n// This is just to capture reason why iteration stopped\n", "file_path": "src/solvers/zero.rs", "rank": 19, "score": 29100.252406422558 }, { "content": " pressure::{self, inch_of_mercury, pascal},\n\n ratio::{self},\n\n thermodynamic_temperature::{\n\n self as temperature, degree_celsius as celsius, degree_fahrenheit as fahrenheit, kelvin,\n\n },\n\n time::{self, second},\n\n velocity::{self, foot_per_second, meter_per_second, mile_per_hour},\n\n },\n\n str::ParseQuantityError,\n\n};\n\npub(crate) use uom::{\n\n si::{Dimension, Quantity, Units, ISQ, SI},\n\n typenum, Conversion,\n\n};\n\n\n\npub(crate) type MyUnits = SI<Numeric>;\n\npub(crate) type MyQuantity<D> = Quantity<D, MyUnits, Numeric>;\n", "file_path": "src/units.rs", "rank": 20, "score": 27938.075743445912 }, { "content": "use crate::Numeric;\n\n\n\npub use uom::{\n\n fmt::DisplayStyle,\n\n si::{\n\n acceleration::{self, foot_per_second_squared, meter_per_second_squared},\n\n amount_of_substance::{self, mole},\n\n angle::{self, degree, minute as moa, radian},\n\n angular_velocity::{self, radian_per_second},\n\n area::{self, square_inch, square_meter},\n\n electric_current::{self, ampere},\n\n energy::{self, foot_pound, joule},\n\n f64::*,\n\n fmt::{Arguments, QuantityArguments},\n\n force::{self},\n\n length::{self, inch, meter, yard},\n\n luminous_intensity::{self, candela},\n\n mass::{self, grain, kilogram, pound},\n\n mass_density::{self, kilogram_per_cubic_meter},\n\n molar_mass::{self},\n", "file_path": "src/units.rs", "rank": 21, "score": 27933.787479583654 }, { "content": " }\n\n }\n\n impl DerefMut for $struct\n\n {\n\n fn deref_mut(&mut self) -> &mut Self::Target {\n\n &mut self.0\n\n }\n\n }\n\n impl Projectile for $struct {\n\n fn velocity(&self) -> Velocity {\n\n self.0.velocity\n\n }\n\n fn mass(&self) -> Mass {\n\n self.0.weight\n\n }\n\n fn radius(&self) -> Length {\n\n self.0.caliber / 2.0\n\n }\n\n fn bc(&self) -> SectionalDensity {\n\n Mass::new::<pound>(self.0.bc) / Area::new::<square_inch>(1.0)\n", "file_path": "src/projectiles.rs", "rank": 39, "score": 25938.69921132184 }, { "content": "use crate::{\n\n consts::PI,\n\n error::{Error, Result},\n\n units::{\n\n pound, square_inch,\n\n typenum::P2,\n\n typenum::{N2, P1, Z0},\n\n Area, Length, Mass, MyQuantity, Ratio, Velocity, ISQ,\n\n },\n\n Numeric, NumericMap,\n\n};\n\n\n\nuse std::ops::{Deref, DerefMut};\n\n\n\nuse lazy_static::lazy_static;\n\n\n\npub type SectionalDensity = MyQuantity<ISQ<N2, P1, Z0, Z0, Z0, Z0, Z0>>;\n\n\n", "file_path": "src/projectiles.rs", "rank": 40, "score": 25934.633347255392 }, { "content": " pub velocity: Velocity,\n\n}\n\n\n\nmacro_rules! drag_tables {\n\n ($($struct:ident => $module:ident,)+) => {\n\n drag_tables!{$($struct => $module),+}\n\n };\n\n ($($struct:ident => $module:ident),*) => {\n\n $(\n\n mod $module;\n\n pub struct $struct(ProjectileImpl);\n\n impl From<ProjectileImpl> for $struct {\n\n fn from(other: ProjectileImpl) -> Self {\n\n Self(other)\n\n }\n\n }\n\n impl Deref for $struct {\n\n type Target = ProjectileImpl;\n\n fn deref(&self) -> &Self::Target {\n\n &self.0\n", "file_path": "src/projectiles.rs", "rank": 41, "score": 25928.37330888913 }, { "content": " }\n\n fn sd(&self) -> SectionalDensity {\n\n self.0.weight / self.0.caliber.powi(P2::new())\n\n }\n\n // TABLE is a map of \"mach speed\" to \"coefficients of drag\", {x => y}\n\n // This funtions returns linear approximation of coefficient, for a given mach speed\n\n // When x is present in the map, interpolation is equivalent to TABLE.get_value(x)\n\n fn cd(&self, x: Numeric) -> Result<Numeric> {\n\n lazy_static! {\n\n static ref TABLE: NumericMap = $module::table();\n\n }\n\n // TODO: Does not work if x exists in map as smallest key, ..x excludes it, so first step is None\n\n TABLE.range(..x).rev() // First = None if smallest key >= x, else Some((x0, &y0)) where x0 greatest key < x\n\n .zip(TABLE.range(x..)) // First = None if greatest key < x, else Some((x1, &y1)) where x1 smallest key >= x\n\n .map(|((x0, &y0), (x1, &y1))| y0 + (x - x0) * ((y1 - y0) / (x1 - x0))) // Linear interpolation when x0 and x1 both exist\n\n .next()\n\n .ok_or(Error::VelocityLookup(x)) // None => Err: x is outside of key range: this function does not extrapolate\n\n }\n\n }\n\n )*\n", "file_path": "src/projectiles.rs", "rank": 42, "score": 25923.842386991913 }, { "content": " };\n\n}\n\n\n\ndrag_tables! {\n\n G1 => g1,\n\n G2 => g2,\n\n G5 => g5,\n\n G6 => g6,\n\n G7 => g7,\n\n G8 => g8,\n\n GI => gi,\n\n GS => gs,\n\n}\n", "file_path": "src/projectiles.rs", "rank": 43, "score": 25916.28469739777 }, { "content": "use crate::NumericMap;\n\n\n", "file_path": "src/projectiles/gs.rs", "rank": 44, "score": 24249.359485709552 }, { "content": "use crate::NumericMap;\n\n\n", "file_path": "src/projectiles/g1.rs", "rank": 45, "score": 24249.359485709552 }, { "content": "use crate::NumericMap;\n\n\n", "file_path": "src/projectiles/g2.rs", "rank": 46, "score": 24249.359485709552 }, { "content": "use crate::NumericMap;\n\n\n", "file_path": "src/projectiles/gi.rs", "rank": 47, "score": 24249.359485709552 }, { "content": "use crate::NumericMap;\n\n\n", "file_path": "src/projectiles/g5.rs", "rank": 48, "score": 24249.359485709552 }, { "content": "use crate::NumericMap;\n\n\n", "file_path": "src/projectiles/g6.rs", "rank": 49, "score": 24249.359485709552 }, { "content": "use crate::NumericMap;\n\n\n", "file_path": "src/projectiles/g8.rs", "rank": 50, "score": 24249.359485709552 }, { "content": "use crate::NumericMap;\n\n\n", "file_path": "src/projectiles/g7.rs", "rank": 51, "score": 24249.359485709552 }, { "content": " 1.45 => 0.3678,\n\n 1.50 => 0.3594,\n\n 1.55 => 0.3512,\n\n 1.60 => 0.3432,\n\n 1.65 => 0.3356,\n\n 1.70 => 0.3282,\n\n 1.75 => 0.3213,\n\n 1.80 => 0.3149,\n\n 1.85 => 0.3089,\n\n 1.90 => 0.3033,\n\n 1.95 => 0.2982,\n\n 2.00 => 0.2933,\n\n 2.05 => 0.2889,\n\n 2.10 => 0.2846,\n\n 2.15 => 0.2806,\n\n 2.20 => 0.2768,\n\n 2.25 => 0.2731,\n\n 2.30 => 0.2696,\n\n 2.35 => 0.2663,\n\n 2.40 => 0.2632,\n", "file_path": "src/projectiles/g2.rs", "rank": 52, "score": 24240.129320687298 }, { "content": " 1.65 => 0.4286,\n\n 1.70 => 0.4237,\n\n 1.75 => 0.4182,\n\n 1.80 => 0.4121,\n\n 1.85 => 0.4057,\n\n 1.90 => 0.3991,\n\n 1.95 => 0.3926,\n\n 2.00 => 0.3861,\n\n 2.05 => 0.3800,\n\n 2.10 => 0.3741,\n\n 2.15 => 0.3684,\n\n 2.20 => 0.3630,\n\n 2.25 => 0.3578,\n\n 2.30 => 0.3529,\n\n 2.35 => 0.3481,\n\n 2.40 => 0.3435,\n\n 2.45 => 0.3391,\n\n 2.50 => 0.3349,\n\n 2.60 => 0.3269,\n\n 2.70 => 0.3194,\n", "file_path": "src/projectiles/g5.rs", "rank": 53, "score": 24240.129320687298 }, { "content": " 0.875 => 0.2229,\n\n 0.90 => 0.2297,\n\n 0.925 => 0.2449,\n\n 0.95 => 0.2732,\n\n 0.975 => 0.3141,\n\n 1.0 => 0.3597,\n\n 1.025 => 0.3994,\n\n 1.05 => 0.4261,\n\n 1.075 => 0.4402,\n\n 1.10 => 0.4465,\n\n 1.125 => 0.4490,\n\n 1.15 => 0.4497,\n\n 1.175 => 0.4494,\n\n 1.20 => 0.4482,\n\n 1.225 => 0.4464,\n\n 1.25 => 0.4441,\n\n 1.30 => 0.4390,\n\n 1.35 => 0.4336,\n\n 1.40 => 0.4279,\n\n 1.45 => 0.4221,\n", "file_path": "src/projectiles/g6.rs", "rank": 54, "score": 24240.129320687298 }, { "content": " 0.875 => 0.1672,\n\n 0.90 => 0.1815,\n\n 0.925 => 0.2051,\n\n 0.95 => 0.2413,\n\n 0.975 => 0.2884,\n\n 1.0 => 0.3379,\n\n 1.025 => 0.3785,\n\n 1.05 => 0.4032,\n\n 1.075 => 0.4147,\n\n 1.10 => 0.4201,\n\n 1.15 => 0.4278,\n\n 1.20 => 0.4338,\n\n 1.25 => 0.4373,\n\n 1.30 => 0.4392,\n\n 1.35 => 0.4403,\n\n 1.40 => 0.4406,\n\n 1.45 => 0.4401,\n\n 1.50 => 0.4386,\n\n 1.55 => 0.4362,\n\n 1.60 => 0.4328,\n", "file_path": "src/projectiles/g5.rs", "rank": 55, "score": 24240.129320687298 }, { "content": " 2.45 => 0.2602,\n\n 2.50 => 0.2572,\n\n 2.55 => 0.2543,\n\n 2.60 => 0.2515,\n\n 2.65 => 0.2487,\n\n 2.70 => 0.2460,\n\n 2.75 => 0.2433,\n\n 2.80 => 0.2408,\n\n 2.85 => 0.2382,\n\n 2.90 => 0.2357,\n\n 2.95 => 0.2333,\n\n 3.00 => 0.2309,\n\n 3.10 => 0.2262,\n\n 3.20 => 0.2217,\n\n 3.30 => 0.2173,\n\n 3.40 => 0.2132,\n\n 3.50 => 0.2091,\n\n 3.60 => 0.2052,\n\n 3.70 => 0.2014,\n\n 3.80 => 0.1978,\n", "file_path": "src/projectiles/g2.rs", "rank": 56, "score": 24240.129320687298 }, { "content": " 2.50 => 0.2697,\n\n 2.55 => 0.2670,\n\n 2.60 => 0.2643,\n\n 2.65 => 0.2615,\n\n 2.70 => 0.2588,\n\n 2.75 => 0.2561,\n\n 2.80 => 0.2533,\n\n 2.85 => 0.2506,\n\n 2.90 => 0.2479,\n\n 2.95 => 0.2451,\n\n 3.00 => 0.2424,\n\n 3.10 => 0.2368,\n\n 3.20 => 0.2313,\n\n 3.30 => 0.2258,\n\n 3.40 => 0.2205,\n\n 3.50 => 0.2154,\n\n 3.60 => 0.2106,\n\n 3.70 => 0.2060,\n\n 3.80 => 0.2017,\n\n 3.90 => 0.1975,\n\n 4.00 => 0.1935,\n\n 4.20 => 0.1861,\n\n 4.40 => 0.1793,\n\n 4.60 => 0.1730,\n\n 4.80 => 0.1672,\n\n 5.00 => 0.1618,\n\n ]\n\n}\n", "file_path": "src/projectiles/g7.rs", "rank": 57, "score": 24240.129320687298 }, { "content": " 3.90 => 0.1944,\n\n 4.00 => 0.1912,\n\n 4.20 => 0.1851,\n\n 4.40 => 0.1794,\n\n 4.60 => 0.1741,\n\n 4.80 => 0.1693,\n\n 5.00 => 0.1648,\n\n ]\n\n}\n", "file_path": "src/projectiles/g2.rs", "rank": 58, "score": 24240.129320687298 }, { "content": " 1.55 => 0.3845,\n\n 1.60 => 0.3777,\n\n 1.65 => 0.3710,\n\n 1.70 => 0.3645,\n\n 1.75 => 0.3581,\n\n 1.80 => 0.3519,\n\n 1.85 => 0.3458,\n\n 1.90 => 0.3400,\n\n 1.95 => 0.3343,\n\n 2.00 => 0.3288,\n\n 2.05 => 0.3234,\n\n 2.10 => 0.3182,\n\n 2.15 => 0.3131,\n\n 2.20 => 0.3081,\n\n 2.25 => 0.3032,\n\n 2.30 => 0.2983,\n\n 2.35 => 0.2937,\n\n 2.40 => 0.2891,\n\n 2.45 => 0.2845,\n\n 2.50 => 0.2802,\n", "file_path": "src/projectiles/g8.rs", "rank": 59, "score": 24240.129320687298 }, { "content": " 0.90 => 0.7370,\n\n 0.95 => 0.7757,\n\n 1.0 => 0.8140,\n\n 1.05 => 0.8512,\n\n 1.10 => 0.8870,\n\n 1.15 => 0.9210,\n\n 1.20 => 0.9510,\n\n 1.25 => 0.9740,\n\n 1.30 => 0.9910,\n\n 1.35 => 0.9990,\n\n 1.40 => 1.0030,\n\n 1.45 => 1.0060,\n\n 1.50 => 1.0080,\n\n 1.55 => 1.0090,\n\n 1.60 => 1.0090,\n\n 1.65 => 1.0090,\n\n 1.70 => 1.0090,\n\n 1.75 => 1.0080,\n\n 1.80 => 1.0070,\n\n 1.85 => 1.0060,\n", "file_path": "src/projectiles/gs.rs", "rank": 60, "score": 24240.129320687298 }, { "content": " 0.804 => 0.1242,\n\n 0.8254 => 0.1266,\n\n 0.854 => 0.1306,\n\n 0.8754 => 0.1368,\n\n 0.904 => 0.1464,\n\n 0.9254 => 0.1660,\n\n 0.954 => 0.2054,\n\n 0.9754 => 0.2993,\n\n 1.04 => 0.3803,\n\n 1.0254 => 0.4015,\n\n 1.054 => 0.4043,\n\n 1.0754 => 0.4034,\n\n 1.104 => 0.4014,\n\n 1.1254 => 0.3987,\n\n 1.15 => 0.3955,\n\n 1.20 => 0.3884,\n\n 1.25 => 0.3810,\n\n 1.30 => 0.3732,\n\n 1.35 => 0.3657,\n\n 1.40 => 0.3580,\n", "file_path": "src/projectiles/g7.rs", "rank": 61, "score": 24240.129320687298 }, { "content": " 1.50 => 0.3440,\n\n 1.55 => 0.3376,\n\n 1.60 => 0.3315,\n\n 1.65 => 0.3260,\n\n 1.70 => 0.3209,\n\n 1.75 => 0.3160,\n\n 1.80 => 0.3117,\n\n 1.85 => 0.3078,\n\n 1.90 => 0.3042,\n\n 1.95 => 0.3010,\n\n 2.00 => 0.2980,\n\n 2.05 => 0.2951,\n\n 2.10 => 0.2922,\n\n 2.15 => 0.2892,\n\n 2.20 => 0.2864,\n\n 2.25 => 0.2835,\n\n 2.30 => 0.2807,\n\n 2.35 => 0.2779,\n\n 2.40 => 0.2752,\n\n 2.45 => 0.2725,\n", "file_path": "src/projectiles/g7.rs", "rank": 62, "score": 24240.129320687298 }, { "content": " 2.40 => 0.5679,\n\n 2.45 => 0.5626,\n\n 2.50 => 0.5576,\n\n 2.60 => 0.5478,\n\n 2.70 => 0.5386,\n\n 2.80 => 0.5298,\n\n 2.90 => 0.5215,\n\n 3.00 => 0.5136,\n\n 3.10 => 0.5061,\n\n 3.20 => 0.4989,\n\n 3.30 => 0.4921,\n\n 3.40 => 0.4855,\n\n 3.50 => 0.4792,\n\n 3.60 => 0.4732,\n\n 3.70 => 0.4674,\n\n 3.80 => 0.4618,\n\n 3.90 => 0.4564,\n\n 4.00 => 0.4513,\n\n 4.20 => 0.4415,\n\n 4.40 => 0.4323,\n\n 4.60 => 0.4238,\n\n 4.80 => 0.4157,\n\n 5.00 => 0.4082,\n\n ]\n\n}\n", "file_path": "src/projectiles/gi.rs", "rank": 63, "score": 24240.129320687298 }, { "content": " 2.50 => 0.2883,\n\n 2.60 => 0.2772,\n\n 2.70 => 0.2668,\n\n 2.80 => 0.2574,\n\n 2.90 => 0.2487,\n\n 3.00 => 0.2407,\n\n 3.10 => 0.2333,\n\n 3.20 => 0.2265,\n\n 3.30 => 0.2202,\n\n 3.40 => 0.2144,\n\n 3.50 => 0.2089,\n\n 3.60 => 0.2039,\n\n 3.70 => 0.1991,\n\n 3.80 => 0.1947,\n\n 3.90 => 0.1905,\n\n 4.00 => 0.1866,\n\n 4.20 => 0.1794,\n\n 4.40 => 0.1730,\n\n 4.60 => 0.1673,\n\n 4.80 => 0.1621,\n\n 5.00 => 0.1574,\n\n ]\n\n}\n", "file_path": "src/projectiles/g6.rs", "rank": 64, "score": 24240.129320687298 }, { "content": " 2.60 => 0.2720,\n\n 2.70 => 0.2642,\n\n 2.80 => 0.2569,\n\n 2.90 => 0.2499,\n\n 3.00 => 0.2432,\n\n 3.10 => 0.2368,\n\n 3.20 => 0.2308,\n\n 3.30 => 0.2251,\n\n 3.40 => 0.2197,\n\n 3.50 => 0.2147,\n\n 3.60 => 0.2101,\n\n 3.70 => 0.2058,\n\n 3.80 => 0.2019,\n\n 3.90 => 0.1983,\n\n 4.00 => 0.1950,\n\n 4.20 => 0.1890,\n\n 4.40 => 0.1837,\n\n 4.60 => 0.1791,\n\n 4.80 => 0.1750,\n\n 5.00 => 0.1713,\n\n ]\n\n}\n", "file_path": "src/projectiles/g8.rs", "rank": 65, "score": 24240.129320687298 }, { "content": " 0.85 => 0.2105,\n\n 0.875 => 0.2106,\n\n 0.90 => 0.2109,\n\n 0.925 => 0.2183,\n\n 0.95 => 0.2571,\n\n 0.975 => 0.3358,\n\n 1.0 => 0.4068,\n\n 1.025 => 0.4378,\n\n 1.05 => 0.4476,\n\n 1.075 => 0.4493,\n\n 1.10 => 0.4477,\n\n 1.125 => 0.4450,\n\n 1.15 => 0.4419,\n\n 1.20 => 0.4353,\n\n 1.25 => 0.4283,\n\n 1.30 => 0.4208,\n\n 1.35 => 0.4133,\n\n 1.40 => 0.4059,\n\n 1.45 => 0.3986,\n\n 1.50 => 0.3915,\n", "file_path": "src/projectiles/g8.rs", "rank": 66, "score": 24240.129320687298 }, { "content": " 0.825 => 0.1682,\n\n 0.85 => 0.1711,\n\n 0.875 => 0.1761,\n\n 0.90 => 0.1831,\n\n 0.925 => 0.2004,\n\n 0.95 => 0.2589,\n\n 0.975 => 0.3492,\n\n 1.0 => 0.3983,\n\n 1.025 => 0.4075,\n\n 1.05 => 0.4103,\n\n 1.075 => 0.4114,\n\n 1.10 => 0.4106,\n\n 1.125 => 0.4089,\n\n 1.15 => 0.4068,\n\n 1.175 => 0.4046,\n\n 1.20 => 0.4021,\n\n 1.25 => 0.3966,\n\n 1.30 => 0.3904,\n\n 1.35 => 0.3835,\n\n 1.40 => 0.3759,\n", "file_path": "src/projectiles/g2.rs", "rank": 67, "score": 24240.129320687298 }, { "content": " 1.50 => 0.4162,\n\n 1.55 => 0.4102,\n\n 1.60 => 0.4042,\n\n 1.65 => 0.3981,\n\n 1.70 => 0.3919,\n\n 1.75 => 0.3855,\n\n 1.80 => 0.3788,\n\n 1.85 => 0.3721,\n\n 1.90 => 0.3652,\n\n 1.95 => 0.3583,\n\n 2.00 => 0.3515,\n\n 2.05 => 0.3447,\n\n 2.10 => 0.3381,\n\n 2.15 => 0.3314,\n\n 2.20 => 0.3249,\n\n 2.25 => 0.3185,\n\n 2.30 => 0.3122,\n\n 2.35 => 0.3060,\n\n 2.40 => 0.3000,\n\n 2.45 => 0.2941,\n", "file_path": "src/projectiles/g6.rs", "rank": 68, "score": 24240.129320687298 }, { "content": " 2.80 => 0.3125,\n\n 2.90 => 0.3060,\n\n 3.00 => 0.2999,\n\n 3.10 => 0.2942,\n\n 3.20 => 0.2889,\n\n 3.30 => 0.2838,\n\n 3.40 => 0.2790,\n\n 3.50 => 0.2745,\n\n 3.60 => 0.2703,\n\n 3.70 => 0.2662,\n\n 3.80 => 0.2624,\n\n 3.90 => 0.2588,\n\n 4.00 => 0.2553,\n\n 4.20 => 0.2488,\n\n 4.40 => 0.2429,\n\n 4.60 => 0.2376,\n\n 4.80 => 0.2326,\n\n 5.00 => 0.2280,\n\n ]\n\n}\n", "file_path": "src/projectiles/g5.rs", "rank": 69, "score": 24240.129320687298 }, { "content": " 1.45 => 0.6423,\n\n 1.50 => 0.6423,\n\n 1.55 => 0.6423,\n\n 1.60 => 0.6423,\n\n 1.625 => 0.6407,\n\n 1.65 => 0.6378,\n\n 1.70 => 0.6321,\n\n 1.75 => 0.6266,\n\n 1.80 => 0.6213,\n\n 1.85 => 0.6163,\n\n 1.90 => 0.6113,\n\n 1.95 => 0.6066,\n\n 2.00 => 0.6020,\n\n 2.05 => 0.5976,\n\n 2.10 => 0.5933,\n\n 2.15 => 0.5891,\n\n 2.20 => 0.5850,\n\n 2.25 => 0.5811,\n\n 2.30 => 0.5773,\n\n 2.35 => 0.5733,\n", "file_path": "src/projectiles/gi.rs", "rank": 70, "score": 24240.129320687298 }, { "content": " 2.90 => 0.9650,\n\n 2.95 => 0.9630,\n\n 3.00 => 0.9610,\n\n 3.05 => 0.9589,\n\n 3.10 => 0.9570,\n\n 3.15 => 0.9555,\n\n 3.20 => 0.9540,\n\n 3.25 => 0.9520,\n\n 3.30 => 0.9500,\n\n 3.35 => 0.9485,\n\n 3.40 => 0.9470,\n\n 3.45 => 0.9450,\n\n 3.50 => 0.9430,\n\n 3.55 => 0.9414,\n\n 3.60 => 0.9400,\n\n 3.65 => 0.9385,\n\n 3.70 => 0.9370,\n\n 3.75 => 0.9355,\n\n 3.80 => 0.9340,\n\n 3.85 => 0.9325,\n\n 3.90 => 0.9310,\n\n 3.95 => 0.9295,\n\n 4.00 => 0.9280,\n\n ]\n\n}\n", "file_path": "src/projectiles/gs.rs", "rank": 71, "score": 24240.129320687298 }, { "content": " 0.80 => 0.2596,\n\n 0.825 => 0.2677,\n\n 0.85 => 0.2759,\n\n 0.875 => 0.2913,\n\n 0.90 => 0.3170,\n\n 0.925 => 0.3442,\n\n 0.95 => 0.3728,\n\n 1.0 => 0.4349,\n\n 1.05 => 0.5034,\n\n 1.075 => 0.5402,\n\n 1.10 => 0.5756,\n\n 1.125 => 0.5887,\n\n 1.15 => 0.6018,\n\n 1.175 => 0.6149,\n\n 1.20 => 0.6279,\n\n 1.225 => 0.6418,\n\n 1.25 => 0.6423,\n\n 1.30 => 0.6423,\n\n 1.35 => 0.6423,\n\n 1.40 => 0.6423,\n", "file_path": "src/projectiles/gi.rs", "rank": 72, "score": 24240.129320687298 }, { "content": " 0.825 => 0.2706,\n\n 0.85 => 0.2901,\n\n 0.875 => 0.3136,\n\n 0.90 => 0.3415,\n\n 0.925 => 0.3734,\n\n 0.95 => 0.4084,\n\n 0.975 => 0.4448,\n\n 1.0 => 0.4805,\n\n 1.025 => 0.5136,\n\n 1.05 => 0.5427,\n\n 1.075 => 0.5677,\n\n 1.10 => 0.5883,\n\n 1.125 => 0.6053,\n\n 1.15 => 0.6191,\n\n 1.20 => 0.6393,\n\n 1.25 => 0.6518,\n\n 1.30 => 0.6589,\n\n 1.35 => 0.6621,\n\n 1.40 => 0.6625,\n\n 1.45 => 0.6607,\n", "file_path": "src/projectiles/g1.rs", "rank": 73, "score": 24240.129320687298 }, { "content": " 2.50 => 0.5397,\n\n 2.60 => 0.5325,\n\n 2.70 => 0.5264,\n\n 2.80 => 0.5211,\n\n 2.90 => 0.5168,\n\n 3.00 => 0.5133,\n\n 3.10 => 0.5105,\n\n 3.20 => 0.5084,\n\n 3.30 => 0.5067,\n\n 3.40 => 0.5054,\n\n 3.50 => 0.5040,\n\n 3.60 => 0.5030,\n\n 3.70 => 0.5022,\n\n 3.80 => 0.5016,\n\n 3.90 => 0.5010,\n\n 4.00 => 0.5006,\n\n 4.20 => 0.4998,\n\n 4.40 => 0.4995,\n\n 4.60 => 0.4992,\n\n 4.80 => 0.4990,\n\n 5.00 => 0.4988,\n\n ]\n\n}\n", "file_path": "src/projectiles/g1.rs", "rank": 74, "score": 24240.129320687298 }, { "content": " 1.90 => 1.0040,\n\n 1.95 => 1.0025,\n\n 2.00 => 1.0010,\n\n 2.05 => 0.9990,\n\n 2.10 => 0.9970,\n\n 2.15 => 0.9956,\n\n 2.20 => 0.9940,\n\n 2.25 => 0.9916,\n\n 2.30 => 0.9890,\n\n 2.35 => 0.9869,\n\n 2.40 => 0.9850,\n\n 2.45 => 0.9830,\n\n 2.50 => 0.9810,\n\n 2.55 => 0.9790,\n\n 2.60 => 0.9770,\n\n 2.65 => 0.9750,\n\n 2.70 => 0.9730,\n\n 2.75 => 0.9710,\n\n 2.80 => 0.9690,\n\n 2.85 => 0.9670,\n", "file_path": "src/projectiles/gs.rs", "rank": 75, "score": 24240.129320687298 }, { "content": " 1.50 => 0.6573,\n\n 1.55 => 0.6528,\n\n 1.60 => 0.6474,\n\n 1.65 => 0.6413,\n\n 1.70 => 0.6347,\n\n 1.75 => 0.6280,\n\n 1.80 => 0.6210,\n\n 1.85 => 0.6141,\n\n 1.90 => 0.6072,\n\n 1.95 => 0.6003,\n\n 2.00 => 0.5934,\n\n 2.05 => 0.5867,\n\n 2.10 => 0.5804,\n\n 2.15 => 0.5743,\n\n 2.20 => 0.5685,\n\n 2.25 => 0.5630,\n\n 2.30 => 0.5577,\n\n 2.35 => 0.5527,\n\n 2.40 => 0.5481,\n\n 2.45 => 0.5438,\n", "file_path": "src/projectiles/g1.rs", "rank": 76, "score": 24240.129320687298 }, { "content": "use crate::{\n\n consts::PI,\n\n my_quantity,\n\n projectiles::Projectile,\n\n simulation::{Atmosphere, Flags, Scope, Shooter, Simulation, Wind},\n\n units::{\n\n acceleration, angular_velocity, celsius, force, meter_per_second, meter_per_second_squared,\n\n pascal, radian, radian_per_second, ratio, typenum::*, velocity, Acceleration, Angle,\n\n AngularVelocity, MassDensity, MolarMass, MyQuantity, Pressure, Ratio, Velocity, ISQ,\n\n },\n\n vectors::{Cross, MyVector3, Norm, Vectors},\n\n Numeric,\n\n};\n\n\n\n// Drag\n\nimpl<T> Simulation<T>\n\nwhere\n\n T: Projectile,\n\n{\n\n // Velocity vector of wind, only horizontal at the moment\n", "file_path": "src/physics.rs", "rank": 77, "score": 38.624266575008185 }, { "content": " MyVector3::new(\n\n self.projectile.velocity(),\n\n Velocity::new::<meter_per_second>(0.0),\n\n Velocity::new::<meter_per_second>(0.0),\n\n )\n\n .pivot_y(self.scope.yaw())\n\n .pivot_z(self.scope.pitch())\n\n .pivot_x(self.shooter.roll())\n\n .pivot_z(self.shooter.pitch())\n\n .pivot_y(self.shooter.yaw())\n\n }\n\n // Projectiles position relative to scope\n\n fn absolute_projectile_position(&self) -> MyVector3<length::Dimension> {\n\n MyVector3::new(\n\n Length::new::<meter>(0.0),\n\n -self.scope.height,\n\n -self.scope.offset,\n\n )\n\n .pivot_x(self.scope.roll())\n\n .pivot_x(self.shooter.roll())\n", "file_path": "src/iter.rs", "rank": 78, "score": 34.653604928791715 }, { "content": " self.temperature.get::<celsius>()\n\n }\n\n}\n\n\n\nimpl Flags {\n\n fn coriolis(&self) -> bool {\n\n self.coriolis\n\n }\n\n fn drag(&self) -> bool {\n\n self.drag\n\n }\n\n fn gravity(&self) -> bool {\n\n self.gravity\n\n }\n\n}\n\nimpl Scope {\n\n pub(crate) fn pitch(&self) -> Angle {\n\n self.pitch\n\n }\n\n pub(crate) fn yaw(&self) -> Angle {\n", "file_path": "src/physics.rs", "rank": 79, "score": 33.907589521921494 }, { "content": "use crate::{units::Angle, Numeric};\n\n\n\nuse std::{error, fmt, result};\n\n\n\npub type Result<T, E = Error> = result::Result<T, E>;\n\n\n\n#[derive(Debug)]\n\npub enum Error {\n\n VelocityLookup(Numeric),\n\n PositiveExpected(Numeric),\n\n NegativeExpected(Numeric),\n\n OutOfRange {\n\n min: Numeric,\n\n max: Numeric,\n\n },\n\n AngleRange {\n\n count: u64,\n\n pitch: Angle,\n\n yaw: Angle,\n\n },\n", "file_path": "src/error.rs", "rank": 80, "score": 27.518440622875833 }, { "content": " self.pitch\n\n }\n\n pub(crate) fn roll(&self) -> Angle {\n\n -self.roll\n\n }\n\n // Angular velocity vector of earth, at current lattitude\n\n // Can be thought of as vector from center of earth, pointing\n\n // to lines of lattitude. Maximum effect at +/-90 degrees (poles)\n\n fn omega(&self) -> MyVector3<angular_velocity::Dimension> {\n\n MyVector3::new(\n\n Self::ANGULAR_VELOCITY_EARTH,\n\n AngularVelocity::new::<radian_per_second>(0.0),\n\n AngularVelocity::new::<radian_per_second>(0.0),\n\n )\n\n .pivot_z(self.lattitude)\n\n }\n\n}\n\nimpl Wind {\n\n // This vector indicates direction of wind flow, not source of wind\n\n // so rotate by PI (adding or subtraction should have the same affect)\n", "file_path": "src/physics.rs", "rank": 81, "score": 27.29047404136283 }, { "content": "use crate::{\n\n projectiles::Projectile,\n\n simulation::Simulation,\n\n units::{length, meter, typenum::P2, velocity, Angle, Energy, Length, Time, Velocity},\n\n vectors::{MyVector3, Norm, Vectors},\n\n};\n\n\n\n// Output of iteration, need a better name to encapsulate a moving projectile\n\n#[derive(Debug)]\n\npub struct Packet<'t, T> {\n\n pub(crate) simulation: &'t Simulation<T>, //Simulation this came from, used for various calculations\n\n pub(crate) time: Time, // Position in time (s)\n\n pub(crate) position: MyVector3<length::Dimension>, // Position (m)\n\n pub(crate) velocity: MyVector3<velocity::Dimension>, // Velocity (m/s)\n\n}\n\n\n\nimpl<T> Measurements for Packet<'_, T>\n\nwhere\n\n T: Projectile,\n\n{\n", "file_path": "src/output.rs", "rank": 82, "score": 26.89789684607648 }, { "content": " .pivot_z(self.shooter.pitch())\n\n .pivot_y(self.shooter.yaw())\n\n }\n\n}\n\n// Create an new iterator over Simulation\n\nimpl<'t, T> IntoIterator for &'t Simulation<T>\n\nwhere\n\n T: Projectile,\n\n{\n\n type Item = <Self::IntoIter as Iterator>::Item;\n\n type IntoIter = Iter<'t, T>;\n\n\n\n fn into_iter(self) -> Self::IntoIter {\n\n self.iter()\n\n }\n\n}\n\n// Produce new 'packet', based on drag, coriolis acceleration, and gravity\n\n// Contains time, position, and velocity of projectile, and reference to simulation used\n\nimpl<'t, T> Iterator for Iter<'t, T>\n\nwhere\n", "file_path": "src/iter.rs", "rank": 83, "score": 26.563511486130146 }, { "content": "use crate::{\n\n output::Packet,\n\n projectiles::Projectile,\n\n simulation::Simulation,\n\n units::{\n\n acceleration, length, meter, meter_per_second, meter_per_second_squared, second,\n\n typenum::P2, velocity, Acceleration, Length, Time, Velocity,\n\n },\n\n vectors::{MyVector3, Vectors},\n\n};\n\n\n\nuse std::iter::FusedIterator;\n\n\n\n// Iterator over PointMassModel, steps through time and adjust position and velocity vectors\n\n// Has reference to current simulation model for calculations\n\n// Item lifetime also timed to this lifetime\n\n#[derive(Debug)]\n\npub struct Iter<'t, T> {\n\n simulation: &'t Simulation<T>, // Reference to model used for calculations\n\n position: MyVector3<length::Dimension>, // Position (m)\n", "file_path": "src/iter.rs", "rank": 84, "score": 26.417509851051594 }, { "content": "impl<T, F, E, W> Iterator for IterFindAdjustments<'_, T, F, E, W>\n\nwhere\n\n T: Projectile,\n\n F: Fn(&Packet<T>) -> bool,\n\n E: Fn(&Packet<T>) -> Angle,\n\n W: Fn(&Packet<T>) -> Angle,\n\n{\n\n type Item = Result<(Angle, Angle, Length, Length)>;\n\n fn next(&mut self) -> Option<Self::Item> {\n\n // Previous pitch/yaw values to ensure angles are changing\n\n let &mut Self {\n\n sim:\n\n &mut Simulation {\n\n scope:\n\n Scope {\n\n pitch: prev_pitch,\n\n yaw: prev_yaw,\n\n ..\n\n },\n\n ..\n", "file_path": "src/solvers/zero.rs", "rank": 85, "score": 26.16425643606252 }, { "content": " -self.yaw\n\n }\n\n pub(crate) fn roll(&self) -> Angle {\n\n self.roll\n\n }\n\n}\n\nimpl Shooter {\n\n // Angular velocity of earth, (radians)\n\n const ANGULAR_VELOCITY_EARTH: AngularVelocity = my_quantity!(0.000_072_921_159);\n\n\n\n fn gravity(&self) -> MyVector3<acceleration::Dimension> {\n\n MyVector3::new(\n\n Acceleration::new::<meter_per_second_squared>(0.0),\n\n self.gravity,\n\n Acceleration::new::<meter_per_second_squared>(0.0),\n\n )\n\n }\n\n // Flip, since circle functions rotate counter-clockwise,\n\n // 90 degrees is east by compass bearing, but west(left) in trig\n\n // (0)\n", "file_path": "src/physics.rs", "rank": 86, "score": 25.79920348604044 }, { "content": "use crate::{\n\n consts::{FRAC_PI_2, FRAC_PI_4},\n\n error::{Error, Result},\n\n my_quantity,\n\n output::Measurements,\n\n output::Packet,\n\n projectiles::Projectile,\n\n simulation::Scope,\n\n simulation::Simulation,\n\n units::{angle, radian, Angle, Length, MyQuantity},\n\n};\n\n\n\n// This angle will trace the longest possible trajectory for a projectile (45 degrees)\n\nconst DEG_45: MyQuantity<angle::Dimension> = my_quantity!(FRAC_PI_4);\n\n\n\n// Should never try to yaw more than 90 degrees, probably not a necessary check\n\n// Also should never try to pitch this low - not sure if this ever happens in practice\n\nconst DEG_90: MyQuantity<angle::Dimension> = my_quantity!(FRAC_PI_2);\n\n\n", "file_path": "src/solvers/zero.rs", "rank": 87, "score": 25.634548299388356 }, { "content": " let compare = MyVector3::new(\n\n Length::new::<meter>(1.0),\n\n Length::new::<meter>(0.0),\n\n Length::new::<meter>(0.0),\n\n );\n\n self.relative_position().angle(&compare)\n\n }\n\n fn vertical_angle(&self, tolerance: Length) -> Angle {\n\n self.offset_vertical_angle(Length::new::<meter>(0.0), tolerance)\n\n }\n\n fn horizontal_angle(&self, tolerance: Length) -> Angle {\n\n self.offset_horizontal_angle(Length::new::<meter>(0.0), tolerance)\n\n }\n\n // During the simulation, the velocity of the projectile is rotated to allign with\n\n // the shooter's bearing (azimuth and line of sight)\n\n // This function returns the position rotated back to the initial frame of reference\n\n // This is used during zero'ing and is output in the drop table\n\n fn relative_position(&self) -> MyVector3<length::Dimension> {\n\n self.position\n\n .pivot_y(-self.simulation.shooter.yaw())\n", "file_path": "src/output.rs", "rank": 88, "score": 25.27764394554996 }, { "content": " velocity: MyVector3<velocity::Dimension>, // Velocity (m/s)\n\n time: Time, // Position in time (s)\n\n}\n\nimpl<T> Simulation<T>\n\nwhere\n\n T: Projectile,\n\n{\n\n pub fn iter(&self) -> Iter<'_, T> {\n\n let position = self.absolute_projectile_position();\n\n let velocity = self.absolute_projectile_velocity();\n\n Iter {\n\n simulation: self,\n\n position,\n\n velocity,\n\n time: Time::new::<second>(0.0),\n\n }\n\n }\n\n // Rotated velocity vector, accounts for muzzle/shooter pitch, and yaw (bearing)\n\n // Start with velocity value along X unit vector\n\n fn absolute_projectile_velocity(&self) -> MyVector3<velocity::Dimension> {\n", "file_path": "src/iter.rs", "rank": 89, "score": 25.165814382905896 }, { "content": " // Also accounts for elevation changes when launching projectils East/West, regardless of hemisphere\n\n // Bearing East results in higher elevation (+y absolute/relative)\n\n // Bearing West results in lower elevation (-y relative/absolute)\n\n pub(crate) fn coriolis_acceleration(\n\n &self,\n\n velocity: MyVector3<velocity::Dimension>,\n\n ) -> MyVector3<acceleration::Dimension> {\n\n if self.flags.coriolis() {\n\n self.shooter.omega().cross(&velocity) * -2.0\n\n } else {\n\n MyVector3::new(\n\n Acceleration::new::<meter_per_second_squared>(0.0),\n\n Acceleration::new::<meter_per_second_squared>(0.0),\n\n Acceleration::new::<meter_per_second_squared>(0.0),\n\n )\n\n }\n\n }\n\n}\n\n\n\n//Gravity\n", "file_path": "src/physics.rs", "rank": 90, "score": 25.030555068665088 }, { "content": " // {after negation(-)}\n\n //\n\n // (180)\n\n // ^\n\n // |\n\n // (+90) <---> (-90)\n\n // |\n\n // v\n\n // (0)\n\n fn yaw(&self) -> Angle {\n\n -self.yaw + Angle::new::<radian>(PI)\n\n }\n\n fn pitch(&self) -> Angle {\n\n self.pitch\n\n }\n\n fn roll(&self) -> Angle {\n\n self.roll\n\n }\n\n fn velocity(&self) -> MyVector3<velocity::Dimension> {\n\n MyVector3::new(\n", "file_path": "src/physics.rs", "rank": 91, "score": 24.36952909160497 }, { "content": " fn time(&self) -> Time {\n\n self.time\n\n }\n\n fn velocity(&self) -> Velocity {\n\n self.velocity.norm()\n\n }\n\n fn energy(&self) -> Energy {\n\n self.velocity.norm().powi(P2::new()) * self.simulation.projectile.mass() * 0.5\n\n }\n\n // Positions relative to line of sight (shooter_pitch)\n\n fn distance(&self) -> Length {\n\n self.relative_position().get_x()\n\n }\n\n fn elevation(&self) -> Length {\n\n self.relative_position().get_y()\n\n }\n\n fn windage(&self) -> Length {\n\n self.relative_position().get_z()\n\n }\n\n fn angle(&self) -> Angle {\n", "file_path": "src/output.rs", "rank": 92, "score": 23.654837934099465 }, { "content": "impl<T> Simulation<T> {\n\n pub(crate) fn gravity_acceleration(&self) -> MyVector3<acceleration::Dimension> {\n\n if self.flags.gravity() {\n\n self.shooter.gravity()\n\n } else {\n\n MyVector3::new(\n\n Acceleration::new::<meter_per_second_squared>(0.0),\n\n Acceleration::new::<meter_per_second_squared>(0.0),\n\n Acceleration::new::<meter_per_second_squared>(0.0),\n\n )\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/physics.rs", "rank": 93, "score": 23.087783857018643 }, { "content": " .pivot_z(-self.simulation.shooter.pitch())\n\n .pivot_x(-self.simulation.shooter.roll())\n\n }\n\n // This gives adjustment - opposite sign relative to desired offset\n\n // Always done in meters for now, due to relative_position()\n\n fn offset_vertical_angle(&self, offset: Length, tolerance: Length) -> Angle {\n\n let sign = if self.elevation() >= (offset - tolerance) {\n\n -1.0\n\n } else {\n\n 1.0\n\n };\n\n\n\n let position = MyVector3::new(self.distance(), self.elevation(), Length::new::<meter>(0.0));\n\n let desired = MyVector3::new(self.distance(), offset, Length::new::<meter>(0.0));\n\n\n\n position.angle(&desired) * sign\n\n }\n\n // This gives adjustment - opposite sign relative to desired offset\n\n // Always done in meters for now, due to relative_position()\n\n fn offset_horizontal_angle(&self, offset: Length, tolerance: Length) -> Angle {\n", "file_path": "src/output.rs", "rank": 94, "score": 22.595185264087498 }, { "content": " velocity: MyVector3<velocity::Dimension>,\n\n ) -> MyVector3<acceleration::Dimension> {\n\n if self.flags.drag() {\n\n // Acceleration from drag force and gravity (F = ma)\n\n self.drag_force(velocity) / self.projectile.mass()\n\n } else {\n\n MyVector3::new(\n\n Acceleration::new::<meter_per_second_squared>(0.0),\n\n Acceleration::new::<meter_per_second_squared>(0.0),\n\n Acceleration::new::<meter_per_second_squared>(0.0),\n\n )\n\n }\n\n }\n\n}\n\n\n\n// Coriolis\n\nimpl<T> Simulation<T> {\n\n // Coriolis/Eotovos acceleration vector. Accounts for Left/Right drift due to Earth's spin\n\n // This drift is always right (+z relative) in the northern hemisphere, regardless of initial bearing\n\n // This drive is always left (-z relative) in the southern hemisphere, regardless of initial bearing\n", "file_path": "src/physics.rs", "rank": 95, "score": 22.48244563958715 }, { "content": " self.velocity,\n\n Velocity::new::<meter_per_second>(0.0),\n\n Velocity::new::<meter_per_second>(0.0),\n\n )\n\n .pivot_y(self.yaw())\n\n .pivot_z(self.pitch())\n\n .pivot_x(self.roll())\n\n }\n\n}\n", "file_path": "src/physics.rs", "rank": 96, "score": 21.025748120600447 }, { "content": " // Speed of sound at given air density and pressure\n\n pub(crate) fn speed_of_sound(&self) -> Velocity {\n\n (Self::ADIABATIC_INDEX_AIR * (self.pressure / self.rho())).sqrt()\n\n }\n\n // Pressure of water vapor, Arden Buck equation\n\n fn pv(&self) -> Pressure {\n\n Pressure::new::<pascal>(\n\n self.humidity\n\n * 611.21\n\n * ((18.678 - (self.celsius() / 234.5))\n\n * (self.celsius() / (257.14 + self.celsius())))\n\n .exp(),\n\n )\n\n }\n\n // Pressure of dry air\n\n fn pd(&self) -> Pressure {\n\n self.pressure - self.pv()\n\n }\n\n // Temperature in celsius\n\n fn celsius(&self) -> Numeric {\n", "file_path": "src/physics.rs", "rank": 97, "score": 19.98914809995816 }, { "content": " \"Within Range Expected Error => min: {:#?} - {:#?}\",\n\n min, max\n\n ),\n\n Self::AngleRange { count, pitch, yaw } => write!(\n\n f,\n\n \"{}: Outside Valid Range Error => pitch: {:#?}, yaw: {:#?}\",\n\n count, pitch, yaw\n\n ),\n\n Self::TerminalVelocity { count, pitch, yaw } => write!(\n\n f,\n\n \"{}: Terminal Velocity Error => pitch: {:#?}, yaw: {:#?}\",\n\n count, pitch, yaw\n\n ),\n\n Self::AngleNotChanging { count, pitch, yaw } => write!(\n\n f,\n\n \"{}: Angle Not Changing Error => pitch: {:#?}, yaw: {:#?}\",\n\n count, pitch, yaw\n\n ),\n\n }\n\n }\n\n}\n\n\n\nimpl error::Error for Error {}\n", "file_path": "src/error.rs", "rank": 98, "score": 19.161037780151663 }, { "content": " // Does not adjust according to line of sight, since most would measure wind\n\n // along relative bearing - I don't think many would factor in a 'downhill' wind for example\n\n // This would be interresting to think of, however.\n\n fn wind_velocity(&self) -> MyVector3<velocity::Dimension> {\n\n self.wind\n\n .velocity()\n\n .pivot_x(self.shooter.roll())\n\n .pivot_z(self.shooter.pitch())\n\n .pivot_y(self.shooter.yaw())\n\n }\n\n // Velocity vector, after impact from wind (actually from drag, not \"being blown\")\n\n // This is why the velocity from wind is subtracted, and vv is not used to find next velocity\n\n fn vv(&self, velocity: MyVector3<velocity::Dimension>) -> MyVector3<velocity::Dimension> {\n\n velocity - self.wind_velocity()\n\n }\n\n // Velocity relative to speed of sound (c), with given atmospheric conditions\n\n fn mach(&self, velocity: MyVector3<velocity::Dimension>) -> Ratio {\n\n velocity.norm() / self.atmosphere.speed_of_sound()\n\n }\n\n // Coefficient of drag, as defined by a standard projectile depending on drag table used\n", "file_path": "src/physics.rs", "rank": 99, "score": 18.910571144722194 } ]
Rust
src/stream.rs
jws121295/dansible
4c32335b048560352135480ab0216ff5be6bd8fa
use byteorder::{BigEndian, ByteOrder, ReadBytesExt}; use std::collections::HashMap; use std::collections::hash_map::Entry; use std::io::{Cursor, Seek, SeekFrom}; use session::{Session, PacketHandler}; pub enum Response<H, S = H> { Continue(H), Spawn(S), Close, } impl <H: Handler + 'static> Response<H> { pub fn boxed(self) -> Response<Box<Handler>> { match self { Response::Continue(handler) => Response::Continue(Box::new(handler)), Response::Spawn(handler) => Response::Spawn(Box::new(handler)), Response::Close => Response::Close, } } } pub trait Handler: Send { fn on_create(self, channel_id: ChannelId, session: &Session) -> Response<Self> where Self: Sized; fn on_header(self, header_id: u8, header_data: &[u8], session: &Session) -> Response<Self> where Self: Sized; fn on_data(self, data: &[u8], session: &Session) -> Response<Self> where Self: Sized; fn on_error(self, session: &Session) -> Response<Self> where Self: Sized; fn on_close(self, session: &Session) -> Response<Self> where Self: Sized; fn box_on_create(self: Box<Self>, channel_id: ChannelId, session: &Session) -> Response<Box<Handler>>; fn box_on_header(self: Box<Self>, header_id: u8, header_data: &[u8], session: &Session) -> Response<Box<Handler>>; fn box_on_data(self: Box<Self>, data: &[u8], session: &Session) -> Response<Box<Handler>>; fn box_on_error(self: Box<Self>, session: &Session) -> Response<Box<Handler>>; fn box_on_close(self: Box<Self>, session: &Session) -> Response<Box<Handler>>; } pub type ChannelId = u16; enum ChannelMode { Header, Data } struct Channel(ChannelMode, Box<Handler>); impl Channel { fn handle_packet(self, cmd: u8, data: Vec<u8>, session: &Session) -> Response<Self, Box<Handler>> { let Channel(mode, mut handler) = self; let mut packet = Cursor::new(&data as &[u8]); packet.read_u16::<BigEndian>().unwrap(); if cmd == 0xa { println!("error: {} {}", data.len(), packet.read_u16::<BigEndian>().unwrap()); return match handler.box_on_error(session) { Response::Continue(_) => Response::Close, Response::Spawn(f) => Response::Spawn(f), Response::Close => Response::Close, }; } match mode { ChannelMode::Header => { let mut length = 0; while packet.position() < data.len() as u64 { length = packet.read_u16::<BigEndian>().unwrap(); if length > 0 { let header_id = packet.read_u8().unwrap(); let header_data = &data[packet.position() as usize .. packet.position() as usize + length as usize - 1]; handler = match handler.box_on_header(header_id, header_data, session) { Response::Continue(handler) => handler, Response::Spawn(f) => return Response::Spawn(f), Response::Close => return Response::Close, }; packet.seek(SeekFrom::Current(length as i64 - 1)).unwrap(); } } if length == 0 { Response::Continue(Channel(ChannelMode::Data, handler)) } else { Response::Continue(Channel(ChannelMode::Header, handler)) } } ChannelMode::Data => { if packet.position() < data.len() as u64 { let event_data = &data[packet.position() as usize..]; match handler.box_on_data(event_data, session) { Response::Continue(handler) => Response::Continue(Channel(ChannelMode::Data, handler)), Response::Spawn(f) => Response::Spawn(f), Response::Close => Response::Close, } } else { match handler.box_on_close(session) { Response::Continue(_) => Response::Close, Response::Spawn(f) => Response::Spawn(f), Response::Close => Response::Close, } } } } } } pub struct StreamManager { next_id: ChannelId, channels: HashMap<ChannelId, Option<Channel>>, } impl StreamManager { pub fn new() -> StreamManager { StreamManager { next_id: 0, channels: HashMap::new(), } } pub fn create(&mut self, handler: Box<Handler>, session: &Session) { let channel_id = self.next_id; self.next_id += 1; trace!("allocated stream {}", channel_id); match handler.box_on_create(channel_id, session) { Response::Continue(handler) => { self.channels.insert(channel_id, Some(Channel(ChannelMode::Header, handler))); } Response::Spawn(handler) => self.create(handler, session), Response::Close => (), } } } impl PacketHandler for StreamManager { fn handle(&mut self, cmd: u8, data: Vec<u8>, session: &Session) { let id: ChannelId = BigEndian::read_u16(&data[0..2]); let spawn = if let Entry::Occupied(mut entry) = self.channels.entry(id) { if let Some(channel) = entry.get_mut().take() { match channel.handle_packet(cmd, data, session) { Response::Continue(channel) => { entry.insert(Some(channel)); None } Response::Spawn(f) => { entry.remove(); Some(f) } Response::Close => { entry.remove(); None } } } else { None } } else { None }; if let Some(s) = spawn { self.create(s, session); } } }
use byteorder::{BigEndian, ByteOrder, ReadBytesExt}; use std::collections::HashMap; use std::collections::hash_map::Entry; use std::io::{Cursor, Seek, SeekFrom}; use session::{Session, PacketHandler}; pub enum Response<H, S = H> { Continue(H), Spawn(S), Close, } impl <H: Handler + 'static> Response<H> { pub fn boxed(self) -> Response<Box<Handler>> { match self { Response::Continue(handler) => Response::Continue(Box::new(handler)), Response::Spawn(handler) => Response::Spawn(Box::new(handler)), Response::Close => Response::Close, } } } pub trait Handler: Send { fn on_create(self, channel_id: ChannelId, session: &Session) -> Response<Self> where Self: Sized; fn on_header(self, header_id: u8, header_data: &[u8], session: &Session) -> Response<Self> where Self: Sized; fn on_data(self, data: &[u8], session: &Session) -> Response<Self> where Self: Sized; fn on_error(self, session: &Session) -> Response<Self> where Self: Sized; fn on_close(self, session: &Session) -> Response<Self> where Self: Sized; fn box_on_create(self: Box<Self>, channel_id: ChannelId, session: &Session) -> Response<Box<Handler>>; fn box_on_header(self: Box<Self>, header_id: u8, header_data: &[u8], session: &Session) -> Response<Box<Handler>>; fn box_on_data(self: Box<Self>, data: &[u8], session: &Session) -> Response<Box<Handler>>; fn box_on_error(self: Box<Self>, session: &Session) -> Response<Box<Handler>>; fn box_on_close(self: Box<Self>, session: &Session) -> Response<Box<Handler>>; } pub type ChannelId = u16; enum ChannelMode { Header, Data } struct Channel(ChannelMode, Box<Handler>); impl Channel { fn handle_packet(self, cmd: u8, data: Vec<u8>, session: &Session) -> Response<Self, Box<Handler>> { let Channel(mode, mut handler) = self; let mut packet = Cursor::new(&data as &[u8]); packet.read_u16::<BigEndian>().unwrap(); if cmd == 0xa { println!("error: {} {}", data.len(), packet.read_u16::<BigEndian>().unwrap()); return match handler.box_on_error(session) { Response::Continue(_) => Response::Close, Response::Spawn(f) => Response::Spawn(f), Response::Close => Response::Close, }; } match mode { ChannelMode::Header => { let mut length = 0; while packet.position() < data.len() as u64 { length = packet.read_u16::<BigEndian>().unwrap(); if length > 0 { let header_id = packet.read_u8().unwrap(); let header_data = &data[packet.position() as usize .. packet.position() as usize + length as usize - 1]; handler = match handler.box_on_header(header_id, header_data, session) { Response::Continue(handler) => handler, Response::Spawn(f) => return Response::Spawn(f), Response::Close => return Response::Close, }; packet.seek(SeekFrom::Current(length as i64 - 1)).unwrap(); } } if length == 0 { Response::Continue(Channel(ChannelMode::Data, handler)) } else { Response::Continue(Channel(ChannelMode::Header, handler)) } } ChannelMode::Data => { if packet.position() < data.len() as u64 { let event_data = &data[packet.position() as usize..]; match handler.box_on_data(event_data, session) { Response::Continue(handler) => Response::Continue(Channel(ChannelMode::Data, handler)), Response::Spawn(f) => Response::Spawn(f), Response::Close => Response::Close, } } else { match handler.box_on_close(session) { Response::Continue(_) => Response::Close, Response::Spawn(f) => Response::Spawn(f), Response::Close => Response::Close, } } } } } } pub struct StreamManager { next_id: ChannelId, channels: HashMap<ChannelId, Option<Channel>>, } impl StreamManager { pub fn new() -> StreamManager { StreamManager { next_id: 0, channels: HashMap::new(), } }
} impl PacketHandler for StreamManager { fn handle(&mut self, cmd: u8, data: Vec<u8>, session: &Session) { let id: ChannelId = BigEndian::read_u16(&data[0..2]); let spawn = if let Entry::Occupied(mut entry) = self.channels.entry(id) { if let Some(channel) = entry.get_mut().take() { match channel.handle_packet(cmd, data, session) { Response::Continue(channel) => { entry.insert(Some(channel)); None } Response::Spawn(f) => { entry.remove(); Some(f) } Response::Close => { entry.remove(); None } } } else { None } } else { None }; if let Some(s) = spawn { self.create(s, session); } } }
pub fn create(&mut self, handler: Box<Handler>, session: &Session) { let channel_id = self.next_id; self.next_id += 1; trace!("allocated stream {}", channel_id); match handler.box_on_create(channel_id, session) { Response::Continue(handler) => { self.channels.insert(channel_id, Some(Channel(ChannelMode::Header, handler))); } Response::Spawn(handler) => self.create(handler, session), Response::Close => (), } }
function_block-full_function
[ { "content": "pub trait Handler : Sized + Send + 'static {\n\n fn on_header(self, header_id: u8, header_data: &[u8], session: &Session) -> Response<Self>;\n\n fn on_data(self, offset: usize, data: &[u8], session: &Session) -> Response<Self>;\n\n fn on_eof(self, session: &Session) -> Response<Self>;\n\n fn on_error(self, session: &Session);\n\n}\n\n\n\npub struct AudioFile<H: Handler> {\n\n handler: H,\n\n file_id: FileId,\n\n offset: usize,\n\n}\n\n\n\nimpl <H: Handler> AudioFile<H> {\n\n pub fn new(file_id: FileId, offset: usize, handler: H, session: &Session) {\n\n let handler = AudioFile {\n\n handler: handler,\n\n file_id: file_id,\n\n offset: offset,\n\n };\n", "file_path": "src/audio_file2.rs", "rank": 0, "score": 253526.86828821618 }, { "content": "pub trait MetadataTrait : Send + 'static {\n\n type Message: protobuf::MessageStatic;\n\n\n\n fn base_url() -> &'static str;\n\n fn parse(msg: &Self::Message, session: &Session) -> Self;\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct Track {\n\n pub id: SpotifyId,\n\n pub name: String,\n\n pub album: SpotifyId,\n\n pub artists: Vec<SpotifyId>,\n\n pub files: LinearMap<FileFormat, FileId>,\n\n pub alternatives: Vec<SpotifyId>,\n\n pub available: bool,\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct Album {\n", "file_path": "src/metadata.rs", "rank": 2, "score": 199493.60772106872 }, { "content": "pub trait PacketHandler {\n\n fn handle(&mut self, cmd: u8, data: Vec<u8>, session: &Session);\n\n}\n", "file_path": "src/session.rs", "rank": 3, "score": 192173.3283364377 }, { "content": "pub fn create_session(matches: &getopts::Matches) -> Session {\n\n info!(\"librespot {} ({}). Built on {}.\",\n\n version::short_sha(),\n\n version::commit_date(),\n\n version::short_now());\n\n\n\n let appkey = load_appkey(matches.opt_str(\"a\"));\n\n let name = matches.opt_str(\"n\").unwrap();\n\n let bitrate = match matches.opt_str(\"b\").as_ref().map(String::as_ref) {\n\n None => Bitrate::Bitrate160, // default value\n\n\n\n Some(\"96\") => Bitrate::Bitrate96,\n\n Some(\"160\") => Bitrate::Bitrate160,\n\n Some(\"320\") => Bitrate::Bitrate320,\n\n Some(b) => {\n\n error!(\"Invalid bitrate {}\", b);\n\n exit(1)\n\n }\n\n };\n\n\n", "file_path": "src/main_helper.rs", "rank": 5, "score": 164963.90915453134 }, { "content": "pub fn create_player(session: &Session, matches: &getopts::Matches) -> Player {\n\n let make_backend = find_backend(matches.opt_str(\"backend\").as_ref().map(AsRef::as_ref));\n\n\n\n Player::new(session.clone(), move || make_backend())\n\n}\n", "file_path": "src/main_helper.rs", "rank": 6, "score": 160332.75605924768 }, { "content": "pub fn get_credentials(session: &Session, matches: &getopts::Matches) -> Credentials {\n\n let credentials = session.cache().get_credentials();\n\n\n\n match (matches.opt_str(\"username\"),\n\n matches.opt_str(\"password\"),\n\n credentials) {\n\n\n\n (Some(username), Some(password), _)\n\n => Credentials::with_password(username, password),\n\n\n\n (Some(ref username), _, Some(ref credentials)) if *username == credentials.username\n\n => credentials.clone(),\n\n\n\n (Some(username), None, _) => {\n\n print!(\"Password for {}: \", username);\n\n stdout().flush().unwrap();\n\n let password = rpassword::read_password().unwrap();\n\n Credentials::with_password(username.clone(), password)\n\n }\n\n\n", "file_path": "src/main_helper.rs", "rank": 7, "score": 160332.7560592477 }, { "content": "pub fn find_backend(name: Option<&str>) -> &'static (Fn() -> Box<Sink> + Send + Sync) {\n\n match name {\n\n Some(\"?\") => {\n\n println!(\"Available Backends : \");\n\n for (&(name, _), idx) in BACKENDS.iter().zip(0..) {\n\n if idx == 0 {\n\n println!(\"- {} (default)\", name);\n\n } else {\n\n println!(\"- {}\", name);\n\n }\n\n }\n\n\n\n exit(0);\n\n },\n\n Some(name) => {\n\n BACKENDS.iter().find(|backend| name == backend.0).expect(\"Unknown backend\").1\n\n },\n\n None => {\n\n BACKENDS.first().expect(\"No backends were enabled at build time\").1\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/main_helper.rs", "rank": 8, "score": 150125.63268602005 }, { "content": "pub fn rand_vec<G: Rng, R: Rand>(rng: &mut G, size: usize) -> Vec<R> {\n\n rng.gen_iter().take(size).collect()\n\n}\n\n\n", "file_path": "src/util/mod.rs", "rank": 9, "score": 145644.07269897257 }, { "content": "pub fn add_session_arguments(opts: &mut getopts::Options) {\n\n opts.optopt(\"c\", \"cache\", \"Path to a directory where files will be cached.\", \"CACHE\")\n\n .reqopt(\"n\", \"name\", \"Device name\", \"NAME\")\n\n .optopt(\"b\", \"bitrate\", \"Bitrate (96, 160 or 320). Defaults to 160\", \"BITRATE\");\n\n\n\n if APPKEY.is_none() {\n\n opts.reqopt(\"a\", \"appkey\", \"Path to a spotify appkey\", \"APPKEY\");\n\n } else {\n\n opts.optopt(\"a\", \"appkey\", \"Path to a spotify appkey\", \"APPKEY\");\n\n };\n\n}\n\n\n", "file_path": "src/main_helper.rs", "rank": 10, "score": 142247.5659734952 }, { "content": "pub fn now_ms() -> i64 {\n\n let ts = time::now_utc().to_timespec();\n\n ts.sec * 1000 + ts.nsec as i64 / 1000000\n\n}\n\n\n", "file_path": "src/util/mod.rs", "rank": 11, "score": 141053.40965491475 }, { "content": "pub trait ReadSeek : ::std::io::Read + ::std::io::Seek { }\n\nimpl <T: ::std::io::Read + ::std::io::Seek> ReadSeek for T { }\n\n\n", "file_path": "src/util/mod.rs", "rank": 12, "score": 125095.55170859458 }, { "content": "#[cfg(feature = \"with-tremor\")]\n\nfn vorbis_time_seek_ms<R>(decoder: &mut vorbis::Decoder<R>, ms: i64) -> Result<(), vorbis::VorbisError> where R: Read + Seek {\n\n decoder.time_seek(ms)\n\n}\n\n\n", "file_path": "src/player.rs", "rank": 13, "score": 123608.59124311182 }, { "content": "fn apply_volume(volume: u16, data: &[i16]) -> Cow<[i16]> {\n\n // Fast path when volume is 100%\n\n if volume == 0xFFFF {\n\n Cow::Borrowed(data)\n\n } else {\n\n Cow::Owned(data.iter()\n\n .map(|&x| {\n\n (x as i32\n\n * volume as i32\n\n / 0xFFFF) as i16\n\n })\n\n .collect())\n\n }\n\n}\n\n\n", "file_path": "src/player.rs", "rank": 14, "score": 122883.27739024145 }, { "content": "pub fn add_authentication_arguments(opts: &mut getopts::Options) {\n\n opts.optopt(\"u\", \"username\", \"Username to sign in with\", \"USERNAME\")\n\n .optopt(\"p\", \"password\", \"Password\", \"PASSWORD\");\n\n\n\n if cfg!(feature = \"facebook\") {\n\n opts.optflag(\"\", \"facebook\", \"Login with a Facebook account\");\n\n }\n\n}\n\n\n", "file_path": "src/main_helper.rs", "rank": 16, "score": 119227.75878422987 }, { "content": "pub fn add_player_arguments(opts: &mut getopts::Options) {\n\n opts.optopt(\"\", \"backend\", \"Audio backend to use. Use '?' to list options\", \"BACKEND\");\n\n}\n\n\n", "file_path": "src/main_helper.rs", "rank": 17, "score": 119227.75878422987 }, { "content": "#[cfg(feature = \"with-tremor\")]\n\nfn vorbis_time_tell_ms<R>(decoder: &mut vorbis::Decoder<R>) -> Result<i64, vorbis::VorbisError> where R: Read + Seek {\n\n decoder.time_tell()\n\n}\n\n\n\npub type PlayerObserver = Box<Fn(&PlayerState) + Send>;\n\n\n\n#[derive(Clone)]\n\npub struct Player {\n\n state: Arc<Mutex<PlayerState>>,\n\n observers: Arc<Mutex<Vec<PlayerObserver>>>,\n\n\n\n commands: mpsc::Sender<PlayerCommand>,\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct PlayerState {\n\n pub status: PlayStatus,\n\n pub position_ms: u32,\n\n pub position_measured_at: i64,\n\n pub update_time: i64,\n\n pub volume: u16,\n\n pub track: Option<SpotifyId>,\n\n\n\n pub end_of_track: bool,\n\n}\n\n\n", "file_path": "src/player.rs", "rank": 18, "score": 118001.02153049794 }, { "content": "pub fn load_appkey<P: AsRef<Path>>(path: Option<P>) -> Vec<u8> {\n\n path.map(|path| {\n\n let mut file = File::open(path).expect(\"Could not open app key.\");\n\n\n\n let mut data = Vec::new();\n\n file.read_to_end(&mut data).unwrap();\n\n\n\n data\n\n }).or_else(|| APPKEY.map(ToOwned::to_owned)).unwrap()\n\n}\n\n\n", "file_path": "src/main_helper.rs", "rank": 19, "score": 104049.45070847953 }, { "content": "pub trait Cache {\n\n fn get_audio_key(&self, _track: SpotifyId, _file: FileId) -> Option<AudioKey> {\n\n None\n\n }\n\n fn put_audio_key(&self, _track: SpotifyId, _file: FileId, _audio_key: AudioKey) { }\n\n\n\n fn get_credentials(&self) -> Option<Credentials> {\n\n None\n\n }\n\n fn put_credentials(&self, _cred: &Credentials) { }\n\n\n\n fn get_file(&self, _file: FileId) -> Option<Box<ReadSeek>> {\n\n None\n\n }\n\n fn put_file(&self, _file: FileId, _contents: &mut Read) { }\n\n}\n\n\n\npub struct NoCache;\n\nimpl Cache for NoCache { }\n\n\n\nmod default_cache;\n\npub use self::default_cache::DefaultCache;\n", "file_path": "src/cache/mod.rs", "rank": 20, "score": 103522.82299605571 }, { "content": "pub trait IgnoreExt {\n\n fn ignore(self);\n\n}\n\n\n\nimpl<T, E> IgnoreExt for Result<T, E> {\n\n fn ignore(self) {\n\n match self {\n\n Ok(_) => (),\n\n Err(_) => (),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/util/mod.rs", "rank": 21, "score": 101023.72072384287 }, { "content": "pub trait Sink {\n\n fn start(&self) -> io::Result<()>;\n\n fn stop(&self) -> io::Result<()>;\n\n fn write(&self, data: &[i16]) -> io::Result<()>;\n\n}\n\n\n\n/*\n\n * Allow #[cfg] rules around elements of a list.\n\n * Workaround until stmt_expr_attributes is stable.\n\n *\n\n * This generates 2^n declarations of the list, with every combination possible\n\n */\n\nmacro_rules! declare_backends {\n\n (pub const $name:ident : $ty:ty = & [ $($tt:tt)* ];) => (\n\n _declare_backends!($name ; $ty ; []; []; []; $($tt)*);\n\n );\n\n}\n\n\n\nmacro_rules! _declare_backends {\n\n ($name:ident ; $ty:ty ; [ $($yes:meta,)* ] ; [ $($no:meta,)* ] ; [ $($exprs:expr,)* ] ; #[cfg($m:meta)] $e:expr, $($rest:tt)* ) => (\n", "file_path": "src/audio_backend/mod.rs", "rank": 22, "score": 101023.72072384287 }, { "content": "pub trait Open {\n\n fn open() -> Self;\n\n}\n\n\n", "file_path": "src/audio_backend/mod.rs", "rank": 23, "score": 101023.72072384287 }, { "content": "pub trait StrChunksExt {\n\n fn chunks(&self, size: usize) -> StrChunks;\n\n}\n\n\n\nimpl StrChunksExt for str {\n\n fn chunks(&self, size: usize) -> StrChunks {\n\n StrChunks(self, size)\n\n }\n\n}\n\n\n\nimpl<'s> Iterator for StrChunks<'s> {\n\n type Item = &'s str;\n\n fn next(&mut self) -> Option<&'s str> {\n\n let &mut StrChunks(data, size) = self;\n\n if data.is_empty() {\n\n None\n\n } else {\n\n let ret = Some(&data[..size]);\n\n self.0 = &data[size..];\n\n ret\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/util/mod.rs", "rank": 24, "score": 98728.14514989848 }, { "content": "fn load_track(session: &Session, track_id: SpotifyId) -> Option<vorbis::Decoder<Subfile<AudioDecrypt<Box<ReadSeek>>>>> {\n\n let track = session.metadata::<Track>(track_id).await().unwrap();\n\n\n\n let track = match find_available_alternative(session, &track) {\n\n Some(track) => track,\n\n None => {\n\n warn!(\"Track \\\"{}\\\" is not available\", track.name);\n\n return None;\n\n }\n\n };\n\n\n\n let format = match session.config().bitrate {\n\n Bitrate::Bitrate96 => FileFormat::OGG_VORBIS_96,\n\n Bitrate::Bitrate160 => FileFormat::OGG_VORBIS_160,\n\n Bitrate::Bitrate320 => FileFormat::OGG_VORBIS_320,\n\n };\n\n\n\n\n\n\n\n let file_id = match track.files.get(&format) {\n", "file_path": "src/player.rs", "rank": 25, "score": 95323.60255068942 }, { "content": "fn find_available_alternative<'a>(session: &Session, track: &'a Track) -> Option<Cow<'a, Track>> {\n\n if track.available {\n\n Some(Cow::Borrowed(track))\n\n } else {\n\n let alternatives = track.alternatives\n\n .iter()\n\n .map(|alt_id| {\n\n session.metadata::<Track>(*alt_id)\n\n })\n\n .collect::<Vec<TrackRef>>();\n\n\n\n eventual::sequence(alternatives.into_iter()).iter().find(|alt| alt.available).map(Cow::Owned)\n\n }\n\n}\n\n\n", "file_path": "src/player.rs", "rank": 26, "score": 86975.60906853528 }, { "content": "#[cfg(not(feature = \"facebook\"))]\n\npub fn facebook_login() -> Result<Credentials, ()> {\n\n Err(())\n\n}\n", "file_path": "src/authentication/mod.rs", "rank": 27, "score": 86705.79630266677 }, { "content": "pub fn facebook_login() -> Result<Credentials, ()> {\n\n let (tx, rx) = mpsc::channel();\n\n\n\n let csrf = rand::thread_rng().gen_ascii_chars().take(32).collect::<String>();\n\n let handler = ServerHandler {\n\n token_tx: Mutex::new(tx),\n\n csrf: csrf.clone()\n\n };\n\n\n\n let ssl = ssl_context().unwrap();\n\n\n\n let mut listener = hyper::net::HttpsListener::new(\"127.0.0.1:0\", ssl).unwrap();\n\n let port = listener.local_addr().unwrap().port();\n\n\n\n let mut server = hyper::Server::new(listener).handle(handler).unwrap();\n\n\n\n println!(\"Logging in using Facebook, please visit https://login.spotify.com/login-facebook-sso/?csrf={}&port={} in your browser.\",\n\n csrf, port);\n\n\n\n let token = rx.recv().unwrap();\n", "file_path": "src/authentication/facebook.rs", "rank": 28, "score": 86705.79630266677 }, { "content": "pub fn apresolve() -> Result<Vec<String>, ()> {\n\n let client = hyper::client::Client::new();\n\n \n\n let mut response = client.get(APRESOLVE_ENDPOINT).send().unwrap();\n\n let mut data = String::new();\n\n response.read_to_string(&mut data).unwrap();\n\n\n\n let data : APResolveData = json::decode(&data).unwrap();\n\n\n\n Ok(data.ap_list)\n\n}\n", "file_path": "src/apresolve.rs", "rank": 29, "score": 86694.46513748386 }, { "content": "struct ServerHandler {\n\n credentials_tx: Mutex<mpsc::Sender<Credentials>>,\n\n private_key: BigUint,\n\n public_key: BigUint,\n\n device_id: String,\n\n device_name: String,\n\n}\n\n\n\nimpl ServerHandler {\n\n fn handle_get_info(&self, _params: &BTreeMap<String, String>,\n\n mut response: hyper::server::Response<hyper::net::Fresh>) {\n\n\n\n let public_key = self.public_key.to_bytes_be()\n\n .to_base64(base64::STANDARD);\n\n\n\n let result = json!({\n\n \"status\": 101,\n\n \"statusString\": \"ERROR-OK\",\n\n \"spotifyError\": 0,\n\n \"version\": \"2.1.0\",\n", "file_path": "src/authentication/discovery.rs", "rank": 30, "score": 84818.60718444028 }, { "content": "struct ServerHandler {\n\n token_tx: Mutex<mpsc::Sender<String>>,\n\n csrf: String,\n\n}\n\n\n\nimpl ServerHandler {\n\n fn handle_login(&self, params: &BTreeMap<String, String>) -> hyper::status::StatusCode {\n\n let token = params.get(\"access_token\").unwrap();\n\n let csrf = params.get(\"csrf\").unwrap();\n\n\n\n if *csrf == self.csrf {\n\n self.token_tx.lock().unwrap().send(token.to_owned()).unwrap();\n\n hyper::status::StatusCode::Ok\n\n } else {\n\n hyper::status::StatusCode::Forbidden\n\n }\n\n }\n\n}\n\n\n\nimpl hyper::server::Handler for ServerHandler {\n", "file_path": "src/authentication/facebook.rs", "rank": 31, "score": 84818.60718444028 }, { "content": "pub fn ssl_context() -> Result<Openssl, SslError> {\n\n let cert = try!(X509::from_pem(&mut Cursor::new(SPOTILOCAL_CERT)));\n\n let key = try!(PKey::private_key_from_pem(&mut Cursor::new(SPOTILOCAL_KEY)));\n\n\n\n let mut ctx = try!(SslContext::new(SslMethod::Sslv23));\n\n try!(ctx.set_cipher_list(\"DEFAULT\"));\n\n try!(ctx.set_private_key(&key));\n\n try!(ctx.set_certificate(&cert));\n\n ctx.set_verify(SSL_VERIFY_NONE, None);\n\n Ok(Openssl { context: Arc::new(ctx) })\n\n}\n", "file_path": "src/spotilocal.rs", "rank": 32, "score": 82659.72445484348 }, { "content": "pub fn mkdir_existing(path: &Path) -> io::Result<()> {\n\n fs::create_dir(path).or_else(|err| {\n\n if err.kind() == io::ErrorKind::AlreadyExists {\n\n Ok(())\n\n } else {\n\n Err(err)\n\n }\n\n })\n\n}\n\n\n", "file_path": "src/util/mod.rs", "rank": 33, "score": 78992.30977158007 }, { "content": "#[allow(dead_code)]\n\nfn mk_sink<S: Sink + Open + 'static>() -> Box<Sink> {\n\n Box::new(S::open())\n\n}\n\n\n\n#[cfg(feature = \"portaudio-backend\")]\n\nmod portaudio;\n\n#[cfg(feature = \"portaudio-backend\")]\n\nuse self::portaudio::PortAudioSink;\n\n\n\n#[cfg(feature = \"pulseaudio-backend\")]\n\nmod pulseaudio;\n\n#[cfg(feature = \"pulseaudio-backend\")]\n\nuse self::pulseaudio::PulseAudioSink;\n\n\n\n\n\ndeclare_backends! {\n\n pub const BACKENDS : &'static [(&'static str, &'static (Fn() -> Box<Sink> + Sync + Send + 'static))] = &[\n\n #[cfg(feature = \"portaudio-backend\")]\n\n (\"portaudio\", &mk_sink::<PortAudioSink>),\n\n #[cfg(feature = \"pulseaudio-backend\")]\n\n (\"pulseaudio\", &mk_sink::<PulseAudioSink>),\n\n\n\n ];\n\n}\n", "file_path": "src/audio_backend/mod.rs", "rank": 34, "score": 76880.35044341566 }, { "content": "#[cfg(not(feature = \"discovery\"))]\n\npub fn discovery_login(_device_name: &str, _device_id: &str) -> Result<Credentials, ()> {\n\n Err(())\n\n}\n\n\n\n#[cfg(feature = \"facebook\")]\n\nmod facebook;\n\n#[cfg(feature = \"facebook\")]\n\npub use self::facebook::facebook_login;\n", "file_path": "src/authentication/mod.rs", "rank": 35, "score": 69770.36742704059 }, { "content": "pub fn discovery_login(device_name: &str, device_id: &str) -> Result<Credentials, ()> {\n\n let (tx, rx) = mpsc::channel();\n\n\n\n let key_data = util::rand_vec(&mut rand::thread_rng(), 95);\n\n let private_key = BigUint::from_bytes_be(&key_data);\n\n let public_key = util::powm(&DH_GENERATOR, &private_key, &DH_PRIME);\n\n\n\n let handler = ServerHandler {\n\n device_name: device_name.to_owned(),\n\n device_id: device_id.to_owned(),\n\n private_key: private_key,\n\n public_key: public_key,\n\n credentials_tx: Mutex::new(tx),\n\n };\n\n\n\n let mut listener = hyper::net::HttpListener::new(\"0.0.0.0:0\").unwrap();\n\n let port = listener.local_addr().unwrap().port();\n\n\n\n let mut server = hyper::Server::new(listener).handle(handler).unwrap();\n\n\n", "file_path": "src/authentication/discovery.rs", "rank": 36, "score": 69770.36742704059 }, { "content": "pub fn powm(base: &BigUint, exp: &BigUint, modulus: &BigUint) -> BigUint {\n\n let mut base = base.clone();\n\n let mut exp = exp.clone();\n\n let mut result: BigUint = One::one();\n\n\n\n while !exp.is_zero() {\n\n if exp.is_odd() {\n\n result = result.mul(&base).rem(modulus);\n\n }\n\n exp = exp.shr(1);\n\n base = (&base).mul(&base).rem(modulus);\n\n }\n\n\n\n result\n\n}\n\n\n\npub struct StrChunks<'s>(&'s str, usize);\n\n\n", "file_path": "src/util/mod.rs", "rank": 37, "score": 65929.98697230978 }, { "content": "#[derive(Debug)]\n\nenum PlayerCommand {\n\n Load(SpotifyId, bool, u32),\n\n Play,\n\n Pause,\n\n Volume(u16),\n\n Stop,\n\n Seek(u32),\n\n SeekAt(u32, i64),\n\n}\n\n\n\nimpl Player {\n\n pub fn new<F>(session: Session, sink_builder: F) -> Player\n\n where F: FnOnce() -> Box<Sink> + Send + 'static {\n\n let (cmd_tx, cmd_rx) = mpsc::channel();\n\n\n\n let state = Arc::new(Mutex::new(PlayerState {\n\n status: PlayStatus::kPlayStatusStop,\n\n position_ms: 0,\n\n position_measured_at: 0,\n\n update_time: util::now_ms(),\n", "file_path": "src/player.rs", "rank": 38, "score": 57479.64427049412 }, { "content": "enum MercuryCallback {\n\n Future(eventual::Complete<MercuryResponse, ()>),\n\n Subscription(mpsc::Sender<MercuryResponse>),\n\n Channel,\n\n}\n\n\n\npub struct MercuryPending {\n\n parts: Vec<Vec<u8>>,\n\n partial: Option<Vec<u8>>,\n\n callback: MercuryCallback,\n\n}\n\n\n\npub struct MercuryManager {\n\n next_seq: u32,\n\n pending: HashMap<Vec<u8>, MercuryPending>,\n\n subscriptions: HashMap<String, mpsc::Sender<MercuryResponse>>,\n\n}\n\n\n\nimpl ToString for MercuryMethod {\n\n fn to_string(&self) -> String {\n", "file_path": "src/mercury.rs", "rank": 39, "score": 57479.64427049412 }, { "content": "struct PlayerInternal {\n\n state: Arc<Mutex<PlayerState>>,\n\n observers: Arc<Mutex<Vec<PlayerObserver>>>,\n\n\n\n session: Session,\n\n commands: mpsc::Receiver<PlayerCommand>,\n\n}\n\n\n", "file_path": "src/player.rs", "rank": 40, "score": 56489.91915530838 }, { "content": "struct SpircInternal {\n\n player: Player,\n\n session: Session,\n\n\n\n seq_nr: u32,\n\n\n\n name: String,\n\n ident: String,\n\n device_type: u8,\n\n can_play: bool,\n\n\n\n repeat: bool,\n\n shuffle: bool,\n\n\n\n is_active: bool,\n\n became_active_at: i64,\n\n\n\n last_command_ident: String,\n\n last_command_msgid: u32,\n\n\n", "file_path": "src/spirc.rs", "rank": 41, "score": 56489.91915530838 }, { "content": "#[derive(Debug, Clone)]\n\n#[derive(RustcDecodable, RustcEncodable)]\n\nstruct StoredCredentials {\n\n pub username: String,\n\n pub auth_type: i32,\n\n pub auth_data: String,\n\n}\n\n\n\nimpl Credentials {\n\n pub fn with_password(username: String, password: String) -> Credentials {\n\n Credentials {\n\n username: username,\n\n auth_type: AuthenticationType::AUTHENTICATION_USER_PASS,\n\n auth_data: password.into_bytes(),\n\n }\n\n }\n\n\n\n pub fn with_blob(username: String, encrypted_blob: &str, device_id: &str) -> Credentials {\n\n fn read_u8<R: Read>(stream: &mut R) -> io::Result<u8> {\n\n let mut data = [0u8];\n\n try!(stream.read_exact(&mut data));\n\n Ok(data[0])\n", "file_path": "src/authentication/mod.rs", "rank": 42, "score": 55115.808386263954 }, { "content": "struct CommandSender<'a> {\n\n spirc_internal: &'a mut SpircInternal,\n\n cmd: MessageType,\n\n recipient: Option<&'a str>,\n\n player_state: Option<&'a PlayerState>,\n\n state: Option<protocol::spirc::State>,\n\n}\n\n\n\nimpl<'a> CommandSender<'a> {\n\n fn new(spirc_internal: &'a mut SpircInternal, cmd: MessageType) -> CommandSender {\n\n CommandSender {\n\n spirc_internal: spirc_internal,\n\n cmd: cmd,\n\n recipient: None,\n\n player_state: None,\n\n state: None,\n\n }\n\n }\n\n\n\n fn recipient(mut self, r: &'a str) -> CommandSender {\n", "file_path": "src/spirc.rs", "rank": 43, "score": 54630.8930457892 }, { "content": "#[cfg(feature = \"with-syntex\")]\n\nfn codegen() {\n\n use std::env;\n\n use std::path::PathBuf;\n\n use std::path::Path;\n\n\n\n let mut registry = syntex::Registry::new();\n\n let out = PathBuf::from(env::var(\"OUT_DIR\").unwrap());\n\n\n\n json_macros::plugin_registrar(&mut registry);\n\n protobuf_macros::plugin_registrar(&mut registry);\n\n registry.expand(\"librespot\", Path::new(\"src/lib.in.rs\"), &out.join(\"lib.rs\")).unwrap();\n\n}\n\n\n", "file_path": "build.rs", "rank": 44, "score": 54405.60185586079 }, { "content": "#[cfg(not(feature = \"with-syntex\"))]\n\nfn codegen() { }\n\n\n", "file_path": "build.rs", "rank": 45, "score": 54405.60185586079 }, { "content": "fn main() {\n\n vergen::vergen(vergen::OutputFns::all()).unwrap();\n\n codegen();\n\n}\n\n\n", "file_path": "build.rs", "rank": 46, "score": 54405.60185586079 }, { "content": "struct AudioFileInternal {\n\n partial_tx: Option<eventual::Complete<fs::File, ()>>,\n\n complete_tx: eventual::Complete<NamedTempFile, ()>,\n\n write_file: NamedTempFile,\n\n seek_rx: mpsc::Receiver<u64>,\n\n shared: Arc<AudioFileShared>,\n\n chunk_count: usize,\n\n}\n\n\n", "file_path": "src/audio_file.rs", "rank": 47, "score": 53858.355393550766 }, { "content": "struct AudioFileShared {\n\n cond: Condvar,\n\n bitmap: Mutex<BitSet>,\n\n}\n\n\n\nimpl AudioFile {\n\n pub fn new(session: &Session, file_id: FileId)\n\n -> (eventual::Future<AudioFile, ()>, eventual::Future<NamedTempFile, ()>) {\n\n\n\n let shared = Arc::new(AudioFileShared {\n\n cond: Condvar::new(),\n\n bitmap: Mutex::new(BitSet::new()),\n\n });\n\n\n\n let (seek_tx, seek_rx) = mpsc::channel();\n\n let (partial_tx, partial_rx) = eventual::Future::pair();\n\n let (complete_tx, complete_rx) = eventual::Future::pair();\n\n\n\n let internal = AudioFileInternal {\n\n shared: shared.clone(),\n", "file_path": "src/audio_file.rs", "rank": 48, "score": 53858.355393550766 }, { "content": "fn main() {\n\n if env::var(\"RUST_LOG\").is_err() {\n\n env::set_var(\"RUST_LOG\", \"info,librespot=trace\")\n\n }\n\n env_logger::init().unwrap();\n\n\n\n let mut opts = getopts::Options::new();\n\n main_helper::add_session_arguments(&mut opts);\n\n main_helper::add_authentication_arguments(&mut opts);\n\n main_helper::add_player_arguments(&mut opts);\n\n\n\n let args: Vec<String> = std::env::args().collect();\n\n\n\n let matches = match opts.parse(&args[1..]) {\n\n Ok(m) => m,\n\n Err(f) => {\n\n error!(\"Error: {}\\n{}\", f.to_string(), usage(&args[0], &opts));\n\n exit(1)\n\n }\n\n };\n", "file_path": "src/main.rs", "rank": 49, "score": 52774.27443501112 }, { "content": "fn main() {\n\n let root = PathBuf::from(env::var(\"CARGO_MANIFEST_DIR\").unwrap());\n\n let out = PathBuf::from(env::var(\"OUT_DIR\").unwrap());\n\n let proto = root.join(\"proto\");\n\n\n\n let mut compiler = protobuf_build::Compiler::new(&proto, &out);\n\n\n\n let files = [\"keyexchange\",\n\n \"authentication\",\n\n \"mercury\",\n\n \"metadata\",\n\n \"pubsub\",\n\n \"spirc\"];\n\n\n\n for file in &files {\n\n compiler.compile(&((*file).to_owned() + \".proto\")).unwrap();\n\n\n\n // Hack for rust-lang/rust#18810\n\n // Wrap the generated rust files with \"pub mod { ... }\", so they\n\n // can be included.\n", "file_path": "protocol/build.rs", "rank": 50, "score": 52774.27443501112 }, { "content": "#[derive(Debug,Hash,PartialEq,Eq,Copy,Clone)]\n\nstruct AudioKeyId(SpotifyId, FileId);\n\n\n\npub struct AudioKeyManager {\n\n next_seq: u32,\n\n pending: HashMap<u32, AudioKeyId>,\n\n cache: HashMap<AudioKeyId, Vec<eventual::Complete<AudioKey, AudioKeyError>>>,\n\n}\n\n\n\nimpl AudioKeyManager {\n\n pub fn new() -> AudioKeyManager {\n\n AudioKeyManager {\n\n next_seq: 1,\n\n pending: HashMap::new(),\n\n cache: HashMap::new(),\n\n }\n\n }\n\n\n\n fn send_key_request(&mut self, session: &Session, track: SpotifyId, file: FileId) -> u32 {\n\n let seq = self.next_seq;\n\n self.next_seq += 1;\n", "file_path": "src/audio_key.rs", "rank": 51, "score": 46275.78428980043 }, { "content": "fn countrylist_contains(list: &str, country: &str) -> bool {\n\n list.chunks(2).any(|cc| cc == country)\n\n}\n\n\n", "file_path": "src/metadata.rs", "rank": 52, "score": 39675.33688289046 }, { "content": "fn facebook_get_me_id(token: &str) -> Result<String, ()> {\n\n let url = format!(\"https://graph.facebook.com/me?fields=id&access_token={}\", token);\n\n\n\n let client = hyper::Client::new();\n\n let mut response = client.get(&url).send().unwrap();\n\n let mut body = String::new();\n\n response.read_to_string(&mut body).unwrap();\n\n\n\n let mut result : BTreeMap<String, String> = json::decode(&body).unwrap();\n\n Ok(result.remove(\"id\").unwrap())\n\n}\n\n\n", "file_path": "src/authentication/facebook.rs", "rank": 53, "score": 39675.32978654698 }, { "content": "fn usage(program: &str, opts: &getopts::Options) -> String {\n\n let brief = format!(\"Usage: {} [options]\", program);\n\n format!(\"{}\", opts.usage(&brief))\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 54, "score": 38874.930386970525 }, { "content": "fn parse_restrictions<'s, I>(restrictions: I, country: &str, catalogue: &str) -> bool\n\n where I: IntoIterator<Item = &'s protocol::metadata::Restriction>\n\n{\n\n restrictions.into_iter()\n\n .filter(|r| r.get_catalogue_str().contains(&catalogue.to_owned()))\n\n .all(|r| {\n\n !countrylist_contains(r.get_countries_forbidden(), country) &&\n\n (!r.has_countries_allowed() ||\n\n countrylist_contains(r.get_countries_allowed(), country))\n\n })\n\n}\n\n\n", "file_path": "src/metadata.rs", "rank": 55, "score": 36019.87144286735 }, { "content": " }\n\n }\n\n\n\n pub fn recv(&self) -> (u8, Vec<u8>) {\n\n self.0.rx_connection.lock().unwrap().as_mut().unwrap().recv_packet().unwrap()\n\n }\n\n\n\n pub fn send_packet(&self, cmd: u8, data: &[u8]) -> connection::Result<()> {\n\n self.0.tx_connection.lock().unwrap().as_mut().unwrap().send_packet(cmd, data)\n\n }\n\n\n\n pub fn audio_key(&self, track: SpotifyId, file_id: FileId) -> Future<AudioKey, AudioKeyError> {\n\n self.0.cache\n\n .get_audio_key(track, file_id)\n\n .map(Future::of)\n\n .unwrap_or_else(|| {\n\n let self_ = self.clone();\n\n self.0.audio_key.lock().unwrap()\n\n .request(self, track, file_id)\n\n .map(move |key| {\n", "file_path": "src/session.rs", "rank": 56, "score": 33634.02804247958 }, { "content": " tx_connection: Mutex<Option<CipherConnection>>,\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct Session(pub Arc<SessionInternal>);\n\n\n\nimpl Session {\n\n pub fn new(config: Config, cache: Box<Cache + Send + Sync>) -> Session {\n\n let device_id = {\n\n let mut h = Sha1::new();\n\n h.input_str(&config.device_name);\n\n h.result_str()\n\n };\n\n\n\n Session(Arc::new(SessionInternal {\n\n config: config,\n\n device_id: device_id,\n\n data: RwLock::new(SessionData {\n\n country: String::new(),\n\n canonical_username: String::new(),\n", "file_path": "src/session.rs", "rank": 57, "score": 33633.29254172014 }, { "content": "\n\n pub fn album_cover(&self, file_id: FileId) -> eventual::Future<Vec<u8>, ()> {\n\n self.0.cache\n\n .get_file(file_id)\n\n .map(|mut f| {\n\n let mut data = Vec::new();\n\n f.read_to_end(&mut data).unwrap();\n\n Future::of(data)\n\n })\n\n .unwrap_or_else(|| {\n\n let self_ = self.clone();\n\n AlbumCover::get(file_id, self)\n\n .map(move |data| {\n\n self_.0.cache.put_file(file_id, &mut Cursor::new(&data));\n\n data\n\n })\n\n })\n\n }\n\n\n\n pub fn stream(&self, handler: Box<stream::Handler>) {\n", "file_path": "src/session.rs", "rank": 58, "score": 33631.70809857707 }, { "content": " _ => {\n\n error!(\"Unexpected message {:x}\", cmd);\n\n Err(())\n\n }\n\n }\n\n }\n\n\n\n pub fn poll(&self) {\n\n let (cmd, data) = self.recv();\n\n\n\n match cmd {\n\n 0x4 => self.send_packet(0x49, &data).unwrap(),\n\n 0x4a => (),\n\n 0x9 | 0xa => self.0.stream.lock().unwrap().handle(cmd, data, self),\n\n 0xd | 0xe => self.0.audio_key.lock().unwrap().handle(cmd, data, self),\n\n 0x1b => {\n\n self.0.data.write().unwrap().country = String::from_utf8(data).unwrap();\n\n }\n\n 0xb2...0xb6 => self.0.mercury.lock().unwrap().handle(cmd, data, self),\n\n _ => (),\n", "file_path": "src/session.rs", "rank": 59, "score": 33630.411479818846 }, { "content": " devkey: self.config().application_key[0x1..0x81].to_vec(),\n\n signature: self.config().application_key[0x81..0x141].to_vec(),\n\n useragent: self.config().user_agent.clone(),\n\n callback_hash: vec![0; 20],\n\n }\n\n });\n\n\n\n let mut connection = self.connect();\n\n connection.send_packet(0xab, &packet.write_to_bytes().unwrap()).unwrap();\n\n let (cmd, data) = connection.recv_packet().unwrap();\n\n\n\n match cmd {\n\n 0xac => {\n\n let welcome_data: protocol::authentication::APWelcome =\n\n protobuf::parse_from_bytes(&data).unwrap();\n\n\n\n let username = welcome_data.get_canonical_username().to_owned();\n\n self.0.data.write().unwrap().canonical_username = username.clone();\n\n *self.0.rx_connection.lock().unwrap() = Some(connection.clone());\n\n *self.0.tx_connection.lock().unwrap() = Some(connection);\n", "file_path": "src/session.rs", "rank": 60, "score": 33628.04620265859 }, { "content": "\n\n let remote_key = response.get_challenge()\n\n .get_login_crypto_challenge()\n\n .get_diffie_hellman()\n\n .get_gs();\n\n\n\n let shared_secret = local_keys.shared_secret(remote_key);\n\n let (challenge, send_key, recv_key) = {\n\n let mut data = Vec::with_capacity(0x64);\n\n let mut mac = Hmac::new(Sha1::new(), &shared_secret);\n\n\n\n for i in 1..6 {\n\n mac.input(&init_client_packet);\n\n mac.input(&init_server_packet);\n\n mac.input(&[i]);\n\n data.write(&mac.result().code()).unwrap();\n\n mac.reset();\n\n }\n\n\n\n mac = Hmac::new(Sha1::new(), &data[..0x14]);\n", "file_path": "src/session.rs", "rank": 61, "score": 33627.394038428356 }, { "content": "use connection::{self, PlainConnection, CipherConnection};\n\nuse diffie_hellman::DHLocalKeys;\n\nuse mercury::{MercuryManager, MercuryRequest, MercuryResponse};\n\nuse metadata::{MetadataManager, MetadataRef, MetadataTrait};\n\nuse protocol;\n\nuse stream::StreamManager;\n\nuse util::{self, SpotifyId, FileId, ReadSeek};\n\nuse version;\n\n\n\nuse stream;\n\n\n\npub enum Bitrate {\n\n Bitrate96,\n\n Bitrate160,\n\n Bitrate320,\n\n}\n\n\n\npub struct Config {\n\n pub application_key: Vec<u8>,\n\n pub user_agent: String,\n", "file_path": "src/session.rs", "rank": 62, "score": 33627.15049756928 }, { "content": " pub device_name: String,\n\n pub bitrate: Bitrate,\n\n}\n\n\n\npub struct SessionData {\n\n country: String,\n\n canonical_username: String,\n\n}\n\n\n\npub struct SessionInternal {\n\n config: Config,\n\n device_id: String,\n\n data: RwLock<SessionData>,\n\n\n\n cache: Box<Cache + Send + Sync>,\n\n mercury: Mutex<MercuryManager>,\n\n metadata: Mutex<MetadataManager>,\n\n stream: Mutex<StreamManager>,\n\n audio_key: Mutex<AudioKeyManager>,\n\n rx_connection: Mutex<Option<CipherConnection>>,\n", "file_path": "src/session.rs", "rank": 63, "score": 33625.54963596082 }, { "content": " &send_key,\n\n &recv_key)\n\n }\n\n\n\n pub fn login(&self, credentials: Credentials) -> Result<Credentials, ()> {\n\n let packet = protobuf_init!(protocol::authentication::ClientResponseEncrypted::new(), {\n\n login_credentials => {\n\n username: credentials.username,\n\n typ: credentials.auth_type,\n\n auth_data: credentials.auth_data,\n\n },\n\n system_info => {\n\n cpu_family: protocol::authentication::CpuFamily::CPU_UNKNOWN,\n\n os: protocol::authentication::Os::OS_UNKNOWN,\n\n system_information_string: \"librespot\".to_owned(),\n\n device_id: self.device_id().to_owned(),\n\n },\n\n version_string: version::version_string(),\n\n appkey => {\n\n version: self.config().application_key[0] as u32,\n", "file_path": "src/session.rs", "rank": 64, "score": 33625.31126840573 }, { "content": " mac.input(&init_client_packet);\n\n mac.input(&init_server_packet);\n\n\n\n (mac.result().code().to_vec(),\n\n data[0x14..0x34].to_vec(),\n\n data[0x34..0x54].to_vec())\n\n };\n\n\n\n let packet = protobuf_init!(protocol::keyexchange::ClientResponsePlaintext::new(), {\n\n login_crypto_response.diffie_hellman => {\n\n hmac: challenge\n\n },\n\n pow_response => {},\n\n crypto_response => {},\n\n });\n\n\n\n\n\n connection.send_packet(&packet.write_to_bytes().unwrap()).unwrap();\n\n\n\n CipherConnection::new(connection.into_stream(),\n", "file_path": "src/session.rs", "rank": 65, "score": 33625.25856567841 }, { "content": " self_.0.cache.put_audio_key(track, file_id, key);\n\n key\n\n })\n\n })\n\n }\n\n\n\n pub fn audio_file(&self, file_id: FileId) -> Box<ReadSeek> {\n\n self.0.cache\n\n .get_file(file_id)\n\n .unwrap_or_else(|| {\n\n let (audio_file, complete_rx) = AudioFile::new(self, file_id);\n\n\n\n let self_ = self.clone();\n\n complete_rx.map(move |mut complete_file| {\n\n self_.0.cache.put_file(file_id, &mut complete_file)\n\n }).fire();\n\n\n\n Box::new(audio_file.await().unwrap())\n\n })\n\n }\n", "file_path": "src/session.rs", "rank": 66, "score": 33622.777093358956 }, { "content": " self.0.stream.lock().unwrap().create(handler, self)\n\n }\n\n\n\n pub fn metadata<T: MetadataTrait>(&self, id: SpotifyId) -> MetadataRef<T> {\n\n self.0.metadata.lock().unwrap().get(self, id)\n\n }\n\n\n\n pub fn mercury(&self, req: MercuryRequest) -> Future<MercuryResponse, ()> {\n\n self.0.mercury.lock().unwrap().request(self, req)\n\n }\n\n\n\n pub fn mercury_sub(&self, uri: String) -> mpsc::Receiver<MercuryResponse> {\n\n self.0.mercury.lock().unwrap().subscribe(self, uri)\n\n }\n\n\n\n pub fn cache(&self) -> &Cache {\n\n self.0.cache.as_ref()\n\n }\n\n\n\n pub fn config(&self) -> &Config {\n", "file_path": "src/session.rs", "rank": 67, "score": 33621.15236483361 }, { "content": " ],\n\n */\n\n login_crypto_hello.diffie_hellman => {\n\n gc: local_keys.public_key(),\n\n server_keys_known: 1,\n\n },\n\n client_nonce: util::rand_vec(&mut thread_rng(), 0x10),\n\n padding: vec![0x1e],\n\n feature_set => {\n\n autoupdate2: true,\n\n }\n\n });\n\n\n\n let init_client_packet = connection.send_packet_prefix(&[0, 4],\n\n &request.write_to_bytes().unwrap())\n\n .unwrap();\n\n let init_server_packet = connection.recv_packet().unwrap();\n\n\n\n let response: protocol::keyexchange::APResponseMessage =\n\n protobuf::parse_from_bytes(&init_server_packet[4..]).unwrap();\n", "file_path": "src/session.rs", "rank": 68, "score": 33620.99001489328 }, { "content": " &self.0.config\n\n }\n\n\n\n pub fn username(&self) -> String {\n\n self.0.data.read().unwrap().canonical_username.clone()\n\n }\n\n\n\n pub fn country(&self) -> String {\n\n self.0.data.read().unwrap().country.clone()\n\n }\n\n\n\n pub fn device_id(&self) -> &str {\n\n &self.0.device_id\n\n }\n\n}\n\n\n", "file_path": "src/session.rs", "rank": 69, "score": 33620.42147067323 }, { "content": "\n\n info!(\"Authenticated !\");\n\n\n\n let reusable_credentials = Credentials {\n\n username: username,\n\n auth_type: welcome_data.get_reusable_auth_credentials_type(),\n\n auth_data: welcome_data.get_reusable_auth_credentials().to_owned(),\n\n };\n\n\n\n self.0.cache.put_credentials(&reusable_credentials);\n\n\n\n Ok(reusable_credentials)\n\n }\n\n\n\n 0xad => {\n\n let msg: protocol::keyexchange::APLoginFailed =\n\n protobuf::parse_from_bytes(&data).unwrap();\n\n error!(\"Authentication failed, {:?}\", msg);\n\n Err(())\n\n }\n", "file_path": "src/session.rs", "rank": 70, "score": 33619.925038364505 }, { "content": " }),\n\n\n\n rx_connection: Mutex::new(None),\n\n tx_connection: Mutex::new(None),\n\n\n\n cache: cache,\n\n mercury: Mutex::new(MercuryManager::new()),\n\n metadata: Mutex::new(MetadataManager::new()),\n\n stream: Mutex::new(StreamManager::new()),\n\n audio_key: Mutex::new(AudioKeyManager::new()),\n\n }))\n\n }\n\n\n\n fn connect(&self) -> CipherConnection {\n\n let local_keys = DHLocalKeys::random(&mut thread_rng());\n\n\n\n let aps = apresolve().unwrap();\n\n let ap = thread_rng().choose(&aps).expect(\"No APs found\");\n\n\n\n info!(\"Connecting to AP {}\", ap);\n", "file_path": "src/session.rs", "rank": 71, "score": 33619.09195872079 }, { "content": "use crypto::digest::Digest;\n\nuse crypto::sha1::Sha1;\n\nuse crypto::hmac::Hmac;\n\nuse crypto::mac::Mac;\n\nuse eventual;\n\nuse eventual::Future;\n\nuse eventual::Async;\n\nuse protobuf::{self, Message};\n\nuse rand::thread_rng;\n\nuse rand::Rng;\n\nuse std::io::{Read, Write, Cursor};\n\nuse std::result::Result;\n\nuse std::sync::{Mutex, RwLock, Arc, mpsc};\n\n\n\nuse album_cover::AlbumCover;\n\nuse apresolve::apresolve;\n\nuse audio_key::{AudioKeyManager, AudioKey, AudioKeyError};\n\nuse audio_file::AudioFile;\n\nuse authentication::Credentials;\n\nuse cache::Cache;\n", "file_path": "src/session.rs", "rank": 72, "score": 33617.54342570539 }, { "content": " let mut connection = PlainConnection::connect(ap).unwrap();\n\n\n\n let request = protobuf_init!(protocol::keyexchange::ClientHello::new(), {\n\n build_info => {\n\n product: protocol::keyexchange::Product::PRODUCT_LIBSPOTIFY_EMBEDDED,\n\n platform: protocol::keyexchange::Platform::PLATFORM_LINUX_X86,\n\n version: 0x10800000000,\n\n },\n\n /*\n\n fingerprints_supported => [\n\n protocol::keyexchange::Fingerprint::FINGERPRINT_GRAIN\n\n ],\n\n */\n\n cryptosuites_supported => [\n\n protocol::keyexchange::Cryptosuite::CRYPTO_SUITE_SHANNON,\n\n //protocol::keyexchange::Cryptosuite::CRYPTO_SUITE_RC4_SHA1_HMAC\n\n ],\n\n /*\n\n powschemes_supported => [\n\n protocol::keyexchange::Powscheme::POW_HASH_CASH\n", "file_path": "src/session.rs", "rank": 73, "score": 33616.70530842123 }, { "content": "fn track_ids_to_state<I: Iterator<Item = SpotifyId>>(track_ids: I) -> protocol::spirc::State {\n\n let tracks: Vec<protocol::spirc::TrackRef> =\n\n track_ids.map(|i| {\n\n protobuf_init!(protocol::spirc::TrackRef::new(), { gid: i.to_raw().to_vec()})\n\n })\n\n .collect();\n\n protobuf_init!(protocol::spirc::State::new(), {\n\n track: RepeatedField::from_vec(tracks)\n\n })\n\n}\n", "file_path": "src/spirc.rs", "rank": 74, "score": 33003.05431919625 }, { "content": " pub scrobble_error: Option<unsafe extern \"C\" fn(session: *mut sp_session,\n\n error: sp_error)>,\n\n\n\n pub private_session_mode_changed: Option<unsafe extern \"C\" fn(session: *mut sp_session,\n\n is_private: bool)>,\n\n}\n\n\n\n#[repr(C)]\n\n#[derive(Clone, Copy)]\n\npub struct sp_audioformat {\n\n pub sample_type: sp_sampletype,\n\n pub sample_rate: c_int,\n\n pub channels: c_int,\n\n}\n\n\n\n#[derive(Clone, Copy)]\n\n#[repr(u32)]\n\npub enum sp_sampletype {\n\n SP_SAMPLETYPE_INT16_NATIVE_ENDIAN = 0,\n\n _Dummy // rust #10292\n\n}\n\n\n\n#[repr(C)]\n\n#[derive(Clone, Copy)]\n\npub struct sp_audio_buffer_stats {\n\n pub samples: c_int,\n\n pub stutter: c_int,\n\n}\n", "file_path": "capi/src/types.rs", "rank": 75, "score": 32586.59899952866 }, { "content": " pub message_to_user: Option<unsafe extern \"C\" fn(session: *mut sp_session,\n\n message: *const c_char)>,\n\n\n\n pub notify_main_thread: Option<unsafe extern \"C\" fn(session: *mut sp_session)>,\n\n\n\n pub music_delivery: Option<unsafe extern \"C\" fn(session: *mut sp_session,\n\n format: *const sp_audioformat,\n\n frames: *const c_void,\n\n num_frames: c_int)\n\n -> c_int>,\n\n\n\n pub play_token_lost: Option<unsafe extern \"C\" fn(session: *mut sp_session)>,\n\n\n\n pub log_message: Option<unsafe extern \"C\" fn(session: *mut sp_session,\n\n data: *const c_char)>,\n\n\n\n pub end_of_track: Option<unsafe extern \"C\" fn(session: *mut sp_session)>,\n\n\n\n pub streaming_error: Option<unsafe extern \"C\" fn(session: *mut sp_session,\n\n error: sp_error)>,\n", "file_path": "capi/src/types.rs", "rank": 76, "score": 32584.063102856704 }, { "content": "\n\n pub userinfo_updated: Option<unsafe extern \"C\" fn(session: *mut sp_session)>,\n\n\n\n pub start_playback: Option<unsafe extern \"C\" fn(session: *mut sp_session)>,\n\n\n\n pub stop_playback: Option<unsafe extern \"C\" fn(session: *mut sp_session)>,\n\n\n\n pub get_audio_buffer_stats: Option<unsafe extern \"C\" fn(session: *mut sp_session,\n\n stats: *mut sp_audio_buffer_stats)>,\n\n\n\n pub offline_status_updated: Option<unsafe extern \"C\" fn(session: *mut sp_session)>,\n\n\n\n pub offline_error: Option<unsafe extern \"C\" fn(session: *mut sp_session,\n\n error: sp_error)>,\n\n\n\n pub credentials_blob_updated: Option<unsafe extern \"C\" fn(session: *mut sp_session,\n\n blob: *const c_char)>,\n\n\n\n pub connectionstate_updated: Option<unsafe extern \"C\" fn(session: *mut sp_session)>,\n\n\n", "file_path": "capi/src/types.rs", "rank": 77, "score": 32582.64999087122 }, { "content": " pub device_id: *const c_char,\n\n pub proxy: *const c_char,\n\n pub proxy_username: *const c_char,\n\n pub proxy_password: *const c_char,\n\n pub tracefile: *const c_char,\n\n}\n\n\n\n#[repr(C)]\n\n#[derive(Clone, Copy)]\n\npub struct sp_session_callbacks {\n\n pub logged_in: Option<unsafe extern \"C\" fn(session: *mut sp_session,\n\n error: sp_error)>,\n\n\n\n pub logged_out: Option<unsafe extern \"C\" fn(session: *mut sp_session)>,\n\n\n\n pub metadata_updated: Option<unsafe extern \"C\" fn(session: *mut sp_session)>,\n\n\n\n pub connection_error: Option<unsafe extern \"C\" fn(session: *mut sp_session,\n\n error: sp_error)>,\n\n\n", "file_path": "capi/src/types.rs", "rank": 78, "score": 32582.337520948255 }, { "content": " SP_ERROR_OFFLINE_LICENSE_ERROR = 36,\n\n SP_ERROR_LASTFM_AUTH_ERROR = 39,\n\n SP_ERROR_INVALID_ARGUMENT = 40,\n\n SP_ERROR_SYSTEM_FAILURE = 41,\n\n}\n\n\n\n#[repr(C)]\n\n#[derive(Copy,Clone)]\n\npub struct sp_session_config {\n\n pub api_version: c_int,\n\n pub cache_location: *const c_char,\n\n pub settings_location: *const c_char,\n\n pub application_key: *const c_void,\n\n pub application_key_size: size_t,\n\n pub user_agent: *const c_char,\n\n pub callbacks: *const sp_session_callbacks,\n\n pub userdata: *mut c_void,\n\n pub compress_playlists: bool,\n\n pub dont_save_metadata_for_playlists: bool,\n\n pub initially_unload_playlists: bool,\n", "file_path": "capi/src/types.rs", "rank": 79, "score": 32582.117036016203 }, { "content": "#![allow(non_camel_case_types, dead_code)]\n\n\n\nuse libc::{size_t, c_int, c_char, c_void};\n\nuse session::sp_session;\n\n\n\n#[derive(Clone, Copy)]\n\n#[repr(u32)]\n\npub enum sp_error {\n\n SP_ERROR_OK = 0,\n\n SP_ERROR_BAD_API_VERSION = 1,\n\n SP_ERROR_API_INITIALIZATION_FAILED = 2,\n\n SP_ERROR_TRACK_NOT_PLAYABLE = 3,\n\n SP_ERROR_BAD_APPLICATION_KEY = 5,\n\n SP_ERROR_BAD_USERNAME_OR_PASSWORD = 6,\n\n SP_ERROR_USER_BANNED = 7,\n\n SP_ERROR_UNABLE_TO_CONTACT_SERVER = 8,\n\n SP_ERROR_CLIENT_TOO_OLD = 9,\n\n SP_ERROR_OTHER_PERMANENT = 10,\n\n SP_ERROR_BAD_USER_AGENT = 11,\n\n SP_ERROR_MISSING_CALLBACK = 12,\n", "file_path": "capi/src/types.rs", "rank": 80, "score": 32581.564755527397 }, { "content": " SP_ERROR_INVALID_INDATA = 13,\n\n SP_ERROR_INDEX_OUT_OF_RANGE = 14,\n\n SP_ERROR_USER_NEEDS_PREMIUM = 15,\n\n SP_ERROR_OTHER_TRANSIENT = 16,\n\n SP_ERROR_IS_LOADING = 17,\n\n SP_ERROR_NO_STREAM_AVAILABLE = 18,\n\n SP_ERROR_PERMISSION_DENIED = 19,\n\n SP_ERROR_INBOX_IS_FULL = 20,\n\n SP_ERROR_NO_CACHE = 21,\n\n SP_ERROR_NO_SUCH_USER = 22,\n\n SP_ERROR_NO_CREDENTIALS = 23,\n\n SP_ERROR_NETWORK_DISABLED = 24,\n\n SP_ERROR_INVALID_DEVICE_ID = 25,\n\n SP_ERROR_CANT_OPEN_TRACE_FILE = 26,\n\n SP_ERROR_APPLICATION_BANNED = 27,\n\n SP_ERROR_OFFLINE_TOO_MANY_TRACKS = 31,\n\n SP_ERROR_OFFLINE_DISK_CACHE = 32,\n\n SP_ERROR_OFFLINE_EXPIRED = 33,\n\n SP_ERROR_OFFLINE_NOT_ALLOWED = 34,\n\n SP_ERROR_OFFLINE_LICENSE_LOST = 35,\n", "file_path": "capi/src/types.rs", "rank": 81, "score": 32570.64344698366 }, { "content": "\n\n pub fn receive<T, E, F>(future: Future<T, E>, handler: F)\n\n where T : Send, E: Send,\n\n F : FnOnce(&mut SpSession, AsyncResult<T, E>) -> () + Send + 'static {\n\n\n\n future.receive(move |result| {\n\n SpSession::run(move |session| {\n\n handler(session, result);\n\n })\n\n })\n\n }\n\n}\n\n\n\n#[allow(non_camel_case_types)]\n\npub type sp_session = SpSession;\n\n\n\n#[no_mangle]\n\npub unsafe extern \"C\" fn sp_session_create(c_config: *const sp_session_config,\n\n c_session: *mut *mut sp_session) -> sp_error {\n\n assert!(global_session.is_none());\n", "file_path": "capi/src/session.rs", "rank": 82, "score": 32000.629269381134 }, { "content": "pub struct SpSession {\n\n pub session: Session,\n\n cache: CStringCache,\n\n rx: mpsc::Receiver<SpSessionEvent>,\n\n\n\n pub callbacks: &'static sp_session_callbacks,\n\n}\n\n\n\nimpl SpSession {\n\n pub unsafe fn global() -> &'static SpSession {\n\n &*global_session.unwrap().0\n\n }\n\n\n\n pub fn run<F: FnOnce(&mut SpSession) -> () + 'static>(event: F) {\n\n let tx = unsafe {\n\n &*global_session.unwrap().1\n\n };\n\n \n\n tx.lock().unwrap().send(Box::new(event)).unwrap();\n\n }\n", "file_path": "capi/src/session.rs", "rank": 83, "score": 32000.37329970635 }, { "content": "use libc::{c_int, c_char};\n\nuse std::ffi::CStr;\n\nuse std::slice::from_raw_parts;\n\nuse std::sync::mpsc;\n\nuse std::boxed::FnBox;\n\nuse std::sync::Mutex;\n\n\n\nuse librespot::session::{Session, Config, Bitrate};\n\nuse eventual::{Async, AsyncResult, Future};\n\n\n\nuse cstring_cache::CStringCache;\n\nuse types::sp_error;\n\nuse types::sp_error::*;\n\nuse types::sp_session_config;\n\nuse types::sp_session_callbacks;\n\n\n\nstatic mut global_session: Option<(*const sp_session, *const Mutex<mpsc::Sender<SpSessionEvent>>)> = None;\n\n\n\npub type SpSessionEvent = Box<FnBox(&mut SpSession) -> ()>;\n\n\n", "file_path": "capi/src/session.rs", "rank": 84, "score": 31995.205651036173 }, { "content": " }), |session, result| {\n\n result.unwrap();\n\n\n\n {\n\n let session = session.session.clone();\n\n ::std::thread::spawn(move || {\n\n loop {\n\n session.poll();\n\n }\n\n });\n\n }\n\n });\n\n }\n\n\n\n SP_ERROR_OK\n\n}\n\n\n\n#[no_mangle]\n\npub unsafe extern \"C\" fn sp_session_user_name(c_session: *mut sp_session) -> *const c_char {\n\n let session = &mut *c_session;\n", "file_path": "capi/src/session.rs", "rank": 85, "score": 31990.79246666078 }, { "content": " let session = SpSession {\n\n session: Session::new(config),\n\n cache: CStringCache::new(),\n\n rx: rx,\n\n callbacks: &*c_config.callbacks,\n\n };\n\n\n\n let session = Box::into_raw(Box::new(session));\n\n let tx = Box::into_raw(Box::new(Mutex::new(tx)));\n\n\n\n global_session = Some((session, tx));\n\n\n\n *c_session = session;\n\n\n\n SP_ERROR_OK\n\n}\n\n\n\n#[no_mangle]\n\npub unsafe extern \"C\" fn sp_session_release(c_session: *mut sp_session) -> sp_error {\n\n global_session = None;\n", "file_path": "capi/src/session.rs", "rank": 86, "score": 31990.37812350998 }, { "content": "\n\n let username = session.session.username();\n\n session.cache.intern(&username).as_ptr()\n\n}\n\n\n\n#[no_mangle]\n\npub unsafe extern \"C\" fn sp_session_user_country(c_session: *mut sp_session) -> c_int {\n\n let session = &*c_session;\n\n\n\n let country = session.session.country();\n\n country.chars().fold(0, |acc, x| {\n\n acc << 8 | (x as u32)\n\n }) as c_int\n\n}\n\n\n\n#[no_mangle]\n\npub unsafe extern \"C\" fn sp_session_process_events(c_session: *mut sp_session, next_timeout: *mut c_int) -> sp_error {\n\n let session = &mut *c_session;\n\n\n\n if !next_timeout.is_null() {\n\n *next_timeout = 10;\n\n }\n\n\n\n let event = session.rx.recv().unwrap();\n\n event.call_box((session,));\n\n\n\n SP_ERROR_OK\n\n}\n", "file_path": "capi/src/session.rs", "rank": 87, "score": 31988.300242920497 }, { "content": " drop(Box::from_raw(c_session));\n\n\n\n SP_ERROR_OK\n\n}\n\n\n\n#[no_mangle]\n\npub unsafe extern \"C\" fn sp_session_login(c_session: *mut sp_session,\n\n c_username: *const c_char,\n\n c_password: *const c_char,\n\n _remember_me: bool,\n\n _blob: *const c_char) -> sp_error {\n\n let session = &*c_session;\n\n\n\n let username = CStr::from_ptr(c_username).to_string_lossy().into_owned();\n\n let password = CStr::from_ptr(c_password).to_string_lossy().into_owned();\n\n\n\n {\n\n let session = session.session.clone();\n\n SpSession::receive(Future::spawn(move || {\n\n session.login_password(username, password)\n", "file_path": "capi/src/session.rs", "rank": 88, "score": 31988.253034446538 }, { "content": "\n\n let c_config = &*c_config;\n\n\n\n let application_key = from_raw_parts::<u8>(c_config.application_key as *const u8,\n\n c_config.application_key_size);\n\n\n\n let user_agent = CStr::from_ptr(c_config.user_agent).to_string_lossy().into_owned();\n\n let device_name = CStr::from_ptr(c_config.device_id).to_string_lossy().into_owned();\n\n let cache_location = CStr::from_ptr(c_config.cache_location).to_string_lossy().into_owned();\n\n\n\n let config = Config {\n\n application_key: application_key.to_owned(),\n\n user_agent: user_agent,\n\n device_name: device_name,\n\n cache_location: cache_location.into(),\n\n bitrate: Bitrate::Bitrate160,\n\n };\n\n\n\n let (tx, rx) = mpsc::channel();\n\n\n", "file_path": "capi/src/session.rs", "rank": 89, "score": 31984.475200748777 }, { "content": " }\n\n\n\n packet.write_u16::<BigEndian>(header.compute_size() as u16).unwrap();\n\n header.write_to_writer(&mut packet).unwrap();\n\n\n\n for p in &req.payload {\n\n packet.write_u16::<BigEndian>(p.len() as u16).unwrap();\n\n packet.write(&p).unwrap();\n\n }\n\n\n\n packet\n\n }\n\n}\n\n\n\nimpl PacketHandler for MercuryManager {\n\n fn handle(&mut self, cmd: u8, data: Vec<u8>, _session: &Session) {\n\n let mut packet = Cursor::new(data);\n\n\n\n let seq = {\n\n let seq_length = packet.read_u16::<BigEndian>().unwrap() as usize;\n", "file_path": "src/mercury.rs", "rank": 94, "score": 38.97077108058258 }, { "content": "\n\n session.send_packet(0x8, &data).unwrap();\n\n\n\n stream::Response::Continue(self)\n\n }\n\n\n\n fn on_header(mut self, header_id: u8, header_data: &[u8], session: &Session) -> stream::Response<Self> {\n\n //println!(\"on_header\");\n\n match self.handler.on_header(header_id, header_data, session) {\n\n Response::Continue(handler) => {\n\n self.handler = handler;\n\n stream::Response::Continue(self)\n\n }\n\n Response::Seek(handler, offset) => {\n\n self.handler = handler;\n\n self.offset = offset;\n\n stream::Response::Spawn(self)\n\n }\n\n Response::Close => stream::Response::Close,\n\n }\n", "file_path": "src/audio_file2.rs", "rank": 95, "score": 37.71028137467703 }, { "content": "use eventual;\n\nuse std::io::Write;\n\nuse byteorder::{WriteBytesExt, BigEndian};\n\n\n\nuse session::Session;\n\nuse util::FileId;\n\nuse stream;\n\n\n\npub struct AlbumCover {\n\n file_id: FileId,\n\n data: Vec<u8>,\n\n cover_tx: eventual::Complete<Vec<u8>, ()>,\n\n}\n\n\n\nimpl stream::Handler for AlbumCover {\n\n fn on_create(self, channel_id: stream::ChannelId, session: &Session) -> stream::Response<Self> {\n\n let mut req: Vec<u8> = Vec::new();\n\n req.write_u16::<BigEndian>(channel_id).unwrap();\n\n req.write_u16::<BigEndian>(0).unwrap();\n\n req.write(&self.file_id.0).unwrap();\n", "file_path": "src/album_cover.rs", "rank": 96, "score": 31.22849546280323 }, { "content": " session.send_packet(0x19, &req).unwrap();\n\n\n\n stream::Response::Continue(self)\n\n }\n\n\n\n fn on_header(self, _header_id: u8, _header_data: &[u8], _session: &Session) -> stream::Response<Self> {\n\n stream::Response::Continue(self)\n\n }\n\n\n\n fn on_data(mut self, data: &[u8], _session: &Session) -> stream::Response<Self> {\n\n self.data.extend_from_slice(data);\n\n stream::Response::Continue(self)\n\n }\n\n\n\n fn on_close(self, _session: &Session) -> stream::Response<Self> {\n\n // End of chunk, request a new one\n\n self.cover_tx.complete(self.data);\n\n stream::Response::Close\n\n }\n\n\n", "file_path": "src/album_cover.rs", "rank": 97, "score": 30.606281329538326 }, { "content": " try!(self.stream.finish_send());\n\n try!(self.stream.flush());\n\n\n\n Ok(())\n\n }\n\n\n\n pub fn recv_packet(&mut self) -> Result<(u8, Vec<u8>)> {\n\n let cmd = try!(self.stream.read_u8());\n\n let size = try!(self.stream.read_u16::<BigEndian>()) as usize;\n\n\n\n let mut data = vec![0; size];\n\n try!(self.stream.read_exact(&mut data));\n\n\n\n try!(self.stream.finish_recv());\n\n\n\n Ok((cmd, data))\n\n }\n\n}\n", "file_path": "src/connection.rs", "rank": 98, "score": 30.57843520290375 }, { "content": " }\n\n\n\n fn on_data(mut self, data: &[u8], session: &Session) -> stream::Response<Self> {\n\n //println!(\"on_data\");\n\n match self.handler.on_data(self.offset, data, session) {\n\n Response::Continue(handler) => {\n\n self.handler = handler;\n\n self.offset += data.len();\n\n stream::Response::Continue(self)\n\n }\n\n Response::Seek(handler, offset) => {\n\n println!(\"seek request {}\", offset);\n\n self.handler = handler;\n\n self.offset = offset;\n\n stream::Response::Spawn(self)\n\n }\n\n Response::Close => stream::Response::Close,\n\n }\n\n }\n\n\n", "file_path": "src/audio_file2.rs", "rank": 99, "score": 30.55745793183946 } ]
Rust
butane/tests/common/pg.rs
DimmKG/butane
fcf9dcddb3e55f827daa9f54a6f276a9fce84903
use butane::db::{Backend, Connection, ConnectionSpec}; use once_cell::sync::Lazy; use std::io::{BufRead, BufReader, Read, Write}; use std::path::PathBuf; use std::process::{ChildStderr, Command, Stdio}; use uuid_for_test::Uuid; pub fn pg_connection() -> (Connection, PgSetupData) { let backend = butane::db::get_backend("pg").unwrap(); let data = pg_setup(); (backend.connect(&pg_connstr(&data)).unwrap(), data) } pub fn pg_connspec() -> (ConnectionSpec, PgSetupData) { let data = pg_setup(); ( ConnectionSpec::new(butane::db::pg::BACKEND_NAME, pg_connstr(&data)), data, ) } struct PgServerState { pub dir: PathBuf, pub sockdir: PathBuf, pub proc: std::process::Child, pub stderr: BufReader<ChildStderr>, } impl Drop for PgServerState { fn drop(&mut self) { self.proc.kill().ok(); let mut buf = String::new(); self.stderr.read_to_string(&mut buf).unwrap(); eprintln!("postgres stderr is {}", buf); std::fs::remove_dir_all(&self.dir).unwrap(); } } pub struct PgSetupData { pub connstr: String, } fn create_tmp_server() -> PgServerState { eprintln!("create tmp server"); let dir = std::env::current_dir() .unwrap() .join("tmp_pg") .join(Uuid::new_v4().to_string()); std::fs::create_dir_all(&dir).unwrap(); let output = Command::new("initdb") .arg("-D") .arg(&dir) .arg("-U") .arg("postgres") .output() .expect("failed to run initdb"); if !output.status.success() { std::io::stdout().write_all(&output.stdout).unwrap(); std::io::stderr().write_all(&output.stderr).unwrap(); panic!("postgres initdb failed") } let sockdir = dir.join("socket"); std::fs::create_dir(&sockdir).unwrap(); let mut proc = Command::new("postgres") .arg("-D") .arg(&dir) .arg("-k") .arg(&sockdir) .arg("-h") .arg("") .stderr(Stdio::piped()) .spawn() .expect("failed to run postgres"); let mut buf = String::new(); let mut stderr = BufReader::new(proc.stderr.take().unwrap()); loop { buf.clear(); stderr.read_line(&mut buf).unwrap(); if buf.contains("ready to accept connections") { break; } if proc.try_wait().unwrap().is_some() { buf.clear(); stderr.read_to_string(&mut buf).unwrap(); eprint!("{}", buf); panic!("postgres process died"); } } eprintln!("createdtmp server!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"); PgServerState { dir, sockdir, proc, stderr, } } static TMP_SERVER: Lazy<PgServerState> = Lazy::new(|| create_tmp_server()); pub fn pg_setup() -> PgSetupData { eprintln!("pg_setup"); let connstr = match std::env::var("BUTANE_PG_CONNSTR") { Ok(connstr) => connstr, Err(_) => { let server = &TMP_SERVER; let host = server.sockdir.to_str().unwrap(); format!("host={} user=postgres", host) } }; let new_dbname = format!("butane_test_{}", Uuid::new_v4().to_simple()); eprintln!("new db is `{}`", &new_dbname); let mut conn = butane::db::connect(&ConnectionSpec::new("pg", &connstr)).unwrap(); conn.execute(format!("CREATE DATABASE {};", new_dbname)) .unwrap(); let connstr = format!("{} dbname={}", connstr, new_dbname); PgSetupData { connstr } } pub fn pg_teardown(_data: PgSetupData) { } pub fn pg_connstr(data: &PgSetupData) -> String { data.connstr.clone() }
use butane::db::{Backend, Connection, ConnectionSpec}; use once_cell::sync::Lazy; use std::io::{BufRead, BufReader, Read, Write}; use std::path::PathBuf; use std::process::{ChildStderr, Command, Stdio}; use uuid_for_test::Uuid; pub fn pg_connection() -> (Connection, PgSetupData) { let backend = butane::db::get_backend("pg").unwrap(); let data = pg_setup(); (backend.connect(&pg_connstr(&data)).unwrap(), data) } pub fn pg_connspec() -> (ConnectionSpec, PgSetupData) { let data = pg_setup(); ( ConnectionSpec::new(butane::db::pg::BACKEND_NAME, pg_connstr(&data)), data, ) } struct PgServerState { pub dir: PathBuf, pub sockdir: PathBuf, pub proc: std::process::Child, pub stderr: BufReader<ChildStderr>, } impl Drop for PgServerState { fn drop(&mut self) { self.proc.kill().ok(); let mut buf = String::new(); self.stderr.read_to_string(&mut buf).unwrap(); eprintln!("postgres stderr is {}", buf); std::fs::remove_dir_all(&self.dir).unwrap(); } } pub struct PgSetupData { pub connstr: String, } fn create_tmp_server() -> PgServerState { eprintln!("create tmp server"); let dir = std::env::current_dir() .unwrap() .join("tmp_pg") .join(Uuid::new_v4().to_string()); std::fs::create_dir_all(&dir).unwrap(); let output = Command::new("initdb") .arg("-D") .arg(&dir) .arg("-U") .arg("postgres") .output() .expect("failed to run initdb"); if !output.status.success() { std::io::stdout().write_all(&output.stdout).unwrap(); std::io::stderr().write_all(&output.stderr).unwrap(); panic!("postgres initdb failed") } let sockdir = dir.join("socket"); std::fs::create_dir(&sockdir).unwrap(); let mut proc = Command::new("postgres") .arg("-D") .arg(&dir) .arg("-k") .arg(&sockdir) .arg("-h") .arg("") .stderr(Stdio::piped()) .spawn() .expect("failed to run postgres"); let mut buf = String::new(); let mut stderr = BufReader::new(proc.stderr.take().unwrap()); loop { buf.clear(); stderr.read_line(&mut buf).unwrap(); if buf.contains("ready to accept connections") { break; }
} eprintln!("createdtmp server!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"); PgServerState { dir, sockdir, proc, stderr, } } static TMP_SERVER: Lazy<PgServerState> = Lazy::new(|| create_tmp_server()); pub fn pg_setup() -> PgSetupData { eprintln!("pg_setup"); let connstr = match std::env::var("BUTANE_PG_CONNSTR") { Ok(connstr) => connstr, Err(_) => { let server = &TMP_SERVER; let host = server.sockdir.to_str().unwrap(); format!("host={} user=postgres", host) } }; let new_dbname = format!("butane_test_{}", Uuid::new_v4().to_simple()); eprintln!("new db is `{}`", &new_dbname); let mut conn = butane::db::connect(&ConnectionSpec::new("pg", &connstr)).unwrap(); conn.execute(format!("CREATE DATABASE {};", new_dbname)) .unwrap(); let connstr = format!("{} dbname={}", connstr, new_dbname); PgSetupData { connstr } } pub fn pg_teardown(_data: PgSetupData) { } pub fn pg_connstr(data: &PgSetupData) -> String { data.connstr.clone() }
if proc.try_wait().unwrap().is_some() { buf.clear(); stderr.read_to_string(&mut buf).unwrap(); eprint!("{}", buf); panic!("postgres process died"); }
if_condition
[ { "content": "pub fn sql_order(order: &[Order], w: &mut impl Write) {\n\n write!(w, \" ORDER BY \").unwrap();\n\n order.iter().fold(\"\", |sep, o| {\n\n let sql_dir = match o.direction {\n\n OrderDirection::Ascending => \"ASC\",\n\n OrderDirection::Descending => \"DESC\",\n\n };\n\n write!(w, \"{}{} {}\", sep, o.column, sql_dir).unwrap();\n\n \", \"\n\n });\n\n}\n\n\n", "file_path": "butane_core/src/db/helper.rs", "rank": 2, "score": 261176.55618860986 }, { "content": "pub fn sql_limit(limit: i32, w: &mut impl Write) {\n\n write!(w, \" LIMIT {}\", limit).unwrap();\n\n}\n\n\n", "file_path": "butane_core/src/db/helper.rs", "rank": 3, "score": 261176.55618860986 }, { "content": "pub fn list_columns(columns: &[Column], w: &mut impl Write) {\n\n let mut colnames: Vec<&'static str> = Vec::new();\n\n columns.iter().for_each(|c| colnames.push(c.name()));\n\n write!(w, \"{}\", colnames.as_slice().join(\",\")).unwrap();\n\n}\n\n\n", "file_path": "butane_core/src/db/helper.rs", "rank": 4, "score": 261176.5561886099 }, { "content": "pub fn sql_offset(offset: i32, w: &mut impl Write) {\n\n write!(w, \" OFFSET {}\", offset).unwrap();\n\n}\n\n\n", "file_path": "butane_core/src/db/helper.rs", "rank": 5, "score": 261176.55618860986 }, { "content": "pub fn setup_db(backend: Box<dyn Backend>, conn: &mut Connection) {\n\n let mut root = std::env::current_dir().unwrap();\n\n root.push(\".butane/migrations\");\n\n let mut disk_migrations = butane::migrations::from_root(&root);\n\n let disk_current = disk_migrations.current();\n\n // Create an in-memory Migrations and write only to that. This\n\n // allows concurrent tetss to avoid stomping on eachother and is\n\n // also faster than real disk writes.\n\n let mut mem_migrations = MemMigrations::new();\n\n let mem_current = mem_migrations.current();\n\n\n\n butane::migrations::copy_migration(disk_current, mem_current).unwrap();\n\n\n\n assert!(\n\n mem_migrations\n\n .create_migration(&backend, &format!(\"init\"), None)\n\n .expect(\"expected to create migration without error\"),\n\n \"expected to create migration\"\n\n );\n\n println!(\"created current migration\");\n\n let to_apply = mem_migrations.unapplied_migrations(conn).unwrap();\n\n for m in to_apply {\n\n println!(\"Applying migration {}\", m.name());\n\n m.apply(conn).unwrap();\n\n }\n\n}\n\n\n", "file_path": "butane/tests/common/mod.rs", "rank": 6, "score": 249392.38538013026 }, { "content": "pub fn sql_select(columns: &[Column], table: &str, w: &mut impl Write) {\n\n write!(w, \"SELECT \").unwrap();\n\n list_columns(columns, w);\n\n write!(w, \" FROM {}\", table).unwrap();\n\n}\n\n\n", "file_path": "butane_core/src/db/helper.rs", "rank": 7, "score": 244255.74902879744 }, { "content": "pub fn sql_insert_or_update(table: &str, columns: &[Column], w: &mut impl Write) {\n\n write!(w, \"INSERT OR REPLACE \").unwrap();\n\n write!(w, \"INTO {} (\", table).unwrap();\n\n helper::list_columns(columns, w);\n\n write!(w, \") VALUES (\").unwrap();\n\n columns.iter().fold(\"\", |sep, _| {\n\n write!(w, \"{}?\", sep).unwrap();\n\n \", \"\n\n });\n\n write!(w, \")\").unwrap();\n\n}\n\n\n", "file_path": "butane_core/src/db/sqlite.rs", "rank": 8, "score": 240994.05067413388 }, { "content": "pub fn create_blog(conn: &Connection, name: impl Into<String>) -> Blog {\n\n let mut blog = Blog::new(name);\n\n blog.save(conn).unwrap();\n\n blog\n\n}\n\n\n", "file_path": "examples/getting_started/src/lib.rs", "rank": 9, "score": 228942.83371903974 }, { "content": "/// Copies the data in `from` to `to`.\n\npub fn copy_migration(from: &impl Migration, to: &mut impl MigrationMut) -> Result<()> {\n\n to.set_migration_from(from.migration_from()?.map(|s| s.to_string()))?;\n\n let db = from.db()?;\n\n for table in db.tables() {\n\n to.write_table(table)?;\n\n }\n\n for (k, v) in db.types() {\n\n to.add_type(k.clone(), v.clone())?;\n\n }\n\n for backend_name in from.sql_backends()? {\n\n let up_sql = from.up_sql(&backend_name)?;\n\n let down_sql = from.down_sql(&backend_name)?;\n\n if let (Some(up_sql), Some(down_sql)) = (up_sql, down_sql) {\n\n to.add_sql(&backend_name, &up_sql, &down_sql)?;\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "butane_core/src/migrations/mod.rs", "rank": 10, "score": 223969.92006040236 }, { "content": "fn sql_column(col: query::Column, w: &mut impl Write) {\n\n match col.table() {\n\n Some(table) => write!(w, \"{}.{}\", table, col.name()),\n\n None => w.write_str(col.name()),\n\n }\n\n .unwrap()\n\n}\n\n\n", "file_path": "butane_core/src/db/helper.rs", "rank": 13, "score": 208444.81127250002 }, { "content": "fn sql_joins(joins: Vec<Join>, w: &mut impl Write) {\n\n for join in joins {\n\n match join {\n\n Join::Inner {\n\n join_table,\n\n col1,\n\n col2,\n\n } => {\n\n // INNER JOIN <join_table> ON <col1> = <col2>\n\n write!(w, \"INNER JOIN {} ON \", join_table).unwrap();\n\n sql_column(col1, w);\n\n w.write_str(\" = \").unwrap();\n\n sql_column(col2, w);\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "butane_core/src/db/helper.rs", "rank": 14, "score": 208444.81127250002 }, { "content": "/// Connect to a database. For non-boxed connections, see individual\n\n/// [Backend][crate::db::Backend] implementations.\n\npub fn connect(spec: &ConnectionSpec) -> Result<Connection> {\n\n get_backend(&spec.backend_name)\n\n .ok_or_else(|| Error::UnknownBackend(spec.backend_name.clone()))?\n\n .connect(&spec.conn_str)\n\n}\n\n\n", "file_path": "butane_core/src/db/mod.rs", "rank": 15, "score": 204158.02886560513 }, { "content": "pub fn create_post(conn: &Connection, blog: &Blog, title: String, body: String) -> Post {\n\n let mut new_post = Post::new(blog, title, body);\n\n new_post.save(conn).unwrap();\n\n new_post\n\n}\n\n\n", "file_path": "examples/getting_started/src/lib.rs", "rank": 16, "score": 183666.05849974154 }, { "content": "fn basic_dropped_transaction(mut conn: Connection) {\n\n // Create an object with a transaction but never commit it\n\n {\n\n let tr = conn.transaction().unwrap();\n\n let mut foo = Foo::new(1);\n\n foo.bar = 42;\n\n foo.save(&tr).unwrap();\n\n }\n\n\n\n // Find the object\n\n match Foo::get(&conn, 1) {\n\n Ok(_) => panic!(\"object should not exist\"),\n\n Err(butane::Error::NoSuchObject) => (),\n\n Err(e) => panic!(\"Unexpected error {}\", e),\n\n }\n\n}\n\ntestall!(basic_dropped_transaction);\n\n\n", "file_path": "butane/tests/basic.rs", "rank": 18, "score": 182735.35503477612 }, { "content": "pub fn sqlite_connspec() -> ConnectionSpec {\n\n ConnectionSpec::new(butane::db::sqlite::BACKEND_NAME, \":memory:\")\n\n}\n\n\n", "file_path": "butane/tests/common/mod.rs", "rank": 19, "score": 182711.70455201398 }, { "content": "pub fn sqlite_connection() -> Connection {\n\n let backend = butane::db::get_backend(\"sqlite\").unwrap();\n\n backend.connect(\":memory:\").unwrap()\n\n}\n\n\n", "file_path": "butane/tests/common/mod.rs", "rank": 20, "score": 163275.07447083917 }, { "content": "// implement the DataObject trait\n\npub fn impl_dbobject(ast_struct: &ItemStruct, config: &Config) -> TokenStream2 {\n\n let tyname = &ast_struct.ident;\n\n let tablelit = make_tablelit(config, tyname);\n\n let fields_type = fields_type(tyname);\n\n\n\n let err = verify_fields(ast_struct);\n\n if let Some(err) = err {\n\n return err;\n\n }\n\n\n\n let pk_field = pk_field(&ast_struct).unwrap();\n\n let pktype = &pk_field.ty;\n\n let pkident = pk_field.ident.clone().unwrap();\n\n let pklit = make_ident_literal_str(&pkident);\n\n\n\n let insert_cols = columns(ast_struct, |f| !is_auto(f));\n\n let save_cols = columns(ast_struct, |f| !is_auto(f) && f != &pk_field);\n\n\n\n let mut post_insert: Vec<TokenStream2> = Vec::new();\n\n add_post_insert_for_auto(&pk_field, &mut post_insert);\n", "file_path": "butane_core/src/codegen/dbobj.rs", "rank": 21, "score": 162952.35698196513 }, { "content": "pub fn impl_dataresult(ast_struct: &ItemStruct, dbo: &Ident) -> TokenStream2 {\n\n let tyname = &ast_struct.ident;\n\n let numdbfields = fields(&ast_struct).filter(|f| is_row_field(f)).count();\n\n let rows = rows_for_from(&ast_struct);\n\n let cols = columns(ast_struct, |_| true);\n\n\n\n let many_init: TokenStream2 =\n\n fields(&ast_struct)\n\n .filter(|f| is_many_to_many(f))\n\n .map(|f| {\n\n let ident = f\n\n .ident\n\n .clone()\n\n .expect(\"Fields must be named for butane\");\n\n let many_table_lit = many_table_lit(&ast_struct, f);\n\n let pksqltype = quote!(<<Self as butane::DataObject>::PKType as butane::FieldType>::SQLTYPE);\n\n quote!(obj.#ident.ensure_init(#many_table_lit, butane::ToSql::to_sql(obj.pk()), #pksqltype);)\n\n }).collect();\n\n\n\n let dbo_is_self = dbo == tyname;\n", "file_path": "butane_core/src/codegen/dbobj.rs", "rank": 22, "score": 162948.26076855158 }, { "content": "/// Find a backend by name.\n\npub fn get_backend(name: &str) -> Option<Box<dyn Backend>> {\n\n match name {\n\n #[cfg(feature = \"sqlite\")]\n\n sqlite::BACKEND_NAME => Some(Box::new(sqlite::SQLiteBackend::new())),\n\n #[cfg(feature = \"pg\")]\n\n pg::BACKEND_NAME => Some(Box::new(pg::PgBackend::new())),\n\n _ => None,\n\n }\n\n}\n\n\n", "file_path": "butane_core/src/db/mod.rs", "rank": 23, "score": 161480.9397165999 }, { "content": "/// An object in the database.\n\n///\n\n/// Rather than implementing this type manually, use the\n\n/// `#[model]` attribute.\n\npub trait DataObject: DataResult<DBO = Self> {\n\n /// The type of the primary key field.\n\n type PKType: PrimaryKeyType;\n\n type Fields: Default;\n\n /// The name of the primary key column.\n\n const PKCOL: &'static str;\n\n /// The name of the table.\n\n const TABLE: &'static str;\n\n /// Get the primary key\n\n fn pk(&self) -> &Self::PKType;\n\n /// Find this object in the database based on primary key.\n\n fn get(conn: &impl ConnectionMethods, id: impl Borrow<Self::PKType>) -> Result<Self>\n\n where\n\n Self: Sized,\n\n {\n\n <Self as DataResult>::query()\n\n .filter(query::BoolExpr::Eq(\n\n Self::PKCOL,\n\n query::Expr::Val(id.borrow().to_sql()),\n\n ))\n", "file_path": "butane_core/src/lib.rs", "rank": 24, "score": 160841.30778711478 }, { "content": "pub fn establish_connection() -> Connection {\n\n butane::db::connect(&ConnectionSpec::load(\".butane/connection.json\").unwrap()).unwrap()\n\n}\n\n\n", "file_path": "examples/getting_started/src/lib.rs", "rank": 25, "score": 160673.17363359654 }, { "content": "fn migrations_dir() -> PathBuf {\n\n let mut dir = PathBuf::from(\n\n std::env::var(\"CARGO_MANIFEST_DIR\").expect(\"CARGO_MANIFEST_DIR expected to be set\"),\n\n );\n\n dir.push(\".butane\");\n\n dir.push(\"migrations\");\n\n dir\n\n}\n", "file_path": "butane_codegen/src/lib.rs", "rank": 26, "score": 160214.01579428703 }, { "content": "pub fn sql_literal_value(val: SqlVal) -> Result<String> {\n\n use SqlVal::*;\n\n match val {\n\n SqlVal::Null => Ok(\"NULL\".to_string()),\n\n SqlVal::Bool(val) => Ok(val.to_string()),\n\n Int(val) => Ok(val.to_string()),\n\n BigInt(val) => Ok(val.to_string()),\n\n Real(val) => Ok(val.to_string()),\n\n Text(val) => Ok(format!(\"'{}'\", val)),\n\n Blob(val) => Ok(format!(\"x'{}'\", hex::encode_upper(val))),\n\n #[cfg(feature = \"datetime\")]\n\n Timestamp(ndt) => Ok(ndt.format(\"'%Y-%m-%dT%H:%M:%S%.f'\").to_string()),\n\n Custom(val) => Err(Error::LiteralForCustomUnsupported((*val).clone())),\n\n }\n\n}\n", "file_path": "butane_core/src/db/helper.rs", "rank": 27, "score": 157290.25963391544 }, { "content": "fn base_dir() -> Result<PathBuf> {\n\n std::env::current_dir()\n\n .map(|d| d.join(\".butane\"))\n\n .map_err(|e| e.into())\n\n}\n\n\n", "file_path": "butane_cli/src/main.rs", "rank": 28, "score": 153689.49966704298 }, { "content": "fn sql_for_op(current: &mut ADB, op: &Operation) -> Result<String> {\n\n match op {\n\n Operation::AddTable(table) => Ok(create_table(&table)?),\n\n Operation::RemoveTable(name) => Ok(drop_table(&name)),\n\n Operation::AddColumn(tbl, col) => add_column(&tbl, &col),\n\n Operation::RemoveColumn(tbl, name) => Ok(remove_column(&tbl, &name)),\n\n Operation::ChangeColumn(tbl, old, new) => change_column(current, &tbl, &old, Some(new)),\n\n }\n\n}\n\n\n", "file_path": "butane_core/src/db/pg.rs", "rank": 29, "score": 151717.1847508644 }, { "content": "fn sql_for_op(current: &mut ADB, op: &Operation) -> Result<String> {\n\n match op {\n\n Operation::AddTable(table) => Ok(create_table(&table)),\n\n Operation::RemoveTable(name) => Ok(drop_table(&name)),\n\n Operation::AddColumn(tbl, col) => add_column(&tbl, &col),\n\n Operation::RemoveColumn(tbl, name) => Ok(remove_column(current, &tbl, &name)),\n\n Operation::ChangeColumn(tbl, old, new) => Ok(change_column(current, &tbl, &old, Some(new))),\n\n }\n\n}\n\n\n", "file_path": "butane_core/src/db/sqlite.rs", "rank": 30, "score": 151717.1847508644 }, { "content": "pub fn handle_expr(fields: &impl ToTokens, expr: &Expr) -> TokenStream2 {\n\n match expr {\n\n Expr::Binary(binop) => handle_bin_op(fields, binop),\n\n Expr::MethodCall(mcall) => handle_call(fields, mcall),\n\n Expr::Path(path) => handle_path(fields, path),\n\n Expr::Lit(lit) => lit.lit.clone().into_token_stream(),\n\n Expr::Block(block) => handle_block(&block.block),\n\n Expr::Group(group) => handle_expr(fields, group.expr.as_ref()),\n\n _ => {\n\n let lit = LitStr::new(\n\n &format!(\n\n \"Unsupported filter expression '{}' \\ndebug info: {:?}\",\n\n expr.clone().into_token_stream(),\n\n expr\n\n ),\n\n Span::call_site(),\n\n );\n\n quote!(compile_error!(#lit))\n\n }\n\n }\n\n}\n\n\n", "file_path": "butane_codegen/src/filter.rs", "rank": 31, "score": 150448.29365704305 }, { "content": "fn basic_rollback_transaction(mut conn: Connection) {\n\n let tr = conn.transaction().unwrap();\n\n\n\n // Create an object with a transaction but then roll back the transaction\n\n let mut foo = Foo::new(1);\n\n foo.bar = 42;\n\n foo.save(&tr).unwrap();\n\n tr.rollback().unwrap();\n\n\n\n // Find the object\n\n match Foo::get(&conn, 1) {\n\n Ok(_) => panic!(\"object should not exist\"),\n\n Err(butane::Error::NoSuchObject) => (),\n\n Err(e) => panic!(\"Unexpected error {}\", e),\n\n }\n\n}\n\ntestall!(basic_rollback_transaction);\n\n\n", "file_path": "butane/tests/basic.rs", "rank": 32, "score": 149065.54763727245 }, { "content": "fn basic_committed_transaction(mut conn: Connection) {\n\n let tr = conn.transaction().unwrap();\n\n\n\n // Create an object with a transaction and commit it\n\n let mut foo = Foo::new(1);\n\n foo.bar = 42;\n\n foo.save(&tr).unwrap();\n\n tr.commit().unwrap();\n\n\n\n // Find the object\n\n let foo2 = Foo::get(&conn, 1).unwrap();\n\n assert_eq!(foo, foo2);\n\n}\n\ntestall!(basic_committed_transaction);\n\n\n", "file_path": "butane/tests/basic.rs", "rank": 33, "score": 149065.54763727245 }, { "content": "pub fn write_table_to_disk<M>(\n\n ms: &mut impl MigrationsMut<M = M>,\n\n ast_struct: &ItemStruct,\n\n config: &dbobj::Config,\n\n) -> Result<()>\n\nwhere\n\n M: MigrationMut,\n\n{\n\n let current_migration = ms.current();\n\n for table in create_atables(ast_struct, config) {\n\n current_migration.write_table(&table)?;\n\n }\n\n if let Some(name) = &config.table_name {\n\n // Custom table name, need to also be able to map with the type name\n\n current_migration.add_type(\n\n TypeKey::PK(ast_struct.ident.to_string()),\n\n DeferredSqlType::Deferred(TypeKey::PK(name.clone())),\n\n )?;\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "butane_core/src/codegen/migration.rs", "rank": 34, "score": 148550.15879380013 }, { "content": "#[allow(dead_code)] // only used by some test files\n\npub fn setup_blog(conn: &Connection) {\n\n let mut cats_blog = Blog::new(1, \"Cats\");\n\n cats_blog.save(conn).unwrap();\n\n let mut mountains_blog = Blog::new(2, \"Mountains\");\n\n mountains_blog.save(conn).unwrap();\n\n\n\n let mut tag_asia = Tag::new(\"asia\");\n\n tag_asia.save(conn).unwrap();\n\n let mut tag_danger = Tag::new(\"danger\");\n\n tag_danger.save(conn).unwrap();\n\n let mut tag_monkeys = Tag::new(\"monkeys\");\n\n tag_monkeys.save(conn).unwrap();\n\n\n\n let mut post = Post::new(\n\n 1,\n\n \"The Tiger\",\n\n \"The tiger is a cat which would very much like to eat you.\",\n\n &cats_blog,\n\n );\n\n post.published = true;\n", "file_path": "butane/tests/common/blog.rs", "rank": 35, "score": 147485.15750307273 }, { "content": "fn drop_table(name: &str) -> String {\n\n format!(\"DROP TABLE {};\", name)\n\n}\n\n\n", "file_path": "butane_core/src/db/pg.rs", "rank": 36, "score": 147165.04820136467 }, { "content": "fn drop_table(name: &str) -> String {\n\n format!(\"DROP TABLE {};\", name)\n\n}\n\n\n", "file_path": "butane_core/src/db/sqlite.rs", "rank": 37, "score": 147165.04820136467 }, { "content": "fn remove_column(current: &mut ADB, tbl_name: &str, name: &str) -> String {\n\n let old = current\n\n .get_table(tbl_name)\n\n .and_then(|table| table.column(name))\n\n .cloned();\n\n match old {\n\n Some(col) => change_column(current, tbl_name, &col, None),\n\n None => {\n\n crate::warn!(\n\n \"Cannot remove column {} that does not exist from table {}\",\n\n name,\n\n tbl_name\n\n );\n\n \"\".to_string()\n\n }\n\n }\n\n}\n\n\n", "file_path": "butane_core/src/db/sqlite.rs", "rank": 38, "score": 144728.78174618247 }, { "content": "fn tmp_table_name(name: &str) -> String {\n\n format!(\"{}__butane_tmp\", name)\n\n}\n\n\n", "file_path": "butane_core/src/db/sqlite.rs", "rank": 39, "score": 144668.4592795174 }, { "content": "fn tmp_table_name(name: &str) -> String {\n\n format!(\"{}__butane_tmp\", name)\n\n}\n\n\n", "file_path": "butane_core/src/db/pg.rs", "rank": 40, "score": 144668.4592795174 }, { "content": "fn rollback_latest(mut conn: Connection) -> Result<()> {\n\n match get_migrations()?.latest() {\n\n Some(m) => {\n\n println!(\"Rolling back migration {}\", m.name());\n\n m.downgrade(&mut conn)?;\n\n }\n\n None => {\n\n eprintln!(\"No migrations applied!\");\n\n std::process::exit(1)\n\n }\n\n };\n\n Ok(())\n\n}\n\n\n", "file_path": "butane_cli/src/main.rs", "rank": 41, "score": 143555.55347878262 }, { "content": "pub fn add_fieldexprs(ast_struct: &ItemStruct) -> TokenStream2 {\n\n let tyname = &ast_struct.ident;\n\n let vis = &ast_struct.vis;\n\n let fieldexprs: Vec<TokenStream2> = fields(ast_struct)\n\n .map(|f| {\n\n if is_many_to_many(f) {\n\n fieldexpr_func_many(f, ast_struct)\n\n } else {\n\n fieldexpr_func_regular(f, ast_struct)\n\n }\n\n })\n\n .collect();\n\n\n\n let fields_type = fields_type(tyname);\n\n quote!(\n\n impl #tyname {\n\n pub fn fields() -> #fields_type {\n\n #fields_type::default()\n\n }\n\n }\n", "file_path": "butane_core/src/codegen/dbobj.rs", "rank": 42, "score": 143549.9386338931 }, { "content": "/// Writes to `w` the SQL to express the expression given in `expr`. Values contained in `expr` are rendered\n\n/// as placeholders in the SQL string and the actual values are added to `values`.\n\npub fn sql_for_expr<F, P, W>(expr: Expr, f: F, values: &mut Vec<SqlVal>, pls: &mut P, w: &mut W)\n\nwhere\n\n F: Fn(Expr, &mut Vec<SqlVal>, &mut P, &mut W),\n\n P: PlaceholderSource,\n\n W: Write,\n\n{\n\n match expr {\n\n Expr::Column(name) => w.write_str(name),\n\n Val(v) => match v {\n\n // No risk of SQL injection with integers and the\n\n // different sizes are tricky with the PG backend's binary\n\n // protocol\n\n SqlVal::Int(i) => write!(w, \"{}\", i),\n\n SqlVal::BigInt(i) => write!(w, \"{}\", i),\n\n _ => {\n\n values.push(v);\n\n w.write_str(&pls.next_placeholder())\n\n }\n\n },\n\n Placeholder => w.write_str(&pls.next_placeholder()),\n", "file_path": "butane_core/src/db/helper.rs", "rank": 43, "score": 142156.35987036224 }, { "content": "fn fields(ast_struct: &ItemStruct) -> impl Iterator<Item = &Field> {\n\n ast_struct\n\n .fields\n\n .iter()\n\n .filter(|f| f.ident.clone().unwrap() != \"state\")\n\n}\n\n\n", "file_path": "butane_core/src/codegen/mod.rs", "rank": 44, "score": 140300.81736870788 }, { "content": "fn rollback_to(mut conn: Connection, to: &str) -> Result<()> {\n\n let ms = get_migrations()?;\n\n let to_migration = match ms.get_migration(to) {\n\n Some(m) => m,\n\n None => {\n\n eprintln!(\"No such migration!\");\n\n std::process::exit(1);\n\n }\n\n };\n\n\n\n let to_unapply = ms.migrations_since(&to_migration)?;\n\n if to_unapply.is_empty() {\n\n eprintln!(\"That is the latest migration, not rolling back to anything. If you expected something to happen, try specifying the migration to rollback to.\");\n\n }\n\n for m in to_unapply.into_iter().rev() {\n\n println!(\"Rolling back migration {}\", m.name());\n\n m.downgrade(&mut conn)?;\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "butane_cli/src/main.rs", "rank": 45, "score": 138646.71405267285 }, { "content": "/// Database backend. A boxed implementation can be returned by name via [get_backend][crate::db::get_backend].\n\npub trait Backend {\n\n fn name(&self) -> &'static str;\n\n fn create_migration_sql(&self, current: &adb::ADB, ops: Vec<adb::Operation>) -> Result<String>;\n\n fn connect(&self, conn_str: &str) -> Result<Connection>;\n\n}\n\n\n\nimpl Backend for Box<dyn Backend> {\n\n fn name(&self) -> &'static str {\n\n self.deref().name()\n\n }\n\n fn create_migration_sql(&self, current: &adb::ADB, ops: Vec<adb::Operation>) -> Result<String> {\n\n self.deref().create_migration_sql(current, ops)\n\n }\n\n fn connect(&self, conn_str: &str) -> Result<Connection> {\n\n self.deref().connect(conn_str)\n\n }\n\n}\n\n\n", "file_path": "butane_core/src/db/mod.rs", "rank": 46, "score": 137096.39829180128 }, { "content": "pub fn existing_blog(conn: &Connection) -> Option<Blog> {\n\n Blog::query().load_first(conn).unwrap()\n\n}\n", "file_path": "examples/getting_started/src/lib.rs", "rank": 47, "score": 134705.59911102278 }, { "content": "fn columns<P>(ast_struct: &ItemStruct, mut predicate: P) -> TokenStream2\n\nwhere\n\n P: FnMut(&Field) -> bool,\n\n{\n\n fields(&ast_struct)\n\n .filter(|f| is_row_field(f) && predicate(f))\n\n .map(|f| match f.ident.clone() {\n\n Some(fname) => {\n\n let ident = make_ident_literal_str(&fname);\n\n let fty = &f.ty;\n\n quote!(butane::db::Column::new(#ident, <#fty as butane::FieldType>::SQLTYPE),)\n\n }\n\n None => quote_spanned! {\n\n f.span() =>\n\n compile_error!(\"Fields must be named for butane\");\n\n },\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "butane_core/src/codegen/dbobj.rs", "rank": 48, "score": 134119.17425044964 }, { "content": "/// Database connection.\n\npub trait BackendConnection: ConnectionMethods + Send + 'static {\n\n /// Begin a database transaction. The transaction object must be\n\n /// used in place of this connection until it is committed and aborted.\n\n fn transaction(&mut self) -> Result<Transaction>;\n\n /// Retrieve the backend backend this connection\n\n fn backend(&self) -> Box<dyn Backend>;\n\n fn backend_name(&self) -> &'static str;\n\n /// Tests if the connection has been closed. Backends which do not\n\n /// support this check should return false.\n\n fn is_closed(&self) -> bool;\n\n}\n\n\n\n/// Database connection. May be a connection to any type of database\n\n/// as it is a boxed abstraction over a specific connection.\n\npub struct Connection {\n\n conn: Box<dyn BackendConnection>,\n\n}\n\nimpl Connection {\n\n pub fn execute(&mut self, sql: impl AsRef<str>) -> Result<()> {\n\n self.conn.execute(sql.as_ref())\n", "file_path": "butane_core/src/db/mod.rs", "rank": 50, "score": 129346.16377124927 }, { "content": "/// Builds code for pushing SqlVals for each column satisfying predicate into a vec called `values`\n\nfn push_values<P>(ast_struct: &ItemStruct, mut predicate: P) -> Vec<TokenStream2>\n\nwhere\n\n P: FnMut(&Field) -> bool,\n\n{\n\n fields(&ast_struct)\n\n .filter(|f| is_row_field(f) && predicate(f))\n\n .map(|f| {\n\n let ident = f.ident.clone().unwrap();\n\n if is_row_field(f) {\n\n if !is_auto(f) {\n\n quote!(values.push(butane::ToSql::to_sql_ref(&self.#ident));)\n\n } else {\n\n quote!()\n\n }\n\n } else if is_many_to_many(f) {\n\n quote!(\n\n self.#ident.ensure_init(Self::TABLE, self.pk().clone(), <Self as butane::DataObject>::PKType);\n\n self.#ident.save()?;\n\n )\n\n } else {\n\n\t\t\t\t\t\t\t\tmake_compile_error!(f.span()=> \"Unexpected struct field\")\n\n }\n\n })\n\n .collect()\n\n}\n", "file_path": "butane_core/src/codegen/dbobj.rs", "rank": 51, "score": 128635.0037071729 }, { "content": "#[proc_macro_attribute]\n\npub fn dataresult(args: TokenStream, input: TokenStream) -> TokenStream {\n\n codegen::dataresult(args.into(), input.into()).into()\n\n}\n\n\n", "file_path": "butane_codegen/src/lib.rs", "rank": 52, "score": 128538.30226392516 }, { "content": "#[proc_macro_attribute]\n\npub fn model(_args: TokenStream, input: TokenStream) -> TokenStream {\n\n codegen::model_with_migrations(input.into(), &mut migrations_for_dir()).into()\n\n}\n\n\n\n/// Attribute macro which generates an implementation of\n\n/// [`DataResult`](butane_core::DataResult). Continuing with our blog\n\n/// post example from [model](macro@model), we could create a `DataResult` with\n\n/// only some of the fields from `Post` (to avoid fetching all of them in a query).\n\n///\n\n/// ```ignore\n\n/// #[dataresult(Post)]\n\n/// pub struct PostMetadata {\n\n/// pub id: i64,\n\n/// pub title: String,\n\n/// pub pub_time: Option<NaiveDateTime>,\n\n/// }\n\n/// ```\n\n///\n\n/// Note that the attribute takes a parameter saying which Model this\n\n/// result is a subset of. Every field named in the DataResult must be\n\n/// present in the Model.\n", "file_path": "butane_codegen/src/lib.rs", "rank": 53, "score": 128538.30226392516 }, { "content": "fn migration_add_field(conn: &mut Connection, up_sql: &str, down_sql: &str) {\n\n let init = quote! {\n\n struct Foo {\n\n id: i64,\n\n bar: String,\n\n }\n\n };\n\n\n\n let v2 = quote! {\n\n struct Foo {\n\n id: i64,\n\n bar: String,\n\n baz: u32,\n\n }\n\n };\n\n test_migrate(conn, init, v2, up_sql, down_sql);\n\n}\n\n\n", "file_path": "butane/tests/migration-tests.rs", "rank": 54, "score": 127344.51119856987 }, { "content": "fn default_name() -> String {\n\n Utc::now().format(\"%Y%m%d_%H%M%S%3f\").to_string()\n\n}\n\n\n", "file_path": "butane_cli/src/main.rs", "rank": 55, "score": 126865.87086172865 }, { "content": "fn string_pk(conn: Connection) {\n\n let mut foo = Foo::new(1);\n\n foo.save(&conn).unwrap();\n\n let mut bar = Bar::new(\"tarzan\", foo);\n\n bar.save(&conn).unwrap();\n\n\n\n let bar2 = Bar::get(&conn, \"tarzan\".to_string()).unwrap();\n\n assert_eq!(bar, bar2);\n\n}\n\ntestall!(string_pk);\n\n\n", "file_path": "butane/tests/basic.rs", "rank": 56, "score": 126543.25953680443 }, { "content": "#[proc_macro_attribute]\n\npub fn butane_type(args: TokenStream, input: TokenStream) -> TokenStream {\n\n codegen::butane_type_with_migrations(args.into(), input.into(), &mut migrations_for_dir())\n\n .into()\n\n}\n\n\n", "file_path": "butane_codegen/src/lib.rs", "rank": 57, "score": 126509.24490002729 }, { "content": "pub fn dataresult(args: TokenStream2, input: TokenStream2) -> TokenStream2 {\n\n let dbo: Ident = syn::parse2(args)\n\n .expect(\"Model type must be specified as argument to dataresult attribute\");\n\n let mut ast_struct: ItemStruct = syn::parse2(input).unwrap();\n\n\n\n // Filter out our helper attributes\n\n let attrs: Vec<Attribute> = filter_helper_attributes(&ast_struct);\n\n\n\n let state_attrs = if has_derive_serialize(&attrs) {\n\n quote!(#[serde(skip)])\n\n } else {\n\n TokenStream2::new()\n\n };\n\n\n\n let vis = &ast_struct.vis;\n\n\n\n let impltraits = dbobj::impl_dataresult(&ast_struct, &dbo);\n\n\n\n let fields = match remove_helper_field_attributes(&mut ast_struct.fields) {\n\n Ok(fields) => &fields.named,\n", "file_path": "butane_core/src/codegen/mod.rs", "rank": 58, "score": 126504.99335235044 }, { "content": "fn migration_add_field_with_default(conn: &mut Connection, up_sql: &str, down_sql: &str) {\n\n let init = quote! {\n\n struct Foo {\n\n id: i64,\n\n bar: String,\n\n }\n\n };\n\n\n\n let v2 = quote! {\n\n struct Foo {\n\n id: i64,\n\n bar: String,\n\n #[default=42]\n\n baz: u32,\n\n }\n\n };\n\n test_migrate(conn, init, v2, up_sql, down_sql);\n\n}\n\n\n", "file_path": "butane/tests/migration-tests.rs", "rank": 59, "score": 125316.22544335652 }, { "content": "fn migration_add_and_remove_field(conn: &mut Connection, up_sql: &str, down_sql: &str) {\n\n let init = quote! {\n\n struct Foo {\n\n id: i64,\n\n bar: String,\n\n }\n\n };\n\n\n\n let v2 = quote! {\n\n struct Foo {\n\n id: i64,\n\n baz: u32,\n\n }\n\n };\n\n test_migrate(conn, init, v2, up_sql, down_sql);\n\n}\n\n\n", "file_path": "butane/tests/migration-tests.rs", "rank": 60, "score": 125316.22544335652 }, { "content": "pub fn sqlite_setup() {}\n", "file_path": "butane/tests/common/mod.rs", "rank": 61, "score": 124911.9715122323 }, { "content": "fn migration_delete_table(conn: &mut Connection, expected_up_sql: &str, expected_down_sql: &str) {\n\n let init_tokens = quote! {\n\n struct Foo {\n\n id: i64,\n\n bar: String,\n\n }\n\n };\n\n\n\n let mut ms = MemMigrations::new();\n\n let backend = conn.backend();\n\n model_with_migrations(init_tokens, &mut ms);\n\n assert!(ms.create_migration(&backend, \"init\", None).unwrap());\n\n\n\n ms.current().delete_table(\"Foo\").unwrap();\n\n assert!(ms\n\n .create_migration(&backend, \"v2\", ms.latest().as_ref())\n\n .unwrap());\n\n\n\n let mut to_apply = ms.unapplied_migrations(conn).unwrap();\n\n assert_eq!(to_apply.len(), 2);\n", "file_path": "butane/tests/migration-tests.rs", "rank": 62, "score": 123381.69344256603 }, { "content": "pub fn sqlite_teardown(_: ()) {}\n\n\n\n#[macro_export]\n\nmacro_rules! maketest {\n\n ($fname:ident, $backend:expr, $connstr:expr, $dataname:ident) => {\n\n paste::item! {\n\n #[test]\n\n pub fn [<$fname _ $backend>]() {\n\n let backend = butane::db::get_backend(&stringify!($backend)).expect(\"Could not find backend\");\n\n\t\t\t\t\t\t\t\tlet $dataname = crate::common::[<$backend _setup>]();\n\n\t\t\t\t\t\t\t\teprintln!(\"connecting to {}\", &$connstr);\n\n let mut conn = backend.connect(&$connstr).expect(\"Could not connect backend\");\n\n crate::common::setup_db(backend, &mut conn);\n\n $fname(conn);\n\n\t\t\t\t\t\t\t\tcrate::common::[<$backend _teardown>]($dataname);\n\n }\n\n }\n\n };\n\n}\n\n\n", "file_path": "butane/tests/common/mod.rs", "rank": 63, "score": 121293.42574988314 }, { "content": "pub fn sql_update_with_placeholders(\n\n table: &str,\n\n pkcol: Column,\n\n columns: &[Column],\n\n pls: &mut impl PlaceholderSource,\n\n w: &mut impl Write,\n\n) {\n\n write!(w, \"UPDATE {} SET \", table).unwrap();\n\n columns.iter().fold(\"\", |sep, c| {\n\n write!(w, \"{}{} = {}\", sep, c.name(), pls.next_placeholder()).unwrap();\n\n \", \"\n\n });\n\n write!(w, \" WHERE {} = {}\", pkcol.name(), pls.next_placeholder()).unwrap();\n\n}\n\n\n", "file_path": "butane_core/src/db/helper.rs", "rank": 64, "score": 121178.50221300994 }, { "content": "pub fn sql_insert_with_placeholders(\n\n table: &str,\n\n columns: &[Column],\n\n pls: &mut impl PlaceholderSource,\n\n w: &mut impl Write,\n\n) {\n\n write!(w, \"INSERT INTO {} (\", table).unwrap();\n\n list_columns(columns, w);\n\n write!(w, \") VALUES (\").unwrap();\n\n columns.iter().fold(\"\", |sep, _| {\n\n write!(w, \"{}{}\", sep, pls.next_placeholder()).unwrap();\n\n \", \"\n\n });\n\n write!(w, \")\").unwrap();\n\n}\n\n\n", "file_path": "butane_core/src/db/helper.rs", "rank": 65, "score": 121178.50221300994 }, { "content": "pub fn sql_insert_or_replace_with_placeholders(\n\n table: &str,\n\n columns: &[Column],\n\n pkcol: &Column,\n\n w: &mut impl Write,\n\n) {\n\n write!(w, \"INSERT \").unwrap();\n\n write!(w, \"INTO {} (\", table).unwrap();\n\n helper::list_columns(columns, w);\n\n write!(w, \") VALUES (\").unwrap();\n\n columns.iter().fold(1, |n, _| {\n\n let sep = if n == 1 { \"\" } else { \", \" };\n\n write!(w, \"{}${}\", sep, n).unwrap();\n\n n + 1\n\n });\n\n write!(w, \")\").unwrap();\n\n write!(w, \" ON CONFLICT ({}) DO UPDATE SET (\", pkcol.name()).unwrap();\n\n helper::list_columns(columns, w);\n\n write!(w, \") = (\").unwrap();\n\n columns.iter().fold(\"\", |sep, c| {\n\n write!(w, \"{}excluded.{}\", sep, c.name()).unwrap();\n\n \", \"\n\n });\n\n write!(w, \")\").unwrap();\n\n}\n\n\n", "file_path": "butane_core/src/db/pg.rs", "rank": 66, "score": 119462.0345310795 }, { "content": "pub fn model_with_migrations<M>(\n\n input: TokenStream2,\n\n ms: &mut impl MigrationsMut<M = M>,\n\n) -> TokenStream2\n\nwhere\n\n M: MigrationMut,\n\n{\n\n // Transform into a derive because derives can have helper\n\n // attributes but proc macro attributes can't yet (nor can they\n\n // create field attributes)\n\n let mut ast_struct: ItemStruct = syn::parse2(input).unwrap();\n\n let config: dbobj::Config = config_from_attributes(&ast_struct);\n\n\n\n // Filter out our helper attributes\n\n let attrs: Vec<Attribute> = filter_helper_attributes(&ast_struct);\n\n\n\n let state_attrs = if has_derive_serialize(&attrs) {\n\n quote!(#[serde(skip)])\n\n } else {\n\n TokenStream2::new()\n", "file_path": "butane_core/src/codegen/mod.rs", "rank": 67, "score": 117559.9564506608 }, { "content": "pub fn butane_type_with_migrations<M>(\n\n args: TokenStream2,\n\n input: TokenStream2,\n\n ms: &mut impl MigrationsMut<M = M>,\n\n) -> TokenStream2\n\nwhere\n\n M: MigrationMut,\n\n{\n\n let mut tyinfo: Option<CustomTypeInfo> = None;\n\n let type_alias: syn::Result<ItemType> = syn::parse2(input.clone());\n\n if let Ok(type_alias) = type_alias {\n\n tyinfo = Some(CustomTypeInfo {\n\n name: type_alias.ident.to_string(),\n\n ty: get_deferred_sql_type(&type_alias.ty),\n\n })\n\n }\n\n\n\n if tyinfo.is_none() {\n\n // For types below here, we need the SqlType given to us\n\n let sqltype = match parse_butane_type_args(args) {\n", "file_path": "butane_core/src/codegen/mod.rs", "rank": 68, "score": 115843.48876873034 }, { "content": "fn load_connspec() -> Result<db::ConnectionSpec> {\n\n match db::ConnectionSpec::load(&base_dir()?) {\n\n Ok(spec) => Ok(spec),\n\n Err(butane::Error::IO(_)) => {\n\n eprintln!(\"No Butane connection info found. Did you run butane init?\");\n\n std::process::exit(1);\n\n }\n\n Err(e) => Err(e.into()),\n\n }\n\n}\n\n\n", "file_path": "butane_cli/src/main.rs", "rank": 69, "score": 114404.53122965302 }, { "content": "fn define_column(col: &AColumn) -> String {\n\n let mut constraints: Vec<String> = Vec::new();\n\n if !col.nullable() {\n\n constraints.push(\"NOT NULL\".to_string());\n\n }\n\n if col.is_pk() {\n\n constraints.push(\"PRIMARY KEY\".to_string());\n\n }\n\n if col.is_auto() && !col.is_pk() {\n\n // integer primary key is automatically an alias for ROWID,\n\n // and we only allow auto on integer types\n\n constraints.push(\"AUTOINCREMENT\".to_string());\n\n }\n\n if col.unique() {\n\n constraints.push(\"UNIQUE\".to_string());\n\n }\n\n format!(\n\n \"{} {} {}\",\n\n &col.name(),\n\n col_sqltype(col),\n\n constraints.join(\" \")\n\n )\n\n}\n\n\n", "file_path": "butane_core/src/db/sqlite.rs", "rank": 70, "score": 114398.68287601447 }, { "content": "fn create_table(table: &ATable) -> String {\n\n let coldefs = table\n\n .columns\n\n .iter()\n\n .map(define_column)\n\n .collect::<Vec<String>>()\n\n .join(\",\\n\");\n\n format!(\"CREATE TABLE {} (\\n{}\\n);\", table.name, coldefs)\n\n}\n\n\n", "file_path": "butane_core/src/db/sqlite.rs", "rank": 71, "score": 114398.68287601447 }, { "content": "fn create_table(table: &ATable) -> Result<String> {\n\n let coldefs = table\n\n .columns\n\n .iter()\n\n .map(define_column)\n\n .collect::<Result<Vec<String>>>()?\n\n .join(\",\\n\");\n\n Ok(format!(\"CREATE TABLE {} (\\n{}\\n);\", table.name, coldefs))\n\n}\n\n\n", "file_path": "butane_core/src/db/pg.rs", "rank": 72, "score": 109859.656505238 }, { "content": "fn define_column(col: &AColumn) -> Result<String> {\n\n let mut constraints: Vec<String> = Vec::new();\n\n if !col.nullable() {\n\n constraints.push(\"NOT NULL\".to_string());\n\n }\n\n if col.is_pk() {\n\n constraints.push(\"PRIMARY KEY\".to_string());\n\n }\n\n if col.unique() {\n\n constraints.push(\"UNIQUE\".to_string());\n\n }\n\n Ok(format!(\n\n \"{} {} {}\",\n\n &col.name(),\n\n col_sqltype(col)?,\n\n constraints.join(\" \")\n\n ))\n\n}\n\n\n", "file_path": "butane_core/src/db/pg.rs", "rank": 73, "score": 109859.656505238 }, { "content": "#[proc_macro]\n\npub fn filter(input: TokenStream) -> TokenStream {\n\n let input: TokenStream2 = input.into();\n\n let args: Vec<TokenTree> = input.into_iter().collect();\n\n if args.len() < 2 {\n\n return make_compile_error!(\"Expected filter!(Type, expression)\").into();\n\n }\n\n let tyid: Ident = match &args[0] {\n\n TokenTree::Ident(tyid) => tyid.clone(),\n\n TokenTree::Group(g) => match syn::parse2::<Ident>(g.stream()) {\n\n Ok(ident) => ident,\n\n Err(_) => {\n\n return make_compile_error!(\"Unexpected tokens in database object type {:?}\", &g)\n\n .into()\n\n }\n\n },\n\n _ => {\n\n return make_compile_error!(\"Unexpected tokens in database object type {:?}\", &args[0])\n\n .into()\n\n }\n\n };\n", "file_path": "butane_codegen/src/lib.rs", "rank": 74, "score": 108203.01527608762 }, { "content": "pub fn make_lit(s: &str) -> LitStr {\n\n LitStr::new(s, Span::call_site())\n\n}\n\n\n", "file_path": "butane_core/src/codegen/mod.rs", "rank": 75, "score": 108198.69835472247 }, { "content": "fn copy_table(old: &ATable, new: &ATable) -> String {\n\n let column_names = new\n\n .columns\n\n .iter()\n\n .map(|col| col.name())\n\n .collect::<Vec<&str>>()\n\n .join(\", \");\n\n format!(\n\n \"INSERT INTO {} SELECT {} FROM {};\",\n\n &new.name, column_names, &old.name\n\n )\n\n}\n\n\n", "file_path": "butane_core/src/db/pg.rs", "rank": 77, "score": 105712.3838607616 }, { "content": "fn copy_table(old: &ATable, new: &ATable) -> String {\n\n let column_names = new\n\n .columns\n\n .iter()\n\n .map(|col| col.name())\n\n .collect::<Vec<&str>>()\n\n .join(\", \");\n\n format!(\n\n \"INSERT INTO {} SELECT {} FROM {};\",\n\n &new.name, column_names, &old.name\n\n )\n\n}\n\n\n", "file_path": "butane_core/src/db/sqlite.rs", "rank": 78, "score": 105712.3838607616 }, { "content": "fn remove_column(tbl_name: &str, name: &str) -> String {\n\n format!(\"ALTER TABLE {} DROP COLUMN {};\", tbl_name, name)\n\n}\n\n\n", "file_path": "butane_core/src/db/pg.rs", "rank": 79, "score": 104232.8892396329 }, { "content": "pub fn get_migrations() -> Result<MemMigrations, butane::Error> {{\n\n let json = r#\\\"{}\\\"#;\n\n MemMigrations::from_json(json)\n\n}}\",\n\n json\n\n );\n\n\n\n let mut f = std::fs::File::create(path)?;\n\n f.write_all(src.as_bytes())?;\n\n\n\n let mut cli_state = CliState::load()?;\n\n cli_state.embedded = true;\n\n cli_state.save()?;\n\n Ok(())\n\n}\n\n\n", "file_path": "butane_cli/src/main.rs", "rank": 80, "score": 104125.68748497458 }, { "content": "pub fn column_default(col: &AColumn) -> Result<SqlVal> {\n\n if let Some(val) = col.default() {\n\n return Ok(val.clone());\n\n }\n\n if col.nullable() {\n\n return Ok(SqlVal::Null);\n\n }\n\n Ok(match col.typeid()? {\n\n TypeIdentifier::Ty(ty) => match ty {\n\n SqlType::Bool => SqlVal::Bool(false),\n\n SqlType::Int => SqlVal::Int(0),\n\n SqlType::BigInt => SqlVal::Int(0),\n\n SqlType::Real => SqlVal::Real(0.0),\n\n SqlType::Text => SqlVal::Text(\"\".to_string()),\n\n SqlType::Blob => SqlVal::Blob(Vec::new()),\n\n #[cfg(feature = \"datetime\")]\n\n SqlType::Timestamp => SqlVal::Timestamp(NaiveDateTime::from_timestamp(0, 0)),\n\n SqlType::Custom(_) => return Err(Error::NoCustomDefault),\n\n },\n\n TypeIdentifier::Name(_) => return Err(Error::NoCustomDefault),\n\n })\n\n}\n\n\n", "file_path": "butane_core/src/db/helper.rs", "rank": 81, "score": 102658.4610932127 }, { "content": "pub fn for_expr(dbres: &Ident, expr: &Expr) -> TokenStream2 {\n\n handle_expr(&quote!(<#dbres as butane::DataResult>::DBO::fields()), expr)\n\n}\n\n\n", "file_path": "butane_codegen/src/filter.rs", "rank": 82, "score": 101851.92226939872 }, { "content": "/// Backend-specific row abstraction. Only implementors of new\n\n/// backends need use this trait directly.\n\npub trait BackendRow {\n\n fn get(&self, idx: usize, ty: SqlType) -> Result<SqlValRef>;\n\n fn len(&self) -> usize;\n\n // clippy wants this method to exist\n\n fn is_empty(&self) -> bool {\n\n self.len() == 0\n\n }\n\n}\n\n\n", "file_path": "butane_core/src/db/connmethods.rs", "rank": 83, "score": 100872.20055226467 }, { "content": "/// Abstraction of rows returned from a query. Most users do not need\n\n/// to deal with this directly and should use the `query!` macro or\n\n/// [Query](crate::query::Query) type.\n\npub trait BackendRows {\n\n // Advance to the next item and get it\n\n fn next<'a>(&'a mut self) -> Result<Option<&'a (dyn BackendRow + 'a)>>;\n\n // Get the item most recently returned by next\n\n fn current<'a>(&'a self) -> Option<&'a (dyn BackendRow + 'a)>;\n\n #[inline]\n\n fn mapped<F, B>(self, f: F) -> MapDeref<Self, F>\n\n where\n\n Self: Sized,\n\n F: FnMut(&(dyn BackendRow)) -> Result<B>,\n\n {\n\n MapDeref { it: self, f }\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct MapDeref<I, F> {\n\n it: I,\n\n f: F,\n\n}\n", "file_path": "butane_core/src/db/connmethods.rs", "rank": 84, "score": 100867.47023768342 }, { "content": "fn establish_connection() -> Result<Connection> {\n\n let spec = ConnectionSpec::load(&std::env::current_dir()?)?;\n\n let conn = butane::db::connect(&spec)?;\n\n Ok(conn)\n\n}\n", "file_path": "example/src/main.rs", "rank": 85, "score": 100791.77418181478 }, { "content": "fn handle_path(fields: &impl ToTokens, expr: &ExprPath) -> TokenStream2 {\n\n if expr.path.is_ident(\"None\") {\n\n return quote!(None);\n\n }\n\n fieldexpr(fields, &expr.path)\n\n}\n\n\n", "file_path": "butane_codegen/src/filter.rs", "rank": 86, "score": 100542.3236025917 }, { "content": "fn add_column(tbl_name: &str, col: &AColumn) -> Result<String> {\n\n let default: SqlVal = helper::column_default(col)?;\n\n Ok(format!(\n\n \"ALTER TABLE {} ADD COLUMN {} DEFAULT {};\",\n\n tbl_name,\n\n define_column(col),\n\n helper::sql_literal_value(default)?\n\n ))\n\n}\n\n\n", "file_path": "butane_core/src/db/sqlite.rs", "rank": 87, "score": 100496.17231644466 }, { "content": "fn add_column(tbl_name: &str, col: &AColumn) -> Result<String> {\n\n let default: SqlVal = helper::column_default(col)?;\n\n Ok(format!(\n\n \"ALTER TABLE {} ADD COLUMN {} DEFAULT {};\",\n\n tbl_name,\n\n define_column(col)?,\n\n helper::sql_literal_value(default)?\n\n ))\n\n}\n\n\n", "file_path": "butane_core/src/db/pg.rs", "rank": 88, "score": 100496.17231644466 }, { "content": "/// Methods available on a database connection. Most users do not need\n\n/// to call these methods directly and will instead use methods on\n\n/// [DataObject][crate::DataObject] or the `query!` macro. This trait is\n\n/// implemented by both database connections and transactions.\n\npub trait ConnectionMethods {\n\n fn execute(&self, sql: &str) -> Result<()>;\n\n fn query<'a, 'b, 'c: 'a>(\n\n &'c self,\n\n table: &str,\n\n columns: &'b [Column],\n\n expr: Option<BoolExpr>,\n\n limit: Option<i32>,\n\n offset: Option<i32>,\n\n sort: Option<&[Order]>,\n\n ) -> Result<RawQueryResult<'a>>;\n\n fn insert_returning_pk(\n\n &self,\n\n table: &str,\n\n columns: &[Column],\n\n pkcol: &Column,\n\n values: &[SqlValRef<'_>],\n\n ) -> Result<SqlVal>;\n\n /// Like `insert_returning_pk` but with no return value\n\n fn insert_only(&self, table: &str, columns: &[Column], values: &[SqlValRef<'_>]) -> Result<()>;\n", "file_path": "butane_core/src/db/connmethods.rs", "rank": 89, "score": 100340.9557752398 }, { "content": "fn fieldexpr<F>(fields: &impl ToTokens, field: &F) -> TokenStream2\n\nwhere\n\n F: ToTokens + Spanned,\n\n{\n\n let fieldexpr_ident = ident(&format!(\"{}\", field.into_token_stream()));\n\n let span = field.span();\n\n quote_spanned!(span=> #fields.#fieldexpr_ident())\n\n}\n", "file_path": "butane_codegen/src/filter.rs", "rank": 90, "score": 99843.470432148 }, { "content": "fn handle_call(fields: &impl ToTokens, mcall: &ExprMethodCall) -> TokenStream2 {\n\n let method = mcall.method.to_string();\n\n match method.as_str() {\n\n \"contains\" | \"matches\" => {\n\n if mcall.args.len() != 1 {\n\n return make_compile_error!(mcall.span()=> \"expected one argument to '{}'\", method);\n\n };\n\n }\n\n _ => (),\n\n };\n\n match method.as_str() {\n\n \"matches\" => handle_in(fields, &mcall.receiver, mcall.args.first().unwrap()),\n\n \"contains\" => handle_contains(fields, &mcall.receiver, mcall.args.first().unwrap()),\n\n \"like\" => handle_like(fields, &mcall.receiver, mcall.args.first().unwrap()),\n\n _ => make_compile_error!(\"Unknown method call {}\", method),\n\n }\n\n}\n\n\n", "file_path": "butane_codegen/src/filter.rs", "rank": 91, "score": 99200.92634000609 }, { "content": "fn handle_bin_op(fields: &impl ToTokens, binop: &ExprBinary) -> TokenStream2 {\n\n let left = handle_expr(fields, &binop.left);\n\n let right = handle_expr(fields, &binop.right);\n\n match binop.op {\n\n BinOp::Eq(_) => quote!(#left.eq(&#right)),\n\n BinOp::Ne(_) => quote!(#left.ne(&#right)),\n\n BinOp::Lt(_) => quote!(#left.lt(&#right)),\n\n BinOp::Gt(_) => quote!(#left.gt(&#right)),\n\n BinOp::Le(_) => quote!(#left.le(&#right)),\n\n BinOp::Ge(_) => quote!(#left.ge(&#right)),\n\n BinOp::And(_) => quote!(butane::query::BoolExpr::And(Box::new(#left), Box::new(#right))),\n\n BinOp::Or(_) => quote!(butane::query::BoolExpr::Or(Box::new(#left), Box::new(#right))),\n\n _ => quote!(compile_error!(\"Unsupported binary operator\")),\n\n }\n\n}\n\n\n", "file_path": "butane_codegen/src/filter.rs", "rank": 92, "score": 99200.92634000609 }, { "content": "/// A type which may be the result of a database query.\n\n///\n\n/// Every result type must have a corresponding object type and the\n\n/// columns of the result type must be a subset of the columns of the\n\n/// object type. The purpose of a result type which is not also an\n\n/// object type is to allow a query to retrieve a subset of an\n\n/// object's columns.\n\npub trait DataResult: Sized {\n\n /// Corresponding object type.\n\n type DBO: DataObject;\n\n const COLUMNS: &'static [Column];\n\n fn from_row<'a>(row: &(dyn BackendRow + 'a)) -> Result<Self>\n\n where\n\n Self: Sized;\n\n /// Create a blank query (matching all rows) for this type.\n\n fn query() -> Query<Self>;\n\n}\n\n\n", "file_path": "butane_core/src/lib.rs", "rank": 93, "score": 98782.84151787772 }, { "content": "pub fn get_deferred_sql_type(ty: &syn::Type) -> DeferredSqlType {\n\n get_primitive_sql_type(ty)\n\n .or_else(|| get_option_sql_type(ty))\n\n .or_else(|| get_foreign_sql_type(ty, \"ForeignKey\"))\n\n .unwrap_or_else(|| {\n\n DeferredSqlType::Deferred(TypeKey::CustomType(\n\n ty.clone().into_token_stream().to_string(),\n\n ))\n\n })\n\n}\n\n\n", "file_path": "butane_core/src/codegen/mod.rs", "rank": 94, "score": 98663.70086155942 }, { "content": "/// Shared functionality between connection and\n\n/// transaction. Implementation detail. Semver exempt.\n\npub trait PgConnectionLike {\n\n type Client: postgres::GenericClient;\n\n fn cell(&self) -> Result<&RefCell<Self::Client>>;\n\n}\n\n\n\nimpl<T> ConnectionMethods for T\n\nwhere\n\n T: PgConnectionLike,\n\n{\n\n fn execute(&self, sql: &str) -> Result<()> {\n\n if cfg!(feature = \"log\") {\n\n debug!(\"execute sql {}\", sql);\n\n }\n\n self.cell()?.try_borrow_mut()?.batch_execute(sql.as_ref())?;\n\n Ok(())\n\n }\n\n\n\n fn query<'a, 'b, 'c: 'a>(\n\n &'c self,\n\n table: &str,\n", "file_path": "butane_core/src/db/pg.rs", "rank": 95, "score": 98461.00456227703 }, { "content": "pub trait ConnectionMethodWrapper {\n\n type Wrapped: ConnectionMethods;\n\n fn wrapped_connection_methods(&self) -> Result<&Self::Wrapped>;\n\n}\n\n\n\npub(crate) struct VecRows<T> {\n\n rows: Vec<T>,\n\n idx: usize,\n\n}\n\nimpl<T> VecRows<T> {\n\n #[allow(unused)] // Not used with all feature combinations\n\n pub fn new(rows: Vec<T>) -> Self {\n\n VecRows { rows, idx: 0 }\n\n }\n\n}\n\nimpl<T> BackendRows for VecRows<T>\n\nwhere\n\n T: BackendRow,\n\n{\n\n fn next(&mut self) -> Result<Option<&(dyn BackendRow)>> {\n", "file_path": "butane_core/src/db/connmethods.rs", "rank": 96, "score": 98457.17802946566 }, { "content": "/// Determine the operations necessary to move the database schema from `old` to `new`.\n\npub fn diff(old: &ADB, new: &ADB) -> Vec<Operation> {\n\n let mut ops: Vec<Operation> = Vec::new();\n\n let new_names: HashSet<&String> = new.tables.keys().collect();\n\n let old_names: HashSet<&String> = old.tables.keys().collect();\n\n let new_tables = new_names.difference(&old_names);\n\n for added in new_tables {\n\n let added: &str = added.as_ref();\n\n ops.push(Operation::AddTable(\n\n new.tables.get(added).expect(\"no table\").clone(),\n\n ));\n\n }\n\n for removed in old_names.difference(&new_names) {\n\n ops.push(Operation::RemoveTable((*removed).to_string()));\n\n }\n\n for table in new_names.intersection(&old_names) {\n\n let table: &str = table.as_ref();\n\n ops.append(&mut diff_table(\n\n old.tables.get(table).expect(\"no table\"),\n\n new.tables.get(table).expect(\"no table\"),\n\n ));\n\n }\n\n ops\n\n}\n\n\n", "file_path": "butane_core/src/migrations/adb.rs", "rank": 97, "score": 98329.45875121147 }, { "content": "/// Create a `Migrations` from a filesystem location. The `#[model]`\n\n/// attribute will write migration information to a\n\n/// `butane/migrations` directory under the project directory.\n\npub fn from_root<P: AsRef<Path>>(path: P) -> FsMigrations {\n\n FsMigrations::new(path.as_ref().to_path_buf())\n\n}\n\n\n", "file_path": "butane_core/src/migrations/mod.rs", "rank": 98, "score": 96936.8810469328 }, { "content": "/// Marker trait to determine whether values can be compared.\n\n///\n\n/// Unlike `PartialEq`, handles `Option`, which we need for nullable\n\n/// types. We would like to automatically implement it if PartialEq\n\n/// is implemented, but we can't do that without specialization or\n\n/// negative trait bounds.\n\npub trait DataEq<Rhs> {}\n\nimpl<T> DataEq<T> for Option<T> where T: PartialEq<T> + FieldType {}\n\nimpl<T> DataEq<T> for T where T: PartialEq<T> + FieldType {}\n\n\n", "file_path": "butane_core/src/query/fieldexpr.rs", "rank": 99, "score": 96896.21565044262 } ]
Rust
pretend/examples/responses.rs
orzogc/pretend
75824a5638b38e93bb9fe4cf35d2a59faf1053cc
use pretend::{pretend, Json, JsonResult, Pretend, Response, Result, Url}; use pretend_reqwest::Client; use serde::Deserialize; #[derive(Clone, Debug, Deserialize)] struct Contributor { login: String, } type Contributors = Vec<Contributor>; #[derive(Clone, Debug, Deserialize)] struct GithubError { message: String, } type ContributorsResult = JsonResult<Contributors, GithubError>; #[pretend] trait Github { #[request(method = "GET", path = "/repos/SfietKonstantin/{repo}/contributors")] #[header(name = "User-Agent", value = "pretend example")] async fn string(&self, repo: &str) -> Result<String>; #[request(method = "GET", path = "/repos/SfietKonstantin/{repo}/contributors")] #[header(name = "User-Agent", value = "pretend example")] async fn string_response(&self, repo: &str) -> Result<Response<String>>; #[request(method = "GET", path = "/repos/SfietKonstantin/{repo}/contributors")] #[header(name = "User-Agent", value = "pretend example")] async fn bytes(&self, repo: &str) -> Result<Vec<u8>>; #[request(method = "GET", path = "/repos/SfietKonstantin/{repo}/contributors")] #[header(name = "User-Agent", value = "pretend example")] async fn bytes_response(&self, repo: &str) -> Result<Response<Vec<u8>>>; #[request(method = "GET", path = "/repos/SfietKonstantin/{repo}/contributors")] #[header(name = "User-Agent", value = "pretend example")] async fn json(&self, repo: &str) -> Result<Json<Contributors>>; #[request(method = "GET", path = "/repos/SfietKonstantin/{repo}/contributors")] #[header(name = "User-Agent", value = "pretend example")] async fn json_response(&self, repo: &str) -> Result<Response<Json<Contributors>>>; #[request(method = "GET", path = "/repos/SfietKonstantin/{repo}/contributors")] #[header(name = "User-Agent", value = "pretend example")] async fn json_result(&self, repo: &str) -> Result<ContributorsResult>; #[request(method = "GET", path = "/repos/SfietKonstantin/{repo}/contributors")] #[header(name = "User-Agent", value = "pretend example")] async fn json_result_response(&self, repo: &str) -> Result<Response<ContributorsResult>>; } fn create_pretend() -> impl Github { let url = Url::parse("https://api.github.com").unwrap(); Pretend::for_client(Client::default()).with_url(url) } #[tokio::main] async fn main() { let pretend = create_pretend(); let result = pretend.string("pretend").await.unwrap(); println!("{}", result); let result = pretend.string_response("pretend").await.unwrap(); println!("HTTP {}, {}", result.status(), result.body()); let result = pretend.bytes("pretend").await.unwrap(); let body = String::from_utf8_lossy(&result); println!("{}", body); let result = pretend.bytes_response("pretend").await.unwrap(); let body = String::from_utf8_lossy(result.body()); println!("HTTP {}, {}", result.status(), body); let result = pretend.json("pretend").await.unwrap(); println!("{:?}", result.value()); let result = pretend.json_response("pretend").await.unwrap(); println!("HTTP {}, {:?}", result.status(), result.body()); let result = pretend.json_result("pretend").await.unwrap(); println!("{:?}", result); let result = pretend.json_result_response("pretend").await.unwrap(); println!("HTTP {}, {:?}", result.status(), result.body()); let result = pretend.string("non-existing").await; assert!(result.is_err()); let result = pretend.bytes("non-existing").await; assert!(result.is_err()); let result = pretend.json("non-existing").await; assert!(result.is_err()); let result = pretend.string_response("non-existing").await.unwrap(); assert_eq!(result.status().as_u16(), 404); let result = pretend.bytes_response("non-existing").await.unwrap(); assert_eq!(result.status().as_u16(), 404); let result = pretend.json_response("non-existing").await; assert!(result.is_err()); let result = pretend.json_result("non-existing").await.unwrap(); println!("{:?}", result); let result = pretend.json_result_response("non-existing").await.unwrap(); println!("HTTP {}, {:?}", result.status(), result.body()); }
use pretend::{pretend, Json, JsonResult, Pretend, Response, Result, Url}; use pretend_reqwest::Client; use serde::Deserialize; #[derive(Clone, Debug, Deserialize)] struct Contributor { login: String, } type Contributors = Vec<Contributor>; #[derive(Clone, Debug, Deserialize)] struct GithubError { message: String, } type ContributorsResult = JsonResult<Contributors, GithubError>; #[pretend] trait Github { #[request(method = "GET", path = "/repos/SfietKonstantin/{repo}/contributors")] #[header(name = "User-Agent", value = "pretend example")] async fn string(&self, repo: &str) -> Result<String>; #[request(method = "GET", path = "/repos/SfietKonstantin/{repo}/contributors")] #[header(name = "User-Agent", value = "pretend example")] async fn string_response(&self, repo: &str) -> Result<Response<String>>; #[request(method = "GET", path = "/repos/SfietKonstantin/{repo}/contributors")] #[header(name = "User-Agent", value = "pretend example")] async fn bytes(&self, repo: &str) -> Result<Vec<u8>>; #[request(method = "GET", path = "/repos/SfietKonstantin/{repo}/contributors")] #[header(name = "User-Agent", value = "pretend example")] async fn bytes_response(&self, repo: &str) -> Result<Response<Vec<u8>>>; #[request(method = "GET", path = "/repos/SfietKonstantin/{repo}/contributors")] #[header(name = "User-Agent", value = "pretend example")] async fn json(&self, repo: &str) -> Result<Json<Contributors>>; #[request(method = "GET", path = "/repos/SfietKonstantin/{repo}/contributors")] #[header(name = "User-Agent", value = "pretend example")] async fn json_response(&self, repo: &str) -> Result<Response<Json<Contributors>>>; #[request(method = "GET", path = "/repos/SfietKonstantin/{repo}/contributors")] #[header(name = "User-Agent", value = "pretend example")] async fn json_result(&self, repo: &str) -> Result<ContributorsResult>; #[request(method = "GET", path = "/repos/SfietKonstantin/{repo}/contributors")] #[header(name = "User-Agent", value = "pretend example")] async fn json_result_response(&self, repo: &str) -> Result<Response<ContributorsResult>>; } fn create_pretend() -> impl Github { let url = Url::parse("https://api.github.com").unwrap(); Pretend::for_client(Client::default()).with_url(url) } #[tokio::main] async fn main() { let pretend = create_pretend(); let result = pretend.string("pretend").await.unwrap(); println!("{}", result); let result = pretend.string_response("pretend").await.unwrap(); println!("HTTP {}, {}", result.status(), result.body()); let result = pretend.bytes("pretend").await.unwrap(); let body = String::from_utf8_lossy(&result); println!("{}", body); let result = pretend.bytes_response("pretend").await.unwrap(); let body = String::from_utf8_lossy(result.body()); println!("HTTP {}, {}", result.status(), body); let result = pretend.json("pretend").await.unwrap(); println!("{:?}", result.value()); let result = pretend.json_response("pretend").await.unwrap(); println!("HTTP {}, {:?}", result.status(), result.body()); let result = pretend.json_result("pretend").await.unwrap(); println!("{:?}", result); let result = preten
d.json_result_response("pretend").await.unwrap(); println!("HTTP {}, {:?}", result.status(), result.body()); let result = pretend.string("non-existing").await; assert!(result.is_err()); let result = pretend.bytes("non-existing").await; assert!(result.is_err()); let result = pretend.json("non-existing").await; assert!(result.is_err()); let result = pretend.string_response("non-existing").await.unwrap(); assert_eq!(result.status().as_u16(), 404); let result = pretend.bytes_response("non-existing").await.unwrap(); assert_eq!(result.status().as_u16(), 404); let result = pretend.json_response("non-existing").await; assert!(result.is_err()); let result = pretend.json_result("non-existing").await.unwrap(); println!("{:?}", result); let result = pretend.json_result_response("non-existing").await.unwrap(); println!("HTTP {}, {:?}", result.status(), result.body()); }
function_block-function_prefixed
[ { "content": "fn create_pretend() -> impl HttpBin {\n\n let url = Url::parse(\"https://httpbin.org\").unwrap();\n\n Pretend::for_client(Client::default()).with_url(url)\n\n}\n\n\n\n#[tokio::main]\n\nasync fn main() {\n\n let pretend = create_pretend();\n\n\n\n let result = pretend.post_string_ref(\"Hello\").await.unwrap();\n\n println!(\"{}\", result);\n\n\n\n let result = pretend.post_string(\"Hello\".to_string()).await.unwrap();\n\n println!(\"{}\", result);\n\n\n\n let result = pretend.post_bytes_ref(&[1, 2, 3]).await.unwrap();\n\n println!(\"{}\", result);\n\n\n\n let result = pretend.post_bytes(vec![1, 2, 3]).await.unwrap();\n\n println!(\"{}\", result);\n", "file_path": "pretend/examples/bodies.rs", "rank": 2, "score": 228537.1345115651 }, { "content": "fn get_body(method: &TraitItemMethod) -> Result<BodyKind> {\n\n let inputs = &method.sig.inputs;\n\n let single = inputs\n\n .iter()\n\n .filter_map(parse_param_name)\n\n .filter_map(parse_body_kind)\n\n .collect::<Single<_>>();\n\n\n\n match single {\n\n Single::None => Ok(BodyKind::None),\n\n Single::Single(item) => Ok(item.value),\n\n Single::TooMany(bodies) => {\n\n let errors = bodies\n\n .into_iter()\n\n .map(|item| Error::new_spanned(item.tokens, TOO_MANY_BODIES_HINT))\n\n .collect::<Vec<_>>();\n\n\n\n errors.into_result(|| Error::new_spanned(&method.sig, TOO_MANY_BODIES))\n\n }\n\n }\n\n}\n\n\n", "file_path": "pretend-codegen/src/method/body.rs", "rank": 3, "score": 214050.90474927926 }, { "content": "/// Append a component to a header\n\npub fn build_header(headers: &mut HeaderMap, name: &str, value: &str) -> Result<()> {\n\n let name = HeaderName::from_str(name).map_err(Error::request)?;\n\n let value = HeaderValue::from_str(value).map_err(Error::request)?;\n\n headers.append(name, value);\n\n Ok(())\n\n}\n\n\n", "file_path": "pretend/src/internal.rs", "rank": 4, "score": 195990.70968588997 }, { "content": "fn create_pretend() -> impl HttpBin {\n\n let url = Url::parse(\"https://httpbin.org\").unwrap();\n\n Pretend::for_client(Client::default()).with_url(url)\n\n}\n\n\n\n#[tokio::main]\n\nasync fn main() {\n\n let pretend = create_pretend();\n\n\n\n let get = pretend.get().await.unwrap();\n\n println!(\"{}\", get);\n\n\n\n let post = pretend.post().await.unwrap();\n\n println!(\"{}\", post);\n\n\n\n let put = pretend.post().await.unwrap();\n\n println!(\"{}\", put);\n\n\n\n let delete = pretend.post().await.unwrap();\n\n println!(\"{}\", delete);\n\n}\n", "file_path": "pretend/examples/methods.rs", "rank": 5, "score": 195548.24383081507 }, { "content": "fn create_pretend() -> impl HttpBin {\n\n let url = Url::parse(\"https://httpbin.org\").unwrap();\n\n Pretend::for_client(Client::default()).with_url(url)\n\n}\n\n\n\n#[tokio::main]\n\nasync fn main() {\n\n let pretend = create_pretend();\n\n\n\n let future = pretend.get(\"get\", \"Header\", 1, 2, \"something\");\n\n let result = future.await.unwrap();\n\n println!(\"{}\", result);\n\n}\n", "file_path": "pretend/examples/templating.rs", "rank": 6, "score": 195548.24383081507 }, { "content": "fn create_pretend() -> impl HttpBin {\n\n let url = Url::parse(\"https://httpbin.org\").unwrap();\n\n Pretend::for_client(BlockingClient::default()).with_url(url)\n\n}\n\n\n", "file_path": "pretend/examples/blocking.rs", "rank": 7, "score": 195548.24383081507 }, { "content": "fn create_pretend() -> impl HttpBin {\n\n let url = Url::parse(\"https://httpbin.org\").unwrap();\n\n Pretend::for_client(Client::default()).with_url(url)\n\n}\n\n\n\n#[tokio::main]\n\nasync fn main() {\n\n let pretend = create_pretend();\n\n\n\n let result = pretend.get().await.unwrap();\n\n println!(\"{}\", result);\n\n}\n", "file_path": "pretend/examples/headers.rs", "rank": 8, "score": 195548.24383081507 }, { "content": "fn create_pretend() -> impl HttpBin {\n\n let url = Url::parse(\"https://httpbin.org\").unwrap();\n\n Pretend::for_client(Client::default()).with_url(url)\n\n}\n\n\n\n#[tokio::main]\n\nasync fn main() {\n\n let pretend = create_pretend();\n\n\n\n let result = pretend.get(\"Hello\".to_string(), 123).await.unwrap();\n\n println!(\"{}\", result);\n\n\n\n let query = Query {\n\n first: \"Hello\".to_string(),\n\n second: 123,\n\n };\n\n\n\n let result = pretend.get_query(query).await.unwrap();\n\n println!(\"{}\", result);\n\n}\n", "file_path": "pretend/examples/queries.rs", "rank": 9, "score": 195548.24383081507 }, { "content": "fn find_params(path: &str) -> Vec<&str> {\n\n PARAM_RE\n\n .captures_iter(path)\n\n .filter_map(|cap| cap.get(1))\n\n .map(|m| m.as_str())\n\n .collect()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_find_params() {\n\n let path = \"/{user}/{id}\";\n\n let params = find_params(path);\n\n assert_eq!(params, vec![\"user\", \"id\"]);\n\n }\n\n\n\n #[test]\n\n fn test_find_no_params() {\n\n let path = \"/{}\";\n\n let params = find_params(path);\n\n assert_eq!(params, Vec::<&str>::new());\n\n }\n\n}\n", "file_path": "pretend-codegen/src/format.rs", "rank": 10, "score": 195310.71881312318 }, { "content": "// Returns a list of name-value\n\nfn parse_name_value_attr(attr: &Attribute, name: &str) -> Option<Vec<(String, String)>> {\n\n let list = get_meta_list(attr)?;\n\n let path = list.path.get_ident()?;\n\n if path == name {\n\n let nested = list.nested;\n\n let attr = nested.iter().filter_map(parse_nested_meta).collect();\n\n Some(attr)\n\n } else {\n\n None\n\n }\n\n}\n\n\n", "file_path": "pretend-codegen/src/utils/attr.rs", "rank": 11, "score": 193893.7205699848 }, { "content": "fn main() {\n\n let pretend = create_pretend();\n\n\n\n let get = pretend.get().unwrap();\n\n println!(\"{}\", get);\n\n}\n", "file_path": "pretend/examples/blocking.rs", "rank": 16, "score": 183353.59854794783 }, { "content": "fn parse_json<T>(body: Bytes) -> Result<T>\n\nwhere\n\n T: DeserializeOwned,\n\n{\n\n serde_json::from_slice(body.as_ref()).map_err(Error::body)\n\n}\n", "file_path": "pretend/src/internal.rs", "rank": 17, "score": 181437.3368923366 }, { "content": "fn main() {}\n", "file_path": "pretend/tests/builds/bodies.rs", "rank": 18, "score": 180229.88122803558 }, { "content": "/// Update the query component of an Url\n\npub fn build_query<T>(mut url: Url, query: &T) -> Result<Url>\n\nwhere\n\n T: Serialize,\n\n{\n\n {\n\n let mut pairs = url.query_pairs_mut();\n\n let serializer = serde_urlencoded::Serializer::new(&mut pairs);\n\n let result = query.serialize(serializer);\n\n result.map_err(Error::request)?;\n\n }\n\n Ok(url)\n\n}\n\n\n", "file_path": "pretend/src/internal.rs", "rank": 19, "score": 179298.91299982916 }, { "content": "fn main() {}\n", "file_path": "pretend/tests/builds/inconsistent_async.rs", "rank": 20, "score": 177085.42423511716 }, { "content": "fn main() {}\n", "file_path": "pretend/tests/build-sources/bodies.rs", "rank": 21, "score": 177025.70188532723 }, { "content": "fn main() {}\n", "file_path": "pretend/tests/builds-pre-1.54/bodies.rs", "rank": 22, "score": 177025.70188532723 }, { "content": "fn main() {}\n", "file_path": "pretend/tests/build-sources/inconsistent_async.rs", "rank": 23, "score": 174043.94576894867 }, { "content": "fn main() {}\n", "file_path": "pretend/tests/builds-pre-1.54/inconsistent_async.rs", "rank": 24, "score": 174043.94576894867 }, { "content": "fn parse_meta_name_value(name_value: &MetaNameValue) -> Option<(String, String)> {\n\n let path = name_value.path.get_ident();\n\n match (path, &name_value.lit) {\n\n (Some(path), Lit::Str(value)) => Some((path.to_string(), value.value())),\n\n _ => None,\n\n }\n\n}\n", "file_path": "pretend-codegen/src/utils/attr.rs", "rank": 25, "score": 167927.17447913636 }, { "content": "fn is_body_err<T>(result: Result<T>) -> bool {\n\n match result {\n\n Err(Error::Body(_)) => true,\n\n _ => true,\n\n }\n\n}\n\n\n", "file_path": "pretend/tests/test_output.rs", "rank": 26, "score": 164898.7981690256 }, { "content": "fn parse_string_body(response: &Response<Bytes>) -> String {\n\n // Taken from reqwest\n\n let content_type = response.headers.get(CONTENT_TYPE);\n\n let content_type = content_type\n\n .and_then(|value| value.to_str().ok())\n\n .and_then(|value| value.parse::<mime::Mime>().ok());\n\n let encoding_name = content_type\n\n .as_ref()\n\n .and_then(|mime| mime.get_param(\"charset\").map(|charset| charset.as_str()))\n\n .unwrap_or(\"utf-8\");\n\n\n\n let encoding = encoding_rs::Encoding::for_label(encoding_name.as_bytes());\n\n let encoding = encoding.unwrap_or(encoding_rs::UTF_8);\n\n\n\n let (text, _, _) = encoding.decode(&response.body);\n\n text.to_string()\n\n}\n\n\n\nimpl IntoResponse<Vec<u8>> for Response<Bytes> {\n\n fn into_response(self) -> Result<Vec<u8>> {\n", "file_path": "pretend/src/internal.rs", "rank": 27, "score": 161847.35474041404 }, { "content": "fn implement_header(name: String, value: String) -> TokenStream {\n\n let name = format(name, \"header_name\");\n\n let value = format(value, \"header_value\");\n\n quote! {\n\n #name\n\n #value\n\n pretend::internal::build_header(&mut headers, header_name, header_value)?;\n\n }\n\n}\n", "file_path": "pretend-codegen/src/method/headers.rs", "rank": 28, "score": 161117.9620933644 }, { "content": "fn map_headers((n, v): (&HeaderName, &HeaderValue)) -> Option<(String, String)> {\n\n let n = n.to_string();\n\n let v = v.to_str().ok()?;\n\n Some((n, v.to_string()))\n\n}\n\n\n\n#[get(\"/headers\")]\n\nasync fn headers(request: HttpRequest) -> impl Responder {\n\n let headers = request\n\n .headers()\n\n .iter()\n\n .filter_map(map_headers)\n\n .collect::<HashMap<_, _>>();\n\n Json(headers)\n\n}\n\n\n\npub struct ServerRunner {\n\n server: Server,\n\n handle: JoinHandle<io::Result<()>>,\n\n}\n", "file_path": "pretend/tests/server.rs", "rank": 29, "score": 159042.04912453835 }, { "content": "#[derive(Clone, Serialize)]\n\nstruct Data {\n\n first: String,\n\n second: i32,\n\n}\n\n\n", "file_path": "pretend/examples/bodies.rs", "rank": 30, "score": 159039.93061710979 }, { "content": "fn get_err_status<T>(result: Result<T>) -> Option<u16> {\n\n match result {\n\n Err(Error::Status(status)) => Some(status.as_u16()),\n\n _ => None,\n\n }\n\n}\n\n\n", "file_path": "pretend/tests/test_output.rs", "rank": 31, "score": 158447.79646863346 }, { "content": "type TestDataResult = JsonResult<server::TestData, server::ErrorData>;\n\n\n", "file_path": "pretend/tests/test_output.rs", "rank": 32, "score": 153626.52521944983 }, { "content": "#[pretend]\n\ntrait HttpBin {\n\n #[request(method = \"POST\", path = \"/anything\")]\n\n #[header(name = \"Content-Type\", value = \"text/plain\")]\n\n async fn post_string_ref(&self, body: &'static str) -> Result<String>;\n\n\n\n #[request(method = \"POST\", path = \"/anything\")]\n\n #[header(name = \"Content-Type\", value = \"text/plain\")]\n\n async fn post_string(&self, body: String) -> Result<String>;\n\n\n\n #[request(method = \"POST\", path = \"/anything\")]\n\n #[header(name = \"Content-Type\", value = \"application/octet-stream\")]\n\n async fn post_bytes_ref(&self, body: &'static [u8]) -> Result<String>;\n\n\n\n #[request(method = \"POST\", path = \"/anything\")]\n\n #[header(name = \"Content-Type\", value = \"application/octet-stream\")]\n\n async fn post_bytes(&self, body: Vec<u8>) -> Result<String>;\n\n\n\n #[request(method = \"POST\", path = \"/anything\")]\n\n async fn post_form_ref(&self, form: &Data) -> Result<String>;\n\n\n\n #[request(method = \"POST\", path = \"/anything\")]\n\n async fn post_form(&self, form: Data) -> Result<String>;\n\n\n\n #[request(method = \"POST\", path = \"/anything\")]\n\n async fn post_json_ref(&self, json: &Data) -> Result<String>;\n\n\n\n #[request(method = \"POST\", path = \"/anything\")]\n\n async fn post_json(&self, json: Data) -> Result<String>;\n\n}\n\n\n", "file_path": "pretend/examples/bodies.rs", "rank": 33, "score": 151868.93770372728 }, { "content": "fn test_clients(url: Url) {\n\n let clients = vec![\n\n create_testable(RClient::default()),\n\n create_testable(IClient::new().unwrap()),\n\n ];\n\n let tester = ClientsTester::new(url, clients);\n\n tester.test();\n\n}\n\n\n", "file_path": "pretend/tests/test_clients.rs", "rank": 34, "score": 148183.0628289177 }, { "content": "fn test_local_clients(url: Url) {\n\n let clients: Vec<Box<dyn TestableClient>> = vec![\n\n create_testable_local(RClient::default()),\n\n create_testable_local(IClient::new().unwrap()),\n\n Box::new(TestableAwcClient),\n\n ];\n\n let tester = ClientsTester::new(url, clients);\n\n tester.test();\n\n}\n\n\n", "file_path": "pretend/tests/test_clients.rs", "rank": 35, "score": 146383.38328326587 }, { "content": "fn test_blocking_clients(url: Url) {\n\n let clients: Vec<Box<dyn TestableClient>> = vec![\n\n Box::new(RBlockingClient::default()),\n\n Box::new(UClient::new(AgentBuilder::new().build())),\n\n ];\n\n let tester = ClientsTester::new(url, clients);\n\n tester.test();\n\n}\n", "file_path": "pretend/tests/test_clients.rs", "rank": 36, "score": 146383.38328326587 }, { "content": "fn main() {}\n", "file_path": "pretend/tests/builds/attribute.rs", "rank": 37, "score": 144567.68203359068 }, { "content": "fn main() {}\n", "file_path": "pretend/tests/builds/requests.rs", "rank": 38, "score": 144567.68203359068 }, { "content": "fn main() {}\n", "file_path": "pretend/tests/builds/headers.rs", "rank": 39, "score": 144567.68203359068 }, { "content": "fn main() {}\n", "file_path": "pretend/tests/builds/receivers.rs", "rank": 40, "score": 144567.68203359068 }, { "content": "fn main() {}\n", "file_path": "pretend/tests/builds/generics.rs", "rank": 41, "score": 144567.68203359068 }, { "content": "fn main() {}\n", "file_path": "pretend/tests/build-sources/generics.rs", "rank": 42, "score": 142301.48064209928 }, { "content": "fn main() {}\n", "file_path": "pretend/tests/build-sources/receivers.rs", "rank": 43, "score": 142301.48064209928 }, { "content": "fn main() {}\n", "file_path": "pretend/tests/build-sources/attribute.rs", "rank": 44, "score": 142301.48064209928 }, { "content": "fn main() {}\n", "file_path": "pretend/tests/builds-pre-1.54/headers.rs", "rank": 45, "score": 142301.48064209925 }, { "content": "fn main() {}\n", "file_path": "pretend/tests/builds-pre-1.54/generics.rs", "rank": 46, "score": 142301.48064209925 }, { "content": "fn main() {}\n", "file_path": "pretend/tests/build-sources/requests.rs", "rank": 47, "score": 142301.48064209925 }, { "content": "fn main() {}\n", "file_path": "pretend/tests/builds/non_method.rs", "rank": 48, "score": 142301.48064209925 }, { "content": "fn main() {}\n", "file_path": "pretend/tests/builds-pre-1.54/receivers.rs", "rank": 49, "score": 142301.48064209925 }, { "content": "fn main() {}\n", "file_path": "pretend/tests/builds-pre-1.54/attribute.rs", "rank": 50, "score": 142301.48064209925 }, { "content": "fn main() {}\n", "file_path": "pretend/tests/builds-pre-1.54/requests.rs", "rank": 51, "score": 142301.48064209928 }, { "content": "fn main() {}\n", "file_path": "pretend/tests/build-sources/headers.rs", "rank": 52, "score": 142301.48064209928 }, { "content": "fn implement_pretend(attr: PretendAttr, item: ItemTrait) -> Result<TokenStream2> {\n\n let name = &item.ident;\n\n let vis = &item.vis;\n\n let items = &item.items;\n\n let trait_items = items\n\n .iter()\n\n .map(|item| trait_item(item))\n\n .collect::<Vec<_>>();\n\n\n\n let kind = parse_client_kind(name, attr, items)?;\n\n let methods = items\n\n .iter()\n\n .map(|item| trait_item_implem(item, &kind))\n\n .collect::<Report<_>>()\n\n .into_result(|| Error::new(Span::call_site(), CODEGEN_FAILURE))?;\n\n\n\n let attr = async_trait_attr(&kind);\n\n let client = client_implem(&kind);\n\n let send_sync = send_sync_traits_impl(&kind);\n\n let tokens = quote! {\n", "file_path": "pretend-codegen/src/lib.rs", "rank": 53, "score": 141906.61787649692 }, { "content": "fn main() {}\n", "file_path": "pretend/tests/build-sources/non_method.rs", "rank": 54, "score": 140151.43425880774 }, { "content": "fn main() {}\n", "file_path": "pretend/tests/builds-pre-1.54/non_method.rs", "rank": 55, "score": 140151.43425880774 }, { "content": "fn new_pretend() -> impl TestApi {\n\n let url = Url::parse(server::URL).unwrap();\n\n let client = Client::default();\n\n Pretend::for_client(client).with_url(url)\n\n}\n\n\n", "file_path": "pretend/tests/test_pretend.rs", "rank": 56, "score": 135153.33553021628 }, { "content": "fn implement_method(method: &TraitItemMethod, kind: &ClientKind) -> Result<TokenStream> {\n\n check_no_generics(method)?;\n\n check_correct_receiver(method)?;\n\n\n\n let query = implement_query(method);\n\n let body = implement_body(method)?;\n\n let headers = implement_headers(method)?;\n\n\n\n let sig = &method.sig;\n\n let (method, path) = get_request(method)?;\n\n let method = Ident::new(&method, Span::call_site());\n\n let path = format(path, \"path\");\n\n\n\n let execute_request = match kind {\n\n ClientKind::Async => quote! {\n\n support.request(method, url, headers, body).await\n\n },\n\n ClientKind::AsyncLocal => quote! {\n\n support.request_local(method, url, headers, body).await\n\n },\n", "file_path": "pretend-codegen/src/method.rs", "rank": 57, "score": 133266.16771061916 }, { "content": "fn new_pretend() -> impl TestApi {\n\n let url = Url::parse(server::URL).unwrap();\n\n let client = Client::default();\n\n Pretend::for_client(client).with_url(url)\n\n}\n\n\n", "file_path": "pretend/tests/test_output.rs", "rank": 58, "score": 133199.09847034112 }, { "content": "fn parse_client_kind(name: &Ident, attr: PretendAttr, items: &[TraitItem]) -> Result<ClientKind> {\n\n let asyncs = items.iter().filter_map(is_method_async).collect::<Vec<_>>();\n\n let is_async = asyncs.iter().all(|item| item.value);\n\n let is_not_async = asyncs.iter().all(|item| !item.value);\n\n\n\n match (is_async, is_not_async) {\n\n (true, false) => {\n\n if attr.local {\n\n Ok(ClientKind::AsyncLocal)\n\n } else {\n\n Ok(ClientKind::Async)\n\n }\n\n }\n\n (false, true) => {\n\n if attr.local {\n\n Err(Error::new(Span::call_site(), UNSUPPORTED_ATTR_SYNC))\n\n } else {\n\n Ok(ClientKind::Blocking)\n\n }\n\n }\n", "file_path": "pretend-codegen/src/lib.rs", "rank": 59, "score": 130124.35947338471 }, { "content": "fn parse_nested_meta(meta: &NestedMeta) -> Option<(String, String)> {\n\n match meta {\n\n NestedMeta::Meta(Meta::NameValue(name_value)) => parse_meta_name_value(name_value),\n\n _ => None,\n\n }\n\n}\n\n\n", "file_path": "pretend-codegen/src/utils/attr.rs", "rank": 60, "score": 130091.1791971743 }, { "content": "fn new_client() -> impl TestApi {\n\n let url = Url::parse(server::URL).unwrap();\n\n Pretend::new(RClient::default(), UrlResolver::new(url))\n\n}\n\n\n", "file_path": "pretend/tests/test_visibility.rs", "rank": 61, "score": 129044.57068659406 }, { "content": "fn check_no_where_clause(sig: &Signature) -> Result<()> {\n\n if let Some(where_clause) = sig.generics.where_clause.as_ref() {\n\n Err(Error::new_spanned(where_clause, UNSUPPORTED_GENERICS))\n\n } else {\n\n Ok(())\n\n }\n\n}\n\n\n\npub(crate) fn check_correct_receiver(method: &TraitItemMethod) -> Result<()> {\n\n let receiver = get_receiver(method);\n\n let receiver = receiver.ok_or_else(|| Error::new_spanned(&method.sig, UNSUPPORTED_RECEIVER))?;\n\n get_good_mutability(receiver).ok_or_else(|| Error::new_spanned(receiver, UNSUPPORTED_RECEIVER))\n\n}\n\n\n", "file_path": "pretend-codegen/src/method/checks.rs", "rank": 62, "score": 123748.24748426929 }, { "content": "fn async_trait_attr(kind: &ClientKind) -> TokenStream2 {\n\n match kind {\n\n ClientKind::Async => quote! {\n\n #[pretend::client::async_trait]\n\n },\n\n ClientKind::AsyncLocal => quote! {\n\n #[pretend::client::async_trait(?Send)]\n\n },\n\n ClientKind::Blocking => TokenStream2::new(),\n\n }\n\n}\n\n\n", "file_path": "pretend-codegen/src/lib.rs", "rank": 63, "score": 123707.76456389469 }, { "content": "#[derive(Clone, Serialize)]\n\nstruct Query {\n\n first: String,\n\n second: i32,\n\n}\n\n\n", "file_path": "pretend/examples/queries.rs", "rank": 64, "score": 122387.67294687674 }, { "content": "fn check_no_generic_params(sig: &Signature) -> Result<()> {\n\n if sig.generics.params.is_empty() {\n\n Ok(())\n\n } else {\n\n Err(Error::new_spanned(&sig.generics, UNSUPPORTED_GENERICS))\n\n }\n\n}\n\n\n", "file_path": "pretend-codegen/src/method/checks.rs", "rank": 65, "score": 121979.11296100638 }, { "content": "fn try_parse(input: ParseStream) -> Result<PretendAttr> {\n\n if input.peek(Token![?]) {\n\n input.parse::<Token![?]>()?;\n\n input.parse::<kw::Send>()?;\n\n Ok(PretendAttr { local: true })\n\n } else {\n\n Ok(PretendAttr { local: false })\n\n }\n\n}\n", "file_path": "pretend-codegen/src/attr.rs", "rank": 66, "score": 121717.55401988534 }, { "content": "fn send_sync_traits_impl(kind: &ClientKind) -> TokenStream2 {\n\n match kind {\n\n ClientKind::Async => quote! {\n\n + Send + Sync\n\n },\n\n ClientKind::AsyncLocal => TokenStream2::new(),\n\n ClientKind::Blocking => TokenStream2::new(),\n\n }\n\n}\n", "file_path": "pretend-codegen/src/lib.rs", "rank": 67, "score": 121545.90818230622 }, { "content": "fn main() {}", "file_path": "tests/default-features-build/tests/builds/unsupported_client.rs", "rank": 68, "score": 121308.50327472705 }, { "content": "fn get_receiver(method: &TraitItemMethod) -> Option<&Receiver> {\n\n let sig = &method.sig;\n\n let first_arg = sig.inputs.first()?;\n\n\n\n match first_arg {\n\n FnArg::Receiver(receiver) => Some(receiver),\n\n _ => None,\n\n }\n\n}\n\n\n", "file_path": "pretend-codegen/src/method/checks.rs", "rank": 69, "score": 120315.50030999802 }, { "content": "fn is_method_async(item: &TraitItem) -> Option<WithTokens<bool, Signature>> {\n\n match item {\n\n TraitItem::Method(method) => {\n\n let is_async = method.sig.asyncness.is_some();\n\n Some(WithTokens::new(is_async, &method.sig))\n\n }\n\n _ => None,\n\n }\n\n}\n\n\n", "file_path": "pretend-codegen/src/lib.rs", "rank": 70, "score": 117891.98430238524 }, { "content": "#[pretend]\n\ntrait Test {\n\n #[request(method = \"GET\", path = \"/get\")]\n\n async fn test_1(&self, body: String, json: String) -> Result<()>;\n\n}\n\n\n", "file_path": "pretend/tests/builds/bodies.rs", "rank": 71, "score": 116458.13100465963 }, { "content": "#[pretend]\n\ntrait HttpBin {\n\n #[request(method = \"GET\", path = \"/get?first={first}&second={second}\")]\n\n async fn get(&self, first: String, second: i32) -> Result<String>;\n\n\n\n #[request(method = \"GET\", path = \"/get\")]\n\n async fn get_query(&self, query: Query) -> Result<String>;\n\n}\n\n\n", "file_path": "pretend/examples/queries.rs", "rank": 72, "score": 116206.73850928238 }, { "content": "#[pretend]\n\ntrait HttpBin {\n\n #[request(method = \"GET\", path = \"/get\")]\n\n async fn get(&self) -> Result<String>;\n\n\n\n #[request(method = \"POST\", path = \"/post\")]\n\n async fn post(&self) -> Result<String>;\n\n\n\n #[request(method = \"PUT\", path = \"/put\")]\n\n async fn put(&self) -> Result<String>;\n\n\n\n #[request(method = \"DELETE\", path = \"/delete\")]\n\n async fn delete(&self) -> Result<String>;\n\n}\n\n\n", "file_path": "pretend/examples/methods.rs", "rank": 73, "score": 116206.73850928238 }, { "content": "#[pretend]\n\ntrait HttpBin {\n\n #[request(method = \"GET\", path = \"/{path}\")]\n\n #[header(name = \"X-{header}\", value = \"{first}-{second}\")]\n\n #[header(name = \"X-Test\", value = \"{value}\")]\n\n async fn get(\n\n &self,\n\n path: &str,\n\n header: &str,\n\n first: i32,\n\n second: i32,\n\n value: &str,\n\n ) -> Result<String>;\n\n}\n\n\n", "file_path": "pretend/examples/templating.rs", "rank": 74, "score": 116206.73850928238 }, { "content": "#[pretend]\n\ntrait HttpBin {\n\n #[request(method = \"GET\", path = \"/get\")]\n\n #[header(name = \"X-Test\", value = \"Hello\")]\n\n #[header(name = \"X-Something-Nice\", value = \"Lovely\")]\n\n async fn get(&self) -> Result<String>;\n\n}\n\n\n", "file_path": "pretend/examples/headers.rs", "rank": 75, "score": 116206.73850928238 }, { "content": "#[pretend]\n\ntrait HttpBin {\n\n #[request(method = \"GET\", path = \"/get\")]\n\n fn get(&self) -> Result<String>;\n\n}\n\n\n", "file_path": "pretend/examples/blocking.rs", "rank": 76, "score": 116206.73850928238 }, { "content": "#[pretend]\n\ntrait Test2 {\n\n #[request(method = \"GET\", path = \"/get\")]\n\n fn test_1(&self) -> Result<()>;\n\n #[request(method = \"GET\", path = \"/get\")]\n\n async fn test_2(&self) -> Result<()>;\n\n}\n\n\n", "file_path": "pretend/tests/builds/inconsistent_async.rs", "rank": 77, "score": 114233.93068565574 }, { "content": "#[pretend]\n\ntrait Test1 {}\n\n\n", "file_path": "pretend/tests/builds/inconsistent_async.rs", "rank": 78, "score": 114233.93068565574 }, { "content": "#[pretend(?Send)]\n\ntrait Test3 {\n\n #[request(method = \"GET\", path = \"/get\")]\n\n fn test_1(&self) -> Result<()>;\n\n}\n\n\n", "file_path": "pretend/tests/builds/inconsistent_async.rs", "rank": 79, "score": 114233.8619604549 }, { "content": "#[pretend]\n\ntrait Test {\n\n #[request(method = \"GET\", path = \"/get\")]\n\n async fn test_1(&self, body: String, json: String) -> Result<()>;\n\n}\n\n\n", "file_path": "pretend/tests/builds-pre-1.54/bodies.rs", "rank": 80, "score": 114174.20833586581 }, { "content": "#[pretend]\n\ntrait Test {\n\n #[request(method = \"GET\", path = \"/get\")]\n\n async fn test_1(&self, body: String, json: String) -> Result<()>;\n\n}\n\n\n", "file_path": "pretend/tests/build-sources/bodies.rs", "rank": 81, "score": 114174.20833586581 }, { "content": "/// Describe an URL resolver\n\n///\n\n/// See module level documentation for more information.\n\npub trait ResolveUrl {\n\n /// Resolve an URL from a path\n\n fn resolve_url(&self, path: &str) -> Result<Url, ParseError>;\n\n}\n\n\n\n/// Default URL resolver\n\n///\n\n/// This resolver appends the path to a base URL.\n\n#[derive(Clone, Debug)]\n\npub struct UrlResolver {\n\n base: Url,\n\n}\n\n\n\nimpl UrlResolver {\n\n /// Constructor\n\n pub fn new(base: Url) -> Self {\n\n UrlResolver { base }\n\n }\n\n}\n\n\n", "file_path": "pretend/src/resolver.rs", "rank": 82, "score": 112271.48221690895 }, { "content": "#[pretend]\n\ntrait Test1 {}\n\n\n", "file_path": "pretend/tests/builds-pre-1.54/inconsistent_async.rs", "rank": 83, "score": 112065.54079002581 }, { "content": "#[pretend]\n\ntrait Test2 {\n\n #[request(method = \"GET\", path = \"/get\")]\n\n fn test_1(&self) -> Result<()>;\n\n #[request(method = \"GET\", path = \"/get\")]\n\n async fn test_2(&self) -> Result<()>;\n\n}\n\n\n", "file_path": "pretend/tests/builds-pre-1.54/inconsistent_async.rs", "rank": 84, "score": 112065.54079002581 }, { "content": "#[pretend]\n\ntrait Test1 {}\n\n\n", "file_path": "pretend/tests/build-sources/inconsistent_async.rs", "rank": 85, "score": 112065.54079002581 }, { "content": "#[pretend]\n\ntrait Test2 {\n\n #[request(method = \"GET\", path = \"/get\")]\n\n fn test_1(&self) -> Result<()>;\n\n #[request(method = \"GET\", path = \"/get\")]\n\n async fn test_2(&self) -> Result<()>;\n\n}\n\n\n", "file_path": "pretend/tests/build-sources/inconsistent_async.rs", "rank": 86, "score": 112065.54079002581 }, { "content": "#[pretend(?Send)]\n\ntrait Test3 {\n\n #[request(method = \"GET\", path = \"/get\")]\n\n fn test_1(&self) -> Result<()>;\n\n}\n\n\n", "file_path": "pretend/tests/build-sources/inconsistent_async.rs", "rank": 87, "score": 112065.47206482498 }, { "content": "#[pretend(?Send)]\n\ntrait Test3 {\n\n #[request(method = \"GET\", path = \"/get\")]\n\n fn test_1(&self) -> Result<()>;\n\n}\n\n\n", "file_path": "pretend/tests/builds-pre-1.54/inconsistent_async.rs", "rank": 88, "score": 112065.47206482498 }, { "content": "fn implement_header_result(\n\n item: WithTokens<Option<(String, String)>, Attribute>,\n\n) -> Result<TokenStream> {\n\n let value = item.value;\n\n let tokens = item.tokens;\n\n let (name, value) = value.ok_or_else(|| Error::new_spanned(tokens, INVALID_HEADER))?;\n\n Ok(implement_header(name, value))\n\n}\n\n\n", "file_path": "pretend-codegen/src/method/headers.rs", "rank": 89, "score": 107074.98413613923 }, { "content": "fn parse_body_kind(ident: &Ident) -> Option<WithTokens<BodyKind, Ident>> {\n\n if ident == \"body\" {\n\n Some(WithTokens::new(BodyKind::Body, ident))\n\n } else if ident == \"form\" {\n\n Some(WithTokens::new(BodyKind::Form, ident))\n\n } else if ident == \"json\" {\n\n Some(WithTokens::new(BodyKind::Json, ident))\n\n } else {\n\n None\n\n }\n\n}\n", "file_path": "pretend-codegen/src/method/body.rs", "rank": 90, "score": 100697.27330623874 }, { "content": "fn create_header((name, value): (&HeaderName, &HeaderValue)) -> (PHeaderName, PHeaderValue) {\n\n (PHeaderName::from(name), PHeaderValue::from(value))\n\n}\n", "file_path": "pretend-awc/src/lib.rs", "rank": 91, "score": 97307.06831414413 }, { "content": "fn get_good_mutability(receiver: &Receiver) -> Option<()> {\n\n let (_, lifetime) = receiver.reference.as_ref()?;\n\n if lifetime.is_none() && receiver.mutability.is_none() {\n\n Some(())\n\n } else {\n\n None\n\n }\n\n}\n", "file_path": "pretend-codegen/src/method/checks.rs", "rank": 92, "score": 96411.59290239899 }, { "content": "fn has_query(method: &TraitItemMethod) -> bool {\n\n let inputs = &method.sig.inputs;\n\n inputs\n\n .iter()\n\n .filter_map(parse_param_name)\n\n .any(|param| param == \"query\")\n\n}\n", "file_path": "pretend-codegen/src/method/query.rs", "rank": 93, "score": 95333.06046381031 }, { "content": "use pretend::{pretend, Pretend, Result, Url};\n\nuse pretend_reqwest::Client;\n\nuse serde::Serialize;\n\n\n\n// This example show how to send various body types to https://httpbin.org\n\n\n\n#[derive(Clone, Serialize)]\n", "file_path": "pretend/examples/bodies.rs", "rank": 94, "score": 92552.62902305435 }, { "content": "\n\n let data = Data {\n\n first: \"Hello\".to_string(),\n\n second: 123,\n\n };\n\n\n\n let result = pretend.post_form_ref(&data).await.unwrap();\n\n println!(\"{}\", result);\n\n\n\n let result = pretend.post_form(data.clone()).await.unwrap();\n\n println!(\"{}\", result);\n\n\n\n let result = pretend.post_json_ref(&data).await.unwrap();\n\n println!(\"{}\", result);\n\n\n\n let result = pretend.post_json(data.clone()).await.unwrap();\n\n println!(\"{}\", result);\n\n}\n", "file_path": "pretend/examples/bodies.rs", "rank": 95, "score": 92550.93184680156 }, { "content": "fn get_meta_list(attr: &Attribute) -> Option<MetaList> {\n\n let meta = attr.parse_meta().ok()?;\n\n match meta {\n\n Meta::List(list) => Some(list),\n\n _ => None,\n\n }\n\n}\n\n\n", "file_path": "pretend-codegen/src/utils/attr.rs", "rank": 96, "score": 91772.91124463674 }, { "content": "#[proc_macro_attribute]\n\npub fn pretend(attr: TokenStream, item: TokenStream) -> TokenStream {\n\n let attr = parse_macro_input!(attr as PretendAttr);\n\n let item = parse_macro_input!(item as ItemTrait);\n\n implement_pretend(attr, item)\n\n .unwrap_or_else(Error::into_compile_error)\n\n .into()\n\n}\n\n\n", "file_path": "pretend-codegen/src/lib.rs", "rank": 97, "score": 85362.90848793852 }, { "content": "#[pretend]\n\ntrait TestApi {\n\n #[request(method = \"GET\", path = \"/method\")]\n\n async fn get(&self) -> Result<String>;\n\n #[request(method = \"POST\", path = \"/method\")]\n\n async fn post(&self) -> Result<String>;\n\n #[request(method = \"PUT\", path = \"/method\")]\n\n async fn put(&self) -> Result<String>;\n\n #[request(method = \"PATCH\", path = \"/method\")]\n\n async fn patch(&self) -> Result<String>;\n\n #[request(method = \"DELETE\", path = \"/method\")]\n\n async fn delete(&self) -> Result<String>;\n\n #[request(method = \"GET\", path = \"/query\")]\n\n async fn query(&self, query: &server::TestData) -> Result<Json<HashMap<String, String>>>;\n\n #[request(method = \"GET\", path = \"/headers\")]\n\n #[header(name = \"X-Test-Header-1\", value = \"abc\")]\n\n #[header(name = \"X-Test-Header-2\", value = \"{value}\")]\n\n #[header(name = \"X-{custom}\", value = \"custom\")]\n\n async fn headers(&self, value: i32, custom: &str) -> Result<Json<HashMap<String, String>>>;\n\n #[request(method = \"POST\", path = \"/post/string\")]\n\n #[header(name = \"Content-Type\", value = \"text/plain\")]\n\n async fn post_string(&self, body: &'static str) -> Result<String>;\n\n #[request(method = \"POST\", path = \"/post/json\")]\n\n async fn post_json(&self, json: &server::TestData) -> Result<Json<server::TestData>>;\n\n #[request(method = \"POST\", path = \"/post/form\")]\n\n async fn post_form(&self, form: &server::TestData) -> Result<Json<server::TestData>>;\n\n}\n\n\n", "file_path": "pretend/tests/test_pretend.rs", "rank": 98, "score": 83365.47721810246 }, { "content": "#[test]\n\nfn test_pretend() {\n\n server::test(|| {\n\n runtimes::block_on(async {\n\n test_get().await;\n\n test_post().await;\n\n test_put().await;\n\n test_patch().await;\n\n test_delete().await;\n\n\n\n test_query().await;\n\n test_headers().await;\n\n test_post_string().await;\n\n test_post_json().await;\n\n test_post_form().await;\n\n })\n\n });\n\n}\n\n\n\nasync fn test_get() {\n\n let result = new_pretend().get().await;\n", "file_path": "pretend/tests/test_pretend.rs", "rank": 99, "score": 82138.59863103941 } ]
Rust
src/state.rs
virome/amethyst
f6cbdfe6e5c38838190e59a35b6e6d35115af0cb
use amethyst_input::is_close_requested; use ecs::prelude::World; use {GameData, StateEvent}; pub struct StateData<'a, T> where T: 'a, { pub world: &'a mut World, pub data: &'a mut T, } impl<'a, T> StateData<'a, T> where T: 'a, { pub fn new(world: &'a mut World, data: &'a mut T) -> Self { StateData { world, data } } } pub enum Trans<T, E> { None, Pop, Push(Box<State<T, E>>), Switch(Box<State<T, E>>), Quit, } pub type EmptyTrans = Trans<(), ()>; pub type SimpleTrans<'a, 'b> = Trans<GameData<'a, 'b>, ()>; pub trait State<T, E: Send + Sync + 'static> { fn on_start(&mut self, _data: StateData<T>) {} fn on_stop(&mut self, _data: StateData<T>) {} fn on_pause(&mut self, _data: StateData<T>) {} fn on_resume(&mut self, _data: StateData<T>) {} fn handle_event(&mut self, _data: StateData<T>, _event: StateEvent<E>) -> Trans<T, E> { Trans::None } fn fixed_update(&mut self, _data: StateData<T>) -> Trans<T, E> { Trans::None } fn update(&mut self, _data: StateData<T>) -> Trans<T, E> { Trans::None } } pub trait EmptyState { fn on_start(&mut self, _data: StateData<()>) {} fn on_stop(&mut self, _data: StateData<()>) {} fn on_pause(&mut self, _data: StateData<()>) {} fn on_resume(&mut self, _data: StateData<()>) {} fn handle_event(&mut self, _data: StateData<()>, event: StateEvent<()>) -> EmptyTrans { if let StateEvent::Window(event) = &event { if is_close_requested(&event) { Trans::Quit } else { Trans::None } } else { Trans::None } } fn fixed_update(&mut self, _data: StateData<()>) -> EmptyTrans { Trans::None } fn update(&mut self, _data: StateData<()>) -> EmptyTrans { Trans::None } } impl<T: EmptyState> State<(), ()> for T { fn on_start(&mut self, data: StateData<()>) { self.on_start(data) } fn on_stop(&mut self, data: StateData<()>) { self.on_stop(data) } fn on_pause(&mut self, data: StateData<()>) { self.on_pause(data) } fn on_resume(&mut self, data: StateData<()>) { self.on_resume(data) } fn handle_event(&mut self, data: StateData<()>, event: StateEvent<()>) -> EmptyTrans { self.handle_event(data, event) } fn fixed_update(&mut self, data: StateData<()>) -> EmptyTrans { self.fixed_update(data) } fn update(&mut self, data: StateData<()>) -> EmptyTrans { self.update(data) } } pub trait SimpleState<'a, 'b> { fn on_start(&mut self, _data: StateData<GameData>) {} fn on_stop(&mut self, _data: StateData<GameData>) {} fn on_pause(&mut self, _data: StateData<GameData>) {} fn on_resume(&mut self, _data: StateData<GameData>) {} fn handle_event( &mut self, _data: StateData<GameData>, event: StateEvent<()>, ) -> SimpleTrans<'a, 'b> { if let StateEvent::Window(event) = &event { if is_close_requested(&event) { Trans::Quit } else { Trans::None } } else { Trans::None } } fn fixed_update(&mut self, _data: StateData<GameData>) -> SimpleTrans<'a, 'b> { Trans::None } fn update(&mut self, _data: &mut StateData<GameData>) -> SimpleTrans<'a, 'b> { Trans::None } } impl<'a, 'b, T: SimpleState<'a, 'b>> State<GameData<'a, 'b>, ()> for T { fn on_start(&mut self, data: StateData<GameData>) { self.on_start(data) } fn on_stop(&mut self, data: StateData<GameData>) { self.on_stop(data) } fn on_pause(&mut self, data: StateData<GameData>) { self.on_pause(data) } fn on_resume(&mut self, data: StateData<GameData>) { self.on_resume(data) } fn handle_event( &mut self, data: StateData<GameData>, event: StateEvent<()>, ) -> SimpleTrans<'a, 'b> { self.handle_event(data, event) } fn fixed_update(&mut self, data: StateData<GameData>) -> SimpleTrans<'a, 'b> { self.fixed_update(data) } fn update(&mut self, mut data: StateData<GameData>) -> SimpleTrans<'a, 'b> { let r = self.update(&mut data); data.data.update(&data.world); r } } #[derive(Derivative)] #[derivative(Debug)] pub struct StateMachine<'a, T, E> { running: bool, #[derivative(Debug = "ignore")] state_stack: Vec<Box<State<T, E> + 'a>>, } impl<'a, T, E: Send + Sync + 'static> StateMachine<'a, T, E> { pub fn new<S: State<T, E> + 'a>(initial_state: S) -> StateMachine<'a, T, E> { StateMachine { running: false, state_stack: vec![Box::new(initial_state)], } } pub fn is_running(&self) -> bool { self.running } pub fn start(&mut self, data: StateData<T>) { if !self.running { let state = self.state_stack.last_mut().unwrap(); state.on_start(data); self.running = true; } } pub fn handle_event(&mut self, data: StateData<T>, event: StateEvent<E>) { let StateData { world, data } = data; if self.running { let trans = match self.state_stack.last_mut() { Some(state) => state.handle_event(StateData { world, data }, event), None => Trans::None, }; self.transition(trans, StateData { world, data }); } } pub fn fixed_update(&mut self, data: StateData<T>) { let StateData { world, data } = data; if self.running { let trans = match self.state_stack.last_mut() { Some(state) => state.fixed_update(StateData { world, data }), None => Trans::None, }; self.transition(trans, StateData { world, data }); } } pub fn update(&mut self, data: StateData<T>) { let StateData { world, data } = data; if self.running { let trans = match self.state_stack.last_mut() { Some(state) => state.update(StateData { world, data }), None => Trans::None, }; self.transition(trans, StateData { world, data }); } } fn transition(&mut self, request: Trans<T, E>, data: StateData<T>) { if self.running { match request { Trans::None => (), Trans::Pop => self.pop(data), Trans::Push(state) => self.push(state, data), Trans::Switch(state) => self.switch(state, data), Trans::Quit => self.stop(data), } } } fn switch(&mut self, state: Box<State<T, E>>, data: StateData<T>) { if self.running { let StateData { world, data } = data; if let Some(mut state) = self.state_stack.pop() { state.on_stop(StateData { world, data }); } self.state_stack.push(state); let state = self.state_stack.last_mut().unwrap(); state.on_start(StateData { world, data }); } } fn push(&mut self, state: Box<State<T, E>>, data: StateData<T>) { if self.running { let StateData { world, data } = data; if let Some(state) = self.state_stack.last_mut() { state.on_pause(StateData { world, data }); } self.state_stack.push(state); let state = self.state_stack.last_mut().unwrap(); state.on_start(StateData { world, data }); } } fn pop(&mut self, data: StateData<T>) { if self.running { let StateData { world, data } = data; if let Some(mut state) = self.state_stack.pop() { state.on_stop(StateData { world, data }); } if let Some(state) = self.state_stack.last_mut() { state.on_resume(StateData { world, data }); } else { self.running = false; } } } pub(crate) fn stop(&mut self, data: StateData<T>) { if self.running { let StateData { world, data } = data; while let Some(mut state) = self.state_stack.pop() { state.on_stop(StateData { world, data }); } self.running = false; } } } #[cfg(test)] mod tests { use super::*; struct State1(u8); struct State2; impl State<(), ()> for State1 { fn update(&mut self, _: StateData<()>) -> Trans<(), ()> { if self.0 > 0 { self.0 -= 1; Trans::None } else { Trans::Switch(Box::new(State2)) } } } impl State<(), ()> for State2 { fn update(&mut self, _: StateData<()>) -> Trans<(), ()> { Trans::Pop } } #[test] fn switch_pop() { use ecs::prelude::World; let mut world = World::new(); let mut sm = StateMachine::new(State1(7)); sm.start(StateData::new(&mut world, &mut ())); for _ in 0..8 { sm.update(StateData::new(&mut world, &mut ())); assert!(sm.is_running()); } sm.update(StateData::new(&mut world, &mut ())); assert!(!sm.is_running()); } }
use amethyst_input::is_close_requested; use ecs::prelude::World; use {GameData, StateEvent}; pub struct StateData<'a, T> where T: 'a, { pub world: &'a mut World, pub data: &'a mut T, } impl<'a, T> StateData<'a, T> where T: 'a, { pub fn new(world: &'a mut World, data: &'a mut T) -> Self { StateData { world, data } } } pub enum Trans<T, E> { None, Pop, Push(Box<State<T, E>>), Switch(Box<State<T, E>>), Quit, } pub type EmptyTrans = Trans<(), ()>; pub type SimpleTrans<'a, 'b> = Trans<GameData<'a, 'b>, ()>; pub trait State<T, E: Send + Sync + 'static> { fn on_start(&mut self, _data: StateData<T>) {} fn on_stop(&mut self, _data: StateData<T>) {} fn on_pause(&mut self, _data: StateData<T>) {} fn on_resume(&mut self, _data: StateData<T>) {} fn handle_event(&mut self, _data: StateData<T>, _event: StateEvent<E>) -> Trans<T, E> { Trans::None } fn fixed_update(&mut self, _data: StateData<T>) -> Trans<T, E> { Trans::None } fn update(&mut self, _data: StateData<T>) -> Trans<T, E> { Trans::None } } pub trait EmptyState { fn on_start(&mut self, _data: StateData<()>) {} fn on_stop(&mut self, _data: StateData<()>) {} fn on_pause(&mut self, _data: StateData<()>) {} fn on_resume(&mut self, _data: StateData<()>) {} fn handle_event(&mut self, _data: StateData<()>, event: StateEvent<()>) -> EmptyTrans { if let StateEvent::Window(event) = &event { if is_close_requested(&event) { Trans::Quit } else { Trans::None } } else { Trans::None } } fn fixed_update(&mut self, _data: StateData<()>) -> EmptyTrans { Trans::None } fn update(&mut self, _data: StateData<()>) -> EmptyTrans { Trans::None } } impl<T: EmptyState> State<(), ()> for T { fn on_start(&mut self, data: StateData<()>) { self.on_start(data) } fn on_stop(&mut self, data: StateData<()>) { self.on_stop(data) } fn on_pause(&mut self, data: StateData<()>) { self.on_pause(data) } fn on_resume(&mut self, data: StateData<()>) { self.on_resume(data) } fn handle_event(&mut self, data: StateData<()>, event: StateEvent<()>) -> EmptyTrans { self.handle_event(data, event) } fn fixed_update(&mut self, data: StateData<()>) -> EmptyTrans { self.fixed_update(data) } fn update(&mut self, data: StateData<()>) -> EmptyTrans { self.update(data) } } pub trait SimpleState<'a, 'b> { fn on_start(&mut self, _data: StateData<GameData>) {} fn on_stop(&mut self, _data: StateData<GameData>) {} fn on_pause(&mut self, _data: StateData<GameData>) {} fn on_resume(&mut self, _data: StateData<GameData>) {} fn handle_event( &mut self, _data: StateData<GameData>, event: StateEvent<()>, ) -> SimpleTrans<'a, 'b> { if let StateEvent::Window(event) = &event { if is_close_requested(&event) { Trans::Quit } else { Trans::None } } else { Trans::None } } fn fixed_update(&mut self, _data: StateData<GameData>) -> SimpleTrans<'a, 'b> { Trans::None } fn update(&mut self, _data: &mut StateData<GameData>) -> SimpleTrans<'a, 'b> { Trans::None } } impl<'a, 'b, T: SimpleState<'a, 'b>> State<GameData<'a, 'b>, ()> for T { fn on_start(&mut self, data: StateData<GameData>) { self.on_start(data) } fn on_stop(&mut self, data: StateData<GameData>) { self.on_stop(data) } fn on_pause(&mut self, data: StateData<GameData>) { self.on_pause(data) } fn on_resume(&mut self, data: StateData<GameData>) { self.on_resume(data) } fn handle_event( &mut self, data: StateData<GameData>, event: StateEvent<()>, ) -> SimpleTrans<'a, 'b> { self.handle_event(data, event) } fn fixed_update(&mut self, data: StateData<GameData>) -> SimpleTrans<'a, 'b> { self.fixed_update(data) } fn update(&mut self, mut data: StateData<GameData>) -> SimpleTrans<'a, 'b> { let r = self.update(&mut data); data.data.update(&data.world); r } } #[derive(Derivative)] #[derivative(Debug)] pub struct StateMachine<'a, T, E> { running: bool, #[derivative(Debug = "ignore")] state_stack: Vec<Box<State<T, E> + 'a>>, } impl<'a, T, E: Send + Sync + 'static> StateMachine<'a, T, E> { pub fn new<S: State<T, E> + 'a>(initial_state: S) -> StateMachine<'a, T, E> { StateMachine { running: false, state_stack: vec![Box::new(initial_state)], } } pub fn is_running(&self) -> bool { self.running } pub fn start(&mut self, data: StateData<T>) { if !self.running { let state = self.state_stack.last_mut().unwrap(); state.on_start(data); self.running = true; } } pub fn handle_event(&mut self, data: StateData<T>, event: StateEvent<E>) { let StateData { world, data } = data; if self.running { let trans = match self.state_stack.last_mut() { Some(state) => state.handle_event(StateData { world, data }, event), None => Trans::None, }; self.transition(trans, StateData { world, data }); } }
pub fn update(&mut self, data: StateData<T>) { let StateData { world, data } = data; if self.running { let trans = match self.state_stack.last_mut() { Some(state) => state.update(StateData { world, data }), None => Trans::None, }; self.transition(trans, StateData { world, data }); } } fn transition(&mut self, request: Trans<T, E>, data: StateData<T>) { if self.running { match request { Trans::None => (), Trans::Pop => self.pop(data), Trans::Push(state) => self.push(state, data), Trans::Switch(state) => self.switch(state, data), Trans::Quit => self.stop(data), } } } fn switch(&mut self, state: Box<State<T, E>>, data: StateData<T>) { if self.running { let StateData { world, data } = data; if let Some(mut state) = self.state_stack.pop() { state.on_stop(StateData { world, data }); } self.state_stack.push(state); let state = self.state_stack.last_mut().unwrap(); state.on_start(StateData { world, data }); } } fn push(&mut self, state: Box<State<T, E>>, data: StateData<T>) { if self.running { let StateData { world, data } = data; if let Some(state) = self.state_stack.last_mut() { state.on_pause(StateData { world, data }); } self.state_stack.push(state); let state = self.state_stack.last_mut().unwrap(); state.on_start(StateData { world, data }); } } fn pop(&mut self, data: StateData<T>) { if self.running { let StateData { world, data } = data; if let Some(mut state) = self.state_stack.pop() { state.on_stop(StateData { world, data }); } if let Some(state) = self.state_stack.last_mut() { state.on_resume(StateData { world, data }); } else { self.running = false; } } } pub(crate) fn stop(&mut self, data: StateData<T>) { if self.running { let StateData { world, data } = data; while let Some(mut state) = self.state_stack.pop() { state.on_stop(StateData { world, data }); } self.running = false; } } } #[cfg(test)] mod tests { use super::*; struct State1(u8); struct State2; impl State<(), ()> for State1 { fn update(&mut self, _: StateData<()>) -> Trans<(), ()> { if self.0 > 0 { self.0 -= 1; Trans::None } else { Trans::Switch(Box::new(State2)) } } } impl State<(), ()> for State2 { fn update(&mut self, _: StateData<()>) -> Trans<(), ()> { Trans::Pop } } #[test] fn switch_pop() { use ecs::prelude::World; let mut world = World::new(); let mut sm = StateMachine::new(State1(7)); sm.start(StateData::new(&mut world, &mut ())); for _ in 0..8 { sm.update(StateData::new(&mut world, &mut ())); assert!(sm.is_running()); } sm.update(StateData::new(&mut world, &mut ())); assert!(!sm.is_running()); } }
pub fn fixed_update(&mut self, data: StateData<T>) { let StateData { world, data } = data; if self.running { let trans = match self.state_stack.last_mut() { Some(state) => state.fixed_update(StateData { world, data }), None => Trans::None, }; self.transition(trans, StateData { world, data }); } }
function_block-full_function
[ { "content": "/// Master trait used to define animation sampling on a component\n\npub trait AnimationSampling: Send + Sync + 'static + for<'b> ApplyData<'b> {\n\n /// The interpolation primitive\n\n type Primitive: InterpolationPrimitive + Clone + Copy + Send + Sync + 'static;\n\n /// An independent grouping or type of functions that operate on attributes of a component\n\n ///\n\n /// For example, `translation`, `scaling` and `rotation` are transformation channels independent\n\n /// of each other, even though they all mutate coordinates of a component.\n\n type Channel: Debug + Clone + Hash + Eq + Send + Sync + 'static;\n\n\n\n /// Apply a sample to a channel\n\n fn apply_sample<'a>(\n\n &mut self,\n\n channel: &Self::Channel,\n\n data: &Self::Primitive,\n\n extra: &<Self as ApplyData<'a>>::ApplyData,\n\n );\n\n\n\n /// Get the current sample for a channel\n\n fn current_sample<'a>(\n\n &self,\n", "file_path": "amethyst_animation/src/resources.rs", "rank": 1, "score": 428430.4963150886 }, { "content": "/// One of the three core traits of this crate.\n\n///\n\n/// You want to implement this for every type of asset like\n\n///\n\n/// * `Mesh`\n\n/// * `Texture`\n\n/// * `Terrain`\n\n///\n\n/// and so on. Now, an asset may be available in different formats.\n\n/// That's why we have the `Data` associated type here. You can specify\n\n/// an intermediate format here, like the vertex data for a mesh or the samples\n\n/// for audio data.\n\n///\n\n/// This data is then generated by the `Format` trait.\n\npub trait Asset: Send + Sync + 'static {\n\n /// An identifier for this asset used for debugging.\n\n const NAME: &'static str;\n\n\n\n /// The `Data` type the asset can be created from.\n\n type Data: Send + Sync + 'static;\n\n\n\n /// The ECS storage type to be used. You'll want to use `VecStorage` in most cases.\n\n type HandleStorage: UnprotectedStorage<Handle<Self>> + Send + Sync;\n\n}\n\n\n", "file_path": "amethyst_assets/src/asset.rs", "rank": 2, "score": 385353.2668401129 }, { "content": "/// A trait for asset sources, which provides\n\n/// methods for loading bytes.\n\npub trait Source: Send + Sync + 'static {\n\n /// This is called to check if an asset has been modified.\n\n ///\n\n /// Returns the modification time as seconds since `UNIX_EPOCH`.\n\n fn modified(&self, path: &str) -> Result<u64>;\n\n\n\n /// Loads the bytes given a path.\n\n ///\n\n /// The id should always use `/` as separator in paths.\n\n fn load(&self, path: &str) -> Result<Vec<u8>>;\n\n\n\n /// Returns both the result of `load` and `modified` as a tuple.\n\n /// There's a default implementation which just calls both methods,\n\n /// but you may be able to provide a more optimized version yourself.\n\n fn load_with_metadata(&self, path: &str) -> Result<(Vec<u8>, u64)> {\n\n #[cfg(feature = \"profiler\")]\n\n profile_scope!(\"source_load_asset_with_metadata\");\n\n\n\n let m = self.modified(path)?;\n\n let b = self.load(path)?;\n\n\n\n Ok((b, m))\n\n }\n\n}\n", "file_path": "amethyst_assets/src/source/mod.rs", "rank": 3, "score": 381571.6734286628 }, { "content": "/// Trait used by the asset processor to convert any user supplied mesh representation into an\n\n/// actual `Mesh`.\n\n///\n\n/// This allows the user to create their own vertex attributes, and have the amethyst asset and\n\n/// render systems be able to convert it into a `Mesh` that can be used from any applicable\n\n/// pass.\n\npub trait MeshCreator: Send + Sync + Debug + 'static {\n\n /// Build a mesh given a `Renderer`\n\n fn build(self: Box<Self>, renderer: &mut Renderer) -> ::error::Result<Mesh>;\n\n\n\n /// Clone a boxed version of this object\n\n fn box_clone(&self) -> Box<MeshCreator>;\n\n}\n\n\n\nimpl Clone for Box<MeshCreator> {\n\n fn clone(&self) -> Box<MeshCreator> {\n\n self.box_clone()\n\n }\n\n}\n\n\n\n/// Mesh creator for `VertexBufferCombination`.\n\n#[derive(Debug, Clone, Serialize, Deserialize)]\n\npub struct ComboMeshCreator {\n\n combo: VertexBufferCombination,\n\n}\n\n\n", "file_path": "amethyst_renderer/src/formats/mesh.rs", "rank": 4, "score": 366997.64418340405 }, { "content": "pub fn add_graphics_to_world(world: &mut World) {\n\n initialise_camera(world);\n\n\n\n let assets = world.read_resource::<Assets>().clone();\n\n\n\n // Add teapot and lid to scene\n\n for mesh in vec![assets.lid.clone(), assets.teapot.clone()] {\n\n let mut trans = Transform::default();\n\n trans.rotation = Quaternion::from(Euler::new(Deg(90.0), Deg(-90.0), Deg(0.0))).into();\n\n trans.translation = Vector3::new(5.0, 5.0, 0.0);\n\n\n\n world\n\n .create_entity()\n\n .with(mesh)\n\n .with(assets.red.clone())\n\n .with(trans)\n\n .with(GlobalTransform::default())\n\n .build();\n\n }\n\n\n", "file_path": "examples/custom_game_data/graphic.rs", "rank": 5, "score": 363416.1067753678 }, { "content": "pub fn load_assets(world: &mut World) -> ProgressCounter {\n\n let mut progress = ProgressCounter::default();\n\n let assets = {\n\n let mesh_storage = world.read_resource();\n\n let tex_storage = world.read_resource();\n\n let font_storage = world.read_resource();\n\n let mat_defaults = world.read_resource::<MaterialDefaults>();\n\n let loader = world.read_resource::<Loader>();\n\n\n\n let red = loader.load_from_data([1.0, 0.0, 0.0, 1.0].into(), &mut progress, &tex_storage);\n\n let red = Material {\n\n albedo: red,\n\n ..mat_defaults.0.clone()\n\n };\n\n\n\n let white = loader.load_from_data([1.0, 1.0, 1.0, 1.0].into(), &mut progress, &tex_storage);\n\n let white = Material {\n\n albedo: white,\n\n ..mat_defaults.0.clone()\n\n };\n", "file_path": "examples/custom_game_data/graphic.rs", "rank": 6, "score": 349644.5931241682 }, { "content": "/// The `Reload` trait provides a method which checks if an asset needs to be reloaded.\n\npub trait Reload<A: Asset>: ReloadClone<A> + Send + Sync + 'static {\n\n /// Checks if a reload is necessary.\n\n fn needs_reload(&self) -> bool;\n\n /// Returns the asset name.\n\n fn name(&self) -> String;\n\n /// Returns the format name.\n\n fn format(&self) -> &'static str;\n\n /// Reloads the asset.\n\n fn reload(self: Box<Self>) -> Result<FormatValue<A>>;\n\n}\n\n\n", "file_path": "amethyst_assets/src/reload.rs", "rank": 7, "score": 348694.11029828666 }, { "content": "/// Initialise audio in the world. This includes the background track and the\n\n/// sound effects.\n\npub fn initialise_audio(world: &mut World) {\n\n use {AUDIO_BOUNCE, AUDIO_MUSIC, AUDIO_SCORE};\n\n\n\n let (sound_effects, music) = {\n\n let loader = world.read_resource::<Loader>();\n\n\n\n let mut sink = world.write_resource::<AudioSink>();\n\n sink.set_volume(0.25); // Music is a bit loud, reduce the volume.\n\n\n\n let music = AUDIO_MUSIC\n\n .iter()\n\n .map(|file| load_audio_track(&loader, &world, file))\n\n .collect::<Vec<_>>()\n\n .into_iter()\n\n .cycle();\n\n let music = Music { music };\n\n\n\n let sound = Sounds {\n\n bounce_sfx: load_audio_track(&loader, &world, AUDIO_BOUNCE),\n\n score_sfx: load_audio_track(&loader, &world, AUDIO_SCORE),\n", "file_path": "examples/pong/audio.rs", "rank": 8, "score": 321361.3246311073 }, { "content": "/// Initialise audio in the world. This includes the background track and the\n\n/// sound effects.\n\npub fn initialise_audio(world: &mut World) {\n\n use {AUDIO_BOUNCE, AUDIO_MUSIC, AUDIO_SCORE};\n\n\n\n let (sound_effects, music) = {\n\n let loader = world.read_resource::<Loader>();\n\n\n\n let mut sink = world.write_resource::<AudioSink>();\n\n sink.set_volume(0.25); // Music is a bit loud, reduce the volume.\n\n\n\n let music = AUDIO_MUSIC\n\n .iter()\n\n .map(|file| load_audio_track(&loader, &world, file))\n\n .collect::<Vec<_>>()\n\n .into_iter()\n\n .cycle();\n\n let music = Music { music };\n\n\n\n let sound = Sounds {\n\n bounce_sfx: load_audio_track(&loader, &world, AUDIO_BOUNCE),\n\n score_sfx: load_audio_track(&loader, &world, AUDIO_SCORE),\n", "file_path": "examples/appendix_a/audio.rs", "rank": 9, "score": 321361.3246311073 }, { "content": "pub fn is_close_requested(event: &Event) -> bool {\n\n match *event {\n\n Event::WindowEvent { ref event, .. } => match *event {\n\n WindowEvent::CloseRequested => true,\n\n _ => false,\n\n },\n\n _ => false,\n\n }\n\n}\n", "file_path": "amethyst_input/src/util.rs", "rank": 10, "score": 320060.9268277331 }, { "content": "/// The `Tracker` trait which will be used by the loader to report\n\n/// back to `Progress`.\n\npub trait Tracker: Send + 'static {\n\n // TODO: maybe add handles as parameters?\n\n /// Called if the asset could be imported.\n\n fn success(self: Box<Self>);\n\n /// Called if the asset couldn't be imported to an error.\n\n fn fail(\n\n self: Box<Self>,\n\n handle_id: u32,\n\n asset_type_name: &'static str,\n\n asset_name: String,\n\n error: Error,\n\n );\n\n}\n\n\n\nimpl Tracker for () {\n\n fn success(self: Box<Self>) {}\n\n fn fail(\n\n self: Box<Self>,\n\n handle_id: u32,\n\n asset_type_name: &'static str,\n\n asset_name: String,\n\n error: Error,\n\n ) {\n\n show_error(handle_id, asset_type_name, &asset_name, &error);\n\n error!(\"Note: to handle the error, use a `Progress` other than `()`\");\n\n }\n\n}\n\n\n", "file_path": "amethyst_assets/src/progress.rs", "rank": 11, "score": 317421.0700706449 }, { "content": "/// Create mesh\n\npub fn create_mesh_asset(data: MeshData, renderer: &mut Renderer) -> Result<ProcessingState<Mesh>> {\n\n let data = match data {\n\n MeshData::PosColor(ref vertices) => {\n\n let mb = MeshBuilder::new(vertices);\n\n renderer.create_mesh(mb)\n\n }\n\n MeshData::PosTex(ref vertices) => {\n\n let mb = MeshBuilder::new(vertices);\n\n renderer.create_mesh(mb)\n\n }\n\n MeshData::PosNormTex(ref vertices) => {\n\n let mb = MeshBuilder::new(vertices);\n\n renderer.create_mesh(mb)\n\n }\n\n MeshData::PosNormTangTex(ref vertices) => {\n\n let mb = MeshBuilder::new(vertices);\n\n renderer.create_mesh(mb)\n\n }\n\n MeshData::Creator(creator) => creator.build(renderer),\n\n };\n\n\n\n data.map(|m| ProcessingState::Loaded(m))\n\n .chain_err(|| \"Failed to build mesh\")\n\n}\n\n\n", "file_path": "amethyst_renderer/src/formats/mesh.rs", "rank": 12, "score": 305166.79279351264 }, { "content": "/// A format, providing a conversion from bytes to asset data, which is then\n\n/// in turn accepted by `Asset::from_data`. Examples for formats are\n\n/// `Png`, `Obj` and `Wave`.\n\npub trait Format<A: Asset>: Send + 'static {\n\n /// A unique identifier for this format.\n\n const NAME: &'static str;\n\n /// Options specific to the format, which are passed to `import`.\n\n /// E.g. for textures this would be stuff like mipmap levels and\n\n /// sampler info.\n\n type Options: Send + 'static;\n\n\n\n /// Reads the given bytes and produces asset data.\n\n ///\n\n /// ## Reload\n\n ///\n\n /// The reload structure has metadata which allows the asset management\n\n /// to reload assets if necessary (for hot reloading).\n\n /// You should only create this if `create_reload` is `true`.\n\n /// Also, the parameter is just a request, which means you can also return `None`.\n\n fn import(\n\n &self,\n\n name: String,\n\n source: Arc<Source>,\n", "file_path": "amethyst_assets/src/asset.rs", "rank": 13, "score": 301872.18294708413 }, { "content": "pub fn is_key_down(event: &Event, key_code: VirtualKeyCode) -> bool {\n\n let op = get_key(event);\n\n if let Some((key, state)) = op {\n\n return key == key_code && state == ElementState::Pressed;\n\n }\n\n return false;\n\n}\n\n\n", "file_path": "amethyst_input/src/util.rs", "rank": 14, "score": 299071.2221922393 }, { "content": "/// Trait implemented by all valid vertex formats.\n\npub trait VertexFormat: Pod + Sized + Send + Sync {\n\n /// List of all attributes formats with name and offset.\n\n const ATTRIBUTES: Attributes<'static>;\n\n\n\n /// Returns the size of a single vertex in bytes.\n\n #[inline]\n\n fn size() -> usize {\n\n use std::mem;\n\n mem::size_of::<Self>()\n\n }\n\n\n\n /// Returns attribute of vertex by type\n\n #[inline]\n\n fn attribute<F>() -> AttributeFormat\n\n where\n\n F: Attribute,\n\n Self: With<F>,\n\n {\n\n <Self as With<F>>::FORMAT\n\n }\n\n}\n\n\n", "file_path": "amethyst_renderer/src/vertex.rs", "rank": 15, "score": 295997.9530636292 }, { "content": "fn initialise_camera(world: &mut World) {\n\n let mut local = Transform::default();\n\n local.translation = Vector3::new(0., -20., 10.);\n\n local.rotation = Quaternion::from_angle_x(Deg(75.)).into();\n\n world\n\n .create_entity()\n\n .with(Camera::from(Projection::perspective(1.3, Deg(60.0))))\n\n .with(local)\n\n .with(GlobalTransform::default())\n\n .build();\n\n}\n\n\n", "file_path": "examples/custom_game_data/graphic.rs", "rank": 16, "score": 294566.28384909185 }, { "content": "/// Input devices can sometimes generate a lot of motion events per frame, these are\n\n/// useless as the extra precision is wasted and these events tend to overflow our\n\n/// otherwise very adequate event buffers. So this function removes and compresses redundant\n\n/// events.\n\nfn compress_events(vec: &mut Vec<Event>, new_event: Event) {\n\n match new_event {\n\n Event::WindowEvent { ref event, .. } => match event {\n\n &WindowEvent::CursorMoved { .. } => {\n\n let mut iter = vec.iter_mut();\n\n while let Some(stored_event) = iter.next_back() {\n\n match stored_event {\n\n &mut Event::WindowEvent {\n\n event: WindowEvent::CursorMoved { .. },\n\n ..\n\n } => {\n\n mem::replace(stored_event, new_event.clone());\n\n return;\n\n }\n\n\n\n &mut Event::WindowEvent {\n\n event: WindowEvent::AxisMotion { .. },\n\n ..\n\n } => {}\n\n\n", "file_path": "amethyst_renderer/src/system.rs", "rank": 17, "score": 283690.97595657257 }, { "content": "pub fn brown_bat(world: &mut World, sprite_sheet_id: u64) -> Handle<Animation<SpriteRender>> {\n\n let sprite_indicies = (6..11)\n\n .into_iter()\n\n .map(|n| SpriteRenderPrimitive::SpriteIndex(n))\n\n .collect::<Vec<SpriteRenderPrimitive>>();\n\n\n\n let sprite_index_sampler = {\n\n Sampler {\n\n input: vec![0., 0.1, 0.2, 0.3, 0.4],\n\n function: InterpolationFunction::Step,\n\n output: sprite_indicies,\n\n }\n\n };\n\n\n\n let sprite_sheet_sampler = Sampler {\n\n input: vec![0.],\n\n function: InterpolationFunction::Step,\n\n output: vec![SpriteRenderPrimitive::SpriteSheet(sprite_sheet_id)],\n\n };\n\n\n", "file_path": "examples/sprites/animation.rs", "rank": 18, "score": 277974.3234702712 }, { "content": "pub fn grey_bat(world: &mut World, sprite_sheet_id: u64) -> Handle<Animation<SpriteRender>> {\n\n let sprite_indicies = [5, 4, 3, 2, 1, 0, 1, 2, 3, 4, 4]\n\n .into_iter()\n\n .map(|&n| SpriteRenderPrimitive::SpriteIndex(n as usize))\n\n .collect::<Vec<SpriteRenderPrimitive>>();\n\n\n\n let sprite_index_sampler = {\n\n Sampler {\n\n input: vec![0., 0.5, 0.6, 0.7, 0.8, 0.9, 1.0, 1.1, 1.2, 1.3, 1.4],\n\n function: InterpolationFunction::Step,\n\n output: sprite_indicies,\n\n }\n\n };\n\n\n\n let sprite_sheet_sampler = Sampler {\n\n input: vec![0., 2.3],\n\n function: InterpolationFunction::Step,\n\n output: vec![SpriteRenderPrimitive::SpriteSheet(sprite_sheet_id)],\n\n };\n\n\n", "file_path": "examples/sprites/animation.rs", "rank": 19, "score": 277974.3234702712 }, { "content": "pub fn set_skinning_buffers(effect: &mut Effect, mesh: &Mesh) -> bool {\n\n set_attribute_buffers(effect, mesh, &ATTRIBUTES)\n\n}\n", "file_path": "amethyst_renderer/src/pass/skinning.rs", "rank": 20, "score": 270001.63251337735 }, { "content": "pub fn get_key(event: &Event) -> Option<(VirtualKeyCode, ElementState)> {\n\n match *event {\n\n Event::WindowEvent { ref event, .. } => match *event {\n\n WindowEvent::KeyboardInput {\n\n input:\n\n KeyboardInput {\n\n virtual_keycode: Some(ref virtual_keycode),\n\n state,\n\n ..\n\n },\n\n ..\n\n } => Some((*virtual_keycode, state)),\n\n _ => None,\n\n },\n\n _ => None,\n\n }\n\n}\n\n\n", "file_path": "amethyst_input/src/util.rs", "rank": 21, "score": 269567.39049747743 }, { "content": "/// Hide the cursor, so it's invisible while player. Can be used at the same time as grab_cursor.\n\npub fn set_mouse_cursor_none(msg: &mut WindowMessages) {\n\n set_mouse_cursor(msg, MouseCursor::NoneCursor);\n\n}\n\n\n", "file_path": "amethyst_renderer/src/mouse.rs", "rank": 22, "score": 260815.26171641765 }, { "content": "/// Initialise the camera.\n\nfn initialise_camera(world: &mut World) {\n\n use amethyst::core::cgmath::{Matrix4, Vector3};\n\n let (arena_height, arena_width) = {\n\n let config = &world.read_resource::<ArenaConfig>();\n\n (config.height, config.width)\n\n };\n\n world\n\n .create_entity()\n\n .with(Camera::from(Projection::orthographic(\n\n 0.0,\n\n arena_width,\n\n arena_height,\n\n 0.0,\n\n )))\n\n .with(GlobalTransform(\n\n Matrix4::from_translation(Vector3::new(0.0, 0.0, 1.0)).into(),\n\n ))\n\n .build();\n\n}\n\n\n", "file_path": "examples/appendix_a/pong.rs", "rank": 24, "score": 252129.0489819817 }, { "content": "fn initialise_score(world: &mut World) {\n\n let font = world.read_resource::<Loader>().load(\n\n \"font/square.ttf\",\n\n TtfFormat,\n\n Default::default(),\n\n (),\n\n &world.read_resource(),\n\n );\n\n let p1_transform = UiTransform::new(\n\n \"P1\".to_string(),\n\n Anchor::TopMiddle,\n\n -50.,\n\n 50.,\n\n 1.,\n\n 55.,\n\n 50.,\n\n 0,\n\n );\n\n\n\n let p2_transform = UiTransform::new(\n", "file_path": "examples/pong/pong.rs", "rank": 25, "score": 252129.0489819817 }, { "content": "/// Hide the cursor, so it's invisible while playing.\n\nfn hide_cursor(world: &mut World) {\n\n use amethyst::winit::CursorState;\n\n\n\n world\n\n .write_resource::<WindowMessages>()\n\n .send_command(|win| {\n\n if let Err(err) = win.set_cursor_state(CursorState::Hide) {\n\n eprintln!(\"Unable to make cursor hidden! Error: {:?}\", err);\n\n }\n\n });\n\n}\n\n\n", "file_path": "examples/pong/pong.rs", "rank": 26, "score": 252129.0489819817 }, { "content": "/// Initialise the camera.\n\nfn initialise_camera(world: &mut World) {\n\n use amethyst::core::cgmath::{Matrix4, Vector3};\n\n world\n\n .create_entity()\n\n .with(Camera::from(Projection::orthographic(\n\n 0.0,\n\n ARENA_WIDTH,\n\n ARENA_HEIGHT,\n\n 0.0,\n\n )))\n\n .with(GlobalTransform(\n\n Matrix4::from_translation(Vector3::new(0.0, 0.0, 1.0)).into(),\n\n ))\n\n .build();\n\n}\n\n\n", "file_path": "examples/pong/pong.rs", "rank": 27, "score": 252129.0489819817 }, { "content": "/// Initialises one ball in the middle-ish of the arena.\n\nfn initialise_balls(world: &mut World) {\n\n let (arena_width, arena_height) = {\n\n let config = world.read_resource::<ArenaConfig>();\n\n (config.width, config.height)\n\n };\n\n let (velocity_x, velocity_y, radius, colour) = {\n\n let config = world.read_resource::<BallConfig>();\n\n let c: [f32; 4] = [\n\n config.colour.0,\n\n config.colour.1,\n\n config.colour.2,\n\n config.colour.3,\n\n ];\n\n (config.velocity.x, config.velocity.y, config.radius, c)\n\n };\n\n // Create the mesh, material and translation.\n\n let mesh = create_mesh(world, generate_circle_vertices(radius, 16));\n\n let material = create_colour_material(world, colour);\n\n let mut local_transform = Transform::default();\n\n local_transform.translation = Vector3::new(arena_width / 2.0, arena_height / 2.0, 0.0);\n", "file_path": "examples/appendix_a/pong.rs", "rank": 28, "score": 252129.0489819817 }, { "content": "/// Hide the cursor, so it's invisible while playing.\n\nfn hide_cursor(world: &mut World) {\n\n use amethyst::winit::CursorState;\n\n\n\n world\n\n .write_resource::<WindowMessages>()\n\n .send_command(|win| {\n\n if let Err(err) = win.set_cursor_state(CursorState::Hide) {\n\n eprintln!(\"Unable to make cursor hidden! Error: {:?}\", err);\n\n }\n\n });\n\n}\n\n\n", "file_path": "examples/appendix_a/pong.rs", "rank": 29, "score": 252129.0489819817 }, { "content": "/// Initialises one paddle on the left, and one paddle on the right.\n\nfn initialise_paddles(world: &mut World) {\n\n let mut left_transform = Transform::default();\n\n let mut right_transform = Transform::default();\n\n\n\n let (arena_height, arena_width) = {\n\n let config = &world.read_resource::<ArenaConfig>();\n\n (config.height, config.width)\n\n };\n\n let (\n\n left_height,\n\n left_width,\n\n left_velocity,\n\n left_colour,\n\n right_height,\n\n right_width,\n\n right_velocity,\n\n right_colour,\n\n ) = {\n\n let config = &world.read_resource::<PaddlesConfig>();\n\n let cl: [f32; 4] = [\n", "file_path": "examples/appendix_a/pong.rs", "rank": 30, "score": 252129.0489819817 }, { "content": "fn initialise_score(world: &mut World) {\n\n let font = world.read_resource::<Loader>().load(\n\n \"font/square.ttf\",\n\n TtfFormat,\n\n Default::default(),\n\n (),\n\n &world.read_resource(),\n\n );\n\n let p1_transform = UiTransform::new(\n\n \"P1\".to_string(),\n\n Anchor::TopMiddle,\n\n -50.,\n\n 50.,\n\n 1.,\n\n 55.,\n\n 50.,\n\n 0,\n\n );\n\n\n\n let p2_transform = UiTransform::new(\n", "file_path": "examples/appendix_a/pong.rs", "rank": 31, "score": 252129.0489819817 }, { "content": "fn initialise_camera(world: &mut World) {\n\n use amethyst::core::cgmath::{Deg, Matrix4};\n\n let transform =\n\n Matrix4::from_translation([0., -20., 10.].into()) * Matrix4::from_angle_x(Deg(75.96));\n\n world\n\n .create_entity()\n\n .with(Camera::from(Projection::perspective(1.0, Deg(60.0))))\n\n .with(GlobalTransform(transform.into()))\n\n .build();\n\n}\n\n\n", "file_path": "examples/asset_loading/main.rs", "rank": 32, "score": 250002.15652954316 }, { "content": "/// Initialise the camera.\n\nfn initialise_camera(world: &mut World) {\n\n world\n\n .create_entity()\n\n .with(Camera::from(Projection::orthographic(\n\n 0.0,\n\n ARENA_WIDTH,\n\n ARENA_HEIGHT,\n\n 0.0,\n\n )))\n\n .with(GlobalTransform(\n\n Matrix4::from_translation(Vector3::new(0.0, 0.0, 1.0)).into(),\n\n ))\n\n .build();\n\n}\n\n\n", "file_path": "examples/pong_tutorial_02/pong.rs", "rank": 33, "score": 250002.15652954316 }, { "content": "/// Initialise the camera.\n\nfn initialise_camera(world: &mut World) {\n\n world\n\n .create_entity()\n\n .with(Camera::from(Projection::orthographic(\n\n 0.0,\n\n ARENA_WIDTH,\n\n ARENA_HEIGHT,\n\n 0.0,\n\n )))\n\n .with(GlobalTransform(\n\n Matrix4::from_translation(Vector3::new(0.0, 0.0, 1.0)).into(),\n\n ))\n\n .build();\n\n}\n\n\n", "file_path": "examples/pong_tutorial_03/pong.rs", "rank": 34, "score": 250002.15652954316 }, { "content": "/// Adds lights to the scene.\n\nfn initialise_lights(world: &mut World) {\n\n let light: Light = PointLight {\n\n intensity: 100.0,\n\n radius: 1.0,\n\n color: Rgba::white(),\n\n ..Default::default()\n\n }.into();\n\n\n\n let transform = Matrix4::from_translation([5.0, -20.0, 15.0].into());\n\n\n\n // Add point light.\n\n world\n\n .create_entity()\n\n .with(light)\n\n .with(GlobalTransform(transform.into()))\n\n .build();\n\n}\n", "file_path": "examples/asset_loading/main.rs", "rank": 35, "score": 250002.15652954322 }, { "content": "/// This method initialises a camera which will view our sprite.\n\nfn initialise_camera(world: &mut World) -> Entity {\n\n let (width, height) = {\n\n let dim = world.read_resource::<ScreenDimensions>();\n\n (dim.width(), dim.height())\n\n };\n\n world\n\n .create_entity()\n\n .with(Camera::from(Projection::orthographic(\n\n 0.0, width, height, 0.0,\n\n )))\n\n .with(GlobalTransform(Matrix4::from_translation(\n\n Vector3::new(0.0, 0.0, 1.0).into(),\n\n )))\n\n .build()\n\n}\n\n\n", "file_path": "examples/sprites/main.rs", "rank": 36, "score": 245598.7876726934 }, { "content": "fn load_sprite_sheet(world: &mut World) -> SpriteSheetHandle {\n\n use {BALL_RADIUS, PADDLE_HEIGHT, PADDLE_WIDTH};\n\n\n\n // Load the sprite sheet necessary to render the graphics.\n\n // The texture is the pixel data\n\n // `sprite_sheet` is the layout of the sprites on the image\n\n\n\n // `texture_handle` is a cloneable reference to the texture\n\n let texture_handle = {\n\n let loader = world.read_resource::<Loader>();\n\n let texture_storage = world.read_resource::<AssetStorage<Texture>>();\n\n loader.load(\n\n \"texture/pong_spritesheet.png\",\n\n PngFormat,\n\n Default::default(),\n\n (),\n\n &texture_storage,\n\n )\n\n };\n\n // `texture_id` is a application defined ID given to the texture to store in the `World`.\n", "file_path": "examples/pong/pong.rs", "rank": 37, "score": 239569.81153197537 }, { "content": "/// Loads and returns a handle to a sprite sheet.\n\n///\n\n/// The sprite sheet consists of two parts:\n\n///\n\n/// * texture: the pixel data\n\n/// * `SpriteSheet`: the layout information of the sprites on the image\n\nfn load_sprite_sheet(world: &mut World) -> LoadedSpriteSheet {\n\n let sprite_sheet_index = 0;\n\n\n\n // Store texture in the world's `MaterialTextureSet` resource (singleton hash map)\n\n // This is used by the `DrawSprite` pass to look up the texture from the `SpriteSheet`\n\n let texture = png_loader::load(\"texture/bat_semi_transparent.png\", world);\n\n world\n\n .write_resource::<MaterialTextureSet>()\n\n .insert(sprite_sheet_index, texture);\n\n\n\n let sprite_w = 32.;\n\n let sprite_h = 32.;\n\n let sprite_sheet_definition = SpriteSheetDefinition::new(sprite_w, sprite_h, 2, 6, false);\n\n\n\n let sprite_sheet = sprite_sheet_loader::load(sprite_sheet_index, &sprite_sheet_definition);\n\n let sprite_count = sprite_sheet.sprites.len();\n\n\n\n let sprite_sheet_handle = {\n\n let loader = world.read_resource::<Loader>();\n\n loader.load_from_data(\n", "file_path": "examples/sprites_ordered/main.rs", "rank": 38, "score": 237673.9483429533 }, { "content": "fn load_sprite_sheet(world: &mut World) -> SpriteSheetHandle {\n\n // Load the sprite sheet necessary to render the graphics.\n\n // The texture is the pixel data\n\n // `sprite_sheet` is the layout of the sprites on the image\n\n\n\n // `texture_handle` is a cloneable reference to the texture\n\n let texture_handle = {\n\n let loader = world.read_resource::<Loader>();\n\n let texture_storage = world.read_resource::<AssetStorage<Texture>>();\n\n loader.load(\n\n \"texture/pong_spritesheet.png\",\n\n PngFormat,\n\n Default::default(),\n\n (),\n\n &texture_storage,\n\n )\n\n };\n\n // `texture_id` is a application defined ID given to the texture to store in the `World`.\n\n // This is needed to link the texture to the sprite_sheet.\n\n let texture_id = 0;\n", "file_path": "examples/pong_tutorial_03/pong.rs", "rank": 39, "score": 237669.14770001083 }, { "content": "fn load_sprite_sheet(world: &mut World) -> SpriteSheetHandle {\n\n // Load the sprite sheet necessary to render the graphics.\n\n // The texture is the pixel data\n\n // `sprite_sheet` is the layout of the sprites on the image\n\n\n\n // `texture_handle` is a cloneable reference to the texture\n\n let texture_handle = {\n\n let loader = world.read_resource::<Loader>();\n\n let texture_storage = world.read_resource::<AssetStorage<Texture>>();\n\n loader.load(\n\n \"texture/pong_spritesheet.png\",\n\n PngFormat,\n\n Default::default(),\n\n (),\n\n &texture_storage,\n\n )\n\n };\n\n // `texture_id` is a application defined ID given to the texture to store in the `World`.\n\n // This is needed to link the texture to the sprite_sheet.\n\n let texture_id = 0;\n", "file_path": "examples/pong_tutorial_02/pong.rs", "rank": 40, "score": 237669.14770001083 }, { "content": "/// Initialises one paddle on the left, and one paddle on the right.\n\nfn initialise_paddles(world: &mut World, sprite_sheet_handle: SpriteSheetHandle) {\n\n use {PADDLE_HEIGHT, PADDLE_VELOCITY, PADDLE_WIDTH};\n\n\n\n let mut left_transform = Transform::default();\n\n let mut right_transform = Transform::default();\n\n\n\n // Correctly position the paddles.\n\n let y = (ARENA_HEIGHT - PADDLE_HEIGHT) / 2.0;\n\n left_transform.translation = Vector3::new(PADDLE_WIDTH * 0.5, y, 0.0);\n\n right_transform.translation = Vector3::new(ARENA_WIDTH - PADDLE_WIDTH * 0.5, y, 0.0);\n\n\n\n // Assign the sprites for the paddles\n\n let sprite_render_left = SpriteRender {\n\n sprite_sheet: sprite_sheet_handle.clone(),\n\n sprite_number: 0, // paddle is the first sprite in the sprite_sheet\n\n flip_horizontal: false,\n\n flip_vertical: false,\n\n };\n\n\n\n let sprite_render_right = SpriteRender {\n", "file_path": "examples/pong/pong.rs", "rank": 41, "score": 231811.15830537543 }, { "content": "/// Initialises one ball in the middle-ish of the arena.\n\nfn initialise_ball(world: &mut World, sprite_sheet_handle: SpriteSheetHandle) {\n\n use {ARENA_HEIGHT, ARENA_WIDTH, BALL_RADIUS, BALL_VELOCITY_X, BALL_VELOCITY_Y};\n\n\n\n // Create the translation.\n\n let mut local_transform = Transform::default();\n\n local_transform.translation = Vector3::new(ARENA_WIDTH / 2.0, ARENA_HEIGHT / 2.0, 0.0);\n\n\n\n // Assign the sprite for the ball\n\n let sprite_render = SpriteRender {\n\n sprite_sheet: sprite_sheet_handle,\n\n sprite_number: 1, // ball is the second sprite on the sprite_sheet\n\n flip_horizontal: true,\n\n flip_vertical: false,\n\n };\n\n\n\n world\n\n .create_entity()\n\n .with(sprite_render)\n\n .with(Ball {\n\n radius: BALL_RADIUS,\n\n velocity: [BALL_VELOCITY_X, BALL_VELOCITY_Y],\n\n })\n\n .with(local_transform)\n\n .build();\n\n}\n\n\n", "file_path": "examples/pong/pong.rs", "rank": 42, "score": 231811.15830537543 }, { "content": "/// Initialises one paddle on the left, and one paddle on the right.\n\nfn initialise_paddles(world: &mut World, sprite_sheet_handle: SpriteSheetHandle) {\n\n let mut left_transform = Transform::default();\n\n let mut right_transform = Transform::default();\n\n\n\n // Correctly position the paddles.\n\n let y = ARENA_HEIGHT / 2.0;\n\n left_transform.translation = Vector3::new(PADDLE_WIDTH * 0.5, y, 0.0);\n\n right_transform.translation = Vector3::new(ARENA_WIDTH - PADDLE_WIDTH * 0.5, y, 0.0);\n\n\n\n // Assign the sprites for the paddles\n\n let sprite_render_left = SpriteRender {\n\n sprite_sheet: sprite_sheet_handle.clone(),\n\n sprite_number: 0, // paddle is the first sprite in the sprite_sheet\n\n flip_horizontal: false,\n\n flip_vertical: false,\n\n };\n\n\n\n let sprite_render_right = SpriteRender {\n\n sprite_sheet: sprite_sheet_handle,\n\n sprite_number: 0,\n", "file_path": "examples/pong_tutorial_03/pong.rs", "rank": 44, "score": 230009.88060155394 }, { "content": "/// Initialises one paddle on the left, and one paddle on the right.\n\nfn initialise_paddles(world: &mut World, sprite_sheet_handle: SpriteSheetHandle) {\n\n let mut left_transform = Transform::default();\n\n let mut right_transform = Transform::default();\n\n\n\n // Correctly position the paddles.\n\n let y = ARENA_HEIGHT / 2.0;\n\n left_transform.translation = Vector3::new(PADDLE_WIDTH * 0.5, y, 0.0);\n\n right_transform.translation = Vector3::new(ARENA_WIDTH - PADDLE_WIDTH * 0.5, y, 0.0);\n\n\n\n // Assign the sprites for the paddles\n\n let sprite_render_left = SpriteRender {\n\n sprite_sheet: sprite_sheet_handle.clone(),\n\n sprite_number: 0, // paddle is the first sprite in the sprite_sheet\n\n flip_horizontal: false,\n\n flip_vertical: false,\n\n };\n\n\n\n let sprite_render_right = SpriteRender {\n\n sprite_sheet: sprite_sheet_handle,\n\n sprite_number: 0,\n", "file_path": "examples/pong_tutorial_02/pong.rs", "rank": 45, "score": 230009.88060155394 }, { "content": "fn change_cursor_state(msg: &mut WindowMessages, state: CursorState) {\n\n msg.send_command(move |win| {\n\n if let Err(err) = win.set_cursor_state(state) {\n\n error!(\"Unable to change the cursor state! Error: {:?}\", err);\n\n }\n\n });\n\n}\n\n\n", "file_path": "amethyst_renderer/src/mouse.rs", "rank": 46, "score": 228367.529563006 }, { "content": "/// Returns a `TextureHandle` to the image.\n\n///\n\n/// # Parameters\n\n///\n\n/// * `name`: Path to the sprite sheet.\n\n/// * `world`: `World` that stores resources.\n\npub fn load<N>(name: N, world: &World) -> TextureHandle\n\nwhere\n\n N: Into<String>,\n\n{\n\n let loader = world.read_resource::<Loader>();\n\n loader.load(\n\n name,\n\n PngFormat,\n\n Default::default(),\n\n (),\n\n &world.read_resource::<AssetStorage<Texture>>(),\n\n )\n\n}\n", "file_path": "examples/sprites/png_loader.rs", "rank": 47, "score": 225788.2722465892 }, { "content": "/// Starts a basic logger outputting to stdout with color on supported platforms.\n\n///\n\n/// If you do not intend on using the logger builtin to Amethyst, it's highly recommended you\n\n/// initialise your own.\n\n///\n\n/// Configuration of the logger can also be controlled via environment variables:\n\n///\n\n/// * AMETHYST_LOG_DISABLE_COLORS - if set, disables colors for the log output\n\n/// * AMETHYST_LOG_LEVEL_FILTER - sets the log level\n\n/// \n\npub fn start_logger(mut config: LoggerConfig) {\n\n if let Ok(_) = env::var(\"AMETHYST_LOG_DISABLE_COLORS\") {\n\n config.use_colors = false;\n\n }\n\n if let Ok(lf) = env::var(\"AMETHYST_LOG_LEVEL_FILTER\") {\n\n use std::str::FromStr;\n\n config.level_filter = LevelFilter::from_str(&lf).unwrap_or(LevelFilter::Debug)\n\n }\n\n let color_config = fern::colors::ColoredLevelConfig::new();\n\n\n\n fern::Dispatch::new()\n\n .format(move |out, message, record| {\n\n out.finish(format_args!(\n\n \"{color}[{level}][{target}] {message}{color_reset}\",\n\n color = if config.use_colors {\n\n format!(\n\n \"\\x1B[{}m\",\n\n color_config.get_color(&record.level()).to_fg_str()\n\n )\n\n } else {\n", "file_path": "src/logger.rs", "rank": 48, "score": 224599.86200685674 }, { "content": "/// Returns a `TextureHandle` to the image.\n\n///\n\n/// # Parameters\n\n///\n\n/// * `name`: Path to the sprite sheet.\n\n/// * `world`: `World` that stores resources.\n\npub fn load<N>(name: N, world: &World) -> TextureHandle\n\nwhere\n\n N: Into<String>,\n\n{\n\n let loader = world.read_resource::<Loader>();\n\n loader.load(\n\n name,\n\n PngFormat,\n\n Default::default(),\n\n (),\n\n &world.read_resource::<AssetStorage<Texture>>(),\n\n )\n\n}\n", "file_path": "examples/sprites_ordered/png_loader.rs", "rank": 49, "score": 223940.20189785596 }, { "content": "/// Removes the highlighted text and returns true if anything was deleted..\n\nfn delete_highlighted(edit: &mut TextEditing, text: &mut UiText) -> bool {\n\n if edit.highlight_vector != 0 {\n\n let range = highlighted_bytes(edit, text);\n\n edit.cursor_position = range.start as isize;\n\n edit.highlight_vector = 0;\n\n text.text.drain(range);\n\n return true;\n\n }\n\n false\n\n}\n\n\n", "file_path": "amethyst_ui/src/text.rs", "rank": 50, "score": 223385.54298566512 }, { "content": "/// Initialize default output\n\npub fn init_output(res: &mut Resources) {\n\n if let Some(o) = default_output() {\n\n res.entry::<AudioSink>()\n\n .or_insert_with(|| AudioSink::new(&o));\n\n res.entry::<Output>().or_insert_with(|| o);\n\n } else {\n\n error!(\"Failed finding a default audio output to hook AudioSink to, audio will not work!\")\n\n }\n\n}\n", "file_path": "amethyst_audio/src/output.rs", "rank": 51, "score": 222346.46528949813 }, { "content": "/// Initialise trait for game data\n\npub trait DataInit<T> {\n\n /// Build game data\n\n fn build(self, world: &mut World) -> T;\n\n}\n\n\n\n/// Default game data\n\npub struct GameData<'a, 'b> {\n\n dispatcher: Dispatcher<'a, 'b>,\n\n}\n\n\n\nimpl<'a, 'b> GameData<'a, 'b> {\n\n /// Create new game data\n\n pub fn new(dispatcher: Dispatcher<'a, 'b>) -> Self {\n\n GameData { dispatcher }\n\n }\n\n\n\n /// Update game data\n\n pub fn update(&mut self, world: &World) {\n\n self.dispatcher.dispatch(&world.res);\n\n }\n", "file_path": "src/game_data.rs", "rank": 52, "score": 221873.39231115958 }, { "content": "/// Hide the cursor, so it's invisible while playing. Can't be used at the same time as grab_cursor.\n\npub fn hide_cursor(msg: &mut WindowMessages) {\n\n change_cursor_state(msg, CursorState::Hide);\n\n}\n\n\n", "file_path": "amethyst_renderer/src/mouse.rs", "rank": 53, "score": 220172.58672797304 }, { "content": "/// Grab the cursor to prevent it from going outside the screen.\n\npub fn grab_cursor(msg: &mut WindowMessages) {\n\n change_cursor_state(msg, CursorState::Grab);\n\n}\n\n\n", "file_path": "amethyst_renderer/src/mouse.rs", "rank": 54, "score": 220167.49800849642 }, { "content": "/// Set the cursor back to normal/visible.\n\npub fn release_cursor(msg: &mut WindowMessages) {\n\n change_cursor_state(msg, CursorState::Normal);\n\n}\n\n\n", "file_path": "amethyst_renderer/src/mouse.rs", "rank": 55, "score": 220167.49800849642 }, { "content": "/// Loads and returns a handle to a sprite sheet.\n\n///\n\n/// The sprite sheet consists of two parts:\n\n///\n\n/// * texture: the pixel data\n\n/// * `SpriteSheet`: the layout information of the sprites on the image\n\nfn load_sprite_sheet(world: &mut World) -> (SpriteSheetHandle, u64, usize, f32, f32) {\n\n let sprite_sheet_index = 0;\n\n\n\n // Store texture in the world's `MaterialTextureSet` resource (singleton hash map)\n\n // This is used by the `DrawSprite` pass to look up the texture from the `SpriteSheet`\n\n let texture = png_loader::load(\"texture/bat.32x32.png\", world);\n\n world\n\n .write_resource::<MaterialTextureSet>()\n\n .insert(sprite_sheet_index, texture);\n\n\n\n let sprite_w = 32.;\n\n let sprite_h = 32.;\n\n let sprite_sheet_definition = SpriteSheetDefinition::new(sprite_w, sprite_h, 2, 6, false);\n\n\n\n let sprite_sheet = sprite_sheet_loader::load(sprite_sheet_index, &sprite_sheet_definition);\n\n let sprite_count = sprite_sheet.sprites.len();\n\n\n\n let sprite_sheet_handle = {\n\n let loader = world.read_resource::<Loader>();\n\n loader.load_from_data(\n", "file_path": "examples/sprites/main.rs", "rank": 56, "score": 218127.63202376373 }, { "content": "#[doc(hidden)]\n\npub trait VertexData {\n\n const ATTRIBUTES: Attributes<'static>;\n\n\n\n /// Get vertex count in buffer\n\n fn len(&self) -> usize;\n\n\n\n /// Build `VertexBuffer`\n\n fn build(&self, factory: &mut Factory) -> Result<VertexBuffer>;\n\n}\n\n\n", "file_path": "amethyst_renderer/src/mesh.rs", "rank": 57, "score": 215669.75805031287 }, { "content": "/// Construct new vertex data from raw data and vertex format\n\npub fn vertex_data<D, V>(data: D) -> (D, PhantomData<V>)\n\nwhere\n\n D: AsRef<[V]>,\n\n V: VertexFormat,\n\n{\n\n (data, PhantomData)\n\n}\n\n\n\nimpl<D, V> VertexData for (D, PhantomData<V>)\n\nwhere\n\n D: AsRef<[V]>,\n\n V: VertexFormat,\n\n{\n\n const ATTRIBUTES: Attributes<'static> = V::ATTRIBUTES;\n\n\n\n fn len(&self) -> usize {\n\n self.0.as_ref().len()\n\n }\n\n\n\n fn build(&self, factory: &mut Factory) -> Result<VertexBuffer> {\n", "file_path": "amethyst_renderer/src/mesh.rs", "rank": 58, "score": 215158.82743473336 }, { "content": "#[doc(hidden)]\n\npub trait VertexDataSet {\n\n /// Iterator for `VertexBuffer`s built\n\n type VertexBufferIter: Iterator<Item = VertexBuffer>;\n\n\n\n /// Get smalles vertex count across buffers\n\n fn len(&self) -> usize;\n\n\n\n /// Build `VertexBuffer`s\n\n fn build(&self, factory: &mut Factory) -> Result<Self::VertexBufferIter>;\n\n}\n\n\n\nimpl<H> VertexDataSet for (H, ())\n\nwhere\n\n H: VertexData,\n\n{\n\n type VertexBufferIter = Once<VertexBuffer>;\n\n\n\n fn len(&self) -> usize {\n\n self.0.len()\n\n }\n", "file_path": "amethyst_renderer/src/mesh.rs", "rank": 59, "score": 213263.32243243247 }, { "content": "/// Extra data to extract from `World`, for use when applying or fetching a sample\n\npub trait ApplyData<'a> {\n\n /// The actual data, must implement `SystemData`\n\n type ApplyData: SystemData<'a>;\n\n}\n\n\n", "file_path": "amethyst_animation/src/resources.rs", "rank": 60, "score": 211639.64193355507 }, { "content": "/// Used to fetch data from the game world for rendering in the pass.\n\npub trait PassData<'a> {\n\n /// The data itself.\n\n type Data: SystemData<'a> + Send;\n\n}\n\n\n", "file_path": "amethyst_renderer/src/pipe/pass.rs", "rank": 61, "score": 209233.3027545859 }, { "content": "/// Trait for loading a prefabs data for a single entity\n\npub trait PrefabData<'a> {\n\n /// `SystemData` needed to perform the load\n\n type SystemData: SystemData<'a>;\n\n\n\n /// The result type returned by the load operation\n\n type Result;\n\n\n\n /// Load the data for this prefab onto the given `Entity`\n\n ///\n\n /// This can also be used to load resources, the recommended way of doing so is to put the\n\n /// resources on the main `Entity` of the `Prefab`\n\n ///\n\n /// ### Parameters:\n\n ///\n\n /// - `entity`: `Entity` to load components on, or the root `Entity` for the resource scenario\n\n /// - `system_data`: `SystemData` needed to do the loading\n\n /// - `entities`: Some components need access to the entities that was created as part of the\n\n /// full prefab, for linking purposes, so this contains all those `Entity`s.\n\n fn load_prefab(\n\n &self,\n", "file_path": "amethyst_assets/src/prefab/mod.rs", "rank": 62, "score": 209228.29132616904 }, { "content": "/// Data requested by the pass from the specs::World.\n\npub trait StageData<'a> {\n\n type Data: SystemData<'a> + Send;\n\n}\n\n\n", "file_path": "amethyst_renderer/src/pipe/stage.rs", "rank": 63, "score": 209228.28303597085 }, { "content": "/// The data requested from the `specs::World` by the Pipeline.\n\npub trait PipelineData<'a> {\n\n /// The data itself\n\n type Data: SystemData<'a> + Send;\n\n}\n\n\n", "file_path": "amethyst_renderer/src/pipe/pipe.rs", "rank": 64, "score": 209228.28303597085 }, { "content": "pub trait PassesData<'a> {\n\n type Data: SystemData<'a> + Send;\n\n}\n\n\n", "file_path": "amethyst_renderer/src/pipe/stage.rs", "rank": 65, "score": 209217.61752025195 }, { "content": "///\n\npub trait StagesData<'a> {\n\n ///\n\n type Data: SystemData<'a> + Send;\n\n}\n\n\n", "file_path": "amethyst_renderer/src/pipe/pipe.rs", "rank": 66, "score": 209217.61752025195 }, { "content": "pub fn get_image_data(\n\n image: &gltf::Image,\n\n buffers: &Buffers,\n\n source: Arc<AssetSource>,\n\n base_path: &Path,\n\n) -> Result<(Vec<u8>, ImageFormat), Error> {\n\n use gltf::image::Source;\n\n match image.source() {\n\n Source::View { view, mime_type } => {\n\n let data = buffers.view(&view).unwrap();\n\n Ok((data.to_vec(), ImageFormat::from_mime_type(mime_type)))\n\n }\n\n\n\n Source::Uri { uri, mime_type } => {\n\n if uri.starts_with(\"data:\") {\n\n let data = parse_data_uri(uri)?;\n\n if let Some(ty) = mime_type {\n\n Ok((data, ImageFormat::from_mime_type(ty)))\n\n } else {\n\n let mimetype = uri\n", "file_path": "amethyst_gltf/src/format/importer.rs", "rank": 67, "score": 205297.76826334308 }, { "content": "/// Sets the mouse cursor icon.\n\npub fn set_mouse_cursor(msg: &mut WindowMessages, cursor: MouseCursor) {\n\n msg.send_command(move |win| {\n\n win.set_cursor(cursor);\n\n });\n\n}\n", "file_path": "amethyst_renderer/src/mouse.rs", "rank": 68, "score": 204204.1881200045 }, { "content": "/// Returns if the command key is down on OSX, and the CTRL key for everything else.\n\nfn ctrl_or_cmd(modifiers: &ModifiersState) -> bool {\n\n (cfg!(target_os = \"macos\") && modifiers.logo)\n\n || (cfg!(not(target_os = \"macos\")) && modifiers.ctrl)\n\n}\n\n\n", "file_path": "amethyst_ui/src/text.rs", "rank": 69, "score": 197953.65194523195 }, { "content": "/// Structures implementing this provide a renderer pass.\n\npub trait Pass: for<'a> PassData<'a> {\n\n /// The pass is given an opportunity to compile shaders and store them in an `Effect`\n\n /// which is then passed to the pass in `apply`.\n\n fn compile(&mut self, effect: NewEffect) -> Result<Effect>;\n\n /// Called whenever the renderer is ready to apply the pass. Feed commands into the\n\n /// encoder here.\n\n fn apply<'a, 'b: 'a>(\n\n &'a mut self,\n\n encoder: &mut Encoder,\n\n effect: &mut Effect,\n\n factory: Factory,\n\n data: <Self as PassData<'b>>::Data,\n\n );\n\n}\n\n\n\n/// A compiled pass. These are created and managed by the `Renderer`. This should not be\n\n/// used directly outside of the renderer.\n\n#[derive(Clone, Debug)]\n\npub struct CompiledPass<P> {\n\n effect: Effect,\n", "file_path": "amethyst_renderer/src/pipe/pass.rs", "rank": 70, "score": 197184.7835678961 }, { "content": "pub trait Passes: for<'a> PassesData<'a> {\n\n fn apply<'a, 'b: 'a>(\n\n &'a mut self,\n\n encoder: &mut Encoder,\n\n factory: Factory,\n\n data: <Self as PassesData<'b>>::Data,\n\n );\n\n\n\n /// Distributes new targets\n\n fn new_target(&mut self, new_target: &Target);\n\n}\n\n\n\nimpl<'a, HP> PassesData<'a> for List<(CompiledPass<HP>, List<()>)>\n\nwhere\n\n HP: Pass,\n\n{\n\n type Data = <HP as PassData<'a>>::Data;\n\n}\n\n\n\nimpl<HP> Passes for List<(CompiledPass<HP>, List<()>)>\n", "file_path": "amethyst_renderer/src/pipe/stage.rs", "rank": 71, "score": 197184.7835678961 }, { "content": "/// Trait used for the pipeline.\n\npub trait PolyPipeline: for<'a> PipelineData<'a> {\n\n /// Retuns `ParallelIterator` which apply data to all stages\n\n fn apply<'a, 'b: 'a>(\n\n &'a mut self,\n\n encoder: &mut Encoder,\n\n factory: Factory,\n\n data: <Self as PipelineData<'b>>::Data,\n\n );\n\n\n\n /// Resizes the pipeline targets\n\n fn new_targets(&mut self, new_targets: HashMap<String, Target>);\n\n\n\n /// Returns an immutable reference to all targets and their name strings.\n\n fn targets(&self) -> &HashMap<String, Target>;\n\n}\n\n\n\nimpl<'a, L> PipelineData<'a> for Pipeline<L>\n\nwhere\n\n L: PolyStages,\n\n{\n", "file_path": "amethyst_renderer/src/pipe/pipe.rs", "rank": 72, "score": 194940.48615856853 }, { "content": "///\n\npub trait PolyStages: for<'a> StagesData<'a> {\n\n ///\n\n fn apply<'a, 'b: 'a>(\n\n &'a mut self,\n\n encoders: &mut Encoder,\n\n factory: Factory,\n\n data: <Self as StagesData<'b>>::Data,\n\n );\n\n\n\n /// Distributes new targets\n\n fn new_targets(&mut self, new_targets: &HashMap<String, Target>);\n\n}\n\n\n\nimpl<'a, HS> StagesData<'a> for List<(HS, List<()>)>\n\nwhere\n\n HS: PolyStage,\n\n{\n\n type Data = <HS as StageData<'a>>::Data;\n\n}\n\n\n", "file_path": "amethyst_renderer/src/pipe/pipe.rs", "rank": 73, "score": 194929.6147110841 }, { "content": "/// A stage in the rendering. Contains multiple passes.\n\npub trait PolyStage: for<'a> StageData<'a> {\n\n ///\n\n fn apply<'a, 'b: 'a>(\n\n &'a mut self,\n\n encoder: &mut Encoder,\n\n factory: Factory,\n\n data: <Self as StageData<'b>>::Data,\n\n );\n\n\n\n /// Distributes new targets\n\n fn new_targets(&mut self, new_targets: &HashMap<String, Target>);\n\n}\n\n\n\nimpl<'a, L> StageData<'a> for Stage<L>\n\nwhere\n\n L: Passes,\n\n{\n\n type Data = <L as PassesData<'a>>::Data;\n\n}\n\n\n", "file_path": "amethyst_renderer/src/pipe/stage.rs", "rank": 74, "score": 194929.6147110841 }, { "content": "/// A bundle of ECS components, resources and systems.\n\npub trait SystemBundle<'a, 'b> {\n\n /// Build and add ECS resources, register components, add systems etc to the Application.\n\n fn build(self, dispatcher: &mut DispatcherBuilder<'a, 'b>) -> Result<()>;\n\n}\n", "file_path": "amethyst_core/src/bundle.rs", "rank": 75, "score": 190729.4387422037 }, { "content": "type SdlEventsData<'a, AX, AC> = (\n\n Write<'a, InputHandler<AX, AC>>,\n\n Write<'a, EventChannel<InputEvent<AC>>>,\n\n);\n\n\n\nimpl<'a, AX, AC> RunNow<'a> for SdlEventsSystem<AX, AC>\n\nwhere\n\n AX: Hash + Eq + Clone + Send + Sync + 'static,\n\n AC: Hash + Eq + Clone + Send + Sync + 'static,\n\n{\n\n fn run_now(&mut self, res: &'a Resources) {\n\n let (mut handler, mut output) = SdlEventsData::fetch(res);\n\n\n\n let mut event_pump = self.event_pump.take().unwrap();\n\n for event in event_pump.poll_iter() {\n\n // handle appropriate events locally\n\n self.handle_sdl_event(&event, &mut handler, &mut output);\n\n }\n\n self.event_pump = Some(event_pump);\n\n }\n", "file_path": "amethyst_input/src/sdl_events_system.rs", "rank": 76, "score": 180372.2373157581 }, { "content": "enum State {\n\n Start,\n\n Loading(ProgressCounter),\n\n SomethingElse,\n\n}\n\n\n\nimpl State {\n\n /// Returns `Some` if the app should quit.\n\n fn update(self, world: &mut World) -> Option<Self> {\n\n match self {\n\n State::Start => {\n\n let (mesh, progress) = {\n\n let mut progress = ProgressCounter::new();\n\n let loader = world.read_resource::<Loader>();\n\n let a = loader.load(\"mesh.ron\", Ron, (), &mut progress, &world.read_resource());\n\n\n\n (a, progress)\n\n };\n\n\n\n world.create_entity().with(mesh).build();\n", "file_path": "amethyst_assets/examples/hl.rs", "rank": 77, "score": 177632.73521265676 }, { "content": "/// Represents a graphics backend for the renderer.\n\nstruct Backend(pub Device, pub Factory, pub Target, pub Window);\n\n\n\n/// Creates the Direct3D 11 backend.\n", "file_path": "amethyst_renderer/src/renderer.rs", "rank": 78, "score": 173406.17666244504 }, { "content": "/// Trait for vertex attributes to implement\n\npub trait Attribute {\n\n /// Name of the attribute\n\n /// It is used to bind to the attributes in shaders\n\n const NAME: &'static str;\n\n\n\n /// Format of the attribute defines arity and type\n\n const FORMAT: Format;\n\n\n\n /// Size of the attribue\n\n const SIZE: u32; // Has to be equal to `std::mem::size_of::<Self::Repr>() as u32`\n\n\n\n /// Representation of the attribute\n\n /// usually it is `[f32; N]`\n\n type Repr: Pod + Send + Sync;\n\n}\n\n\n\n/// Type for position attribute of vertex\n\n#[derive(Clone, Debug)]\n\npub enum Position {}\n\nimpl Attribute for Position {\n", "file_path": "amethyst_renderer/src/vertex.rs", "rank": 79, "score": 168377.5967452992 }, { "content": "/// Trait implemented by the `config!` macro.\n\npub trait Config\n\nwhere\n\n Self: Sized,\n\n{\n\n /// Loads a configuration structure from a file.\n\n /// Defaults if the file fails in any way.\n\n fn load<P: AsRef<Path>>(path: P) -> Self;\n\n\n\n /// Loads a configuration structure from a file.\n\n fn load_no_fallback<P: AsRef<Path>>(path: P) -> Result<Self, ConfigError>;\n\n\n\n /// Writes a configuration structure to a file.\n\n fn write<P: AsRef<Path>>(&self, path: P) -> Result<(), ConfigError>;\n\n}\n\n\n\nimpl<T> Config for T\n\nwhere\n\n T: for<'a> Deserialize<'a> + Serialize + Default,\n\n{\n\n fn load<P: AsRef<Path>>(path: P) -> Self {\n", "file_path": "amethyst_config/src/lib.rs", "rank": 80, "score": 168377.5967452992 }, { "content": "/// The `Progress` trait, allowing to track which assets are\n\n/// imported already.\n\npub trait Progress {\n\n /// The tracker this progress can create.\n\n type Tracker: Tracker;\n\n\n\n /// Add `num` assets to the progress.\n\n /// This should be done whenever a new asset is\n\n /// put in the queue.\n\n fn add_assets(&mut self, num: usize);\n\n\n\n /// Creates a `Tracker`.\n\n fn create_tracker(self) -> Self::Tracker;\n\n}\n\n\n\nimpl Progress for () {\n\n type Tracker = ();\n\n\n\n fn add_assets(&mut self, _: usize) {}\n\n\n\n fn create_tracker(self) -> () {\n\n ()\n", "file_path": "amethyst_assets/src/progress.rs", "rank": 81, "score": 168377.49367044162 }, { "content": "/// An easy way to name an `Entity` and give it a `Named` `Component`.\n\npub trait WithNamed\n\nwhere\n\n Self: Sized,\n\n{\n\n /// Adds a name to the entity being built.\n\n fn named<S>(self, name: S) -> Self\n\n where\n\n S: Into<Cow<'static, str>>;\n\n}\n\n\n\nimpl<'a> WithNamed for EntityBuilder<'a> {\n\n fn named<S>(self, name: S) -> Self\n\n where\n\n S: Into<Cow<'static, str>>,\n\n {\n\n // Unwrap: The only way this can fail is if the entity is invalid and this is used while creating the entity.\n\n self.world\n\n .system_data::<(WriteStorage<'a, Named>,)>()\n\n .0\n\n .insert(self.entity, Named::new(name))\n", "file_path": "amethyst_core/src/named.rs", "rank": 82, "score": 168372.04292596586 }, { "content": "fn show_error(handle_id: u32, asset_type_name: &'static str, asset_name: &String, error: &Error) {\n\n let mut err_out = format!(\n\n \"Error loading handle {}, {}, with name {}, caused by: {:?}\",\n\n handle_id, asset_type_name, asset_name, error\n\n );\n\n error\n\n .iter()\n\n .skip(1)\n\n .for_each(|e| err_out.push_str(&format!(\"\\r\\ncaused by: {:?}\", e)));\n\n error!(\"{}\", err_out);\n\n}\n", "file_path": "amethyst_assets/src/progress.rs", "rank": 83, "score": 166008.06239291088 }, { "content": "///\n\npub trait PipelineBuild {\n\n /// Resuling pipeline\n\n type Pipeline: PolyPipeline;\n\n\n\n /// Build pipeline\n\n fn build(self, fac: &mut Factory, out: &Target, multisampling: u16) -> Result<Self::Pipeline>;\n\n}\n\n\n\nimpl<L, Z, R, Q> PipelineBuild for PipelineBuilder<Q>\n\nwhere\n\n Q: IntoList<List = L>,\n\n L: for<'a> Functor<BuildStage<'a>, Output = Z>,\n\n Z: Try<Error, Ok = R>,\n\n R: PolyStages,\n\n{\n\n type Pipeline = Pipeline<R>;\n\n fn build(mut self, fac: &mut Factory, out: &Target, multisampling: u16) -> Result<Pipeline<R>> {\n\n let mut targets = self\n\n .targets\n\n .drain(..)\n", "file_path": "amethyst_renderer/src/pipe/pipe.rs", "rank": 84, "score": 165106.81988802092 }, { "content": "fn default_vsync() -> bool {\n\n true\n\n}\n", "file_path": "amethyst_renderer/src/config.rs", "rank": 85, "score": 163060.38056955487 }, { "content": "fn default_visibility() -> bool {\n\n true\n\n}\n\n\n\nimpl DisplayConfig {\n\n /// Creates a `winit::WindowBuilder` using the values set in the DisplayConfig\n\n ///\n\n /// The EventsLoop is needed to configure a fullscreen window\n\n pub fn to_windowbuilder(self, el: winit::EventsLoop) -> WindowBuilder {\n\n use winit::WindowAttributes;\n\n let attrs = WindowAttributes {\n\n dimensions: self.dimensions,\n\n max_dimensions: self.max_dimensions,\n\n min_dimensions: self.min_dimensions,\n\n title: self.title,\n\n visible: self.visibility,\n\n ..Default::default()\n\n };\n\n\n\n let mut builder = WindowBuilder::new();\n", "file_path": "amethyst_renderer/src/config.rs", "rank": 86, "score": 163060.38056955487 }, { "content": "pub trait ReloadClone<A> {\n\n fn cloned(&self) -> Box<Reload<A>>;\n\n}\n\n\n\nimpl<A, T> ReloadClone<A> for T\n\nwhere\n\n A: Asset,\n\n T: Clone + Reload<A>,\n\n{\n\n fn cloned(&self) -> Box<Reload<A>> {\n\n Box::new(self.clone())\n\n }\n\n}\n\n\n\nimpl<A: Asset> Clone for Box<Reload<A>> {\n\n fn clone(&self) -> Self {\n\n self.cloned()\n\n }\n\n}\n\n\n", "file_path": "amethyst_assets/src/reload.rs", "rank": 87, "score": 162666.57154045094 }, { "content": "/// Creates a solid material of the specified colour.\n\nfn create_colour_material(world: &World, colour: [f32; 4]) -> Material {\n\n // TODO: optimize\n\n\n\n use amethyst::renderer::MaterialDefaults;\n\n\n\n let mat_defaults = world.read_resource::<MaterialDefaults>();\n\n let loader = world.read_resource::<Loader>();\n\n\n\n let albedo = loader.load_from_data(colour.into(), (), &world.read_resource());\n\n\n\n Material {\n\n albedo,\n\n ..mat_defaults.0.clone()\n\n }\n\n}\n\n\n", "file_path": "examples/appendix_a/pong.rs", "rank": 88, "score": 161391.38746840192 }, { "content": "// Load a single material, and transform into a format usable by the engine\n\npub fn load_material(\n\n material: &gltf::Material,\n\n buffers: &Buffers,\n\n source: Arc<Source>,\n\n name: &str,\n\n) -> Result<MaterialPrefab<TextureFormat>, GltfError> {\n\n let mut prefab = MaterialPrefab::default();\n\n prefab.albedo = Some(\n\n load_texture_with_factor(\n\n material.pbr_metallic_roughness().base_color_texture(),\n\n material.pbr_metallic_roughness().base_color_factor(),\n\n buffers,\n\n source.clone(),\n\n name,\n\n ).map(|(texture, _)| TexturePrefab::Data(texture))?,\n\n );\n\n\n\n let (metallic, roughness) =\n\n load_texture_with_factor(\n\n material\n", "file_path": "amethyst_gltf/src/format/material.rs", "rank": 89, "score": 159461.45066254883 }, { "content": "pub fn load_animations(\n\n gltf: &gltf::Gltf,\n\n buffers: &Buffers,\n\n node_map: &HashMap<usize, usize>,\n\n) -> Result<AnimationSetPrefab<usize, Transform>, GltfError> {\n\n let mut prefab = AnimationSetPrefab::default();\n\n for animation in gltf.animations() {\n\n let anim = load_animation(&animation, buffers)?;\n\n if anim\n\n .samplers\n\n .iter()\n\n .any(|sampler| node_map.contains_key(&sampler.0))\n\n {\n\n prefab.animations.push((animation.index(), anim));\n\n }\n\n }\n\n Ok(prefab)\n\n}\n\n\n", "file_path": "amethyst_gltf/src/format/animation.rs", "rank": 90, "score": 159461.45066254883 }, { "content": "pub fn load_mesh(\n\n mesh: &gltf::Mesh,\n\n buffers: &Buffers,\n\n options: &GltfSceneOptions,\n\n) -> Result<Vec<(MeshData, Option<usize>, Range<[f32; 3]>)>, GltfError> {\n\n trace!(\"Loading mesh\");\n\n let mut primitives = vec![];\n\n\n\n for primitive in mesh.primitives() {\n\n trace!(\"Loading mesh primitive\");\n\n let reader = primitive.reader(|buffer| buffers.buffer(&buffer));\n\n\n\n trace!(\"Loading faces\");\n\n let faces = reader\n\n .read_indices()\n\n .map(|indices| indices.into_u32())\n\n .map(|mut indices| {\n\n let mut faces = vec![];\n\n while let (Some(a), Some(b), Some(c)) =\n\n (indices.next(), indices.next(), indices.next())\n", "file_path": "amethyst_gltf/src/format/mesh.rs", "rank": 91, "score": 159461.45066254883 }, { "content": "pub fn load_skin(\n\n skin: &gltf::Skin,\n\n buffers: &Buffers,\n\n skin_entity: usize,\n\n node_map: &HashMap<usize, usize>,\n\n meshes: Vec<usize>,\n\n prefab: &mut Prefab<GltfPrefab>,\n\n) -> Result<(), GltfError> {\n\n let joints = skin\n\n .joints()\n\n .map(|j| node_map.get(&j.index()).cloned().unwrap())\n\n .collect::<Vec<_>>();\n\n\n\n let reader = skin.reader(|buffer| buffers.buffer(&buffer));\n\n\n\n let inverse_bind_matrices = reader\n\n .read_inverse_bind_matrices()\n\n .map(|matrices| matrices.collect())\n\n .unwrap_or(vec![Matrix4::identity().into(); joints.len()]);\n\n\n", "file_path": "amethyst_gltf/src/format/skin.rs", "rank": 92, "score": 159461.45066254883 }, { "content": "/// Converts a vector of vertices into a mesh.\n\nfn create_mesh(world: &World, vertices: Vec<PosTex>) -> MeshHandle {\n\n let loader = world.read_resource::<Loader>();\n\n loader.load_from_data(vertices.into(), (), &world.read_resource())\n\n}\n\n\n", "file_path": "examples/appendix_a/pong.rs", "rank": 93, "score": 158930.11088155356 }, { "content": "pub fn set_view_args(\n\n effect: &mut Effect,\n\n encoder: &mut Encoder,\n\n camera: Option<(&Camera, &GlobalTransform)>,\n\n) {\n\n let view_args = camera\n\n .as_ref()\n\n .map(|&(ref cam, ref transform)| ViewArgs {\n\n proj: cam.proj.into(),\n\n view: transform.0.invert().unwrap().into(),\n\n })\n\n .unwrap_or_else(|| ViewArgs {\n\n proj: Matrix4::one().into(),\n\n view: Matrix4::one().into(),\n\n });\n\n effect.update_constant_buffer(\"ViewArgs\", &view_args.std140(), encoder);\n\n}\n\n\n\npub(crate) fn draw_mesh(\n\n encoder: &mut Encoder,\n", "file_path": "amethyst_renderer/src/pass/util.rs", "rank": 94, "score": 157916.45750163947 }, { "content": "/// Create a texture asset.\n\npub fn create_texture_asset(\n\n data: TextureData,\n\n renderer: &mut Renderer,\n\n) -> Result<ProcessingState<Texture>> {\n\n use self::TextureData::*;\n\n let t = match data {\n\n Image(image_data, options) => {\n\n create_texture_asset_from_image(image_data, options, renderer)\n\n }\n\n\n\n Rgba(color, options) => {\n\n let tb = apply_options(Texture::from_color_val(color), options);\n\n renderer\n\n .create_texture(tb)\n\n .chain_err(|| \"Failed to build texture\")\n\n }\n\n\n\n F32(data, options) => {\n\n let tb = apply_options(TextureBuilder::new(data), options);\n\n renderer\n", "file_path": "amethyst_renderer/src/formats/texture.rs", "rank": 95, "score": 157916.45750163947 }, { "content": "/// Build Mesh with vertex buffer combination\n\npub fn build_mesh_with_combo(\n\n combo: VertexBufferCombination,\n\n renderer: &mut Renderer,\n\n) -> ::error::Result<Mesh> {\n\n build_mesh_with_some!(\n\n MeshBuilder::new(combo.0),\n\n renderer,\n\n combo.1,\n\n combo.2,\n\n combo.3,\n\n combo.4\n\n )\n\n}\n\n\n", "file_path": "amethyst_renderer/src/formats/mesh.rs", "rank": 96, "score": 157916.45750163947 }, { "content": "/// Sets the vertex argument in the constant buffer.\n\npub fn set_vertex_args(\n\n effect: &mut Effect,\n\n encoder: &mut Encoder,\n\n camera: Option<(&Camera, &GlobalTransform)>,\n\n global: &GlobalTransform,\n\n) {\n\n let vertex_args = camera\n\n .as_ref()\n\n .map(|&(ref cam, ref transform)| VertexArgs {\n\n proj: cam.proj.into(),\n\n view: transform.0.invert().unwrap().into(),\n\n model: global.0.into(),\n\n })\n\n .unwrap_or_else(|| VertexArgs {\n\n proj: Matrix4::one().into(),\n\n view: Matrix4::one().into(),\n\n model: global.0.into(),\n\n });\n\n effect.update_constant_buffer(\"VertexArgs\", &vertex_args.std140(), encoder);\n\n}\n\n\n", "file_path": "amethyst_renderer/src/pass/util.rs", "rank": 97, "score": 157916.45750163947 }, { "content": "/// This is a simplified version of `Format`, which doesn't give you as much freedom,\n\n/// but in return is simpler to implement.\n\n/// All `SimpleFormat` types automatically implement `Format`.\n\n/// This format assumes that the asset name is the full path and the asset is only\n\n/// contained in one file.\n\npub trait SimpleFormat<A: Asset> {\n\n /// A unique identifier for this format.\n\n const NAME: &'static str;\n\n /// Options specific to the format, which are passed to `import`.\n\n /// E.g. for textures this would be stuff like mipmap levels and\n\n /// sampler info.\n\n type Options: Clone + Send + Sync + 'static;\n\n\n\n /// Produces asset data from given bytes.\n\n fn import(&self, bytes: Vec<u8>, options: Self::Options) -> Result<A::Data>;\n\n}\n\n\n\nimpl<A, T> Format<A> for T\n\nwhere\n\n A: Asset,\n\n T: SimpleFormat<A> + Clone + Send + Sync + 'static,\n\n{\n\n const NAME: &'static str = T::NAME;\n\n type Options = T::Options;\n\n\n", "file_path": "amethyst_assets/src/asset.rs", "rank": 98, "score": 157470.9696413671 }, { "content": "/// A color used to query a hashmap for a cached texture of that color.\n\nstruct KeyColor(pub [u8; 4]);\n\n\n\nimpl Eq for KeyColor {}\n\n\n\nimpl PartialEq for KeyColor {\n\n fn eq(&self, other: &Self) -> bool {\n\n self.0[0] == other.0[0]\n\n && self.0[1] == other.0[1]\n\n && self.0[2] == other.0[2]\n\n && self.0[3] == other.0[3]\n\n }\n\n}\n\n\n\nimpl Hash for KeyColor {\n\n fn hash<H: Hasher>(&self, hasher: &mut H) {\n\n Hash::hash_slice(&self.0, hasher);\n\n }\n\n}\n\n\n\n/// Draw Ui elements. UI won't display without this. It's recommended this be your last pass.\n\npub struct DrawUi {\n\n mesh: Option<Mesh>,\n\n cached_draw_order: CachedDrawOrder,\n\n cached_color_textures: HashMap<KeyColor, TextureHandle>,\n\n glyph_brushes: GlyphBrushCache,\n\n next_brush_cache_id: u64,\n\n}\n\n\n", "file_path": "amethyst_ui/src/pass.rs", "rank": 99, "score": 157224.19031375152 } ]
Rust
src/scene/mesh/vertex.rs
jackos/Fyrox
4b293733bda8e1a0a774aaf82554ac8930afdd8b
use crate::core::visitor::{Visit, VisitResult, Visitor}; use crate::{ core::algebra::{Vector2, Vector3, Vector4}, scene::mesh::buffer::{ VertexAttributeDataType, VertexAttributeDescriptor, VertexAttributeUsage, }, }; use std::hash::{Hash, Hasher}; #[derive(Copy, Clone, Debug, Default)] #[repr(C)] pub struct StaticVertex { pub position: Vector3<f32>, pub tex_coord: Vector2<f32>, pub normal: Vector3<f32>, pub tangent: Vector4<f32>, } impl StaticVertex { pub fn from_pos_uv(position: Vector3<f32>, tex_coord: Vector2<f32>) -> Self { Self { position, tex_coord, normal: Vector3::new(0.0, 1.0, 0.0), tangent: Vector4::default(), } } pub fn from_pos_uv_normal( position: Vector3<f32>, tex_coord: Vector2<f32>, normal: Vector3<f32>, ) -> Self { Self { position, tex_coord, normal, tangent: Vector4::default(), } } pub fn layout() -> &'static [VertexAttributeDescriptor] { static LAYOUT: [VertexAttributeDescriptor; 4] = [ VertexAttributeDescriptor { usage: VertexAttributeUsage::Position, data_type: VertexAttributeDataType::F32, size: 3, divisor: 0, shader_location: 0, }, VertexAttributeDescriptor { usage: VertexAttributeUsage::TexCoord0, data_type: VertexAttributeDataType::F32, size: 2, divisor: 0, shader_location: 1, }, VertexAttributeDescriptor { usage: VertexAttributeUsage::Normal, data_type: VertexAttributeDataType::F32, size: 3, divisor: 0, shader_location: 2, }, VertexAttributeDescriptor { usage: VertexAttributeUsage::Tangent, data_type: VertexAttributeDataType::F32, size: 4, divisor: 0, shader_location: 3, }, ]; &LAYOUT } } impl PartialEq for StaticVertex { fn eq(&self, other: &Self) -> bool { self.position == other.position && self.tex_coord == other.tex_coord && self.normal == other.normal && self.tangent == other.tangent } } impl Hash for StaticVertex { fn hash<H: Hasher>(&self, state: &mut H) { #[allow(unsafe_code)] unsafe { let bytes = self as *const Self as *const u8; state.write(std::slice::from_raw_parts( bytes, std::mem::size_of::<Self>(), )) } } } #[derive(Copy, Clone, Debug, Default)] #[repr(C)] pub struct AnimatedVertex { pub position: Vector3<f32>, pub tex_coord: Vector2<f32>, pub normal: Vector3<f32>, pub tangent: Vector4<f32>, pub bone_weights: [f32; 4], pub bone_indices: [u8; 4], } impl AnimatedVertex { pub fn layout() -> &'static [VertexAttributeDescriptor] { static LAYOUT: [VertexAttributeDescriptor; 6] = [ VertexAttributeDescriptor { usage: VertexAttributeUsage::Position, data_type: VertexAttributeDataType::F32, size: 3, divisor: 0, shader_location: 0, }, VertexAttributeDescriptor { usage: VertexAttributeUsage::TexCoord0, data_type: VertexAttributeDataType::F32, size: 2, divisor: 0, shader_location: 1, }, VertexAttributeDescriptor { usage: VertexAttributeUsage::Normal, data_type: VertexAttributeDataType::F32, size: 3, divisor: 0, shader_location: 2, }, VertexAttributeDescriptor { usage: VertexAttributeUsage::Tangent, data_type: VertexAttributeDataType::F32, size: 4, divisor: 0, shader_location: 3, }, VertexAttributeDescriptor { usage: VertexAttributeUsage::BoneWeight, data_type: VertexAttributeDataType::F32, size: 4, divisor: 0, shader_location: 4, }, VertexAttributeDescriptor { usage: VertexAttributeUsage::BoneIndices, data_type: VertexAttributeDataType::U8, size: 4, divisor: 0, shader_location: 5, }, ]; &LAYOUT } } impl PartialEq for AnimatedVertex { fn eq(&self, other: &Self) -> bool { self.position == other.position && self.tex_coord == other.tex_coord && self.normal == other.normal && self.tangent == other.tangent && self.bone_weights == other.bone_weights && self.bone_indices == other.bone_indices } } impl Hash for AnimatedVertex { fn hash<H: Hasher>(&self, state: &mut H) { #[allow(unsafe_code)] unsafe { let bytes = self as *const Self as *const u8; state.write(std::slice::from_raw_parts( bytes, std::mem::size_of::<Self>(), )) } } } #[derive(Copy, Clone, Debug, Default)] #[repr(C)] pub struct SimpleVertex { pub position: Vector3<f32>, } impl SimpleVertex { pub fn new(x: f32, y: f32, z: f32) -> Self { Self { position: Vector3::new(x, y, z), } } pub fn layout() -> &'static [VertexAttributeDescriptor] { static LAYOUT: [VertexAttributeDescriptor; 1] = [VertexAttributeDescriptor { usage: VertexAttributeUsage::Position, data_type: VertexAttributeDataType::F32, size: 3, divisor: 0, shader_location: 0, }]; &LAYOUT } } impl PartialEq for SimpleVertex { fn eq(&self, other: &Self) -> bool { self.position == other.position } } impl Hash for SimpleVertex { fn hash<H: Hasher>(&self, state: &mut H) { #[allow(unsafe_code)] unsafe { let bytes = self as *const Self as *const u8; state.write(std::slice::from_raw_parts( bytes, std::mem::size_of::<Self>(), )) } } } #[derive(Copy, Clone, Debug, Default)] #[repr(C)] pub struct OldVertex { pub position: Vector3<f32>, pub tex_coord: Vector2<f32>, pub normal: Vector3<f32>, pub tangent: Vector4<f32>, pub bone_weights: [f32; 4], pub bone_indices: [u8; 4], pub second_tex_coord: Vector2<f32>, } impl Visit for OldVertex { fn visit(&mut self, name: &str, visitor: &mut Visitor) -> VisitResult { visitor.enter_region(name)?; self.position.visit("Position", visitor)?; self.tex_coord.visit("TexCoord", visitor)?; self.second_tex_coord.visit("SecondTexCoord", visitor)?; self.normal.visit("Normal", visitor)?; self.tangent.visit("Tangent", visitor)?; self.bone_weights[0].visit("Weight0", visitor)?; self.bone_weights[1].visit("Weight1", visitor)?; self.bone_weights[2].visit("Weight2", visitor)?; self.bone_weights[3].visit("Weight3", visitor)?; self.bone_indices[0].visit("BoneIndex0", visitor)?; self.bone_indices[1].visit("BoneIndex1", visitor)?; self.bone_indices[2].visit("BoneIndex2", visitor)?; self.bone_indices[3].visit("BoneIndex3", visitor)?; visitor.leave_region() } } impl OldVertex { pub fn layout() -> &'static [VertexAttributeDescriptor] { static LAYOUT: [VertexAttributeDescriptor; 7] = [ VertexAttributeDescriptor { usage: VertexAttributeUsage::Position, data_type: VertexAttributeDataType::F32, size: 3, divisor: 0, shader_location: 0, }, VertexAttributeDescriptor { usage: VertexAttributeUsage::TexCoord0, data_type: VertexAttributeDataType::F32, size: 2, divisor: 0, shader_location: 1, }, VertexAttributeDescriptor { usage: VertexAttributeUsage::Normal, data_type: VertexAttributeDataType::F32, size: 3, divisor: 0, shader_location: 2, }, VertexAttributeDescriptor { usage: VertexAttributeUsage::Tangent, data_type: VertexAttributeDataType::F32, size: 4, divisor: 0, shader_location: 3, }, VertexAttributeDescriptor { usage: VertexAttributeUsage::BoneWeight, data_type: VertexAttributeDataType::F32, size: 4, divisor: 0, shader_location: 4, }, VertexAttributeDescriptor { usage: VertexAttributeUsage::BoneIndices, data_type: VertexAttributeDataType::U8, size: 4, divisor: 0, shader_location: 5, }, VertexAttributeDescriptor { usage: VertexAttributeUsage::TexCoord1, data_type: VertexAttributeDataType::F32, size: 2, divisor: 0, shader_location: 6, }, ]; &LAYOUT } }
use crate::core::visitor::{Visit, VisitResult, Visitor}; use crate::{ core::algebra::{Vector2, Vector3, Vector4}, scene::mesh::buffer::{ VertexAttributeDataType, VertexAttributeDescriptor, VertexAttributeUsage, }, }; use std::hash::{Hash, Hasher}; #[derive(Copy, Clone, Debug, Default)] #[repr(C)] pub struct StaticVertex { pub position: Vector3<f32>, pub tex_coord: Vector2<f32>, pub normal: Vector3<f32>, pub tangent: Vector4<f32>, } impl StaticVertex { pub fn from_pos_uv(position: Vector3<f32>, tex_coord: Vector2<f32>) -> Self { Self { position, tex_coord, normal: Vector3::new(0.0, 1.0, 0.0), tangent: Vector4::default(), } } pub fn from_pos_uv_normal( position: Vector3<f32>, tex_coord: Vector2<f32>, normal: Vector3<f32>, ) -> Self { Self { position, tex_coord, normal, tangent: Vector4::default(), } } pub fn layout() -> &'static [VertexAttributeDescriptor] { static LAYOUT: [VertexAttributeDescriptor; 4] = [ VertexAttributeDescriptor { usage: VertexAttributeUsage::Position, data_type: VertexAttributeDataType::F32, size: 3, divisor: 0, shader_location: 0, }, VertexAttributeDescriptor { usage: VertexAttributeUsage::TexCoord0, data_type: VertexAttributeDataType::F32, size: 2, divisor: 0, shader_location: 1, }, VertexAttributeDescriptor { usage: VertexAttributeUsage::Normal, data_type: VertexAttributeDataType::F32, size: 3, divisor: 0, shader_location: 2, }, VertexAttributeDescriptor { usage: VertexAttributeUsage::Tangent, data_type: VertexAttributeDataType::F32, size: 4, divisor: 0, shader_location: 3, }, ]; &LAYOUT } } impl PartialEq for StaticVertex { fn eq(&self, other: &Self) -> bool { self.position == other.position && self.tex_coord == other.tex_coord && self.normal == other.normal && self.tangent == other.tangent } } impl Hash for StaticVertex { fn hash<H: Hasher>(&self, state: &mut H) { #[allow(unsafe_code)] unsafe { let bytes = self as *const Self as *const u8; state.write(std::slice::from_raw_parts( bytes, std::mem::size_of::<Self>(), )) } } } #[derive(Copy, Clone, Debug, Default)] #[repr(C)] pub struct AnimatedVertex { pub position: Vector3<f32>, pub tex_coord: Vector2<f32>, pub normal: Vector3<f32>, pub tangent: Vector4<f32>, pub bone_weights: [f32; 4], pub bone_indices: [u8; 4], } impl AnimatedVertex { pub fn layout() -> &'static [VertexAttributeDescriptor] { static LAYOUT: [VertexAttributeDescriptor; 6] = [ VertexAttributeDescriptor { usage: VertexAttributeUsage::Position, data_type: VertexAttributeDataType::F32, size: 3, divisor: 0, shader_location: 0, }, VertexAttributeDescriptor { usage: VertexAttributeUsage::TexCoord0, data_type: VertexAttributeDataType::F32, size: 2, divisor: 0, shader_location: 1, }, VertexAttributeDescriptor { usage: VertexAttributeUsage::Normal, data_type: VertexAttributeDataType::F32, size: 3, divisor: 0, shader_location: 2, }, VertexAttributeDescriptor { usage: VertexAttributeUsage::Tangent, data_type: VertexAttributeDataType::F32, size: 4, divisor: 0, shader_location: 3, }, VertexAttributeDescriptor { usage: VertexAttributeUsage::BoneWeight, data_type: VertexAttributeDataType::F32, size: 4, divisor: 0, shader_location: 4, }, VertexAttributeDescriptor { usage: VertexAttributeUsage::BoneIndices, data_type: VertexAttributeDataType::U8, size: 4, divisor: 0, shader_location: 5, }, ]; &LAYOUT } } impl PartialEq for AnimatedVertex { fn eq(&self, other: &Self) -> bool { self.position == other.position && self.tex_coord == other.tex_coord && self.normal == other.normal && self.tangent == other.tangent && self.bone_weights == other.bone_weights && self.bone_indices == other.bone_indices } } impl Hash for AnimatedVertex { fn hash<H: Hasher>(&self, state: &mut H) { #[allow(unsafe_code)] unsafe { let bytes = self as *const Self as *const u8; state.write(std::slice::from_raw_parts( bytes, std::mem::size_of::<Self>(), )) } } } #[derive(Copy, Clone, Debug, Default)] #[repr(C)] pub struct SimpleVertex { pub position: Vector3<f32>, } impl SimpleVertex { pub fn new(x: f32, y: f32, z: f32) -> Self { Self { position: Vector3::new(x, y, z), } } pub fn layout() -> &'static [VertexAttributeDescriptor] { static LAYOUT: [VertexAttributeDescriptor; 1] = [VertexAttributeDescriptor { usage: VertexAttributeUsage::Position, data_type: VertexAttributeDataType::F32, size: 3, divisor: 0, shader_location: 0, }]; &LAYOUT } } impl PartialEq for SimpleVertex { fn eq(&self, other: &Self) -> bool { self.position == other.position } } impl Hash for SimpleVertex { fn hash<H: Hasher>(&self, state: &mut H) { #[allow(unsafe_code)] unsafe { let bytes = self as *const Self as *const u8; state.write(std::slice::from_raw_parts( bytes, std::mem::size_of::<Self>(), )) } } } #[derive(Copy, Clone, Debug, Default)] #[repr(C)] pub struct OldVertex { pub position: Vector3<f32>, pub tex_coord: Vector2<f32>, pub normal: Vector3<f32>, pub tangent: Vector4<f32>, pub bone_weights: [f32; 4], pub bone_indices: [u8; 4], pub second_tex_coord: Vector2<f32>, } impl Visit for OldVertex { fn visit(&mut self, name: &str, visitor: &mut Visitor) -> VisitResult { visitor.enter_region(name)?; self.position.visit("Position", visitor)?; self.tex_coord.visit("TexCoord", visitor)?; self.second_tex_coord.visit("SecondTexCoord", visitor)?; self.normal.visit("Normal", visitor)?; self.tangent.visit("Tangent", visitor)?; self.bone_weights[0].visit("Weight0", visitor)?; self.bone_weights
visitor)?; self.bone_indices[1].visit("BoneIndex1", visitor)?; self.bone_indices[2].visit("BoneIndex2", visitor)?; self.bone_indices[3].visit("BoneIndex3", visitor)?; visitor.leave_region() } } impl OldVertex { pub fn layout() -> &'static [VertexAttributeDescriptor] { static LAYOUT: [VertexAttributeDescriptor; 7] = [ VertexAttributeDescriptor { usage: VertexAttributeUsage::Position, data_type: VertexAttributeDataType::F32, size: 3, divisor: 0, shader_location: 0, }, VertexAttributeDescriptor { usage: VertexAttributeUsage::TexCoord0, data_type: VertexAttributeDataType::F32, size: 2, divisor: 0, shader_location: 1, }, VertexAttributeDescriptor { usage: VertexAttributeUsage::Normal, data_type: VertexAttributeDataType::F32, size: 3, divisor: 0, shader_location: 2, }, VertexAttributeDescriptor { usage: VertexAttributeUsage::Tangent, data_type: VertexAttributeDataType::F32, size: 4, divisor: 0, shader_location: 3, }, VertexAttributeDescriptor { usage: VertexAttributeUsage::BoneWeight, data_type: VertexAttributeDataType::F32, size: 4, divisor: 0, shader_location: 4, }, VertexAttributeDescriptor { usage: VertexAttributeUsage::BoneIndices, data_type: VertexAttributeDataType::U8, size: 4, divisor: 0, shader_location: 5, }, VertexAttributeDescriptor { usage: VertexAttributeUsage::TexCoord1, data_type: VertexAttributeDataType::F32, size: 2, divisor: 0, shader_location: 6, }, ]; &LAYOUT } }
[1].visit("Weight1", visitor)?; self.bone_weights[2].visit("Weight2", visitor)?; self.bone_weights[3].visit("Weight3", visitor)?; self.bone_indices[0].visit("BoneIndex0",
function_block-random_span
[ { "content": "/// Performs hashing of a sized value by interpreting it as raw memory.\n\npub fn hash_as_bytes<T: Sized, H: Hasher>(value: &T, hasher: &mut H) {\n\n hasher.write(value_as_u8_slice(value))\n\n}\n", "file_path": "src/utils/mod.rs", "rank": 0, "score": 527698.0130796023 }, { "content": "#[inline]\n\npub fn get_polygon_normal(polygon: &[Vector3<f32>]) -> Result<Vector3<f32>, &'static str> {\n\n let mut normal = Vector3::default();\n\n\n\n for (i, current) in polygon.iter().enumerate() {\n\n let next = polygon[(i + 1) % polygon.len()];\n\n normal.x += (current.y - next.y) * (current.z + next.z);\n\n normal.y += (current.z - next.z) * (current.x + next.x);\n\n normal.z += (current.x - next.x) * (current.y + next.y);\n\n }\n\n\n\n normal\n\n .try_normalize(f32::EPSILON)\n\n .ok_or(\"Unable to get normal of degenerated polygon!\")\n\n}\n\n\n", "file_path": "fyrox-core/src/math/mod.rs", "rank": 1, "score": 507725.62472912564 }, { "content": "fn write_node(name: &str, node: &mut Node, visitor: &mut Visitor) -> VisitResult {\n\n visitor.enter_region(name)?;\n\n\n\n let mut id = node.id();\n\n id.visit(\"TypeUuid\", visitor)?;\n\n\n\n node.visit(\"NodeData\", visitor)?;\n\n\n\n visitor.leave_region()\n\n}\n\n\n\nimpl Visit for NodeContainer {\n\n fn visit(&mut self, name: &str, visitor: &mut Visitor) -> VisitResult {\n\n visitor.enter_region(name)?;\n\n\n\n let mut is_some = if self.is_some() { 1u8 } else { 0u8 };\n\n is_some.visit(\"IsSome\", visitor)?;\n\n\n\n if is_some != 0 {\n\n if visitor.is_reading() {\n", "file_path": "src/scene/node/container.rs", "rank": 2, "score": 473798.1150302753 }, { "content": "/// Saves given `data` and overwrites `data_default` with the saved data.\n\n///\n\n/// Test the equality after running this method!\n\npub fn save_load<T: Visit>(test_name: &str, data: &mut T, data_default: &mut T) {\n\n // Locate output path\n\n let (bin, txt) = {\n\n let manifest_dir = env::var(\"CARGO_MANIFEST_DIR\").unwrap();\n\n let root = PathBuf::from(manifest_dir).join(\"test_output\");\n\n let _ = std::fs::create_dir(&root);\n\n (\n\n root.join(format!(\"{}.bin\", test_name)),\n\n root.join(format!(\"{}.txt\", test_name)),\n\n )\n\n };\n\n\n\n // Save `data`\n\n {\n\n let mut visitor = Visitor::new();\n\n data.visit(\"Data\", &mut visitor).unwrap();\n\n\n\n visitor.save_binary(&bin).unwrap();\n\n let mut file = File::create(&txt).unwrap();\n\n file.write_all(visitor.save_text().as_bytes()).unwrap();\n", "file_path": "fyrox-core-derive/tests/it/visit.rs", "rank": 3, "score": 440860.475788104 }, { "content": "#[inline]\n\npub fn is_point_inside_triangle(p: &Vector3<f32>, vertices: &[Vector3<f32>; 3]) -> bool {\n\n let ba = vertices[1] - vertices[0];\n\n let ca = vertices[2] - vertices[0];\n\n let vp = *p - vertices[0];\n\n\n\n let ba_dot_ba = ba.dot(&ba);\n\n let ca_dot_ba = ca.dot(&ba);\n\n let ca_dot_ca = ca.dot(&ca);\n\n\n\n let dot02 = ca.dot(&vp);\n\n let dot12 = ba.dot(&vp);\n\n\n\n let inv_denom = 1.0 / (ca_dot_ca * ba_dot_ba - ca_dot_ba.powi(2));\n\n\n\n // Calculate barycentric coordinates\n\n let u = (ba_dot_ba * dot02 - ca_dot_ba * dot12) * inv_denom;\n\n let v = (ca_dot_ca * dot12 - ca_dot_ba * dot02) * inv_denom;\n\n\n\n (u >= 0.0) && (v >= 0.0) && (u + v < 1.0)\n\n}\n\n\n", "file_path": "fyrox-core/src/math/mod.rs", "rank": 4, "score": 435141.673369291 }, { "content": "fn read_node(name: &str, visitor: &mut Visitor) -> Result<Node, VisitError> {\n\n let node = {\n\n // Handle legacy nodes.\n\n let mut kind_id = 0u8;\n\n if kind_id.visit(\"KindId\", visitor).is_ok() {\n\n let mut node = match kind_id {\n\n 0 => Node::new(Pivot::default()),\n\n 1 => {\n\n visitor.enter_region(name)?;\n\n\n\n let mut light_id = 0u32;\n\n light_id.visit(\"KindId\", visitor)?;\n\n\n\n let mut light_node = match light_id {\n\n 0 => Node::new(SpotLight::default()),\n\n 1 => Node::new(PointLight::default()),\n\n 2 => Node::new(DirectionalLight::default()),\n\n _ => {\n\n return Err(VisitError::User(format!(\n\n \"Invalid legacy light kind {}\",\n", "file_path": "src/scene/node/container.rs", "rank": 5, "score": 428921.0574280795 }, { "content": "/// A trait for resource data.\n\npub trait ResourceData: 'static + Default + Debug + Visit + Send {\n\n /// Returns path of resource data.\n\n fn path(&self) -> Cow<Path>;\n\n\n\n /// Sets new path to resource data.\n\n fn set_path(&mut self, path: PathBuf);\n\n}\n\n\n", "file_path": "fyrox-resource/src/lib.rs", "rank": 6, "score": 392454.61260161735 }, { "content": "///\n\n/// Triangulates specified polygon.\n\n///\n\npub fn triangulate(vertices: &[Vector3<f32>], out_triangles: &mut Vec<[usize; 3]>) {\n\n out_triangles.clear();\n\n if vertices.len() == 3 {\n\n // Triangulating a triangle?\n\n out_triangles.push([0, 1, 2]);\n\n } else if vertices.len() == 4 {\n\n // Special case for quadrilaterals (much faster than generic)\n\n let mut start_vertex = 0;\n\n for i in 0..4 {\n\n let v = vertices[i];\n\n let v0 = vertices[(i + 3) % 4];\n\n if let Some(left) = (v0 - v).try_normalize(f32::EPSILON) {\n\n let v1 = vertices[(i + 2) % 4];\n\n if let Some(diag) = (v1 - v).try_normalize(f32::EPSILON) {\n\n let v2 = vertices[(i + 1) % 4];\n\n if let Some(right) = (v2 - v).try_normalize(f32::EPSILON) {\n\n // Check for concave vertex\n\n let angle = left.dot(&diag).acos() + right.dot(&diag).acos();\n\n if angle > std::f32::consts::PI {\n\n start_vertex = i;\n", "file_path": "fyrox-core/src/math/triangulator.rs", "rank": 7, "score": 388267.5025465507 }, { "content": "#[inline]\n\n#[allow(clippy::useless_let_if_seq)]\n\npub fn classify_plane(normal: Vector3<f32>) -> PlaneClass {\n\n let mut longest = 0.0f32;\n\n let mut class = PlaneClass::XY;\n\n\n\n if normal.x.abs() > longest {\n\n longest = normal.x.abs();\n\n class = PlaneClass::YZ;\n\n }\n\n\n\n if normal.y.abs() > longest {\n\n longest = normal.y.abs();\n\n class = PlaneClass::XZ;\n\n }\n\n\n\n if normal.z.abs() > longest {\n\n class = PlaneClass::XY;\n\n }\n\n\n\n class\n\n}\n\n\n", "file_path": "fyrox-core/src/math/mod.rs", "rank": 8, "score": 388039.907777013 }, { "content": "/// https://en.wikipedia.org/wiki/Lambert%27s_cosine_law\n\nfn lambertian(light_vec: Vector3<f32>, normal: Vector3<f32>) -> f32 {\n\n normal.dot(&light_vec).max(0.0)\n\n}\n\n\n", "file_path": "src/utils/lightmap.rs", "rank": 9, "score": 366555.318029613 }, { "content": "#[inline]\n\npub fn triangle_area(a: Vector3<f32>, b: Vector3<f32>, c: Vector3<f32>) -> f32 {\n\n (b - a).cross(&(c - a)).norm() * 0.5\n\n}\n\n\n", "file_path": "fyrox-core/src/math/mod.rs", "rank": 10, "score": 360257.65224334446 }, { "content": "#[inline]\n\npub fn type_name_of<T>(_: T) -> &'static str {\n\n std::any::type_name::<T>()\n\n}\n\n\n\n#[cfg(feature = \"enable_profiler\")]\n\n#[macro_export]\n\nmacro_rules! scope_profile {\n\n () => {\n\n let function_name = {\n\n fn scope() {}\n\n $crate::profiler::type_name_of(scope)\n\n };\n\n let _scope_guard = $crate::profiler::ScopeDefinition::new(function_name, line!());\n\n };\n\n}\n\n\n\n#[cfg(not(feature = \"enable_profiler\"))]\n\n#[macro_export]\n\nmacro_rules! scope_profile {\n\n () => {};\n", "file_path": "fyrox-core/src/profiler.rs", "rank": 11, "score": 358366.612646957 }, { "content": "#[inline]\n\npub fn get_farthest_point(points: &[Vector3<f32>], dir: Vector3<f32>) -> Vector3<f32> {\n\n let mut n_farthest = 0;\n\n let mut max_dot = -f32::MAX;\n\n for (i, point) in points.iter().enumerate() {\n\n let dot = dir.dot(point);\n\n if dot > max_dot {\n\n n_farthest = i;\n\n max_dot = dot\n\n }\n\n }\n\n points[n_farthest]\n\n}\n\n\n", "file_path": "fyrox-core/src/math/mod.rs", "rank": 12, "score": 352883.60772605264 }, { "content": "/// A trait for user-defined actions for behavior tree.\n\npub trait Behavior<'a>: Visit + Default + PartialEq + Debug {\n\n /// A context in which the behavior will be performed.\n\n type Context;\n\n\n\n /// A function that will be called each frame depending on\n\n /// the current execution path of the behavior tree it belongs\n\n /// to.\n\n fn tick(&mut self, context: &mut Self::Context) -> Status;\n\n}\n\n\n\n/// Root node of the tree.\n\n#[derive(Debug, PartialEq, Visit)]\n\npub struct RootNode<B> {\n\n child: Handle<BehaviorNode<B>>,\n\n}\n\n\n\nimpl<B> Default for RootNode<B> {\n\n fn default() -> Self {\n\n Self {\n\n child: Default::default(),\n", "file_path": "src/utils/behavior/mod.rs", "rank": 13, "score": 351514.5362129933 }, { "content": "#[inline]\n\npub fn wrapf(mut n: f32, mut min_limit: f32, mut max_limit: f32) -> f32 {\n\n if n >= min_limit && n <= max_limit {\n\n return n;\n\n }\n\n\n\n if max_limit == 0.0 && min_limit == 0.0 {\n\n return 0.0;\n\n }\n\n\n\n max_limit -= min_limit;\n\n\n\n let offset = min_limit;\n\n min_limit = 0.0;\n\n n -= offset;\n\n\n\n let num_of_max = (n / max_limit).abs().floor();\n\n\n\n if n >= max_limit {\n\n n -= num_of_max * max_limit;\n\n } else if n < min_limit {\n\n n += (num_of_max + 1.0) * max_limit;\n\n }\n\n\n\n n + offset\n\n}\n\n\n", "file_path": "fyrox-core/src/math/mod.rs", "rank": 14, "score": 350709.786706817 }, { "content": "pub fn make_dropdown_list_option(ctx: &mut BuildContext, name: &str) -> Handle<UiNode> {\n\n DecoratorBuilder::new(BorderBuilder::new(\n\n WidgetBuilder::new().with_height(26.0).with_child(\n\n TextBuilder::new(WidgetBuilder::new())\n\n .with_vertical_text_alignment(VerticalAlignment::Center)\n\n .with_horizontal_text_alignment(HorizontalAlignment::Center)\n\n .with_text(name)\n\n .build(ctx),\n\n ),\n\n ))\n\n .build(ctx)\n\n}\n\n\n", "file_path": "editor/src/gui.rs", "rank": 15, "score": 350661.5756559416 }, { "content": "/// Maps key code to its name. Can be useful if you making adjustable key bindings in your\n\n/// game and you need quickly map key code to its name.\n\npub fn virtual_key_code_name(code: VirtualKeyCode) -> &'static str {\n\n match code {\n\n VirtualKeyCode::Key1 => \"1\",\n\n VirtualKeyCode::Key2 => \"2\",\n\n VirtualKeyCode::Key3 => \"3\",\n\n VirtualKeyCode::Key4 => \"4\",\n\n VirtualKeyCode::Key5 => \"5\",\n\n VirtualKeyCode::Key6 => \"6\",\n\n VirtualKeyCode::Key7 => \"7\",\n\n VirtualKeyCode::Key8 => \"8\",\n\n VirtualKeyCode::Key9 => \"9\",\n\n VirtualKeyCode::Key0 => \"0\",\n\n VirtualKeyCode::A => \"A\",\n\n VirtualKeyCode::B => \"B\",\n\n VirtualKeyCode::C => \"C\",\n\n VirtualKeyCode::D => \"D\",\n\n VirtualKeyCode::E => \"E\",\n\n VirtualKeyCode::F => \"F\",\n\n VirtualKeyCode::G => \"G\",\n\n VirtualKeyCode::H => \"H\",\n", "file_path": "src/utils/mod.rs", "rank": 16, "score": 348492.5182052003 }, { "content": "#[inline]\n\npub fn barycentric_is_inside(bary: (f32, f32, f32)) -> bool {\n\n (bary.0 >= 0.0) && (bary.1 >= 0.0) && (bary.0 + bary.1 < 1.0)\n\n}\n\n\n", "file_path": "fyrox-core/src/math/mod.rs", "rank": 17, "score": 340545.57844677573 }, { "content": "pub fn create_ui(ui: &mut BuildContext, screen_size: Vector2<f32>) -> Interface {\n\n let debug_text;\n\n let progress_bar;\n\n let progress_text;\n\n let root = GridBuilder::new(\n\n WidgetBuilder::new()\n\n .with_width(screen_size.x)\n\n .with_height(screen_size.y)\n\n .with_child({\n\n debug_text = TextBuilder::new(WidgetBuilder::new().on_row(0).on_column(0))\n\n .with_wrap(WrapMode::Word)\n\n .build(ui);\n\n debug_text\n\n })\n\n .with_child({\n\n progress_bar =\n\n ProgressBarBuilder::new(WidgetBuilder::new().on_row(1).on_column(1)).build(ui);\n\n progress_bar\n\n })\n\n .with_child({\n", "file_path": "examples/shared/mod.rs", "rank": 18, "score": 328017.79573836527 }, { "content": "#[inline]\n\npub fn spherical_to_cartesian(azimuth: f32, elevation: f32, radius: f32) -> Vector3<f32> {\n\n let x = radius * elevation.sin() * azimuth.sin();\n\n let y = radius * elevation.cos();\n\n let z = -radius * elevation.sin() * azimuth.cos();\n\n Vector3::new(x, y, z)\n\n}\n\n\n", "file_path": "fyrox-core/src/math/mod.rs", "rank": 19, "score": 325042.9152807397 }, { "content": "#[inline]\n\npub fn vector_to_quat(vec: Vector3<f32>) -> UnitQuaternion<f32> {\n\n let dot = vec.normalize().dot(&Vector3::y());\n\n\n\n if dot.abs() > 1.0 - 10.0 * f32::EPSILON {\n\n // Handle singularity when vector is collinear with Y axis.\n\n UnitQuaternion::from_axis_angle(&Vector3::x_axis(), -dot.signum() * 90.0f32.to_radians())\n\n } else {\n\n UnitQuaternion::face_towards(&vec, &Vector3::y())\n\n }\n\n}\n\n\n", "file_path": "fyrox-core/src/math/mod.rs", "rank": 20, "score": 324439.0373771655 }, { "content": "#[inline]\n\npub fn m4x4_approx_eq(a: &Matrix4<f32>, b: &Matrix4<f32>) -> bool {\n\n a.iter()\n\n .zip(b.iter())\n\n .all(|(a, b)| (*a - *b).abs() <= 0.001)\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use crate::algebra::Vector2;\n\n use crate::math::Rect;\n\n use crate::math::SmoothAngle;\n\n\n\n #[test]\n\n fn ray_rect_intersection() {\n\n let rect = Rect::new(0.0, 0.0, 10.0, 10.0);\n\n\n\n // Edge-case: Horizontal ray.\n\n assert!(super::ray_rect_intersection(\n\n rect,\n\n Vector2::new(-1.0, 5.0),\n", "file_path": "fyrox-core/src/math/mod.rs", "rank": 21, "score": 315157.21227815765 }, { "content": "#[inline]\n\npub fn get_closest_point<P: PositionProvider>(points: &[P], point: Vector3<f32>) -> Option<usize> {\n\n let mut closest_sqr_distance = f32::MAX;\n\n let mut closest_index = None;\n\n for (i, vertex) in points.iter().enumerate() {\n\n let sqr_distance = (vertex.position() - point).norm_squared();\n\n if sqr_distance < closest_sqr_distance {\n\n closest_sqr_distance = sqr_distance;\n\n closest_index = Some(i);\n\n }\n\n }\n\n closest_index\n\n}\n\n\n", "file_path": "fyrox-core/src/math/mod.rs", "rank": 22, "score": 303440.1392846963 }, { "content": "fn heuristic(a: Vector3<f32>, b: Vector3<f32>) -> f32 {\n\n (a - b).norm_squared()\n\n}\n\n\n\nimpl Default for PathFinder {\n\n fn default() -> Self {\n\n Self::new()\n\n }\n\n}\n\n\n\nimpl PositionProvider for PathVertex {\n\n fn position(&self) -> Vector3<f32> {\n\n self.position\n\n }\n\n}\n\n\n\n/// Path search can be interrupted by errors, this enum stores all possible\n\n/// kinds of errors.\n\n#[derive(Clone, Debug, thiserror::Error)]\n\npub enum PathError {\n", "file_path": "src/utils/astar.rs", "rank": 23, "score": 297470.2903945899 }, { "content": "fn make_section(ctx: &mut BuildContext, name: &str) -> Handle<UiNode> {\n\n TreeBuilder::new(WidgetBuilder::new())\n\n .with_content(\n\n TextBuilder::new(WidgetBuilder::new())\n\n .with_text(name)\n\n .build(ctx),\n\n )\n\n .build(ctx)\n\n}\n\n\n\nimpl SettingsWindow {\n\n pub fn new(engine: &mut GameEngine, sender: Sender<Message>, settings: &Settings) -> Self {\n\n let ok;\n\n let default;\n\n\n\n let ctx = &mut engine.user_interface.build_ctx();\n\n let text =\n\n \"Here you can select graphics settings to improve performance and/or to understand how \\\n\n you scene will look like with different graphics settings. Please note that these settings won't be saved \\\n\n with scene!\";\n", "file_path": "editor/src/settings/mod.rs", "rank": 24, "score": 292335.34440191253 }, { "content": "fn make_folder(ctx: &mut BuildContext, name: &str) -> Handle<UiNode> {\n\n TreeBuilder::new(WidgetBuilder::new())\n\n .with_content(\n\n TextBuilder::new(\n\n WidgetBuilder::new()\n\n .with_margin(Thickness::left(5.0))\n\n .with_foreground(Brush::Solid(Color::opaque(153, 217, 234))),\n\n )\n\n .with_vertical_text_alignment(VerticalAlignment::Center)\n\n .with_text(name)\n\n .build(ctx),\n\n )\n\n .build(ctx)\n\n}\n\n\n\nimpl WorldViewer {\n\n pub fn new(ctx: &mut BuildContext, sender: Sender<Message>) -> Self {\n\n let track_selection_state = true;\n\n let tree_root;\n\n let node_path;\n", "file_path": "editor/src/world/mod.rs", "rank": 25, "score": 292335.34440191253 }, { "content": "pub fn make_arrow_primitives(orientation: ArrowDirection, size: f32) -> Vec<Primitive> {\n\n vec![match orientation {\n\n ArrowDirection::Top => Primitive::Triangle {\n\n points: [\n\n Vector2::new(size * 0.5, 0.0),\n\n Vector2::new(size, size),\n\n Vector2::new(0.0, size),\n\n ],\n\n },\n\n ArrowDirection::Bottom => Primitive::Triangle {\n\n points: [\n\n Vector2::new(0.0, 0.0),\n\n Vector2::new(size, 0.0),\n\n Vector2::new(size * 0.5, size),\n\n ],\n\n },\n\n ArrowDirection::Right => Primitive::Triangle {\n\n points: [\n\n Vector2::new(0.0, 0.0),\n\n Vector2::new(size, size * 0.5),\n", "file_path": "fyrox-ui/src/utils.rs", "rank": 26, "score": 290813.84617813875 }, { "content": "/// \"Transmutes\" array of any sized type to a slice of bytes.\n\npub fn array_as_u8_slice<T: Sized>(v: &[T]) -> &'_ [u8] {\n\n // SAFETY: It is safe to reinterpret data to read it.\n\n unsafe {\n\n std::slice::from_raw_parts(v.as_ptr() as *const u8, std::mem::size_of::<T>() * v.len())\n\n }\n\n}\n\n\n", "file_path": "src/utils/mod.rs", "rank": 27, "score": 288983.9949742269 }, { "content": "/// \"Transmutes\" value of any sized type to a slice of bytes.\n\npub fn value_as_u8_slice<T: Sized>(v: &T) -> &'_ [u8] {\n\n // SAFETY: It is safe to reinterpret data to read it.\n\n unsafe { std::slice::from_raw_parts(v as *const T as *const u8, std::mem::size_of::<T>()) }\n\n}\n\n\n", "file_path": "src/utils/mod.rs", "rank": 28, "score": 288983.9949742269 }, { "content": "#[inline]\n\npub fn solve_quadratic(a: f32, b: f32, c: f32) -> Option<[f32; 2]> {\n\n let discriminant = b * b - 4.0 * a * c;\n\n if discriminant < 0.0 {\n\n // No real roots\n\n None\n\n } else {\n\n // Dont care if quadratic equation has only one root (discriminant == 0), this is edge-case\n\n // which requires additional branching instructions which is not good for branch-predictor in CPU.\n\n let _2a = 2.0 * a;\n\n let discr_root = discriminant.sqrt();\n\n let r1 = (-b + discr_root) / _2a;\n\n let r2 = (-b - discr_root) / _2a;\n\n Some([r1, r2])\n\n }\n\n}\n\n\n", "file_path": "fyrox-core/src/math/mod.rs", "rank": 29, "score": 287423.4827693925 }, { "content": "fn create_vec4_view(ctx: &mut BuildContext, value: Vector4<f32>) -> Handle<UiNode> {\n\n Vec4EditorBuilder::new(WidgetBuilder::new().with_height(24.0))\n\n .with_value(value)\n\n .build(ctx)\n\n}\n\n\n", "file_path": "editor/src/material.rs", "rank": 30, "score": 283847.2494495059 }, { "content": "fn create_vec3_view(ctx: &mut BuildContext, value: Vector3<f32>) -> Handle<UiNode> {\n\n Vec3EditorBuilder::new(WidgetBuilder::new().with_height(24.0))\n\n .with_value(value)\n\n .build(ctx)\n\n}\n\n\n", "file_path": "editor/src/material.rs", "rank": 31, "score": 283730.5278422582 }, { "content": "pub fn make_simple_tooltip(ctx: &mut BuildContext, text: &str) -> Handle<UiNode> {\n\n BorderBuilder::new(\n\n WidgetBuilder::new()\n\n .with_visibility(false)\n\n .with_foreground(Brush::Solid(Color::opaque(160, 160, 160)))\n\n .with_max_size(Vector2::new(250.0, f32::INFINITY))\n\n .with_child(\n\n TextBuilder::new(WidgetBuilder::new())\n\n .with_wrap(WrapMode::Word)\n\n .with_text(text)\n\n .build(ctx),\n\n ),\n\n )\n\n .build(ctx)\n\n}\n", "file_path": "fyrox-ui/src/utils.rs", "rank": 32, "score": 279002.94411162497 }, { "content": "fn arrange_dims(dims: &mut [GridDimension], final_size: f32) {\n\n let mut preset_width = 0.0;\n\n for dim in dims.iter() {\n\n if dim.size_mode == SizeMode::Auto || dim.size_mode == SizeMode::Strict {\n\n preset_width += dim.actual_size;\n\n }\n\n }\n\n\n\n let stretch_count = count_stretch_dims(dims);\n\n let avg_size = if stretch_count > 0 {\n\n (final_size - preset_width) / stretch_count as f32\n\n } else {\n\n 0.0\n\n };\n\n\n\n let mut location = 0.0;\n\n for dim in dims.iter_mut() {\n\n dim.location = location;\n\n location += match dim.size_mode {\n\n SizeMode::Strict | SizeMode::Auto => dim.actual_size,\n", "file_path": "fyrox-ui/src/grid.rs", "rank": 33, "score": 275074.9030056685 }, { "content": "pub fn create_impl(\n\n ty_args: &args::TypeArgs,\n\n field_args: impl Iterator<Item = args::FieldArgs>,\n\n impl_body: TokenStream2,\n\n) -> TokenStream2 {\n\n let ty_ident = &ty_args.ident;\n\n let generics = self::create_impl_generics(&ty_args.generics, field_args);\n\n let (impl_generics, ty_generics, where_clause) = generics.split_for_impl();\n\n\n\n quote! {\n\n impl #impl_generics Visit for #ty_ident #ty_generics #where_clause {\n\n fn visit(\n\n &mut self,\n\n name: &str,\n\n visitor: &mut Visitor,\n\n ) -> VisitResult {\n\n #impl_body\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "fyrox-core-derive/src/visit/utils.rs", "rank": 34, "score": 269570.3665165321 }, { "content": "#[inline(always)]\n\npub fn lerpf(a: f32, b: f32, t: f32) -> f32 {\n\n a + (b - a) * t\n\n}\n\n\n\n// https://en.wikipedia.org/wiki/Cubic_Hermite_spline\n", "file_path": "fyrox-core/src/math/mod.rs", "rank": 35, "score": 269348.8246297611 }, { "content": "fn map_to_local(v: Vector3<f32>) -> Vector2<f32> {\n\n // Terrain is a XZ oriented surface so we can map X -> X, Z -> Y\n\n Vector2::new(v.x, v.z)\n\n}\n\n\n\n/// Ray-terrain intersection result.\n\n#[derive(Debug)]\n\npub struct TerrainRayCastResult {\n\n /// World-space position of impact point.\n\n pub position: Vector3<f32>,\n\n /// World-space normal of triangle at impact point.\n\n pub normal: Vector3<f32>,\n\n /// Index of a chunk that was hit.\n\n pub chunk_index: usize,\n\n /// Time of impact. Usually in [0; 1] range where 0 - origin of a ray, 1 - its end.\n\n pub toi: f32,\n\n}\n\n\n\n/// Terrain is a height field where each point has fixed coordinates in XZ plane, but variable\n\n/// Y coordinate. It can be used to create landscapes. It supports multiple layers, where each\n", "file_path": "src/scene/terrain.rs", "rank": 36, "score": 268489.2489823677 }, { "content": "#[inline]\n\npub fn ieee_remainder(x: f32, y: f32) -> f32 {\n\n x - (x / y).round() * y\n\n}\n\n\n", "file_path": "fyrox-core/src/math/mod.rs", "rank": 37, "score": 268399.31506727944 }, { "content": "fn create_ui(ctx: &mut BuildContext, screen_size: Vector2<f32>) -> Interface {\n\n let debug_text;\n\n let progress_bar;\n\n let progress_text;\n\n let cancel;\n\n let progress_grid;\n\n let root = GridBuilder::new(\n\n WidgetBuilder::new()\n\n .with_width(screen_size.x)\n\n .with_height(screen_size.y)\n\n .with_child({\n\n debug_text =\n\n TextBuilder::new(WidgetBuilder::new().on_row(0).on_column(0)).build(ctx);\n\n debug_text\n\n })\n\n .with_child({\n\n progress_grid = GridBuilder::new(\n\n WidgetBuilder::new()\n\n .with_visibility(!Path::new(LIGHTMAP_SCENE_PATH).exists())\n\n .on_column(1)\n", "file_path": "examples/lightmap.rs", "rank": 38, "score": 267344.7834396668 }, { "content": "fn create_ui(ctx: &mut BuildContext, screen_size: Vector2<f32>) -> Interface {\n\n let debug_text;\n\n let progress_bar;\n\n let progress_text;\n\n let root = GridBuilder::new(\n\n WidgetBuilder::new()\n\n .with_width(screen_size.x)\n\n .with_height(screen_size.y)\n\n .with_child({\n\n debug_text =\n\n TextBuilder::new(WidgetBuilder::new().on_row(0).on_column(0)).build(ctx);\n\n debug_text\n\n })\n\n .with_child({\n\n progress_bar =\n\n ProgressBarBuilder::new(WidgetBuilder::new().on_row(1).on_column(1)).build(ctx);\n\n progress_bar\n\n })\n\n .with_child({\n\n progress_text = TextBuilder::new(\n", "file_path": "examples/async.rs", "rank": 39, "score": 267344.7834396668 }, { "content": "/// Translates library button state into fyrox-ui button state.\n\npub fn translate_state(state: ElementState) -> ButtonState {\n\n match state {\n\n ElementState::Pressed => ButtonState::Pressed,\n\n ElementState::Released => ButtonState::Released,\n\n }\n\n}\n\n\n", "file_path": "src/utils/mod.rs", "rank": 40, "score": 265597.78654867026 }, { "content": "#[inline]\n\npub fn round_to_step(x: f32, step: f32) -> f32 {\n\n x - ieee_remainder(x, step)\n\n}\n\n\n", "file_path": "fyrox-core/src/math/mod.rs", "rank": 41, "score": 265512.20040779375 }, { "content": "#[inline]\n\npub fn wrap_angle(angle: f32) -> f32 {\n\n let two_pi = 2.0 * std::f32::consts::PI;\n\n\n\n if angle > 0.0 {\n\n angle % two_pi\n\n } else {\n\n (angle + two_pi) % two_pi\n\n }\n\n}\n\n\n", "file_path": "fyrox-core/src/math/mod.rs", "rank": 42, "score": 264234.36603147263 }, { "content": "#[inline]\n\npub fn clampf(v: f32, min: f32, max: f32) -> f32 {\n\n if v < min {\n\n min\n\n } else if v > max {\n\n max\n\n } else {\n\n v\n\n }\n\n}\n\n\n\n/// There are two versions of remainder, the standard `%` operator which does `x - (x/y).trunc()*y` and IEEE remainder which does `x - (x/y).round()*y`.\n", "file_path": "fyrox-core/src/math/mod.rs", "rank": 43, "score": 263918.4572658228 }, { "content": "/// Input angles in degrees\n\nfn quat_from_euler(euler: Vector3<f32>) -> UnitQuaternion<f32> {\n\n math::quat_from_euler(\n\n Vector3::new(\n\n euler.x.to_radians(),\n\n euler.y.to_radians(),\n\n euler.z.to_radians(),\n\n ),\n\n RotationOrder::XYZ,\n\n )\n\n}\n\n\n", "file_path": "src/resource/fbx/mod.rs", "rank": 44, "score": 258942.72994611936 }, { "content": "// impl `#[derive(Visit)]` for `struct` or `enum`\n\npub fn impl_visit(ast: DeriveInput) -> TokenStream2 {\n\n let ty_args = args::TypeArgs::from_derive_input(&ast).unwrap();\n\n match &ty_args.data {\n\n ast::Data::Struct(ref field_args) => self::impl_visit_struct(&ty_args, field_args),\n\n ast::Data::Enum(ref variants) => self::impl_visit_enum(&ty_args, variants),\n\n }\n\n}\n\n\n", "file_path": "fyrox-core-derive/src/visit.rs", "rank": 45, "score": 256392.35298632225 }, { "content": "fn project(global_transform: Matrix4<f32>, p: Vector3<f32>) -> Option<Vector2<f32>> {\n\n // Transform point in coordinate system of the terrain.\n\n if let Some(inv_global_transform) = global_transform.try_inverse() {\n\n let local_p = inv_global_transform\n\n .transform_point(&Point3::from(p))\n\n .coords;\n\n Some(map_to_local(local_p))\n\n } else {\n\n None\n\n }\n\n}\n\n\n\nimpl TypeUuidProvider for Terrain {\n\n fn type_uuid() -> Uuid {\n\n Uuid::from_str(\"4b0a7927-bcd8-41a3-949a-dd10fba8e16a\").unwrap()\n\n }\n\n}\n\n\n\nimpl Terrain {\n\n /// Returns width of the terrain in local coordinates.\n", "file_path": "src/scene/terrain.rs", "rank": 46, "score": 254971.23166471533 }, { "content": "fn is_binary(data: &[u8]) -> bool {\n\n let fbx_magic = b\"Kaydara FBX Binary\";\n\n &data[0..18] == fbx_magic\n\n}\n\n\n\nimpl FbxDocument {\n\n pub async fn new<P: AsRef<Path>>(path: P) -> Result<FbxDocument, FbxError> {\n\n let data = io::load_file(path).await?;\n\n\n\n let is_bin = is_binary(&data);\n\n\n\n let mut reader = Cursor::new(data);\n\n\n\n if is_bin {\n\n binary::read_binary(&mut reader)\n\n } else {\n\n ascii::read_ascii(&mut reader)\n\n }\n\n }\n\n\n\n pub fn root(&self) -> Handle<FbxNode> {\n\n self.root\n\n }\n\n\n\n pub fn nodes(&self) -> &FbxNodeContainer {\n\n &self.nodes\n\n }\n\n}\n", "file_path": "src/resource/fbx/document/mod.rs", "rank": 47, "score": 254150.39444639342 }, { "content": "#[inline]\n\npub fn cubicf(p0: f32, p1: f32, t: f32, m0: f32, m1: f32) -> f32 {\n\n let t2 = t * t;\n\n let t3 = t2 * t;\n\n let scale = (p1 - p0).abs();\n\n\n\n (2.0 * t3 - 3.0 * t2 + 1.0) * p0\n\n + (t3 - 2.0 * t2 + t) * m0 * scale\n\n + (-2.0 * t3 + 3.0 * t2) * p1\n\n + (t3 - t2) * m1 * scale\n\n}\n\n\n", "file_path": "fyrox-core/src/math/mod.rs", "rank": 48, "score": 253883.40927934845 }, { "content": "pub trait MessageData: 'static + Debug + Any {\n\n fn as_any(&self) -> &dyn Any;\n\n\n\n fn compare(&self, other: &dyn MessageData) -> bool;\n\n}\n\n\n\nimpl<T> MessageData for T\n\nwhere\n\n T: 'static + Debug + PartialEq + Any,\n\n{\n\n fn as_any(&self) -> &dyn Any {\n\n self\n\n }\n\n\n\n fn compare(&self, other: &dyn MessageData) -> bool {\n\n other\n\n .as_any()\n\n .downcast_ref::<T>()\n\n .map(|other| other == self)\n\n .unwrap_or_default()\n", "file_path": "fyrox-ui/src/message.rs", "rank": 49, "score": 253600.93051269068 }, { "content": "#[inline]\n\npub fn inf_sup_cubicf(p0: f32, p1: f32, m0: f32, m1: f32) -> (f32, f32) {\n\n // Find two `t`s where derivative of cubicf is zero - these will be\n\n // extreme points of the spline. Then get the values at those `t`s\n\n let d = -(9.0 * p0 * p0 + 6.0 * p0 * (-3.0 * p1 + m1 + m0) + 9.0 * p1 * p1\n\n - 6.0 * p1 * (m1 + m0)\n\n + m1 * m1\n\n + m1 * m0\n\n + m0 * m0)\n\n .sqrt();\n\n let k = 3.0 * (2.0 * p0 - 2.0 * p1 + m1 + m0);\n\n let v = 3.0 * p0 - 3.0 * p1 + m1 + 2.0 * m0;\n\n let t0 = (-d + v) / k;\n\n let t1 = (d + v) / k;\n\n (cubicf(p0, p1, t0, m0, m1), cubicf(p0, p1, t1, m0, m1))\n\n}\n\n\n", "file_path": "fyrox-core/src/math/mod.rs", "rank": 50, "score": 251978.15061383182 }, { "content": "#[inline]\n\npub fn cubicf_derivative(p0: f32, p1: f32, t: f32, m0: f32, m1: f32) -> f32 {\n\n let t2 = t * t;\n\n let scale = (p1 - p0).abs();\n\n\n\n (6.0 * t2 - 6.0 * t) * p0\n\n + (3.0 * t2 - 4.0 * t + 1.0) * m0 * scale\n\n + (6.0 * t - 6.0 * t2) * p1\n\n + (3.0 * t2 - 2.0 * t) * m1 * scale\n\n}\n\n\n", "file_path": "fyrox-core/src/math/mod.rs", "rank": 51, "score": 251797.01658238276 }, { "content": "/// impl `Visit` for `struct`\n\nfn impl_visit_struct(\n\n ty_args: &args::TypeArgs,\n\n field_args: &ast::Fields<args::FieldArgs>,\n\n) -> TokenStream2 {\n\n let visit_fn_body = if field_args.style == ast::Style::Unit {\n\n quote! { Ok(()) }\n\n } else {\n\n // `field.visit(..);` parts\n\n let field_visits =\n\n utils::create_field_visits(None, field_args.fields.iter(), field_args.style);\n\n\n\n quote! {\n\n visitor.enter_region(name)?;\n\n #(self.#field_visits)*\n\n visitor.leave_region()\n\n }\n\n };\n\n\n\n utils::create_impl(ty_args, field_args.iter().cloned(), visit_fn_body)\n\n}\n\n\n", "file_path": "fyrox-core-derive/src/visit.rs", "rank": 52, "score": 249879.00818866113 }, { "content": "pub fn is_slice_equal_permutation<T: PartialEq>(a: &[T], b: &[T]) -> bool {\n\n if a.is_empty() && !b.is_empty() {\n\n false\n\n } else {\n\n // TODO: Find a way to do this faster.\n\n for source in a.iter() {\n\n let mut found = false;\n\n for other in b.iter() {\n\n if other == source {\n\n found = true;\n\n break;\n\n }\n\n }\n\n if !found {\n\n return false;\n\n }\n\n }\n\n true\n\n }\n\n}\n", "file_path": "editor/src/utils/mod.rs", "rank": 53, "score": 247068.72210556277 }, { "content": "pub trait InspectableEnum: Debug + Inspect + 'static {}\n\n\n\nimpl<T: Debug + Inspect + 'static> InspectableEnum for T {}\n\n\n\n#[derive(Debug, Clone, PartialEq)]\n\npub enum EnumPropertyEditorMessage {\n\n Variant(usize),\n\n PropertyChanged(PropertyChanged),\n\n}\n\n\n\nimpl EnumPropertyEditorMessage {\n\n define_constructor!(EnumPropertyEditorMessage:Variant => fn variant(usize), layout: false);\n\n define_constructor!(EnumPropertyEditorMessage:PropertyChanged => fn property_changed(PropertyChanged), layout: false);\n\n}\n\n\n\npub struct EnumPropertyEditor<T: InspectableEnum> {\n\n widget: Widget,\n\n variant_selector: Handle<UiNode>,\n\n inspector: Handle<UiNode>,\n\n definition: EnumPropertyEditorDefinition<T>,\n", "file_path": "fyrox-ui/src/inspector/editors/enumeration.rs", "rank": 54, "score": 243511.2489910147 }, { "content": "/// These are only used for creating this directories and checking inside\n\n/// TEST_EXISTENCE constant. Because normal ones is using TEST_EXISTENCE constant value,\n\n/// but the constant haven't returned the value to yet, so it crashes.\n\npub fn config_dir(filename: &str) -> PathBuf {\n\n project_dir().config_dir().join(filename)\n\n}\n\n\n", "file_path": "editor/src/project_dirs.rs", "rank": 55, "score": 240438.5894676009 }, { "content": "pub fn data_dir(filename: &str) -> PathBuf {\n\n project_dir().data_dir().join(filename)\n\n}\n\n\n", "file_path": "editor/src/project_dirs.rs", "rank": 56, "score": 240426.31351168102 }, { "content": "pub fn resources_dir(filename: &str) -> PathBuf {\n\n PathBuf::from(\"/usr/lib/Fyroxed/\").join(filename)\n\n}\n", "file_path": "editor/src/project_dirs.rs", "rank": 57, "score": 240426.31351168102 }, { "content": "#[inline]\n\npub fn get_signed_triangle_area(v1: Vector2<f32>, v2: Vector2<f32>, v3: Vector2<f32>) -> f32 {\n\n 0.5 * (v1.x * (v3.y - v2.y) + v2.x * (v1.y - v3.y) + v3.x * (v2.y - v1.y))\n\n}\n\n\n", "file_path": "fyrox-core/src/math/mod.rs", "rank": 58, "score": 239907.630237167 }, { "content": "pub trait Visit {\n\n fn visit(&mut self, name: &str, visitor: &mut Visitor) -> VisitResult;\n\n}\n\n\n\nimpl Default for Visitor {\n\n fn default() -> Self {\n\n Self::new()\n\n }\n\n}\n\n\n\nimpl Visitor {\n\n const MAGIC: &'static str = \"RG3D\";\n\n\n\n pub fn new() -> Self {\n\n let mut nodes = Pool::new();\n\n let root = nodes.spawn(Node::new(\"__ROOT__\", Handle::NONE));\n\n Self {\n\n nodes,\n\n rc_map: FxHashMap::default(),\n\n arc_map: FxHashMap::default(),\n", "file_path": "fyrox-core/src/visitor.rs", "rank": 59, "score": 239721.01624400454 }, { "content": "/// Creates new window using specified window function.\n\n/// <https://en.wikipedia.org/wiki/Window_function>\n\npub fn make_window<W: Fn(usize, usize) -> f32>(sample_count: usize, func: W) -> Vec<f32> {\n\n (0..sample_count).map(|i| func(i, sample_count)).collect()\n\n}\n", "file_path": "fyrox-sound/src/dsp/mod.rs", "rank": 60, "score": 239089.28193120201 }, { "content": "/// A trait for resource load error.\n\npub trait ResourceLoadError: 'static + Debug + Send + Sync {}\n\n\n\nimpl<T> ResourceLoadError for T where T: 'static + Debug + Send + Sync {}\n\n\n\n/// Resource could be in three possible states:\n\n/// 1. Pending - it is loading.\n\n/// 2. LoadError - an error has occurred during the load.\n\n/// 3. Ok - resource is fully loaded and ready to use.\n\n///\n\n/// Why it is so complex?\n\n/// Short answer: asynchronous loading.\n\n/// Long answer: when you loading a scene you expect it to be loaded as fast as\n\n/// possible, use all available power of the CPU. To achieve that each resource\n\n/// ideally should be loaded on separate core of the CPU, but since this is\n\n/// asynchronous, we must have the ability to track the state of the resource.\n\n#[derive(Debug)]\n\npub enum ResourceState<T, E>\n\nwhere\n\n T: ResourceData,\n\n E: ResourceLoadError,\n", "file_path": "fyrox-resource/src/lib.rs", "rank": 61, "score": 238918.442335308 }, { "content": "fn is_vorbis_ogg(source: &mut DataSource) -> bool {\n\n let pos = source.seek(SeekFrom::Current(0)).unwrap();\n\n\n\n let is_vorbis = OggStreamReader::new(source.by_ref()).is_ok();\n\n\n\n source.seek(SeekFrom::Start(pos)).unwrap();\n\n\n\n is_vorbis\n\n}\n\n\n\nimpl OggDecoder {\n\n pub fn new(mut source: DataSource) -> Result<Self, DataSource> {\n\n if is_vorbis_ogg(&mut source) {\n\n let mut reader = OggStreamReader::new(source).unwrap();\n\n\n\n let samples = if let Ok(Some(samples)) =\n\n reader.read_dec_packet_generic::<InterleavedSamples<f32>>()\n\n {\n\n samples.samples.into_iter()\n\n } else {\n", "file_path": "fyrox-sound/src/decoder/vorbis.rs", "rank": 62, "score": 238744.4579825287 }, { "content": "#[proc_macro_derive(Visit, attributes(visit))]\n\npub fn visit(input: TokenStream) -> TokenStream {\n\n let ast = parse_macro_input!(input as DeriveInput);\n\n TokenStream::from(visit::impl_visit(ast))\n\n}\n\n\n\n/// Implements `Inspect` trait\n\n///\n\n/// User has to import `Inspect` and `PropertyInfo` to use this macro.\n", "file_path": "fyrox-core-derive/src/lib.rs", "rank": 63, "score": 237379.58062648948 }, { "content": "pub fn working_config_dir(filename: &str) -> PathBuf {\n\n if *TEST_EXISTENCE {\n\n config_dir(filename)\n\n } else {\n\n debug_dir(filename)\n\n }\n\n}\n\n\n", "file_path": "editor/src/project_dirs.rs", "rank": 64, "score": 237229.97644285476 }, { "content": "pub fn make_vec3_container<P: AsRef<str>>(\n\n nodes: &FbxNodeContainer,\n\n container_node: Handle<FbxNode>,\n\n data_name: P,\n\n) -> Result<FbxContainer<Vector3<f32>>, FbxError> {\n\n FbxContainer::new(nodes, container_node, data_name, |attributes| {\n\n let mut normals = Vec::with_capacity(attributes.len() / 3);\n\n for normal in attributes.chunks_exact(3) {\n\n normals.push(Vector3::new(\n\n normal[0].as_f32()?,\n\n normal[1].as_f32()?,\n\n normal[2].as_f32()?,\n\n ));\n\n }\n\n Ok(normals)\n\n })\n\n}\n", "file_path": "src/resource/fbx/scene/mod.rs", "rank": 65, "score": 237229.97644285476 }, { "content": "pub fn working_data_dir(filename: &str) -> PathBuf {\n\n if *TEST_EXISTENCE {\n\n data_dir(filename)\n\n } else {\n\n debug_dir(filename)\n\n }\n\n}\n\n\n", "file_path": "editor/src/project_dirs.rs", "rank": 66, "score": 237229.97644285476 }, { "content": "/// A trait for resource import options. It provides generic functionality shared over all types of import options.\n\npub trait ImportOptions: Serialize + DeserializeOwned + Default + Clone {\n\n /// Saves import options into a specified file.\n\n fn save(&self, path: &Path) -> bool {\n\n if let Ok(file) = File::create(path) {\n\n if ron::ser::to_writer_pretty(file, self, PrettyConfig::default()).is_ok() {\n\n return true;\n\n }\n\n }\n\n false\n\n }\n\n}\n\n\n\n/// Tries to load import settings for a resource. It is not part of ImportOptions trait because\n\n/// `async fn` is not yet supported for traits.\n\npub async fn try_get_import_settings<T>(resource_path: &Path) -> Option<T>\n\nwhere\n\n T: ImportOptions,\n\n{\n\n let settings_path = append_extension(resource_path, \"options\");\n\n\n", "file_path": "src/engine/resource_manager/options.rs", "rank": 67, "score": 236037.09279235784 }, { "content": "pub fn make_default_anchor(ctx: &mut BuildContext, row: usize, column: usize) -> Handle<UiNode> {\n\n let default_anchor_size = 30.0;\n\n BorderBuilder::new(\n\n WidgetBuilder::new()\n\n .with_width(default_anchor_size)\n\n .with_height(default_anchor_size)\n\n .with_visibility(false)\n\n .on_row(row)\n\n .on_column(column)\n\n .with_draw_on_top(true)\n\n .with_background(Brush::Solid(DEFAULT_ANCHOR_COLOR)),\n\n )\n\n .build(ctx)\n\n}\n\n\n\nimpl TileBuilder {\n\n pub fn new(widget_builder: WidgetBuilder) -> Self {\n\n Self {\n\n widget_builder,\n\n content: TileContent::Empty,\n", "file_path": "fyrox-ui/src/dock.rs", "rank": 68, "score": 235604.50642912177 }, { "content": "#[derive(Debug, Clone, Default, PartialEq, Visit)]\n\nstruct TupleStruct(f32, u32);\n\n\n", "file_path": "fyrox-core-derive/tests/it/visit/basic.rs", "rank": 69, "score": 234758.15227310936 }, { "content": "/// Translates window mouse button into fyrox-ui mouse button.\n\npub fn translate_button(button: crate::event::MouseButton) -> crate::gui::message::MouseButton {\n\n match button {\n\n crate::event::MouseButton::Left => crate::gui::message::MouseButton::Left,\n\n crate::event::MouseButton::Right => crate::gui::message::MouseButton::Right,\n\n crate::event::MouseButton::Middle => crate::gui::message::MouseButton::Middle,\n\n crate::event::MouseButton::Other(i) => crate::gui::message::MouseButton::Other(i),\n\n }\n\n}\n\n\n", "file_path": "src/utils/mod.rs", "rank": 70, "score": 233172.908976011 }, { "content": "#[derive(Default, Clone)]\n\nstruct TextureBytes(Vec<u8>);\n\n\n\nimpl Visit for TextureBytes {\n\n fn visit(&mut self, name: &str, visitor: &mut Visitor) -> VisitResult {\n\n self.0.visit(name, visitor)\n\n }\n\n}\n\n\n\nimpl Debug for TextureBytes {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {\n\n write!(f, \"Texture has {} bytes\", self.0.len())\n\n }\n\n}\n\n\n\nimpl From<Vec<u8>> for TextureBytes {\n\n fn from(bytes: Vec<u8>) -> Self {\n\n Self(bytes)\n\n }\n\n}\n\n\n", "file_path": "src/resource/texture.rs", "rank": 71, "score": 232508.54855859175 }, { "content": "/// A trait for your game state, it contains all possible methods which will be called in\n\n/// various situations. Every method, except `init` is optional.\n\npub trait GameState: 'static {\n\n /// An initializer function that will be called once after engine's initialization\n\n /// allowing you to initialize the state your game.\n\n fn init(engine: &mut Engine) -> Self\n\n where\n\n Self: Sized;\n\n\n\n /// Defines a function that will contain game logic. It has stabilized update rate of\n\n /// 60 Hz. Callee can alter control flow of the game by modifying _control_flow parameter.\n\n fn on_tick(&mut self, _engine: &mut Engine, _dt: f32, _control_flow: &mut ControlFlow) {}\n\n\n\n /// Defines a function that will be called when there is any message from user interface.\n\n fn on_ui_message(&mut self, _engine: &mut Engine, _message: UiMessage) {}\n\n\n\n /// Defines a function that will be called when a device event has occurred.\n\n fn on_device_event(&mut self, _engine: &mut Engine, _device_id: DeviceId, _event: DeviceEvent) {\n\n }\n\n\n\n /// Defines a function that will be called when a window event has occurred.\n\n fn on_window_event(&mut self, _engine: &mut Engine, _event: WindowEvent) {}\n", "file_path": "src/engine/framework.rs", "rank": 72, "score": 231643.90524415474 }, { "content": "/// Translates cursor icon from fyrox-ui library to glutin format.\n\npub fn translate_cursor_icon(icon: crate::gui::message::CursorIcon) -> crate::window::CursorIcon {\n\n match icon {\n\n crate::gui::message::CursorIcon::Default => crate::window::CursorIcon::Default,\n\n crate::gui::message::CursorIcon::Crosshair => crate::window::CursorIcon::Crosshair,\n\n crate::gui::message::CursorIcon::Hand => crate::window::CursorIcon::Hand,\n\n crate::gui::message::CursorIcon::Arrow => crate::window::CursorIcon::Arrow,\n\n crate::gui::message::CursorIcon::Move => crate::window::CursorIcon::Move,\n\n crate::gui::message::CursorIcon::Text => crate::window::CursorIcon::Text,\n\n crate::gui::message::CursorIcon::Wait => crate::window::CursorIcon::Wait,\n\n crate::gui::message::CursorIcon::Help => crate::window::CursorIcon::Help,\n\n crate::gui::message::CursorIcon::Progress => crate::window::CursorIcon::Progress,\n\n crate::gui::message::CursorIcon::NotAllowed => crate::window::CursorIcon::NotAllowed,\n\n crate::gui::message::CursorIcon::ContextMenu => crate::window::CursorIcon::ContextMenu,\n\n crate::gui::message::CursorIcon::Cell => crate::window::CursorIcon::Cell,\n\n crate::gui::message::CursorIcon::VerticalText => crate::window::CursorIcon::VerticalText,\n\n crate::gui::message::CursorIcon::Alias => crate::window::CursorIcon::Alias,\n\n crate::gui::message::CursorIcon::Copy => crate::window::CursorIcon::Copy,\n\n crate::gui::message::CursorIcon::NoDrop => crate::window::CursorIcon::NoDrop,\n\n crate::gui::message::CursorIcon::Grab => crate::window::CursorIcon::Grab,\n\n crate::gui::message::CursorIcon::Grabbing => crate::window::CursorIcon::Grabbing,\n", "file_path": "src/utils/mod.rs", "rank": 73, "score": 230675.3910409195 }, { "content": "pub fn fix_shadows_distance(mut quality: QualitySettings) -> QualitySettings {\n\n // Scale distance because game world has different scale.\n\n quality.spot_shadows_distance *= 2.0;\n\n quality.point_shadows_distance *= 2.0;\n\n quality\n\n}\n", "file_path": "examples/shared/mod.rs", "rank": 74, "score": 229451.52428068052 }, { "content": "pub fn load_image(data: &[u8]) -> Option<draw::SharedTexture> {\n\n Some(into_gui_texture(\n\n Texture::load_from_memory(data, CompressionOptions::NoCompression, false).ok()?,\n\n ))\n\n}\n\n\n\nlazy_static! {\n\n static ref GIZMO_SHADER: Shader = {\n\n Shader::from_str(\n\n include_str!(\"../resources/embed/shaders/gizmo.shader\",),\n\n PathBuf::default(),\n\n )\n\n .unwrap()\n\n };\n\n}\n\n\n", "file_path": "editor/src/main.rs", "rank": 75, "score": 228994.22200989706 }, { "content": "/// `<prefix>field.visit(\"name\", visitor);`\n\npub fn create_field_visits<'a>(\n\n // None or `f` when bindings tuple variants. NOTE: We can't use `prefix: Ident`\n\n prefix: Option<Ident>,\n\n fields: impl Iterator<Item = &'a args::FieldArgs>,\n\n field_style: ast::Style,\n\n) -> Vec<TokenStream2> {\n\n if field_style == ast::Style::Unit {\n\n // `Unit` (struct/enum variant) has no field to visit.\n\n // We won't even enter this region:\n\n return vec![];\n\n }\n\n\n\n let visit_args = fields\n\n .filter(|field| !field.skip)\n\n .enumerate()\n\n .map(|(field_index, field)| {\n\n let (ident, name) = match field_style {\n\n // `NamedFields { a: f32, .. }`\n\n ast::Style::Struct => {\n\n let ident = field.ident.as_ref().unwrap_or_else(|| unreachable!());\n", "file_path": "fyrox-core-derive/src/visit/utils.rs", "rank": 76, "score": 227673.87932174955 }, { "content": "/// Calculates single coefficient of Hann window.\n\n/// <https://en.wikipedia.org/wiki/Hann_function>\n\npub fn hann_window(i: usize, sample_count: usize) -> f32 {\n\n 0.5 - 0.5 * (2.0 * std::f32::consts::PI * i as f32 / (sample_count - 1) as f32).cos()\n\n}\n\n\n", "file_path": "fyrox-sound/src/dsp/mod.rs", "rank": 77, "score": 225520.1207828524 }, { "content": "/// Calculates single coefficient of Hamming window.\n\n/// <https://en.wikipedia.org/wiki/Window_function#Hamming_window>\n\npub fn hamming_window(i: usize, sample_count: usize) -> f32 {\n\n 0.54 - 0.46 * (2.0 * std::f32::consts::PI * i as f32 / (sample_count - 1) as f32).cos()\n\n}\n\n\n", "file_path": "fyrox-sound/src/dsp/mod.rs", "rank": 78, "score": 225520.1207828524 }, { "content": "pub fn set_mesh_diffuse_color(mesh: &mut Mesh, color: Color) {\n\n for surface in mesh.surfaces() {\n\n surface\n\n .material()\n\n .lock()\n\n .set_property(\n\n &ImmutableString::new(\"diffuseColor\"),\n\n PropertyValue::Color(color),\n\n )\n\n .unwrap();\n\n }\n\n}\n\n\n", "file_path": "editor/src/main.rs", "rank": 79, "score": 225480.43185715337 }, { "content": "fn filtered_out(filter: &mut Option<Filter>, path: &Path) -> bool {\n\n match filter.as_mut() {\n\n Some(filter) => !filter.0.borrow_mut().deref_mut().lock().unwrap()(path),\n\n None => false,\n\n }\n\n}\n\n\n", "file_path": "fyrox-ui/src/file_browser.rs", "rank": 80, "score": 223527.08376273618 }, { "content": "pub fn send_sync_message(ui: &UserInterface, mut msg: UiMessage) {\n\n msg.flags = MSG_SYNC_FLAG;\n\n ui.send_message(msg);\n\n}\n\n\n", "file_path": "editor/src/main.rs", "rank": 81, "score": 222563.8087690862 }, { "content": "#[must_use]\n\npub fn append_extension<P: AsRef<Path>, E: AsRef<str>>(\n\n path: P,\n\n additional_extension: E,\n\n) -> PathBuf {\n\n let mut final_path = path.as_ref().to_path_buf();\n\n let new_extension = final_path\n\n .extension()\n\n .map(|e| {\n\n let mut ext = e.to_owned();\n\n ext.push(\".\");\n\n ext.push(additional_extension.as_ref());\n\n ext\n\n })\n\n .unwrap_or_else(|| OsString::from(additional_extension.as_ref()));\n\n final_path.set_extension(new_extension);\n\n final_path\n\n}\n\n\n\n#[derive(Clone, Debug)]\n\npub struct BiDirHashMap<K, V> {\n", "file_path": "fyrox-core/src/lib.rs", "rank": 82, "score": 222186.38534027647 }, { "content": "pub fn make_save_file_selector(ctx: &mut BuildContext) -> Handle<UiNode> {\n\n FileSelectorBuilder::new(\n\n WindowBuilder::new(WidgetBuilder::new().with_width(300.0).with_height(400.0))\n\n .with_title(WindowTitle::Text(\"Save Scene As\".into()))\n\n .open(false),\n\n )\n\n .with_mode(FileBrowserMode::Save {\n\n default_file_name: PathBuf::from(\"unnamed.rgs\"),\n\n })\n\n .with_path(\"./\")\n\n .with_filter(make_scene_file_filter())\n\n .build(ctx)\n\n}\n\n\n", "file_path": "editor/src/main.rs", "rank": 83, "score": 219773.10652604254 }, { "content": "fn data_hash(data: &[u8]) -> u64 {\n\n let mut hasher = FxHasher::default();\n\n data.hash(&mut hasher);\n\n hasher.finish()\n\n}\n\n\n", "file_path": "src/resource/texture.rs", "rank": 84, "score": 219617.79097310116 }, { "content": "/// A samples generator.\n\n///\n\n/// # Notes\n\n///\n\n/// Iterator implementation (the `next()` method) must produce samples in interleaved format, this\n\n/// means that samples emitted by the method should be in `LRLRLR..` order, where `L` and `R` are\n\n/// samples from left and right channels respectively. The sound engine supports both mono and\n\n/// stereo sample sources.\n\npub trait RawStreamingDataSource: Iterator<Item = f32> + Send + Sync + Debug {\n\n /// Should return sample rate of the source.\n\n fn sample_rate(&self) -> usize;\n\n\n\n /// Should return total channel count.\n\n fn channel_count(&self) -> usize;\n\n\n\n /// Tells whether the provider should restart.\n\n fn rewind(&mut self) -> Result<(), SoundError> {\n\n Ok(())\n\n }\n\n\n\n /// Allows you to start playback from given duration.\n\n fn time_seek(&mut self, _duration: Duration) {}\n\n\n\n /// Returns total duration of data. Can be `None` if internal decoder does not supports seeking.\n\n fn duration(&self) -> Option<Duration> {\n\n None\n\n }\n\n}\n", "file_path": "fyrox-sound/src/buffer/mod.rs", "rank": 85, "score": 218904.1970551667 }, { "content": "#[inline]\n\npub fn quat_from_euler<T: SimdRealField + RealField + Copy + Clone>(\n\n euler_radians: Vector3<T>,\n\n order: RotationOrder,\n\n) -> UnitQuaternion<T> {\n\n let qx = UnitQuaternion::from_axis_angle(&Vector3::x_axis(), euler_radians.x);\n\n let qy = UnitQuaternion::from_axis_angle(&Vector3::y_axis(), euler_radians.y);\n\n let qz = UnitQuaternion::from_axis_angle(&Vector3::z_axis(), euler_radians.z);\n\n match order {\n\n RotationOrder::XYZ => qz * qy * qx,\n\n RotationOrder::XZY => qy * qz * qx,\n\n RotationOrder::YZX => qx * qz * qy,\n\n RotationOrder::YXZ => qz * qx * qy,\n\n RotationOrder::ZXY => qy * qx * qz,\n\n RotationOrder::ZYX => qx * qy * qz,\n\n }\n\n}\n\n\n", "file_path": "fyrox-core/src/math/mod.rs", "rank": 86, "score": 217663.68145404154 }, { "content": "fn create_float_view(ctx: &mut BuildContext, value: f32) -> Handle<UiNode> {\n\n NumericUpDownBuilder::new(WidgetBuilder::new().with_height(24.0))\n\n .with_value(value)\n\n .build(ctx)\n\n}\n\n\n", "file_path": "editor/src/material.rs", "rank": 87, "score": 217386.0635404431 }, { "content": "fn make_bool_input_field(ctx: &mut BuildContext, row: usize, value: bool) -> Handle<UiNode> {\n\n CheckBoxBuilder::new(\n\n WidgetBuilder::new()\n\n .on_row(row)\n\n .with_margin(Thickness::uniform(1.0))\n\n .on_column(1),\n\n )\n\n .checked(Some(value))\n\n .build(ctx)\n\n}\n\n\n", "file_path": "editor/src/settings/mod.rs", "rank": 88, "score": 215963.73934901005 }, { "content": "fn default_prop() -> PropertyInfo<'static> {\n\n PropertyInfo {\n\n owner_type_id: TypeId::of::<()>(),\n\n name: \"\",\n\n display_name: \"\",\n\n value: &(),\n\n read_only: false,\n\n min_value: None,\n\n max_value: None,\n\n step: None,\n\n precision: None,\n\n description: \"\".to_string(),\n\n is_modified: false,\n\n }\n\n}\n\n\n", "file_path": "fyrox-core-derive/tests/it/inspect.rs", "rank": 89, "score": 215883.87371516827 }, { "content": "fn choose_constraint(dimension: &GridDimension, available_size: f32) -> Option<f32> {\n\n match dimension.size_mode {\n\n SizeMode::Strict => Some(dimension.desired_size),\n\n SizeMode::Auto => Some(available_size),\n\n SizeMode::Stretch => None,\n\n }\n\n}\n\n\n", "file_path": "fyrox-ui/src/grid.rs", "rank": 90, "score": 215837.6347513601 }, { "content": "fn make_tooltip(ctx: &mut BuildContext, text: &str) -> Handle<UiNode> {\n\n if text.is_empty() {\n\n Handle::NONE\n\n } else {\n\n make_simple_tooltip(ctx, text)\n\n }\n\n}\n\n\n", "file_path": "fyrox-ui/src/inspector/mod.rs", "rank": 91, "score": 215029.89507407605 }, { "content": "fn make_text_title(ctx: &mut BuildContext, text: &str) -> Handle<UiNode> {\n\n TextBuilder::new(\n\n WidgetBuilder::new()\n\n .with_margin(Thickness::uniform(5.0))\n\n .on_row(0)\n\n .on_column(0),\n\n )\n\n .with_text(text)\n\n .build(ctx)\n\n}\n\n\n", "file_path": "fyrox-ui/src/window.rs", "rank": 92, "score": 215029.89507407605 }, { "content": "/// Creates `Inspect` trait impl and field prop keys\n\npub fn create_inspect_impl<'f>(\n\n ty_args: &args::TypeArgs,\n\n field_args: impl Iterator<Item = &'f args::FieldArgs>,\n\n impl_body: TokenStream2,\n\n) -> TokenStream2 {\n\n let prop_keys_impl = self::prop_keys_impl(ty_args);\n\n let trait_impl = self::inspect_trait_impl(ty_args, field_args, impl_body);\n\n\n\n quote! {\n\n #prop_keys_impl\n\n #trait_impl\n\n }\n\n}\n\n\n", "file_path": "fyrox-core-derive/src/inspect/utils.rs", "rank": 93, "score": 213762.7517073428 }, { "content": "fn calculate_data_hash(data: &[u8]) -> u64 {\n\n let mut hasher = FxHasher::default();\n\n data.hash(&mut hasher);\n\n hasher.finish()\n\n}\n\n\n\n/// See VertexBuffer::modify for more info.\n\npub struct VertexBufferRefMut<'a> {\n\n vertex_buffer: &'a mut VertexBuffer,\n\n}\n\n\n\nimpl<'a> Drop for VertexBufferRefMut<'a> {\n\n fn drop(&mut self) {\n\n // Recalculate data hash.\n\n self.vertex_buffer.data_hash = calculate_data_hash(&self.vertex_buffer.data);\n\n }\n\n}\n\n\n\nimpl<'a> Deref for VertexBufferRefMut<'a> {\n\n type Target = VertexBuffer;\n", "file_path": "src/scene/mesh/buffer.rs", "rank": 94, "score": 212395.54081092816 }, { "content": "fn debug_dir(filename: &str) -> PathBuf {\n\n std::env::current_dir().unwrap().join(filename)\n\n}\n\n\n", "file_path": "editor/src/project_dirs.rs", "rank": 95, "score": 212234.7882339161 }, { "content": "fn split_rect(rect: &Rect<f32>) -> [Rect<f32>; 4] {\n\n let half_size = rect.size.scale(0.5);\n\n [\n\n Rect {\n\n position: rect.position,\n\n size: half_size,\n\n },\n\n Rect {\n\n position: Vector2::new(rect.position.x + half_size.x, rect.position.y),\n\n size: half_size,\n\n },\n\n Rect {\n\n position: rect.position + half_size,\n\n size: half_size,\n\n },\n\n Rect {\n\n position: Vector2::new(rect.position.x, rect.position.y + half_size.y),\n\n size: half_size,\n\n },\n\n ]\n", "file_path": "fyrox-core/src/quadtree.rs", "rank": 96, "score": 211924.39886356518 }, { "content": "fn create_vec2_view(ctx: &mut BuildContext, value: Vector2<f32>) -> Handle<UiNode> {\n\n Vec2EditorBuilder::new(WidgetBuilder::new().with_height(24.0))\n\n .with_value(value)\n\n .build(ctx)\n\n}\n\n\n", "file_path": "editor/src/material.rs", "rank": 97, "score": 211283.74791972962 }, { "content": "fn transmute_slice<T>(bytes: &[u8]) -> &'_ [T] {\n\n // This is absolutely safe because `image` crate's Rgb8/Rgba8/etc. and `tbc`s Rgb8/Rgba8/etc.\n\n // have exactly the same memory layout.\n\n unsafe {\n\n std::slice::from_raw_parts(\n\n bytes.as_ptr() as *const T,\n\n bytes.len() / std::mem::size_of::<T>(),\n\n )\n\n }\n\n}\n\n\n", "file_path": "src/resource/texture.rs", "rank": 98, "score": 209922.40285372577 }, { "content": "pub fn read_ascii<R>(reader: &mut R) -> Result<FbxDocument, FbxError>\n\nwhere\n\n R: Read + Seek,\n\n{\n\n let mut nodes: Pool<FbxNode> = Pool::new();\n\n let root_handle = nodes.spawn(FbxNode {\n\n name: String::from(\"__ROOT__\"),\n\n children: Vec::new(),\n\n parent: Handle::NONE,\n\n attributes: Vec::new(),\n\n });\n\n let mut parent_handle: Handle<FbxNode> = root_handle;\n\n let mut node_handle: Handle<FbxNode> = Handle::NONE;\n\n let mut buffer: Vec<u8> = Vec::new();\n\n let mut name: Vec<u8> = Vec::new();\n\n let mut value: Vec<u8> = Vec::new();\n\n\n\n let buf_len = reader.seek(SeekFrom::End(0))?;\n\n reader.seek(SeekFrom::Start(0))?;\n\n\n", "file_path": "src/resource/fbx/document/ascii.rs", "rank": 99, "score": 207495.42822539737 } ]
Rust
flare-agent/src/profile/tree.rs
kylixs/flare-profiler
dd27371476b1326a50b1d753a5a6e9bc4cf7c67b
use std::collections::HashMap; use std::rc::*; use std::borrow::Cow; use std::sync::atomic::{AtomicUsize, Ordering}; use std::sync::Mutex; use std::sync::RwLock; use std::sync::Arc; use time::Duration; use thread::*; use log::{debug, info, warn}; use native::{JavaLong, JavaMethod}; use std::collections::hash_map::IterMut; static CALL_COUNT: AtomicUsize = AtomicUsize::new(0); fn get_next_nodeid() { CALL_COUNT.fetch_add(1, Ordering::SeqCst); } pub struct TreeArena { thread_trees: HashMap<JavaLong, CallStackTree>, } impl TreeArena { pub fn new() -> TreeArena { TreeArena { thread_trees: HashMap::new(), } } pub fn get_all_call_trees(&self) -> &HashMap<JavaLong, CallStackTree>{ &self.thread_trees } pub fn get_call_tree(&mut self, thread: &Thread) -> &mut CallStackTree { self.thread_trees.entry(thread.thread_id).or_insert_with(||{ CallStackTree::new(thread.thread_id, &thread.name) }); self.thread_trees.get_mut(&thread.thread_id).unwrap() } pub fn format_call_tree(&mut self, thread: &Thread, compact: bool) -> String { match self.thread_trees.get(&thread.thread_id) { Some(thread_data) => { println!("call tree of thread: [{}] [{}]", thread.thread_id, thread.name); thread_data.format_call_tree(compact) }, None => { println!("call tree not found of thread: [{}] [{}]", thread.thread_id, thread.name); String::from("[call tree not found]") } } } pub fn print_all(&self) { for (thread_id,thread_data) in self.thread_trees.iter() { println!("call tree of thread: [{}]", thread_id); println!("{}", thread_data.format_call_tree(false)); } } pub fn clear(&mut self) { self.thread_trees.clear(); println!("clear trace data"); } } pub struct CallStackTree { nodes: Vec<TreeNode>, root_node: NodeId, top_call_stack_node: NodeId, pub total_duration: i64, pub thread_id: JavaLong } impl CallStackTree { pub fn new(thread_id: JavaLong, thread_name: &str) -> CallStackTree { CallStackTree { nodes: vec![TreeNode::newRootNode(thread_name)], root_node: NodeId { index: 0 }, top_call_stack_node: NodeId { index: 0 }, total_duration: 0, thread_id: thread_id } } pub fn reset_top_call_stack_node(&mut self) { self.top_call_stack_node = self.root_node; } pub fn begin_call(&mut self, method_id: &JavaMethod) -> bool { let topNode = self.get_top_node(); match topNode.find_child(method_id) { Some(child_id) => { let node = self.get_node(child_id); self.top_call_stack_node = node.data.node_id.clone(); true }, None => { let next_index = self.nodes.len(); let topNode = self.get_mut_top_node(); let node_data = TreeNode::newCallNode(topNode, next_index, method_id); self.top_call_stack_node = node_data.data.node_id.clone(); self.nodes.push(node_data); false } } } pub fn end_call(&mut self, method_id: JavaMethod, call_name: &String, duration: i64) { let top_node = self.get_mut_top_node(); if top_node.data.name == *call_name { top_node.data.call_duration += duration; top_node.data.call_count += 1; debug!("end_call: {} {}, call_count:{}", call_name, duration, top_node.data.call_count); match &top_node.parent { Some(nodeid) => { self.top_call_stack_node = nodeid.clone(); }, None => { println!("parent node not found, pop call stack failed, call_name: {}, stack: {}, depth: {}", call_name, top_node.data.name, top_node.data.depth) } } } else { println!("call name mismatch, pop call stack failed, call_name: {}, top_node:{}, stack:{}, depth: {} ", call_name, top_node.data.name, top_node.data.name, top_node.data.depth); } } pub fn end_last_call(&mut self, total_duration: i64) { let last_duration = self.total_duration; let top_node = self.get_mut_top_node(); if(last_duration > 0){ top_node.data.call_duration += (total_duration - last_duration); } top_node.data.call_count += 1; self.total_duration = total_duration; } pub fn format_call_tree(&self, compact: bool) -> String { let mut result = String::with_capacity(8192); self.format_tree_node(&mut result,&self.root_node, compact); result } pub fn format_tree_node(&self, result: &mut String, nodeid: &NodeId, compact: bool) { let node = self.get_node(&nodeid); if compact { result.push_str(&node.data.depth.to_string()); result.push_str(","); } else { for x in 0..node.data.depth { result.push_str(" "); } } let mut call_duration = node.data.call_duration; if nodeid.index == 0 { for child in node.children.values() { call_duration += self.get_node(&child).data.call_duration; } }else { } let duration = call_duration/1000_000; result.push_str(&node.data.name); result.push_str(","); result.push_str(&node.data.call_count.to_string()); result.push_str(","); result.push_str(&duration.to_string()); result.push_str("\n"); for child in node.children.values() { self.format_tree_node(result,&child, compact); } } pub fn get_top_node(&self) -> &TreeNode { &self.nodes[self.top_call_stack_node.index] } pub fn get_mut_top_node(&mut self) -> &mut TreeNode { self.nodes.get_mut(self.top_call_stack_node.index).unwrap() } pub fn get_node(&self, node_id: &NodeId) -> &TreeNode { &self.nodes[node_id.index] } pub fn get_mut_node(&mut self, node_id: &NodeId) -> &mut TreeNode { &mut self.nodes[node_id.index] } pub fn get_root_node(&self) -> &TreeNode { &self.nodes[self.root_node.index] } } #[derive(Clone)] pub struct NodeData { pub node_id: NodeId, pub depth: u32, pub name: String, pub call_count: u32, pub call_duration: i64, pub children_size: u32 } #[derive(Clone, Copy)] pub struct NodeId { index: usize, } #[derive( Clone)] pub struct TreeNode { id: u64, pub data: NodeData, parent: Option<NodeId>, children: HashMap<u64, NodeId> } impl TreeNode { pub fn newRootNode(name: &str) -> TreeNode { TreeNode{ id: 0, data : NodeData { node_id: NodeId{index:0}, depth: 0, name: name.to_string(), call_count: 0, call_duration: 0, children_size: 0, }, parent: None, children: HashMap::new() } } pub fn newCallNode(parentNode: &mut TreeNode, next_index: usize, method_id: &JavaMethod) -> TreeNode { let node_id = NodeId{index:next_index}; parentNode.children.insert(*method_id as u64, node_id.clone()); parentNode.data.children_size += 1; TreeNode{ id: *method_id as u64, data : NodeData { node_id: node_id, name: String::new(), depth: parentNode.data.depth + 1, call_count: 0, call_duration: 0, children_size: 0, }, parent: Some(parentNode.data.node_id.clone()), children: HashMap::new(), } } fn find_child(&self, method_id: &JavaMethod) -> Option<&NodeId> { let key = *method_id as u64; self.children.get(&key) } }
use std::collections::HashMap; use std::rc::*; use std::borrow::Cow; use std::sync::atomic::{AtomicUsize, Ordering}; use std::sync::Mutex; use std::sync::RwLock; use std::sync::Arc; use time::Duration; use thread::*; use log::{debug, info, warn}; use native::{JavaLong, JavaMethod}; use std::collections::hash_map::IterMut; static CALL_COUNT: AtomicUsize = AtomicUsize::new(0); fn get_next_nodeid() { CALL_COUNT.fetch_add(1, Ordering::SeqCst); } pub struct TreeArena { thread_trees: HashMap<JavaLong, CallStackTree>, } impl TreeArena { pub fn new() -> TreeArena { TreeArena { thread_trees: HashMap::new(), } } pub fn get_all_call_trees(&self) -> &HashMap<JavaLong, CallStackTree>{ &self.thread_trees } pub fn get_call_tree(&mut self, thread: &Thread) -> &mut CallStackTree { self.thread_trees.entry(thread.thread_id).or_insert_with(||{ CallStackTree::new(thread.thread_id, &thread.name) }); self.thread_trees.get_mut(&thread.thread_id).unwrap() } pub fn format_call_tree(&mut self, thread: &Thread, compact: bool) -> String { match self.thread_trees.get(&thread.thread_id) { Some(thread_data) => { println!("call tree of thread: [{}] [{}]", thread.thread_id, thread.name); thread_data.format_call_tree(compact) }, None => { println!("call tree not found of thread: [{}] [{}]", thread.thread_id, thread.name); String::from("[call tree not found]") } } } pub fn print_all(&self) { for (thread_id,thread_data) in self.thread_trees.iter() { println!("call tree of thread: [{}]", thread_id); println!("{}", thread_data.format_call_tree(false)); } } pub fn clear(&mut self) { self.thread_trees.clear(); println!("clear trace data"); } } pub struct CallStackTree { nodes: Vec<TreeNode>, root_node: NodeId, top_call_stack_node: NodeId, pub total_duration: i64, pub thread_id: JavaLong } impl CallStackTree { pub fn new(thread_id: JavaLong, thread_name: &st
pub fn reset_top_call_stack_node(&mut self) { self.top_call_stack_node = self.root_node; } pub fn begin_call(&mut self, method_id: &JavaMethod) -> bool { let topNode = self.get_top_node(); match topNode.find_child(method_id) { Some(child_id) => { let node = self.get_node(child_id); self.top_call_stack_node = node.data.node_id.clone(); true }, None => { let next_index = self.nodes.len(); let topNode = self.get_mut_top_node(); let node_data = TreeNode::newCallNode(topNode, next_index, method_id); self.top_call_stack_node = node_data.data.node_id.clone(); self.nodes.push(node_data); false } } } pub fn end_call(&mut self, method_id: JavaMethod, call_name: &String, duration: i64) { let top_node = self.get_mut_top_node(); if top_node.data.name == *call_name { top_node.data.call_duration += duration; top_node.data.call_count += 1; debug!("end_call: {} {}, call_count:{}", call_name, duration, top_node.data.call_count); match &top_node.parent { Some(nodeid) => { self.top_call_stack_node = nodeid.clone(); }, None => { println!("parent node not found, pop call stack failed, call_name: {}, stack: {}, depth: {}", call_name, top_node.data.name, top_node.data.depth) } } } else { println!("call name mismatch, pop call stack failed, call_name: {}, top_node:{}, stack:{}, depth: {} ", call_name, top_node.data.name, top_node.data.name, top_node.data.depth); } } pub fn end_last_call(&mut self, total_duration: i64) { let last_duration = self.total_duration; let top_node = self.get_mut_top_node(); if(last_duration > 0){ top_node.data.call_duration += (total_duration - last_duration); } top_node.data.call_count += 1; self.total_duration = total_duration; } pub fn format_call_tree(&self, compact: bool) -> String { let mut result = String::with_capacity(8192); self.format_tree_node(&mut result,&self.root_node, compact); result } pub fn format_tree_node(&self, result: &mut String, nodeid: &NodeId, compact: bool) { let node = self.get_node(&nodeid); if compact { result.push_str(&node.data.depth.to_string()); result.push_str(","); } else { for x in 0..node.data.depth { result.push_str(" "); } } let mut call_duration = node.data.call_duration; if nodeid.index == 0 { for child in node.children.values() { call_duration += self.get_node(&child).data.call_duration; } }else { } let duration = call_duration/1000_000; result.push_str(&node.data.name); result.push_str(","); result.push_str(&node.data.call_count.to_string()); result.push_str(","); result.push_str(&duration.to_string()); result.push_str("\n"); for child in node.children.values() { self.format_tree_node(result,&child, compact); } } pub fn get_top_node(&self) -> &TreeNode { &self.nodes[self.top_call_stack_node.index] } pub fn get_mut_top_node(&mut self) -> &mut TreeNode { self.nodes.get_mut(self.top_call_stack_node.index).unwrap() } pub fn get_node(&self, node_id: &NodeId) -> &TreeNode { &self.nodes[node_id.index] } pub fn get_mut_node(&mut self, node_id: &NodeId) -> &mut TreeNode { &mut self.nodes[node_id.index] } pub fn get_root_node(&self) -> &TreeNode { &self.nodes[self.root_node.index] } } #[derive(Clone)] pub struct NodeData { pub node_id: NodeId, pub depth: u32, pub name: String, pub call_count: u32, pub call_duration: i64, pub children_size: u32 } #[derive(Clone, Copy)] pub struct NodeId { index: usize, } #[derive( Clone)] pub struct TreeNode { id: u64, pub data: NodeData, parent: Option<NodeId>, children: HashMap<u64, NodeId> } impl TreeNode { pub fn newRootNode(name: &str) -> TreeNode { TreeNode{ id: 0, data : NodeData { node_id: NodeId{index:0}, depth: 0, name: name.to_string(), call_count: 0, call_duration: 0, children_size: 0, }, parent: None, children: HashMap::new() } } pub fn newCallNode(parentNode: &mut TreeNode, next_index: usize, method_id: &JavaMethod) -> TreeNode { let node_id = NodeId{index:next_index}; parentNode.children.insert(*method_id as u64, node_id.clone()); parentNode.data.children_size += 1; TreeNode{ id: *method_id as u64, data : NodeData { node_id: node_id, name: String::new(), depth: parentNode.data.depth + 1, call_count: 0, call_duration: 0, children_size: 0, }, parent: Some(parentNode.data.node_id.clone()), children: HashMap::new(), } } fn find_child(&self, method_id: &JavaMethod) -> Option<&NodeId> { let key = *method_id as u64; self.children.get(&key) } }
r) -> CallStackTree { CallStackTree { nodes: vec![TreeNode::newRootNode(thread_name)], root_node: NodeId { index: 0 }, top_call_stack_node: NodeId { index: 0 }, total_duration: 0, thread_id: thread_id } }
function_block-function_prefixed
[ { "content": "fn get_stack_traces(jvmenv: &Box<Environment>, thread_info_map: &mut HashMap<JavaLong, ThreadInfo>, update_cpu_time: bool) -> Result<Vec<JavaStackTrace>, NativeError> {\n\n let mut stack_traces = vec![];\n\n match jvmenv.get_all_threads() {\n\n Err(e) => {\n\n println!(\"get_all_threads failed: {:?}\", e);\n\n Err(e)\n\n }\n\n Ok(threads) => {\n\n //println!(\"get_all_threads: {:?}\", threads);\n\n for thread in threads {\n\n let java_thread_id = jvmenv.get_thread_id(&thread.native_id);\n\n let mut thread_info = thread_info_map.get_mut(&java_thread_id);\n\n let mut is_new_thread = false;\n\n if thread_info.is_none() {\n\n is_new_thread = true;\n\n let mut new_thread_info;\n\n match jvmenv.get_thread_info_ex(&thread.native_id) {\n\n Ok(v) => {\n\n new_thread_info = v;\n\n },\n", "file_path": "flare-agent/src/lib.rs", "rank": 0, "score": 383200.8586386804 }, { "content": "pub fn read_header_info(file: &mut File, header_map: &mut HashMap<String, String>, header_segment_flag: &str, data_segment_flag: &str) -> Result<u64, io::Error> {\n\n //read file header\n\n let flag = read_file_flag(file);\n\n if flag != header_segment_flag {\n\n println!(\"Invalid file, header segment flag not match, expect '{}' but '{}'\", header_segment_flag, flag);\n\n return Err(io::Error::new(ErrorKind::InvalidInput, \"Invalid file, header segment not match\"));\n\n }\n\n\n\n //header len (2 bytes)\n\n let header_len = file.read_u16::<FileEndian>().unwrap() as u64;\n\n let header_offset = 4 + 2;\n\n let header_count = file.read_u8().unwrap();\n\n\n\n let mut buf_reader = BufReader::new(file as &mut Read);\n\n let reader = &mut buf_reader;\n\n for i in 00..header_count {\n\n let name = read_utf8(reader);\n\n let value = read_utf8(reader);\n\n header_map.insert(name.to_string(), value.to_string());\n\n }\n", "file_path": "flare-utils/src/file_utils.rs", "rank": 1, "score": 375087.7371505354 }, { "content": "//write common file header\n\npub fn write_header_info(file: &mut File, header_map: &mut HashMap<&str, String>, header_segment_flag: &str, data_segment_flag: &str) -> Result<u64, io::Error> {\n\n //file version\n\n header_map.insert(\"ver\", \"0.1.0\".to_string());\n\n\n\n //encode header\n\n let mut header_vec = vec![];\n\n //property size (1 byte)\n\n header_vec.write_i8(header_map.len() as i8);\n\n for (name, value) in header_map.iter() {\n\n header_vec.write_all(name.as_bytes());\n\n header_vec.write_u8(0);\n\n header_vec.write_all(value.as_bytes());\n\n header_vec.write_u8(0);\n\n }\n\n let max_len = 192;\n\n if header_vec.len() > max_len {\n\n return Err(io::Error::new(ErrorKind::InvalidInput, \"header len is too large!\"));\n\n }\n\n //添加一个空闲空间,避免后面重写头部覆盖数据\n\n let pad_len = max_len - header_vec.len();\n", "file_path": "flare-utils/src/file_utils.rs", "rank": 2, "score": 353077.38869223185 }, { "content": "pub fn thread_rng() -> impl Fn() -> f32 {\n\n || RNG.with(|rng| rng.borrow_mut().next_f64() as f32)\n\n}\n\n\n", "file_path": "thirty-libs/inferno/src/flamegraph/rand.rs", "rank": 3, "score": 329923.7968069714 }, { "content": "pub fn resp_encode_thread_data(thread_data: &ThreadData) -> Value {\n\n Value::Array(vec![\n\n Value::String(\"thread\".to_string()),\n\n Value::String(\"time\".to_string()),\n\n Value::Integer(thread_data.sample_time),\n\n Value::String(\"id\".to_string()),\n\n Value::Integer(thread_data.id),\n\n Value::String(\"name\".to_string()),\n\n Value::String(thread_data.name.clone()),\n\n Value::String(\"cpu_time\".to_string()),\n\n Value::Integer(thread_data.cpu_time),\n\n Value::String(\"cpu_time_delta\".to_string()),\n\n Value::Integer(thread_data.cpu_time_delta),\n\n Value::String(\"state\".to_string()),\n\n Value::String(thread_data.state.clone()),\n\n Value::String(\"stacktrace\".to_string()),\n\n resp_encode_stacktrace(thread_data),\n\n ])\n\n}\n\n\n\n\n", "file_path": "flare-server/src/sample_encoder.rs", "rank": 4, "score": 316370.286176553 }, { "content": "pub fn resp_encode_thread_data(thread_data: &ThreadData) -> Value {\n\n Value::Array(vec![\n\n Value::String(\"thread\".to_string()),\n\n Value::String(\"time\".to_string()),\n\n Value::Integer(thread_data.sample_time),\n\n Value::String(\"id\".to_string()),\n\n Value::Integer(thread_data.id),\n\n Value::String(\"name\".to_string()),\n\n Value::String(thread_data.name.clone()),\n\n Value::String(\"cpu_time\".to_string()),\n\n Value::Integer(thread_data.cpu_time),\n\n Value::String(\"cpu_time_delta\".to_string()),\n\n Value::Integer(thread_data.cpu_time_delta),\n\n Value::String(\"state\".to_string()),\n\n Value::String(thread_data.state.clone()),\n\n Value::String(\"stacktrace\".to_string()),\n\n resp_encode_stacktrace(thread_data),\n\n ])\n\n}\n\n\n\n\n", "file_path": "flare-agent/src/profile/encoder.rs", "rank": 5, "score": 316370.286176553 }, { "content": "pub fn resp_decode_thread_data(data_vec: &Vec<resp::Value>) -> ThreadData {\n\n// let sample_time = get_resp_property_as_int(data_vec, \"time\", 1, 0);\n\n// let thread_id = get_resp_property_as_int(data_vec, \"id\", 1, 0);\n\n// let cpu_time = get_resp_property_as_int(data_vec, \"cpu_time\", 1, 0);\n\n// let cpu_time_delta = get_resp_property_as_int(data_vec, \"cpu_time_delta\", 1, 0);\n\n// let name = get_resp_property_as_str(data_vec, \"name\", 1, \"\");\n\n// let state = get_resp_property_as_str(data_vec, \"state\", 1, \"\");\n\n// let mut stacktrace = vec![];\n\n// let data = get_resp_property(data_vec, \"stacktrace\", 1);\n\n// if let Some(resp::Value::BufBulk(vec)) = data {\n\n// stacktrace = convert_to_vec64(vec.clone());\n\n// }\n\n\n\n let mut stacktrace = vec![];\n\n let mut sample_time= 0;\n\n let mut thread_id= 0;\n\n let mut cpu_time= 0;\n\n let mut cpu_time_delta= 0;\n\n let mut name= \"\";\n\n let mut state= \"\";\n", "file_path": "flare-server/src/sample_encoder.rs", "rank": 6, "score": 298096.2544354438 }, { "content": "//填充空的数据,使得返回的时序数据范围的一致的\n\nfn fill_null_data(mut data_vec: Vec<i64>, start_time: i64, end_time: i64, origin_start_time: i64, origin_end_time: i64, unit_time_ms: i32) -> Vec<i64> {\n\n let fill_steps_before = (start_time - origin_start_time)/unit_time_ms as i64;\n\n let fill_steps_after = (origin_end_time - end_time)/unit_time_ms as i64;\n\n if fill_steps_before == 0 && fill_steps_after == 0 {\n\n return data_vec;\n\n }\n\n\n\n let mut new_data_vec = Vec::with_capacity(data_vec.len()+(fill_steps_before+fill_steps_after) as usize);\n\n for i in 0..fill_steps_before {\n\n new_data_vec.push(0);\n\n }\n\n\n\n new_data_vec.append(&mut data_vec);\n\n\n\n for i in 0..fill_steps_after {\n\n new_data_vec.push(0);\n\n }\n\n new_data_vec\n\n}\n\n\n", "file_path": "flare-utils/src/timeseries.rs", "rank": 7, "score": 296588.93970822584 }, { "content": "pub fn resp_encode_sample_info(start_time: i64, sample_interval:u64, last_sample_time: i64) -> Value {\n\n Value::Array(vec![\n\n Value::String(\"sample_info\".to_string()),\n\n Value::String(\"start_time\".to_string()),\n\n Value::Integer(start_time),\n\n Value::String(\"sample_interval\".to_string()),\n\n Value::Integer(sample_interval as i64),\n\n Value::String(\"last_sample_time\".to_string()),\n\n Value::Integer(last_sample_time),\n\n ])\n\n}", "file_path": "flare-agent/src/profile/encoder.rs", "rank": 8, "score": 289789.3333750019 }, { "content": " nodeForSelfOrAncestor() {\n\n for (let layer = this; layer; layer = layer._parent) {\n\n if (layer._node)\n\n return layer._node;\n\n }\n\n return null;\n", "file_path": "flare-server/static/simpleui/devtools/timeline_model/TracingLayerTree.js", "rank": 9, "score": 286997.67400733865 }, { "content": " node() {\n\n return this._node;\n", "file_path": "flare-server/static/simpleui/devtools/timeline_model/TracingLayerTree.js", "rank": 10, "score": 283784.3484909808 }, { "content": "pub fn get_option_as_int(options: &serde_json::Map<String, serde_json::Value>, key: &str, default_value: i64) -> i64 {\n\n match options.get(key) {\n\n Some(val) => {\n\n match val.as_i64() {\n\n Some(s) => s,\n\n None => default_value\n\n }\n\n },\n\n None => default_value\n\n }\n\n}\n\n\n", "file_path": "flare-server/src/utils.rs", "rank": 11, "score": 281644.73950795253 }, { "content": "pub fn get_resp_property_as_int(data_vec: &Vec<resp::Value>, key: &str, start: i32, default_value: i64) -> i64 {\n\n for x in (start as usize..data_vec.len()).step_by(2) {\n\n if let resp::Value::String(name) = &data_vec[x] {\n\n if name == key {\n\n if let resp::Value::Integer(x) = &data_vec[x+1] {\n\n return *x\n\n }\n\n }\n\n }\n\n }\n\n default_value\n\n}\n\n\n", "file_path": "flare-server/src/utils.rs", "rank": 12, "score": 271846.7120034448 }, { "content": "pub fn nowTime() -> String {\n\n let date = Local::now();\n\n return date.format(\"%Y-%m-%d %H:%M:%S.%6f\").to_string();\n\n //println!(\"{:?} {}\", date, date.format(\"[%Y-%m-%d %H:%M:%S.%3f]\"));\n\n}\n\n\n", "file_path": "flare-server/src/utils.rs", "rank": 13, "score": 269999.0191120303 }, { "content": "fn get_next_nodeid() {\n\n CALL_COUNT.fetch_add(1, Ordering::SeqCst);\n\n}\n\n\n\n// assume thread safe, get lock outside\n\npub struct TreeArena {\n\n thread_trees: HashMap<JavaLong, CallStackTree>,\n\n// lock: RwLock<u32>\n\n}\n\n\n\nimpl TreeArena {\n\n pub fn new() -> TreeArena {\n\n TreeArena {\n\n thread_trees: HashMap::new(),\n\n //lock: RwLock::new(0)\n\n }\n\n }\n\n\n\n pub fn get_all_call_trees(&self) -> &HashMap<JavaLong, CallStackTree>{\n\n &self.thread_trees\n", "file_path": "flare-server/src/call_tree.rs", "rank": 15, "score": 269080.3772098258 }, { "content": "fn tidy_generic(mut func: String) -> String {\n\n func = func.replace(';', \":\");\n\n // remove argument list from function name, but _don't_ remove:\n\n //\n\n // - Go method names like \"net/http.(*Client).Do\".\n\n // see https://github.com/brendangregg/FlameGraph/pull/72\n\n // - C++ anonymous namespace annotations.\n\n // see https://github.com/brendangregg/FlameGraph/pull/93\n\n if let Some(first_paren) = func.find('(') {\n\n if func[first_paren..].starts_with(\"anonymous namespace)\") {\n\n // C++ anonymous namespace\n\n } else {\n\n let mut is_go = false;\n\n if let Some(c) = func.get((first_paren - 1)..first_paren) {\n\n // if .get(-1) is None, can't be a dot\n\n if c == \".\" {\n\n // assume it's a Go method name, so do nothing\n\n is_go = true;\n\n }\n\n }\n", "file_path": "thirty-libs/inferno/src/collapse/perf.rs", "rank": 16, "score": 268276.1497835113 }, { "content": "fn tidy_java(mut func: String) -> String {\n\n // along with tidy_generic converts the following:\n\n // Lorg/mozilla/javascript/ContextFactory;.call(Lorg/mozilla/javascript/ContextAction;)Ljava/lang/Object;\n\n // Lorg/mozilla/javascript/ContextFactory;.call(Lorg/mozilla/javascript/C\n\n // Lorg/mozilla/javascript/MemberBox;.<init>(Ljava/lang/reflect/Method;)V\n\n // into:\n\n // org/mozilla/javascript/ContextFactory:.call\n\n // org/mozilla/javascript/ContextFactory:.call\n\n // org/mozilla/javascript/MemberBox:.init\n\n if func.starts_with('L') && func.contains('/') {\n\n func.remove(0);\n\n }\n\n\n\n func\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use std::fs;\n\n use std::io::Read;\n", "file_path": "thirty-libs/inferno/src/collapse/perf.rs", "rank": 17, "score": 268276.1497835113 }, { "content": "pub fn is_server_running() -> bool {\n\n SAMPLE_SERVER.lock().unwrap().is_running()\n\n}\n\n\n", "file_path": "flare-agent/src/profile/server.rs", "rank": 18, "score": 267662.7643946144 }, { "content": "///\n\n/// Public accessor that provides an abstraction to the global mutable agent state.\n\n///\n\npub fn static_context() -> &'static AgentContext {\n\n &STATIC_CONTEXT\n\n}\n\n\n\npub struct AgentContext {\n\n context: Arc<RwLock<Context>>,\n\n pub config: Arc<RwLock<Config>>\n\n}\n\n\n\nimpl AgentContext {\n\n pub fn new() -> AgentContext {\n\n AgentContext {\n\n context: Arc::new(RwLock::new(Context::new())),\n\n config: Arc::new(RwLock::new(Config::default()))\n\n }\n\n }\n\n\n\n pub fn set_config(&self, config: Config) {\n\n match self.config.write() {\n\n Ok(mut cfg) => {\n", "file_path": "flare-agent/src/context.rs", "rank": 19, "score": 262909.8131735038 }, { "content": "///\n\n/// Turns a C-style string pointer into a String instance. If the string pointer points to NULL,\n\n/// then a \"(NULL)\" string will be returned.\n\n///\n\npub fn stringify(input: RawString) -> String {\n\n unsafe {\n\n if input != ptr::null_mut() {\n\n match CStr::from_ptr(input).to_str() {\n\n Ok(string) => string.to_string(),\n\n Err(_) => \"(UTF8-ERROR)\".to_string()\n\n }\n\n } else {\n\n \"(NULL)\".to_string()\n\n }\n\n }\n\n}\n", "file_path": "flare-agent/src/util.rs", "rank": 20, "score": 259625.35931020862 }, { "content": "pub fn compare_results<R, E>(result: R, mut expected: E, expected_file: &str, strip_quotes: bool)\n\nwhere\n\n R: BufRead,\n\n E: BufRead,\n\n{\n\n let mut buf = String::new();\n\n let mut line_num = 1;\n\n for line in result.lines() {\n\n let line = if strip_quotes {\n\n line.unwrap().replace(\"\\\"\", \"\").replace(\"'\", \"\")\n\n } else {\n\n line.unwrap()\n\n };\n\n if expected.read_line(&mut buf).unwrap() == 0 {\n\n panic!(\n\n \"\\noutput has more lines than expected result file: {}\",\n\n expected_file\n\n );\n\n }\n\n assert_eq!(line, buf.trim_end(), \"\\n{}:{}\", expected_file, line_num);\n", "file_path": "thirty-libs/inferno/tests/common/collapse.rs", "rank": 21, "score": 258882.93738748485 }, { "content": "fn is_trace_running() -> bool {\n\n //avoid dead lock in gc event callback function\n\n //SAMPLER.lock().unwrap().is_running()\n\n unsafe { TRACE_RUNNING }\n\n}\n\n\n", "file_path": "flare-agent/src/lib.rs", "rank": 22, "score": 256933.9431497173 }, { "content": " nodeSelfHeight() {\n\n return 40;\n", "file_path": "flare-server/static/simpleui/devtools/data_grid/ShowMoreDataGridNode.js", "rank": 23, "score": 255190.18357367994 }, { "content": " dataGridNodeForTreeNode(treeNode) {\n\n return treeNode[Flamechart.TimelineTreeView.TreeGridNode._gridNodeSymbol] || null;\n", "file_path": "flare-server/static/simpleui/devtools/flamechart/TimelineTreeView.js", "rank": 24, "score": 254929.31810649528 }, { "content": "pub fn get_option_as_int_array(options: &serde_json::Map<String, serde_json::Value>, key: &str) -> io::Result<Vec<i64>> {\n\n let val = options.get(key);\n\n if val.is_none() {\n\n return Err(new_invalid_input_error(&format!(\"missing option: {}\", key)));\n\n }\n\n let val = val.unwrap().as_array();\n\n if val.is_none() {\n\n return Err(new_invalid_input_error(&format!(\"option '{}' is not int array \", key)));\n\n }\n\n let vals = val.unwrap();\n\n let mut data = vec![];\n\n for v in vals {\n\n match v.as_i64() {\n\n Some(x) => {\n\n data.push(x);\n\n },\n\n None => {\n\n return Err(new_invalid_input_error(&format!(\"option '{}' contains none int value: {} \", key, v)));\n\n },\n\n }\n\n }\n\n Ok(data)\n\n}\n\n\n", "file_path": "flare-server/src/utils.rs", "rank": 25, "score": 254301.57140665877 }, { "content": "fn read_utf8(buf_reader: &mut BufReader<&mut Read>) -> String {\n\n let mut buf = vec![];\n\n let num_bytes = buf_reader.read_until(b'\\0', &mut buf)\n\n .expect(\"expect delimiter '\\0'\");\n\n let s = std::str::from_utf8(buf.as_slice()).unwrap();\n\n s.trim_matches(|x|x=='\\0' ).to_string()\n\n}\n\n\n", "file_path": "flare-utils/src/file_utils.rs", "rank": 26, "score": 249497.17105230794 }, { "content": "pub fn parse_resp_properties<'a>(data_vec: &'a Vec<resp::Value>, start: i32) -> HashMap<&'a String, &'a resp::Value> {\n\n let mut map = HashMap::new();\n\n for x in (start as usize..data_vec.len()).step_by(2) {\n\n if let resp::Value::String(name) = &data_vec[x] {\n\n map.insert(name, &data_vec[x+1]);\n\n }\n\n }\n\n map\n\n}\n\n\n\n\n", "file_path": "flare-server/src/utils.rs", "rank": 27, "score": 249263.15247266984 }, { "content": " nodeSelfHeight() {\n\n return 20;\n", "file_path": "flare-server/static/simpleui/devtools/data_grid/DataGrid.js", "rank": 28, "score": 245606.24654728867 }, { "content": "pub fn register_thread_end_callback(callback: Option<FnThreadEnd>) {\n\n unsafe { CALLBACK_TABLE.thread_end = callback; }\n\n}\n\n\n", "file_path": "flare-agent/src/event_handler.rs", "rank": 29, "score": 244606.3250587284 }, { "content": "pub fn register_thread_start_callback(callback: Option<FnThreadStart>) {\n\n unsafe { CALLBACK_TABLE.thread_start = callback; }\n\n}\n\n\n", "file_path": "flare-agent/src/event_handler.rs", "rank": 30, "score": 244606.3250587284 }, { "content": "/// Turn native error codes into meaningful and user-readable strings\n\npub fn translate_error(code: &NativeError) -> String {\n\n match code {\n\n &NativeError::NoError => \"No error has occurred.\",\n\n &NativeError::NotAvailable => \"The functionality is not available in this virtual machine.\",\n\n &NativeError::MustPossessCapability => \"The capability being used is false in this environment.\",\n\n &NativeError::NullPointer => \"Pointer is unexpectedly NULL.\",\n\n &NativeError::OutOfMemory => \"The function attempted to allocate memory and no more memory was available for allocation.\",\n\n &NativeError::NotEnabled => \"The desired functionality has not been enabled in this virtual machine.\",\n\n &NativeError::WrongPhase => \"The desired functionality is not available in the current phase. Always returned if the virtual machine has completed running.\",\n\n &NativeError::UnexpectedInternalError => \"An unexpected internal error has occurred.\",\n\n &NativeError::ThreadNotAttached => \"The thread being used to call this function is not attached to the virtual machine. Calls must be made from attached threads.\",\n\n &NativeError::Disconnected => \"The JVM TI environment provided is no longer connected or is not an environment.\",\n\n &NativeError::NotImplemented => \"This function is not implemented yet\",\n\n &NativeError::UnknownError => \"Unknown error.\"\n\n }.to_string()\n\n}\n", "file_path": "flare-agent/src/error.rs", "rank": 31, "score": 244299.8499306472 }, { "content": "fn read_file_flag(file: &mut Read) -> String {\n\n let mut flag_buf = [0 as u8; 4];\n\n//TS file header segment: TSHS (4 bytes)\n\n file.read_exact(&mut flag_buf[..]);\n\n let flag = std::str::from_utf8(&flag_buf[..]).unwrap();\n\n flag.to_string()\n\n}\n\n\n", "file_path": "flare-utils/src/file_utils.rs", "rank": 32, "score": 242100.8534400467 }, { "content": "// Replace all hex strings like \"0x45ef2173\" with \"0x...\".\n\nfn strip_hex_address(mut stack: &str) -> String {\n\n let mut stripped = String::with_capacity(stack.len());\n\n while let Some(idx) = stack.find(\"0x\") {\n\n stripped.push_str(&stack[..idx + 2]);\n\n let ndigits = stack[idx + 2..]\n\n .chars()\n\n .take_while(|c| c.is_digit(16))\n\n .count();\n\n if ndigits > 0 {\n\n stripped.push_str(\"...\");\n\n }\n\n stack = &stack[idx + 2 + ndigits..];\n\n }\n\n stripped.push_str(stack);\n\n stripped\n\n}\n", "file_path": "thirty-libs/inferno/src/differential/mod.rs", "rank": 33, "score": 239944.06753040454 }, { "content": "pub fn get_server() -> &'static Mutex<SampleServer> {\n\n &SAMPLE_SERVER\n\n}\n\n\n", "file_path": "flare-agent/src/profile/server.rs", "rank": 34, "score": 238231.67417081975 }, { "content": "pub fn resp_encode_method_data(method_data: &MethodData) -> Value {\n\n Value::Array(vec![\n\n Value::String(\"method\".to_string()),\n\n Value::String(\"id\".to_string()),\n\n Value::Integer(method_data.method_id),\n\n Value::String(\"name\".to_string()),\n\n Value::String(method_data.full_name.clone()),\n\n ])\n\n}\n\n\n", "file_path": "flare-agent/src/profile/encoder.rs", "rank": 35, "score": 236593.93376098614 }, { "content": "// Parse and remove the number of samples from the end of a line.\n\nfn parse_nsamples(line: &mut &str, stripped_fractional_samples: &mut bool) -> Option<usize> {\n\n if let Some((samplesi, doti)) = rfind_samples(line) {\n\n let mut samples = &line[samplesi..];\n\n // Strip fractional part (if any);\n\n // foobar 1.klwdjlakdj\n\n //\n\n // The Perl version keeps the fractional part but this can be problematic\n\n // because of cumulative floating point errors. Instead we recommend to\n\n // use the --factor option. See https://github.com/brendangregg/FlameGraph/pull/18\n\n //\n\n // Warn if we're stripping a non-zero fractional part, but only the first time.\n\n if !*stripped_fractional_samples\n\n && doti < samples.len() - 1\n\n && !samples[doti + 1..].chars().all(|c| c == '0')\n\n {\n\n *stripped_fractional_samples = true;\n\n warn!(\n\n \"The input data has fractional sample counts that will be truncated to integers. \\\n\n If you need to retain the extra precision you can scale up the sample data and \\\n\n use the --factor option to scale it back down.\"\n", "file_path": "thirty-libs/inferno/src/flamegraph/merge.rs", "rank": 36, "score": 235170.1906427477 }, { "content": " data() {\n\n return this._data;\n", "file_path": "flare-server/static/simpleui/devtools/common/StringOutputStream.js", "rank": 37, "score": 234834.11103421322 }, { "content": "pub fn add_sample_data(sample_data: Box<SampleData + Send>) {\n\n let mut data_queue = DATA_QUEUE.lock().unwrap();\n\n let mut queue = &mut data_queue.queue;\n\n queue.push_back(sample_data);\n\n while(queue.len() > 10000){\n\n queue.pop_front();\n\n }\n\n}\n\n\n", "file_path": "flare-agent/src/profile/server.rs", "rank": 38, "score": 232407.84064669488 }, { "content": " nodeForSelfOrAncestor() {},\n", "file_path": "flare-server/static/simpleui/devtools/sdk/LayerTreeBase.js", "rank": 39, "score": 231489.6451804802 }, { "content": "fn resp_encode_stacktrace(thread_data: &ThreadData) -> Value {\n\n let mut vec = vec![];\n\n for call_id in &thread_data.stacktrace {\n\n vec.push(Value::Integer(call_id.clone()));\n\n }\n\n Value::Array(vec)\n\n}\n\n\n", "file_path": "flare-agent/src/profile/encoder.rs", "rank": 40, "score": 231453.7748097754 }, { "content": "fn resp_encode_stacktrace(thread_data: &ThreadData) -> Value {\n\n// let mut vec = Vec::with_capacity(thread_data.stacktrace.len());\n\n// for call_id in &thread_data.stacktrace {\n\n// vec.push(Value::Integer(*call_id));\n\n// }\n\n// Value::Array(vec)\n\n\n\n let vec64 = thread_data.stacktrace.clone();\n\n\n\n // I copy-pasted this code from StackOverflow without reading the answer\n\n // surrounding it that told me to write a comment explaining why this code\n\n // is actually safe for my own use case.\n\n let vec8 = unsafe {\n\n let ratio = mem::size_of::<i64>() / mem::size_of::<u8>();\n\n\n\n let length = vec64.len() * ratio;\n\n let capacity = vec64.capacity() * ratio;\n\n let ptr = vec64.as_ptr() as *mut u8;\n\n\n\n // Don't run the destructor for vec32\n\n mem::forget(vec64);\n\n\n\n // Construct new Vec\n\n Vec::from_raw_parts(ptr, length, capacity)\n\n };\n\n Value::BufBulk(vec8)\n\n}\n\n\n\n\n", "file_path": "flare-server/src/sample_encoder.rs", "rank": 41, "score": 231453.7748097754 }, { "content": " _setNode(node) {\n\n this._node = node;\n", "file_path": "flare-server/static/simpleui/devtools/timeline_model/TracingLayerTree.js", "rank": 42, "score": 231351.2896801313 }, { "content": " nodeForSelfOrAncestor() {\n\n for (let layer = this; layer; layer = layer._parent) {\n\n if (layer._node)\n\n return layer._node;\n\n }\n\n return null;\n", "file_path": "flare-ui/renderer/public/plugins/devtools/timeline_model/TracingLayerTree.js", "rank": 43, "score": 229868.85277451645 }, { "content": " _displayInfoForGroupNode(node) {\n\n const categories = Flamechart.TimelineUIUtils.categories();\n\n let color = node.id ? Flamechart.TimelineUIUtils.eventColor(/** @type {!SDK.TracingModel.Event} */ (node.event)) :\n\n categories['other'].color;\n\n const unattributed = Common.UIString('[unattributed]');\n\n\n\n const id = typeof node.id === 'symbol' ? undefined : node.id;\n\n\n\n switch (this._groupBySetting.get()) {\n\n case Flamechart.AggregatedTimelineTreeView.GroupBy.Category: {\n\n const category = id ? categories[id] || categories['other'] : unattributed;\n\n return {name: category.title, color: category.color};\n\n }\n\n\n\n case Flamechart.AggregatedTimelineTreeView.GroupBy.Domain:\n\n case Flamechart.AggregatedTimelineTreeView.GroupBy.Subdomain: {\n\n let domainName = id ? this._beautifyDomainName(id) : undefined;\n\n if (domainName) {\n\n const productName = this._productByEvent(/** @type {!SDK.TracingModel.Event} */ (node.event));\n\n if (productName)\n\n domainName += ' \\u2014 ' + productName;\n\n }\n\n return {name: domainName || unattributed, color: color};\n\n }\n\n\n\n case Flamechart.AggregatedTimelineTreeView.GroupBy.EventName: {\n\n const name = node.event.name === TimelineModel.TimelineModel.RecordType.JSFrame ?\n\n Common.UIString('JavaScript') :\n\n Flamechart.TimelineUIUtils.eventTitle(node.event);\n\n return {\n\n name: name,\n\n color: node.event.name === TimelineModel.TimelineModel.RecordType.JSFrame ?\n\n Flamechart.TimelineUIUtils.eventStyle(node.event).category.color :\n\n color\n\n };\n\n }\n\n case Flamechart.AggregatedTimelineTreeView.GroupBy.Product: {\n\n const event = /** @type {!SDK.TracingModel.Event} */ (node.event);\n\n const info = this._productAndBadgeByEvent(event);\n\n const name = info && info.name || unattributed;\n\n color = Flamechart.TimelineUIUtils.eventColorByProduct(\n\n this._productRegistry, this._model.timelineModel(), this._colorByURLCache, event);\n\n return {name: name, color: color, icon: info && info.badge || undefined};\n\n }\n\n\n\n case Flamechart.AggregatedTimelineTreeView.GroupBy.URL:\n\n break;\n\n\n\n case Flamechart.AggregatedTimelineTreeView.GroupBy.Frame: {\n\n const frame = id ? this._model.timelineModel().pageFrameById(id) : undefined;\n\n const frameName = frame ? Flamechart.TimelineUIUtils.displayNameForFrame(frame, 80) : Common.UIString('Page');\n\n return {name: frameName, color: color};\n\n }\n\n\n\n default:\n\n console.assert(false, 'Unexpected grouping type');\n\n }\n\n return {name: id || unattributed, color: color};\n", "file_path": "flare-server/static/simpleui/devtools/flamechart/TimelineTreeView.js", "rank": 44, "score": 228893.36156718765 }, { "content": "fn get_match_count(primary_stacks: &[i64], call_stack_ids: &HashSet<i64>) -> u64 {\n\n let mut count = 0;\n\n for method_id in primary_stacks {\n\n if call_stack_ids.contains(method_id) {\n\n count +=1;\n\n }\n\n }\n\n count\n\n}", "file_path": "flare-server/src/method_analysis.rs", "rank": 45, "score": 228711.24084242957 }, { "content": "//itoa is faster than int.to_string()\n\nfn write_int(buf: &mut Vec<u8>, val: &i64) {\n\n //itoa::write(buf, *val);\n\n let mut itoa_buf = itoa::Buffer::new();\n\n let bytes = itoa_buf.format(*val).as_bytes();\n\n buf.extend_from_slice(bytes);\n\n}\n\n\n\n/// A streaming RESP Decoder.\n\n#[derive(Debug)]\n\npub struct Decoder<R> {\n\n buf_bulk: bool,\n\n reader: BufReader<R>,\n\n}\n\n\n\nimpl<R: Read> Decoder<R> {\n\n /// Creates a Decoder instance with given BufReader for decoding the RESP buffers.\n\n /// # Examples\n\n /// ```\n\n /// # use std::io::BufReader;\n\n /// # use self::resp::{Decoder, Value};\n", "file_path": "thirty-libs/resp/src/serialize.rs", "rank": 46, "score": 228660.96146841702 }, { "content": "pub fn convert_to_vec64(vec8: Vec<u8>) -> Vec<i64> {\n\n let vec64 = unsafe {\n\n let ratio = mem::size_of::<i64>() / mem::size_of::<u8>();\n\n\n\n let length = vec8.len() / ratio;\n\n let capacity = vec8.capacity() / ratio;\n\n let ptr = vec8.as_ptr() as *mut i64;\n\n\n\n // Don't run the destructor for vec32\n\n mem::forget(vec8);\n\n\n\n // Construct new Vec\n\n Vec::from_raw_parts(ptr, length, capacity)\n\n };\n\n vec64\n\n}", "file_path": "flare-server/src/sample_encoder.rs", "rank": 47, "score": 228634.59877491847 }, { "content": " node() {\n\n return this._node;\n", "file_path": "flare-server/static/simpleui/devtools/sdk/CSSMatchedStyles.js", "rank": 48, "score": 226603.5373822776 }, { "content": " node() {},\n", "file_path": "flare-server/static/simpleui/devtools/sdk/LayerTreeBase.js", "rank": 49, "score": 226379.4226273857 }, { "content": " _extractNodeIdsToResolve(nodeIdsToResolve, seenNodeIds, payload) {\n\n const backendNodeId = payload.owner_node;\n\n if (backendNodeId && !this.backendNodeIdToNode().has(backendNodeId))\n\n nodeIdsToResolve.add(backendNodeId);\n\n for (let i = 0; payload.children && i < payload.children.length; ++i)\n\n this._extractNodeIdsToResolve(nodeIdsToResolve, seenNodeIds, payload.children[i]);\n", "file_path": "flare-server/static/simpleui/devtools/timeline_model/TracingLayerTree.js", "rank": 50, "score": 226240.50321038227 }, { "content": " node() {\n\n return this._node;\n", "file_path": "flare-ui/renderer/public/plugins/devtools/timeline_model/TracingLayerTree.js", "rank": 51, "score": 225963.1229242494 }, { "content": "pub fn add_sample_data_batch(data_vec: Vec<Box<SampleData + Send>>) {\n\n let mut data_queue = DATA_QUEUE.lock().unwrap();\n\n data_queue.push_back(data_vec);\n\n}\n\n\n", "file_path": "flare-agent/src/profile/server.rs", "rank": 52, "score": 225586.77709688738 }, { "content": "#[test]\n\nfn flamegraph_should_warn_about_no_sort_when_reversing_stack_ordering() {\n\n let options = Options {\n\n no_sort: true,\n\n reverse_stack_order: true,\n\n ..Default::default()\n\n };\n\n test_flamegraph_logs_with_options(\n\n \"./flamegraph/test/results/perf-funcab-cmd-01-collapsed-all.txt\",\n\n |captured_logs| {\n\n let nwarnings = captured_logs\n\n .into_iter()\n\n .filter(|log| log.body == \"Input lines are always sorted when `reverse_stack_order` is `true`. The `no_sort` option is being ignored.\" && log.level == Level::Warn)\n\n .count();\n\n assert_eq!(\n\n nwarnings, 1,\n\n \"no-sort warning logged {} times, but should be logged exactly once\",\n\n nwarnings\n\n );\n\n },\n\n options,\n\n );\n\n}\n\n\n", "file_path": "thirty-libs/inferno/tests/flamegraph.rs", "rank": 53, "score": 224706.36769305478 }, { "content": "fn flamegraph_benchmark(c: &mut Criterion, id: &str, infile: &str, mut opt: Options<'static>) {\n\n let mut f = File::open(infile).expect(\"file not found\");\n\n\n\n let mut bytes = Vec::new();\n\n f.read_to_end(&mut bytes).expect(\"Could not read file\");\n\n\n\n c.bench(\n\n \"flamegraph\",\n\n ParameterizedBenchmark::new(\n\n id,\n\n move |b, data| {\n\n b.iter(|| {\n\n let reader = BufReader::new(data.as_slice());\n\n let _folder = flamegraph::from_reader(&mut opt, reader, io::sink());\n\n })\n\n },\n\n vec![bytes],\n\n )\n\n .throughput(|bytes| Throughput::Bytes(bytes.len() as u64)),\n\n );\n", "file_path": "thirty-libs/inferno/benches/flamegraph.rs", "rank": 54, "score": 224168.54734553208 }, { "content": "#[test]\n\nfn flamegraph_should_warn_about_bad_input_lines_with_reversed_stack_ordering() {\n\n let options = Options {\n\n reverse_stack_order: true,\n\n ..Default::default()\n\n };\n\n test_flamegraph_logs_with_options(\n\n \"./tests/data/flamegraph/bad-lines/bad-lines.txt\",\n\n |captured_logs| {\n\n let nwarnings = captured_logs\n\n .into_iter()\n\n .filter(|log| {\n\n log.body.starts_with(\"Ignored\")\n\n && log.body.ends_with(\" lines with invalid format\")\n\n && log.level == Level::Warn\n\n })\n\n .count();\n\n assert_eq!(\n\n nwarnings, 1,\n\n \"bad lines warning logged {} times, but should be logged exactly once\",\n\n nwarnings\n\n );\n\n },\n\n options,\n\n );\n\n}\n\n\n", "file_path": "thirty-libs/inferno/tests/flamegraph.rs", "rank": 55, "score": 220327.70103328538 }, { "content": "pub fn parse_request_options(request: &Vec<Value>) -> HashMap<String, Value> {\n\n let mut result = HashMap::new();\n\n let mut i = 1;\n\n while i < request.len()-1 {\n\n let key = &request[i];\n\n let value = &request[i+1];\n\n match key {\n\n Value::String(str) => {\n\n result.insert(str.to_string(), value.clone());\n\n },\n\n _ => {\n\n println!(\"invalid cmd option key, expect String but get: {:?}\", key);\n\n }\n\n }\n\n i+=2;\n\n }\n\n result\n\n}\n\n\n", "file_path": "flare-agent/src/profile/server.rs", "rank": 56, "score": 218268.59539522004 }, { "content": "pub fn new_invalid_input_error(msg: &str) -> io::Error {\n\n io::Error::new(ErrorKind::InvalidInput, msg)\n\n}", "file_path": "flare-server/src/utils.rs", "rank": 57, "score": 216669.9844323083 }, { "content": "fn main() -> io::Result<()> {\n\n\n\n //TODO 注意: 此修复方法只针对旧的文件格式,不适用于新版\n\n\n\n let samples_dir = \"D:\\\\projects\\\\arch\\\\flare-profiler\\\\flare-server\\\\flare-samples\\\\\";\n\n let dirs = std::fs::read_dir(samples_dir)?;\n\n for dir in dirs {\n\n //sample\n\n let path_buf = dir.unwrap().path();\n\n if !std::fs::metadata(&path_buf).unwrap().is_dir() {\n\n continue;\n\n }\n\n let paths = std::fs::read_dir(path_buf)?;\n\n for path in paths {\n\n let path_str = path.unwrap().path().to_str().unwrap().to_owned();\n\n if path_str.ends_with(\".fidx\") {\n\n let idx_path = &path_str[0..path_str.len()-5];\n\n let mut tuple_file = TupleIndexedFile::new_reader(idx_path)?;\n\n if tuple_file.amount > 0 {\n\n println!(\"fidx file: {}, amount: {}, data: {:?}\", path_str, tuple_file.amount, tuple_file.get_index_pairs(0,3));\n", "file_path": "flare-server/examples/test_fix_thread_data.rs", "rank": 58, "score": 214418.3184390882 }, { "content": "fn on_thread_end(thread: Thread) {\n\n if !is_trace_running() {\n\n return;\n\n }\n\n println!(\"[{}] thread end [{}] [{}]\", nowTime(), thread.id, thread.name);\n\n\n\n match static_context().thread_end(&thread.id) {\n\n Some(duration) => {\n\n println!(\"[{}] Thread {} lived {}\", nowTime(), thread.name, duration);\n\n //TREE_ARENA.lock().unwrap().print_call_tree(&thread);\n\n },\n\n None => println!(\"[{}] Thread {} has no start\", nowTime(), thread.name)\n\n }\n\n}\n\n\n", "file_path": "flare-agent/src/lib.rs", "rank": 59, "score": 212684.1388879401 }, { "content": "fn on_thread_start(thread: Thread) {\n\n if !is_trace_running() {\n\n return;\n\n }\n\n println!(\"[{}] thread start [{}] [{}]\", nowTime(), thread.id, thread.name);\n\n\n\n static_context().thread_start(&thread.id);\n\n}\n\n\n", "file_path": "flare-agent/src/lib.rs", "rank": 60, "score": 212684.1388879401 }, { "content": "fn handle_subscribe_events_cmd(stream: &mut TcpStream, cmd_options: &HashMap<String, Value>) {\n\n println!(\"subscribe event loop start\");\n\n\n\n //send sample info\n\n// let start_time = SAMPLE_SERVER.lock().unwrap().start_time;\n\n// let sample_interval = SAMPLE_SERVER.lock().unwrap().sample_interval;\n\n// let buf = resp_encode_sample_info(start_time, sample_interval);\n\n// if let Err(e) = stream.write_all(buf.as_slice()) {\n\n// println!(\"send sample info failed: {}\", e);\n\n// return;;\n\n// }\n\n\n\n //send sample info\n\n println!(\"sending sample info to new client ..\");\n\n let request = Value::Array(vec![\n\n Value::String(\"get_sample_info\".to_string()),\n\n ]);\n\n SAMPLE_SERVER.lock().unwrap().send_request(request);\n\n if let Some(response) = SAMPLE_SERVER.lock().unwrap().recv_response() {\n\n if let Err(e) = stream.write_all(response.encode().as_slice()) {\n", "file_path": "flare-agent/src/profile/server.rs", "rank": 61, "score": 212498.72340672306 }, { "content": "fn handle_resume_sample_cmd(stream: &mut TcpStream, cmd_options: &HashMap<String, Value>) {\n\n //resume\n\n}\n\n\n", "file_path": "flare-agent/src/profile/server.rs", "rank": 62, "score": 212498.72340672306 }, { "content": "fn handle_stop_sample_cmd(stream: &mut TcpStream, cmd_options: &HashMap<String, Value>) {\n\n stop_server();\n\n}\n\n\n", "file_path": "flare-agent/src/profile/server.rs", "rank": 63, "score": 212498.72340672306 }, { "content": "fn handle_pause_sample_cmd(stream: &mut TcpStream, cmd_options: &HashMap<String, Value>) {\n\n //pause\n\n}\n\n\n", "file_path": "flare-agent/src/profile/server.rs", "rank": 64, "score": 212498.72340672306 }, { "content": "//open file with read and write permissions\n\npub fn open_file(path: &str, rw: bool) -> Result<File, io::Error> {\n\n OpenOptions::new()\n\n .read(true)\n\n .write(rw)\n\n .create(rw)\n\n .open(path.to_string())\n\n}\n\n\n", "file_path": "flare-utils/src/file_utils.rs", "rank": 65, "score": 211935.41601985932 }, { "content": "pub fn new_error(kind: ErrorKind, msg: &str) -> io::Error {\n\n io::Error::new(kind, msg)\n\n}\n\n\n", "file_path": "flare-server/src/utils.rs", "rank": 66, "score": 208445.11710766982 }, { "content": "// massage function name to be nicer\n\n// NOTE: ignoring https://github.com/jvm-profiling-tools/perf-map-agent/pull/35\n\nfn with_module_fallback(module: &str, func: &str, pc: &str, include_addrs: bool) -> String {\n\n if func != \"[unknown]\" {\n\n return func.to_string();\n\n }\n\n\n\n // try to use part of module name as function if unknown\n\n let func = match (module, include_addrs) {\n\n (\"[unknown]\", true) => \"unknown\",\n\n (\"[unknown]\", false) => {\n\n // no need to process this further\n\n return func.to_string();\n\n }\n\n (module, _) => {\n\n // use everything following last / of module as function name\n\n &module[module.rfind('/').map(|i| i + 1).unwrap_or(0)..]\n\n }\n\n };\n\n\n\n // output string is a bit longer than rawfunc but not much\n\n let mut res = String::with_capacity(func.len() + 12);\n", "file_path": "thirty-libs/inferno/src/collapse/perf.rs", "rank": 67, "score": 208026.39546266082 }, { "content": "fn on_monitor_waited(thread: Thread) {\n\n if !is_trace_running() {\n\n return;\n\n }\n\n println!(\"[{}] [W2-{}]\", nowTime(), thread.name);\n\n}\n\n\n", "file_path": "flare-agent/src/lib.rs", "rank": 68, "score": 207306.39588659324 }, { "content": "fn on_monitor_wait(thread: Thread) {\n\n if !is_trace_running() {\n\n return;\n\n }\n\n println!(\"[{}] [W1-{}]\", nowTime(), thread.name);\n\n}\n\n\n", "file_path": "flare-agent/src/lib.rs", "rank": 69, "score": 207306.39588659324 }, { "content": "pub fn test_collapse_error<C>(mut collapser: C, test_filename: &str) -> io::Error\n\nwhere\n\n C: Collapse,\n\n{\n\n if fs::metadata(test_filename).is_err() {\n\n panic!(\"Failed to open input file '{}'\", test_filename);\n\n }\n\n\n\n let mut collapse = move |out: &mut dyn io::Write| {\n\n if test_filename.ends_with(\".gz\") {\n\n let test_file = File::open(test_filename)?;\n\n let r = BufReader::new(Decoder::new(test_file).unwrap());\n\n collapser.collapse(r, out)\n\n } else {\n\n collapser.collapse_file(Some(test_filename), out)\n\n }\n\n };\n\n\n\n collapse(&mut io::sink()).expect_err(\"Expected an error\")\n\n}\n", "file_path": "thirty-libs/inferno/tests/common/collapse.rs", "rank": 70, "score": 206131.02518488804 }, { "content": "fn on_monitor_contended_entered(thread: Thread) {\n\n if !is_trace_running() {\n\n return;\n\n }\n\n println!(\"[{}] [C2-{}]\", nowTime(), thread.name);\n\n\n\n match static_context().monitor_entered(&thread.id) {\n\n Some(duration) => println!(\"[{}] Thread {} waited {}\", nowTime(), thread.name, duration),\n\n None => println!(\"[{}] Thread {} has never waited\", nowTime(), thread.name)\n\n }\n\n}\n\n\n", "file_path": "flare-agent/src/lib.rs", "rank": 71, "score": 205943.15350918856 }, { "content": "fn on_monitor_contended_enter(thread: Thread) {\n\n if !is_trace_running() {\n\n return;\n\n }\n\n println!(\"[{}] [C1-{}]\", nowTime(), thread.name);\n\n\n\n static_context().monitor_enter(&thread.id);\n\n}\n\n\n", "file_path": "flare-agent/src/lib.rs", "rank": 72, "score": 205943.15350918856 }, { "content": " dataGridNodeFromNode(target) {\n\n const rowElement = target.enclosingNodeOrSelfWithNodeName('tr');\n\n return rowElement && rowElement._dataGridNode;\n", "file_path": "flare-server/static/simpleui/devtools/data_grid/DataGrid.js", "rank": 73, "score": 205647.0474972393 }, { "content": "pub fn test_collapse_logs<C, F>(mut collapser: C, input_file: &str, asserter: F)\n\nwhere\n\n C: Collapse,\n\n F: Fn(&Vec<CapturedLog>),\n\n{\n\n testing_logger::setup();\n\n let r = BufReader::new(File::open(input_file).unwrap());\n\n collapser.collapse(r, std::io::sink()).unwrap();\n\n testing_logger::validate(asserter);\n\n}\n\n\n", "file_path": "thirty-libs/inferno/tests/common/collapse.rs", "rank": 74, "score": 201775.5312478864 }, { "content": "#[allow(clippy::cognitive_complexity)]\n\npub fn from_frames<W>(opt: &mut Options, writer: W, frames: &mut Vec<TimedFrame>, time: usize, delta_max: usize) -> quick_xml::Result<()>\n\n where W: Write\n\n{\n\n let mut buffer = StrStack::new();\n\n\n\n // let's start writing the svg!\n\n let mut svg = if opt.pretty_xml {\n\n Writer::new_with_indent(writer, b' ', 4)\n\n } else {\n\n Writer::new(writer)\n\n };\n\n\n\n if time == 0 {\n\n error!(\"No stack counts found\");\n\n // emit an error message SVG, for tools automating flamegraph use\n\n let imageheight = opt.font_size * 5;\n\n svg::write_header(&mut svg, imageheight, &opt)?;\n\n svg::write_str(\n\n &mut svg,\n\n &mut buffer,\n", "file_path": "thirty-libs/inferno/src/flamegraph/mod.rs", "rank": 75, "score": 200694.50813020486 }, { "content": "// Write three-column lines with the folded stack trace and two value columns,\n\n// one for each profile.\n\nfn write_stacks<W>(stack_counts: &FnvHashMap<String, Counts>, mut writer: W) -> io::Result<()>\n\nwhere\n\n W: Write,\n\n{\n\n for (stack, &Counts { first, second }) in stack_counts {\n\n writeln!(writer, \"{} {} {}\", stack, first, second)?;\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "thirty-libs/inferno/src/differential/mod.rs", "rank": 76, "score": 199967.70210475643 }, { "content": " nodeSelfHeight() {\n\n return 40;\n", "file_path": "flare-ui/renderer/public/plugins/devtools/data_grid/ShowMoreDataGridNode.js", "rank": 77, "score": 199617.9949641665 }, { "content": " dataGridNodeForTreeNode(treeNode) {\n\n return treeNode[Flamechart.TimelineTreeView.TreeGridNode._gridNodeSymbol] || null;\n", "file_path": "flare-ui/renderer/public/plugins/devtools/flamechart/TimelineTreeView.js", "rank": 78, "score": 199359.95435751817 }, { "content": "#[allow(clippy::cognitive_complexity)]\n\npub fn from_lines<'a, I, W>(opt: &mut Options<'_>, lines: I, writer: W) -> quick_xml::Result<()>\n\nwhere\n\n I: IntoIterator<Item = &'a str>,\n\n W: Write,\n\n{\n\n let mut reversed = StrStack::new();\n\n let (mut frames, time, ignored, delta_max) = if opt.reverse_stack_order {\n\n if opt.no_sort {\n\n warn!(\n\n \"Input lines are always sorted when `reverse_stack_order` is `true`. \\\n\n The `no_sort` option is being ignored.\"\n\n );\n\n }\n\n // Reverse order of stacks and sort.\n\n let mut stack = String::new();\n\n for line in lines {\n\n stack.clear();\n\n let samples_idx = merge::rfind_samples(line)\n\n .map(|(i, _)| i)\n\n .unwrap_or_else(|| line.len());\n", "file_path": "thirty-libs/inferno/src/flamegraph/mod.rs", "rank": 79, "score": 198243.08183517287 }, { "content": "fn ts_sum_int64 (numbers: &[i64]) -> i64 {\n\n let mut sum = 0;\n\n numbers.iter().for_each(|x| sum += *x as i64);\n\n sum\n\n}\n\n\n\n//fn average(numbers: &[i32]) -> f32 {\n\n// numbers.iter().sum::<i32>() as f32 / numbers.len() as f32\n\n//}\n\n//\n\n//fn median(numbers: &mut [i32]) -> i32 {\n\n// numbers.sort();\n\n// let mid = numbers.len() / 2;\n\n// numbers[mid]\n\n//}\n\n\n\n\n\nimpl TimeSeriesFileReader {\n\n\n\n pub fn new(path: &str) -> Result<TimeSeriesFileReader, Error> {\n", "file_path": "flare-utils/src/timeseries.rs", "rank": 80, "score": 196478.03688848406 }, { "content": "/// Produce a flame graph from a reader that contains a sequence of folded stack lines.\n\n///\n\n/// See [`from_sorted_lines`] for the expected format of each line.\n\n///\n\n/// The resulting flame graph will be written out to `writer` in SVG format.\n\npub fn from_reader<R, W>(opt: &mut Options<'_>, reader: R, writer: W) -> quick_xml::Result<()>\n\nwhere\n\n R: Read,\n\n W: Write,\n\n{\n\n from_readers(opt, iter::once(reader), writer)\n\n}\n\n\n", "file_path": "thirty-libs/inferno/src/flamegraph/mod.rs", "rank": 81, "score": 196007.6450774566 }, { "content": "/// Produce a flame graph from a set of readers that contain folded stack lines.\n\n///\n\n/// See [`from_sorted_lines`] for the expected format of each line.\n\n///\n\n/// The resulting flame graph will be written out to `writer` in SVG format.\n\npub fn from_readers<R, W>(opt: &mut Options<'_>, readers: R, writer: W) -> quick_xml::Result<()>\n\nwhere\n\n R: IntoIterator,\n\n R::Item: Read,\n\n W: Write,\n\n{\n\n let mut input = String::new();\n\n for mut reader in readers {\n\n reader\n\n .read_to_string(&mut input)\n\n .map_err(quick_xml::Error::Io)?;\n\n }\n\n from_lines(opt, input.lines(), writer)\n\n}\n\n\n", "file_path": "thirty-libs/inferno/src/flamegraph/mod.rs", "rank": 82, "score": 196007.6450774566 }, { "content": "fn nowTime() -> String {\n\n let date = Local::now();\n\n return date.format(\"%Y-%m-%d %H:%M:%S.%6f\").to_string();\n\n //println!(\"{:?} {}\", date, date.format(\"[%Y-%m-%d %H:%M:%S.%3f]\"));\n\n}\n\n\n", "file_path": "flare-agent/src/lib.rs", "rank": 83, "score": 191480.54816583893 }, { "content": "fn is_server_running() -> bool {\n\n *RUNNING_SERVER.lock().unwrap()\n\n}\n\n\n", "file_path": "flare-agent/examples/resp_server.rs", "rank": 84, "score": 189899.9401723736 }, { "content": "fn default_sample_count() -> i64 {\n\n 1\n\n}\n\n\n\n#[derive(Clone, Serialize)]\n\npub struct MethodInfo {\n\n pub method_id: i64,\n\n pub full_name: String,\n\n\n\n #[serde(skip_serializing)]\n\n pub hits_count: u32\n\n// pub source_file: String,\n\n// pub line_num: u16\n\n}\n\n\n\n#[derive(Clone, Serialize)]\n\npub struct MethodCall {\n\n pub method_id: i64,\n\n pub full_name: String,\n\n pub thread_id: JavaLong,\n", "file_path": "flare-server/src/sample.rs", "rank": 85, "score": 189895.85064509077 }, { "content": "pub fn stop_server() {\n\n set_server_running(false);\n\n //make a new connection force tcp listener exit accept() blocking\n\n let bind_port = SAMPLE_SERVER.lock().unwrap().get_bind_port();\n\n let bind_host = SAMPLE_SERVER.lock().unwrap().get_bind_host();\n\n let mut host = if bind_host == \"0.0.0.0\" {\n\n \"127.0.0.1\".to_string()\n\n } else {\n\n bind_host\n\n };\n\n match TcpStream::connect(format!(\"{}:{}\", host, bind_port)) {\n\n Ok(_) => {\n\n println!(\"send notify to agent server ok\");\n\n },\n\n Err(e) => {\n\n println!(\"send notify to agent server failed: {}\", e)\n\n }\n\n }\n\n}\n\n\n", "file_path": "flare-agent/src/profile/server.rs", "rank": 86, "score": 189854.25355843318 }, { "content": "pub fn start_server() {\n\n let timer = timer::Timer::new();\n\n let guard = {\n\n timer.schedule_repeating(chrono::Duration::milliseconds(3000), move || {\n\n DATA_QUEUE.lock().unwrap().stats();\n\n })\n\n };\n\n\n\n let bind_addr = SAMPLE_SERVER.lock().unwrap().get_bind_addr();\n\n let listener = TcpListener::bind(&bind_addr).unwrap();\n\n // accept connections and process them, spawning a new thread for each one\n\n println!(\"Flare agent server listening on {}\", bind_addr);\n\n set_server_running(true);\n\n let mut last_client_stream: Option<TcpStream> = None;\n\n for stream in listener.incoming() {\n\n if !is_server_running() {\n\n println!(\"Flare agent server is stopping, exiting\");\n\n break;\n\n }\n\n match stream {\n", "file_path": "flare-agent/src/profile/server.rs", "rank": 87, "score": 189854.25355843318 }, { "content": "type JavaMethod = i64;\n\n\n\nstatic CALL_COUNT: AtomicUsize = AtomicUsize::new(0);\n\n\n", "file_path": "flare-server/src/call_tree.rs", "rank": 88, "score": 189805.5379617809 }, { "content": "type JavaLong = i64;\n", "file_path": "flare-server/src/call_tree.rs", "rank": 89, "score": 189805.29601845116 }, { "content": "pub fn get_option_as_str_required<'a>(options: &'a serde_json::Map<String, serde_json::Value>, key: &str) -> io::Result<&'a str> {\n\n match options.get(key) {\n\n Some(val) => {\n\n match val.as_str() {\n\n Some(s) => Ok(s),\n\n None => {\n\n Err(io::Error::new(ErrorKind::InvalidInput, format!(\"option value is not a string '{}'\", key)))\n\n },\n\n }\n\n },\n\n None => {\n\n Err(io::Error::new(ErrorKind::InvalidInput, format!(\"missing option '{}'\", key)))\n\n }\n\n }\n\n}\n\n\n", "file_path": "flare-server/src/utils.rs", "rank": 90, "score": 189569.09034739807 }, { "content": " sortOrder() {\n\n if (!this._sortColumnCell || this._sortColumnCell.classList.contains(DataGrid.DataGrid.Order.Ascending))\n\n return DataGrid.DataGrid.Order.Ascending;\n\n if (this._sortColumnCell.classList.contains(DataGrid.DataGrid.Order.Descending))\n\n return DataGrid.DataGrid.Order.Descending;\n\n return null;\n", "file_path": "flare-server/static/simpleui/devtools/data_grid/DataGrid.js", "rank": 91, "score": 188633.06761480475 }, { "content": " nodeSelfHeight() {\n\n return 20;\n", "file_path": "flare-ui/renderer/public/plugins/devtools/data_grid/DataGrid.js", "rank": 92, "score": 188608.20985322457 }, { "content": " rootNode() {\n\n return this._rootNode;\n", "file_path": "flare-server/static/simpleui/devtools/data_grid/DataGrid.js", "rank": 93, "score": 188537.74895577077 }, { "content": " resetNode(onlyCaches) {\n\n // @TODO(allada) This is a hack to make sure ViewportDataGrid can clean up these caches. Try Not To Use.\n\n delete this._depth;\n\n delete this._revealed;\n\n if (onlyCaches)\n\n return;\n\n if (this.previousSibling)\n\n this.previousSibling.nextSibling = this.nextSibling;\n\n if (this.nextSibling)\n\n this.nextSibling.previousSibling = this.previousSibling;\n\n this.dataGrid = null;\n\n this.parent = null;\n\n this.nextSibling = null;\n\n this.previousSibling = null;\n\n this._attached = false;\n", "file_path": "flare-server/static/simpleui/devtools/data_grid/DataGrid.js", "rank": 94, "score": 188531.07219570674 }, { "content": "pub fn get_option_as_str<'a>(options: &'a serde_json::Map<String, serde_json::Value>, key: &str, default_value: &'a str) -> &'a str {\n\n match options.get(key) {\n\n Some(val) => {\n\n match val.as_str() {\n\n Some(s) => s.trim(),\n\n None => default_value\n\n }\n\n },\n\n None => default_value\n\n }\n\n}\n\n\n", "file_path": "flare-server/src/utils.rs", "rank": 95, "score": 188361.19546504322 }, { "content": " _startEditingColumnOfDataGridNode(node, cellIndex) {\n\n this._editing = true;\n\n /** @type {?DataGrid.DataGridNode} */\n\n this._editingNode = node;\n\n this._editingNode.select();\n\n\n\n const element = this._editingNode._element.children[cellIndex];\n\n UI.InplaceEditor.startEditing(element, this._startEditingConfig(element));\n\n element.getComponentSelection().selectAllChildren(element);\n", "file_path": "flare-server/static/simpleui/devtools/data_grid/DataGrid.js", "rank": 96, "score": 188133.0988541559 }, { "content": " isSortOrderAscending() {\n\n return !this._sortColumnCell || this._sortColumnCell.classList.contains(DataGrid.DataGrid.Order.Ascending);\n", "file_path": "flare-server/static/simpleui/devtools/data_grid/DataGrid.js", "rank": 97, "score": 186808.87708447187 }, { "content": " static StringComparator(columnId, a, b) {\n\n const aValue = a.data[columnId];\n\n const bValue = b.data[columnId];\n\n const aString = aValue instanceof Node ? aValue.textContent : String(aValue);\n\n const bString = bValue instanceof Node ? bValue.textContent : String(bValue);\n\n return aString < bString ? -1 : (aString > bString ? 1 : 0);\n", "file_path": "flare-server/static/simpleui/devtools/data_grid/SortableDataGrid.js", "rank": 98, "score": 186790.8881163228 }, { "content": " columnIdFromNode(target) {\n\n const cellElement = target.enclosingNodeOrSelfWithNodeName('td');\n\n return cellElement && cellElement[DataGrid.DataGrid._columnIdSymbol];\n", "file_path": "flare-server/static/simpleui/devtools/data_grid/DataGrid.js", "rank": 99, "score": 186727.091551412 } ]
Rust
src/arch/intel/interrupt/x2apic/local_apic.rs
VenmoTools/libarch
589bd07a2fdcd3dfc16adbbb8f6c5d720fd3cb0c
use bit_field::BitField; use crate::arch::intel::chips::flags::LocalAPICFlags; use crate::arch::intel::interrupt::ApicInfo; use crate::arch::intel::interrupt::x2apic::consts::*; use crate::arch::intel::interrupt::x2apic::register::{IpiAllShorthand, IpiDeliveryMode, IpiDestMode, LocalApicRegisters, TimerDivide, TimerMode}; #[derive(Debug)] pub struct LocalApic { timer_vector: usize, error_vector: usize, spurious_vector: usize, timer_mode: TimerMode, timer_divide: TimerDivide, timer_initial: u32, ipi_destination_mode: IpiDestMode, regs: LocalApicRegisters, } impl From<ApicInfo> for LocalApic { fn from(info: ApicInfo) -> Self { LocalApic { timer_vector: info.timer_vector.expect("missing timer vector"), error_vector: info.error_vector.expect("missing error vector"), spurious_vector: info.spurious_vector.expect("missing spurious vector"), timer_mode: info.timer_mode.unwrap_or(TimerMode::Periodic), timer_divide: info.timer_divide.unwrap_or(TimerDivide::Div256), timer_initial: info.timer_initial.unwrap_or(10_000_000), ipi_destination_mode: info .ipi_destination_mode .unwrap_or(IpiDestMode::Physical), regs: LocalApicRegisters::new(), } } } impl LocalApic { pub unsafe fn enable(&mut self) { self.x2apic_mode_enable(); self.remap_lvt_entries(); self.configure_timer(); self.enable_timer(); self.disable_local_interrupt_pins(); self.software_enable(); } pub unsafe fn disable(&mut self) { self.regs.set_base_bit(BASE_APIC_ENABLE, false); } pub unsafe fn end_of_interrupt(&mut self) { self.regs.write_eoi(0); } pub unsafe fn is_bsp(&self) -> bool { self.regs.base_bit(BASE_BSP) } pub unsafe fn id(&self) -> u32 { self.regs.id() as u32 } pub unsafe fn version(&self) -> u8 { self.regs.version_bit_range(VERSION_NR) as u8 } pub unsafe fn max_lvt_entry(&self) -> u8 { self.regs.version_bit_range(VERSION_MAX_LVT_ENTRY) as u8 } pub unsafe fn has_eoi_bcast_suppression(&self) -> bool { self.regs.version_bit(VERSION_EOI_BCAST_SUPPRESSION) } pub unsafe fn error_flags(&self) -> LocalAPICFlags { LocalAPICFlags::from_bits_truncate(self.regs.error() as u8) } pub unsafe fn enable_timer(&mut self) { self.regs.set_lvt_timer_bit(LVT_TIMER_MASK, false); } pub unsafe fn disable_timer(&mut self) { self.regs.set_lvt_timer_bit(LVT_TIMER_MASK, true); } pub unsafe fn set_timer_mode(&mut self, mode: TimerMode) { self.timer_mode = mode; self.regs.set_lvt_timer_bit_range(LVT_TIMER_MODE, mode.into()); } pub unsafe fn set_timer_divide(&mut self, divide: TimerDivide) { self.timer_divide = divide; self.regs .set_tdcr_bit_range(TDCR_DIVIDE_VALUE, divide.into()); } pub unsafe fn set_timer_initial(&mut self, initial: u32) { self.timer_initial = initial; self.regs.write_ticr(u64::from(initial)); } pub unsafe fn set_logical_id(&mut self, dest: u32) { self.regs.write_ldr(u64::from(dest)); } pub unsafe fn send_ipi(&mut self, vector: u8, dest: u32) { let mut icr_val = self.format_icr(vector, IpiDeliveryMode::Fixed); icr_val.set_bits(ICR_DESTINATION, u64::from(dest)); self.regs.write_icr(icr_val); } pub unsafe fn send_ipi_all(&mut self, vector: u8, who: IpiAllShorthand) { let mut icr_val = self.format_icr(vector, IpiDeliveryMode::Fixed); icr_val.set_bits(ICR_DEST_SHORTHAND, who.into()); self.regs.write_icr(icr_val); } pub unsafe fn send_lowest_priority_ipi(&mut self, vector: u8, dest: u32) { let mut icr_val = self.format_icr(vector, IpiDeliveryMode::LowestPriority); icr_val.set_bits(ICR_DESTINATION, u64::from(dest)); self.regs.write_icr(icr_val); } pub unsafe fn send_lowest_priority_ipi_all( &mut self, vector: u8, who: IpiAllShorthand, ) { let mut icr_val = self.format_icr(vector, IpiDeliveryMode::LowestPriority); icr_val.set_bits(ICR_DEST_SHORTHAND, who.into()); self.regs.write_icr(icr_val); } pub unsafe fn send_smi(&mut self, dest: u32) { let mut icr_val = self.format_icr(0, IpiDeliveryMode::SystemManagement); icr_val.set_bits(ICR_DESTINATION, u64::from(dest)); self.regs.write_icr(icr_val); } pub unsafe fn send_smi_all(&mut self, who: IpiAllShorthand) { let mut icr_val = self.format_icr(0, IpiDeliveryMode::SystemManagement); icr_val.set_bits(ICR_DEST_SHORTHAND, who.into()); self.regs.write_icr(icr_val); } pub unsafe fn send_nmi(&mut self, dest: u32) { let mut icr_val = self.format_icr(0, IpiDeliveryMode::NonMaskable); icr_val.set_bits(ICR_DESTINATION, u64::from(dest)); self.regs.write_icr(icr_val); } pub unsafe fn send_nmi_all(&mut self, who: IpiAllShorthand) { let mut icr_val = self.format_icr(0, IpiDeliveryMode::NonMaskable); icr_val.set_bits(ICR_DEST_SHORTHAND, who.into()); self.regs.write_icr(icr_val); } pub unsafe fn send_sipi(&mut self, vector: u8, dest: u32) { let mut icr_val = self.format_icr(vector, IpiDeliveryMode::StartUp); icr_val.set_bits(ICR_DESTINATION, u64::from(dest)); self.regs.write_icr(icr_val); } pub unsafe fn send_sipi_all(&mut self, vector: u8) { let mut icr_val = self.format_icr(vector, IpiDeliveryMode::StartUp); icr_val.set_bits( ICR_DEST_SHORTHAND, IpiAllShorthand::AllExcludingSelf.into(), ); self.regs.write_icr(icr_val); } pub unsafe fn send_ipi_self(&mut self, vector: u8) { self.regs.write_self_ipi(u64::from(vector)); } fn format_icr(&self, vector: u8, mode: IpiDeliveryMode) -> u64 { let mut icr_val = 0; icr_val.set_bits(ICR_VECTOR, u64::from(vector)); icr_val.set_bits(ICR_DELIVERY_MODE, mode.into()); icr_val.set_bit( ICR_DESTINATION_MODE, self.ipi_destination_mode == IpiDestMode::Logical, ); icr_val.set_bit(ICR_LEVEL, true); icr_val } unsafe fn x2apic_mode_enable(&mut self) { self.regs.set_base_bit(BASE_X2APIC_ENABLE, true); } unsafe fn software_enable(&mut self) { self.regs.set_sivr_bit(SIVR_APIC_SOFTWARE_ENABLE, true); } unsafe fn remap_lvt_entries(&mut self) { self.regs.set_lvt_timer_bit_range( LVT_TIMER_VECTOR, self.timer_vector as u64, ); self.regs.set_lvt_error_bit_range( LVT_ERROR_VECTOR, self.error_vector as u64, ); self.regs .set_sivr_bit_range(SIVR_VECTOR, self.spurious_vector as u64); } unsafe fn configure_timer(&mut self) { self.regs .set_lvt_timer_bit_range(LVT_TIMER_MODE, self.timer_mode.into()); self.regs .set_tdcr_bit_range(TDCR_DIVIDE_VALUE, self.timer_divide.into()); self.regs.write_ticr(u64::from(self.timer_initial)); } unsafe fn disable_local_interrupt_pins(&mut self) { self.regs.write_lvt_lint0(0); self.regs.write_lvt_lint1(0); } }
use bit_field::BitField; use crate::arch::intel::chips::flags::LocalAPICFlags; use crate::arch::intel::interrupt::ApicInfo; use crate::arch::intel::interrupt::x2apic::consts::*; use crate::arch::intel::interrupt::x2apic::register::{IpiAllShorthand, IpiDeliveryMode, IpiDestMode, LocalApicRegisters, TimerDivide, TimerMode}; #[derive(Debug)] pub struct LocalApic { timer_vector: usize, error_vector: usize, spurious_vector: usize, timer_mode: TimerMode, timer_divide: TimerDivide, timer_initial: u32, ipi_destination_mode: IpiDestMode, regs: LocalApicRegisters, } impl From<ApicInfo> for LocalApic { fn from(info: ApicInfo) -> Self { LocalApic { timer_vector: info.timer_vector.expect("missing timer vector"), error_vector: info.error_vector.expect("missing error vector"), spurious_vector: info.spurious_vector.expect("missing spurious vector"), timer_mode: info.timer_mode.unwrap_or(TimerMode::Periodic), timer_divide: info.timer_divide.unwrap_or(TimerDivide::Div256), timer_initial: info.timer_initial.unwrap_or(10_000_000), ipi_destination_mode: info .ipi_destination_mode .unwrap_or(IpiDestMode::Physical), regs: LocalApicRegisters::new(), } } } impl LocalApic { pub unsafe fn enable(&mut self) { self.x2apic_mode_enable(); self.remap_lvt_entries(); self.configure_timer(); self.enable_timer(); self.disable_local_interrupt_pins(); self.software_enable(); } pub unsafe fn disable(&mut self) { self.regs.set_base_bit(BASE_APIC_ENABLE, false); } pub unsafe fn end_of_interrupt(&mut self) { self.regs.write_eoi(0); } pub unsafe fn is_bsp(&self) -> bool { self.regs.base_bit(BASE_BSP) } pub unsafe fn id(&self) -> u32 { self.regs.id() as u32 } pub unsafe fn version(&self) -> u8 { self.regs.version_bit_range(VERSION_NR) as u8 } pub unsafe fn max_lvt_entry(&self) -> u8 { self.regs.version_bit_range(VERSION_MAX_LVT_ENTRY) as u8 } pub unsafe fn has_eoi_bcast_suppression(&self) -> bool { self.regs.version_bit(VERSION_EOI_BCAST_SUPPRESSION) } pub unsafe fn error_flags(&self) -> LocalAPICFlags { LocalAPICFlags::from_bits_truncate(self.regs.error() as u8) } pub unsafe fn enable_timer(&mut self) { self.regs.set_lvt_timer_bit(LVT_TIMER_MASK, false); } pub unsafe fn disable_timer(&mut self) { self.regs.set_lvt_timer_bit(LVT_TIMER_MASK, true); } pub unsafe fn set_timer_mode(&mut self, mode: TimerMode) { self.timer_mode = mode; self.regs.set_lvt_timer_bit_range(LVT_TIMER_MODE, mode.into()); } pub unsafe fn set_timer_divide(&mut self, divide: TimerDivide) { self.timer_divide = divide; self.regs .set_tdcr_bit_range(TDCR_DIVIDE_VALUE, divide.into()); } pub unsafe fn set_timer_initial(&mut self, initial: u32) { self.timer_initial = initial; self.regs.write_ticr(u64::from(initial)); } pub unsafe fn set_logical_id(&mut self, dest: u32) { self.regs.write_ldr(u64::from(dest)); } pub unsafe fn send_ipi(&mut self, vector: u8, dest: u32) { let mut icr_val = self.format_icr(vector, IpiDeliveryMode::Fixed); icr_val.set_bits(ICR_DESTINATION, u64::from(dest)); self.regs.write_icr(icr_val); } pub unsafe fn send_ipi_all(&mut self, vector: u8, who: IpiAllShorthand) { let mut icr_val = self.format_icr(vector, IpiDeliveryMode::Fixed); icr_val.set_bits(ICR_DEST_SHORTHAND, who.into()); self.regs.write_icr(icr_val); } pub unsafe fn send_lowest_priority_ipi(&mut self, vector: u8, dest: u32) { let mut icr_val = self.format_icr(vector, IpiDeliveryMode::LowestPriority); icr_val.set_bits(ICR_DESTINATION, u64::from(dest)); self.regs.write_icr(icr_val); } pub unsafe fn send_lowest_priority_ipi_all( &mut self, vector: u8, who: IpiAllShorthand, ) { let mut icr_val = self.format_icr(vector, IpiDeliveryMode::LowestPriority); icr_val.set_bits(ICR_DEST_SHORTHAND, who.into()); self.regs.write_icr(icr_val); } pub unsafe fn send_smi(&mut self, dest: u32) { let mut icr_val = self.format_icr(0, IpiDeliveryMode::SystemManagement); icr_val.set_bits(ICR_DESTINATION, u64::from(dest)); self.regs.write_icr(icr_val); } pub unsafe fn send_smi_all(&mut self, who: IpiAllShorthand) { let mut icr_val = self.format_icr(0, IpiDeliveryMode::SystemManagement); icr_val.set_bits(ICR_DEST_SHORTHAND, who.into()); self.regs.write_icr(icr_val); } pub unsafe fn send_nmi(&mut self, dest: u32) { let mut icr_val = self.format_icr(0, IpiDeliveryMode::NonMaskable); icr_val.set_bits(ICR_DESTINATION, u64::from(dest)); self.regs.write_icr(icr_val); } pub unsafe fn send_nmi_all(&mut self, who: IpiAllShorthand) {
pub unsafe fn send_sipi(&mut self, vector: u8, dest: u32) { let mut icr_val = self.format_icr(vector, IpiDeliveryMode::StartUp); icr_val.set_bits(ICR_DESTINATION, u64::from(dest)); self.regs.write_icr(icr_val); } pub unsafe fn send_sipi_all(&mut self, vector: u8) { let mut icr_val = self.format_icr(vector, IpiDeliveryMode::StartUp); icr_val.set_bits( ICR_DEST_SHORTHAND, IpiAllShorthand::AllExcludingSelf.into(), ); self.regs.write_icr(icr_val); } pub unsafe fn send_ipi_self(&mut self, vector: u8) { self.regs.write_self_ipi(u64::from(vector)); } fn format_icr(&self, vector: u8, mode: IpiDeliveryMode) -> u64 { let mut icr_val = 0; icr_val.set_bits(ICR_VECTOR, u64::from(vector)); icr_val.set_bits(ICR_DELIVERY_MODE, mode.into()); icr_val.set_bit( ICR_DESTINATION_MODE, self.ipi_destination_mode == IpiDestMode::Logical, ); icr_val.set_bit(ICR_LEVEL, true); icr_val } unsafe fn x2apic_mode_enable(&mut self) { self.regs.set_base_bit(BASE_X2APIC_ENABLE, true); } unsafe fn software_enable(&mut self) { self.regs.set_sivr_bit(SIVR_APIC_SOFTWARE_ENABLE, true); } unsafe fn remap_lvt_entries(&mut self) { self.regs.set_lvt_timer_bit_range( LVT_TIMER_VECTOR, self.timer_vector as u64, ); self.regs.set_lvt_error_bit_range( LVT_ERROR_VECTOR, self.error_vector as u64, ); self.regs .set_sivr_bit_range(SIVR_VECTOR, self.spurious_vector as u64); } unsafe fn configure_timer(&mut self) { self.regs .set_lvt_timer_bit_range(LVT_TIMER_MODE, self.timer_mode.into()); self.regs .set_tdcr_bit_range(TDCR_DIVIDE_VALUE, self.timer_divide.into()); self.regs.write_ticr(u64::from(self.timer_initial)); } unsafe fn disable_local_interrupt_pins(&mut self) { self.regs.write_lvt_lint0(0); self.regs.write_lvt_lint1(0); } }
let mut icr_val = self.format_icr(0, IpiDeliveryMode::NonMaskable); icr_val.set_bits(ICR_DEST_SHORTHAND, who.into()); self.regs.write_icr(icr_val); }
function_block-function_prefix_line
[ { "content": "// Gets the upper segment selector for `irq`\n\npub fn hi(irq: u8) -> u32 {\n\n lo(irq) + 1\n\n}\n\n\n\n\n\nimpl IrqMode {\n\n pub(super) fn as_u32(self) -> u32 {\n\n self as u32\n\n }\n\n}\n\n\n\n/// The IOAPIC structure.\n\n#[derive(Debug)]\n\npub struct IoApic {\n\n regs: IoApicRegisters,\n\n}\n\n\n\nimpl IoApic {\n\n /// Returns an IOAPIC with the given MMIO address `base_addr`.\n\n ///\n", "file_path": "src/arch/intel/interrupt/x2apic/io_apic.rs", "rank": 0, "score": 197296.1835282166 }, { "content": "// Gets the lower segment selector for `irq`\n\npub fn lo(irq: u8) -> u32 {\n\n IOAPIC_TABLE_BASE + (2 * u32::from(irq))\n\n}\n\n\n", "file_path": "src/arch/intel/interrupt/x2apic/io_apic.rs", "rank": 1, "score": 197296.1835282166 }, { "content": "/// 返回是否启用中断。\n\npub fn are_enabled() -> bool {\n\n use crate::arch::intel::chips::flags::RFlags;\n\n use super::rflags::read_flags;\n\n read_flags().contains(RFlags::INTERRUPT_FLAG)\n\n}\n\n\n", "file_path": "src/arch/intel/instructions/interrupt.rs", "rank": 2, "score": 162584.15978442616 }, { "content": "#[inline]\n\npub fn int_n(n: u32) {\n\n unsafe {\n\n llvm_asm!(\"int $0\" :: \"N\" (n) :: \"volatile\");\n\n }\n\n}\n\n\n\n/// 暂停CPU,直到下一个中断到达。\n", "file_path": "src/arch/intel/instructions/interrupt.rs", "rank": 3, "score": 158560.628395133 }, { "content": "#[allow(unused_assignments)]\n\n#[inline]\n\npub fn rdmsr(msr: u32) -> u64 {\n\n let (mut high, mut low) = (0_u32, 0_32);\n\n unsafe {\n\n llvm_asm!(\"rdmsr\"\n\n : \"={eax}\"(low),\"={edx}\"(high)\n\n : \"{ecx}\"(msr)\n\n : \"memory\"\n\n : \"volatile\"\n\n );\n\n }\n\n ((high as u64) << 32) | (low as u64)\n\n}\n\n\n\n/// 从msr寄存器中写入64位数据\n\n#[inline]\n\npub unsafe fn wrmsr(msr: u32, data: u64) {\n\n let low = data as u32;\n\n let high = (data >> 32) as u32;\n\n llvm_asm!(\"wrmsr\"\n\n :\n", "file_path": "src/arch/intel/instructions/register.rs", "rank": 4, "score": 150066.76084702348 }, { "content": "pub fn get_offset(offset: u8) -> u16 {\n\n ((offset as u16 & 2) * 8) & 0xffff\n\n}", "file_path": "src/devices/bus/pic/mod.rs", "rank": 5, "score": 147694.3060280579 }, { "content": "#[inline]\n\npub fn int3() {\n\n unsafe {\n\n llvm_asm!(\"int3\" :::: \"volatile\");\n\n }\n\n}\n\n\n\n/// 对int n指令的封装\n", "file_path": "src/arch/intel/instructions/interrupt.rs", "rank": 6, "score": 108497.41758320192 }, { "content": "#[inline]\n\npub fn hlt() {\n\n unsafe {\n\n llvm_asm!(\"hlt\" :::: \"volatile\");\n\n }\n\n}\n\n\n\n/// Atomically enable interrupts and put the CPU to sleep\n", "file_path": "src/arch/intel/instructions/interrupt.rs", "rank": 7, "score": 108497.41758320192 }, { "content": "#[inline]\n\npub fn enable_interrupt() {\n\n unsafe {\n\n llvm_asm!(\"sti\" :::: \"volatile\");\n\n }\n\n}\n\n\n\n/// 屏蔽中断,已经使用unsafe包裹\n", "file_path": "src/arch/intel/instructions/interrupt.rs", "rank": 8, "score": 106593.51412226644 }, { "content": "#[inline]\n\npub fn disable_interrupt() {\n\n unsafe {\n\n llvm_asm!(\"cli\" :::: \"volatile\");\n\n }\n\n}\n\n\n\n/// 触发调试中断 breakpoint exception\n", "file_path": "src/arch/intel/instructions/interrupt.rs", "rank": 9, "score": 106593.51412226644 }, { "content": "#[inline]\n\npub fn system_pause() {\n\n unsafe { llvm_asm!(\"pause\" : : : : \"intel\", \"volatile\"); }\n\n}\n\n\n\n/// 开启中断,已经使用unsafe包裹\n", "file_path": "src/arch/intel/instructions/interrupt.rs", "rank": 10, "score": 106593.51412226644 }, { "content": "struct Chips8253 {\n\n // frequency (Hz)\n\n frequency: u16,\n\n control: ContorlWord,\n\n}\n\n\n\nimpl Chips8253 {\n\n pub fn new(frequency: u16, control: ContorlWord) -> Self {\n\n Self {\n\n frequency,\n\n control,\n\n }\n\n }\n\n // 中断计数方式\n\n pub fn interrupt_on_terminal_count(&self) {\n\n unsafe { self.init_time_n(TIMER0) }\n\n }\n\n // 硬件可重触发单稳方式\n\n pub fn hardware_retriggerable_one_shot(&self) {\n\n unsafe { self.init_time_n(TIMER1) }\n", "file_path": "src/arch/intel/timer.rs", "rank": 11, "score": 105522.9641598623 }, { "content": "#[inline]\n\npub fn enable_interrupt_and_hlt() {\n\n unsafe {\n\n llvm_asm!(\"sti;hlt\"::::\"volatile\")\n\n }\n\n}\n\n\n", "file_path": "src/arch/intel/instructions/interrupt.rs", "rank": 12, "score": 104782.72822413404 }, { "content": "#[inline]\n\npub fn enable_interrupt_and_nop() {\n\n unsafe {\n\n llvm_asm!(\"sti;nop\"::::\"volatile\")\n\n }\n\n}\n\n\n", "file_path": "src/arch/intel/instructions/interrupt.rs", "rank": 13, "score": 104782.72822413404 }, { "content": "#[allow(unused_assignments)]\n\npub fn read() -> u64 {\n\n let mut r = 0_u64;\n\n unsafe {\n\n llvm_asm!(\"pushfq; popq $0\" : \"=r\"(r) ::\"memory\")\n\n };\n\n r\n\n}\n\n\n", "file_path": "src/arch/intel/instructions/rflags.rs", "rank": 14, "score": 104061.52099574718 }, { "content": "#[allow(unused_assignments)]\n\n#[inline(always)]\n\npub fn read_rip() -> u64 {\n\n let mut rip: u64 = 0;\n\n unsafe {\n\n llvm_asm!(\n\n \"lea (%rip), $0\"\n\n : \"=r\"(rip) ::: \"volatile\"\n\n );\n\n }\n\n rip\n\n}\n\n\n\n/// 从msr寄存器中读取64位数据\n", "file_path": "src/arch/intel/instructions/register.rs", "rank": 15, "score": 102250.73509761479 }, { "content": "/// 读取RFLAGS寄存器转为RFLAGS\n\npub fn read_flags() -> RFlags {\n\n RFlags::from_bits_truncate(read())\n\n}\n\n\n", "file_path": "src/arch/intel/instructions/rflags.rs", "rank": 16, "score": 100526.39843491607 }, { "content": "/// 写入REFLAGS寄存器原始数据\n\npub fn write(val: u64) {\n\n unsafe { llvm_asm!(\"pushq $0; popfq\" :: \"r\"(val) : \"memory\" \"flags\") };\n\n}\n\n\n", "file_path": "src/arch/intel/instructions/rflags.rs", "rank": 17, "score": 100084.2695693395 }, { "content": "/// 写入REFLAGS寄存器\n\npub fn write_flags(flags: RFlags) {\n\n let old_value = read();\n\n let reserved = old_value & !(RFlags::all().bits());\n\n let new_value = reserved | flags.bits();\n\n write(new_value);\n\n}", "file_path": "src/arch/intel/instructions/rflags.rs", "rank": 18, "score": 96715.99901665897 }, { "content": "#[allow(unused_assignments)]\n\npub fn cs<S: Selector>() -> S {\n\n let mut segment: u16 = 0;\n\n\n\n unsafe {\n\n llvm_asm!(\n\n \"mov %cs, $0\"\n\n :\"=r\"(segment)\n\n );\n\n }\n\n S::from(segment)\n\n}\n", "file_path": "src/arch/intel/instructions/segmention.rs", "rank": 19, "score": 96485.170452681 }, { "content": "#[inline]\n\npub fn sidt() -> DescriptorTablePointer<IntelX64> {\n\n let idt = DescriptorTablePointer::empty();\n\n unsafe {\n\n llvm_asm!(\n\n \"sidt ($0)\":\"=r\"(&idt)::\"memory\"\n\n )\n\n }\n\n idt\n\n}\n\n\n\n\n\n/// 使用`lidt`加载IDT描述符\n\n#[inline]\n\npub unsafe fn lidt(idt: &DescriptorTablePointer<IntelX64>) {\n\n llvm_asm!(\"lidt ($0)\" :: \"r\" (idt) : \"memory\");\n\n}\n\n\n\n/// 使用`ltr`加载TSS描述符\n\n#[inline]\n\npub unsafe fn load_tss<T: Selector>(sel: T) {\n\n llvm_asm!(\"ltr $0\" :: \"r\" (sel.as_u16()));\n\n}\n\n\n\n#[inline]\n\npub unsafe fn load_tr<T: Selector>(sel: T) {\n\n llvm_asm!(\"ltr $0\" :: \"r\" (sel.as_u16()));\n\n}", "file_path": "src/arch/intel/instructions/tables.rs", "rank": 20, "score": 95146.97246176601 }, { "content": "#[inline]\n\npub fn sgdt() -> DescriptorTablePointer<IntelX64> {\n\n let gdt = DescriptorTablePointer::empty();\n\n unsafe {\n\n llvm_asm!(\n\n \"sgdt ($0)\":\"=r\"(&gdt) : :\"memory\"\n\n )\n\n }\n\n gdt\n\n}\n\n\n\n/// 使用`sgdt`取出IDTR寄存器的数据\n", "file_path": "src/arch/intel/instructions/tables.rs", "rank": 21, "score": 95146.97246176601 }, { "content": "/// Run a closure with disabled interrupts.\n\n///\n\n/// Run the given closure, disabling interrupts before running it (if they aren't already disabled).\n\n/// Afterwards, interrupts are enabling again if they were enabled before.\n\n///\n\n/// If you have other `enable` and `disable` calls _within_ the closure, things may not work as expected.\n\n///\n\n/// # Examples\n\n///\n\n/// ```ignore\n\n/// // interrupts are enabled\n\n/// without_interrupts(|| {\n\n/// // interrupts are disabled\n\n/// without_interrupts(|| {\n\n/// // interrupts are disabled\n\n/// });\n\n/// // interrupts are still disabled\n\n/// });\n\n/// // interrupts are enabled again\n\n/// ```\n\npub fn without_interrupts<F, R>(f: F) -> R\n\n where\n\n F: FnOnce() -> R,\n\n{\n\n // true if the interrupt flag is set (i.e. interrupts are enabled)\n\n let saved_intpt_flag = are_enabled();\n\n\n\n // if interrupts are enabled, disable them for now\n\n if saved_intpt_flag {\n\n disable_interrupt();\n\n }\n\n\n\n // do `f` while interrupts are disabled\n\n let ret = f();\n\n\n\n // re-enable interrupts if they were previously enabled\n\n if saved_intpt_flag {\n\n enable_interrupt();\n\n }\n\n\n\n // return the result of `f` to the caller\n\n ret\n\n}", "file_path": "src/arch/intel/instructions/interrupt.rs", "rank": 22, "score": 90190.10144901945 }, { "content": "pub fn align_up(addr: u64, align: u64) -> u64 {\n\n assert_eq!(align & (align - 1), 0, \"`align` must be a power of two\");\n\n\n\n let mask = align - 1;\n\n if addr & mask == 0 {\n\n addr\n\n } else {\n\n (addr | mask) + 1\n\n }\n\n}\n\n\n", "file_path": "src/arch/intel/x64/address/mod.rs", "rank": 23, "score": 88690.97644501203 }, { "content": "pub fn align_down(addr: u64, align: u64) -> u64 {\n\n assert_eq!(align & (align - 1), 0, \"`align` must be a power of two\");\n\n addr & !(align - 1)\n\n}\n\n\n", "file_path": "src/arch/intel/x64/address/mod.rs", "rank": 24, "score": 88690.97644501203 }, { "content": "#[inline]\n\nfn p1_ptr(page: Page<Page4KB>, recursive_index: PageIndex) -> *mut PageTable {\n\n p1_page(page, recursive_index).start_address().as_mut_ptr()\n\n}\n\n\n", "file_path": "src/arch/intel/x64/paging/mapper/page.rs", "rank": 25, "score": 84415.5288112142 }, { "content": "#[inline]\n\nfn p3_ptr<S: PageSize>(page: Page<S>, recursive_index: PageIndex) -> *mut PageTable {\n\n p3_page(page, recursive_index).start_address().as_mut_ptr()\n\n}\n\n\n", "file_path": "src/arch/intel/x64/paging/mapper/page.rs", "rank": 26, "score": 79551.62013155148 }, { "content": "#[inline]\n\nfn p2_ptr<S: NotGiantPageSize>(page: Page<S>, recursive_index: PageIndex) -> *mut PageTable {\n\n p2_page(page, recursive_index).start_address().as_mut_ptr()\n\n}\n\n\n", "file_path": "src/arch/intel/x64/paging/mapper/page.rs", "rank": 27, "score": 78548.13916451165 }, { "content": "#[derive(Debug)]\n\nstruct Pic {\n\n offset: u8,\n\n command: UnsafePort<u8>,\n\n data: UnsafePort<u8>,\n\n}\n\n\n\nimpl Pic {\n\n /// 判断中断向量是否可接受的范围中\n\n /// IRQ0-IRQ7 共8个\n\n /// IRQ8 -IRQ15 共8个\n\n fn handle_interrupt(&self, interrupt_id: u8) -> bool {\n\n self.offset <= interrupt_id && interrupt_id < self.offset + 8\n\n }\n\n /// 向对应端口写入EOI命令完成中断\n\n unsafe fn end_interrupt(&mut self) {\n\n self.command.write(EOI);\n\n }\n\n}\n\n\n\n/// PIC两级级联结构\n", "file_path": "src/arch/intel/interrupt/pic.rs", "rank": 28, "score": 64116.20896157503 }, { "content": "#[derive(Debug)]\n\nstruct PageTableWalker<P: PhysicalToVirtual> {\n\n phy_to_vir: P,\n\n}\n\n\n\nimpl<P: PhysicalToVirtual> PageTableWalker<P> {\n\n ///\n\n /// # Safety\n\n ///\n\n pub unsafe fn new(p: P) -> Self {\n\n Self { phy_to_vir: p }\n\n }\n\n\n\n /// MappedPageTable内部辅助函数可获取对下一级页面表的引用。\n\n /// 如果未使用该条目,则返回 `PageTableWalkError::NotMapped`。\n\n /// 如果在传递的条目中设置了`HUGE_PAGE`标志,则返回`PageTableWalkError::MappedToHugePage`。\n\n fn next_table<'a>(&self, entry: &'a PageTableEntry) -> Result<&'a PageTable, PageTableWalkError> {\n\n let table_ptr = self.phy_to_vir.phy_to_vir(entry.frame()?);\n\n let page_table: &PageTable = unsafe { &*table_ptr };\n\n Ok(page_table)\n\n }\n", "file_path": "src/arch/intel/x64/paging/mapper/map_pt.rs", "rank": 29, "score": 53404.04002298531 }, { "content": "fn microdelay(us: u64) {\n\n let start = rdtsc!();\n\n let freq = 3_000_000_000u64; // 3GHz\n\n let end = start + freq / 1_000_000 * us;\n\n while rdtsc!() < end {}\n\n}\n\n\n\nimpl fmt::Debug for xApic {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n f.debug_struct(\"Xapic\")\n\n .field(\"id\", &self.id())\n\n .field(\"version\", &self.version())\n\n .field(\"icr\", &self.icr())\n\n .finish()\n\n }\n\n}\n\n\n\nimpl xApic {\n\n unsafe fn read(&self, reg: u32) -> u32 {\n\n read_volatile((self.base + reg as usize) as *const u32)\n\n }\n\n unsafe fn write(&self, reg: u32, value: u32) {\n\n write_volatile((self.base + reg as usize) as *mut u32, value);\n\n let _ = self.read(0x20);\n\n }\n\n}\n\n\n", "file_path": "src/arch/intel/interrupt/xapic/xpaic.rs", "rank": 30, "score": 53015.70877386507 }, { "content": "pub trait ArchIntel {\n\n const BIT: u64;\n\n const DISPLAY_STR: &'static str;\n\n}\n\n\n\npub struct IntelX32;\n\n\n\nimpl ArchIntel for IntelX32 {\n\n const BIT: u64 = 32;\n\n const DISPLAY_STR: &'static str = \"Intel x32\";\n\n}\n\n\n\npub struct IntelX64;\n\n\n\nimpl ArchIntel for IntelX64 {\n\n const BIT: u64 = 64;\n\n const DISPLAY_STR: &'static str = \"Intel x64\";\n\n}\n\n\n", "file_path": "src/arch/intel/mod.rs", "rank": 31, "score": 52200.70233163968 }, { "content": "pub trait DescriptorTable {\n\n fn load();\n\n}\n\n\n", "file_path": "src/arch/intel/mod.rs", "rank": 32, "score": 52200.70233163968 }, { "content": "/// 端口读取操作\n\npub trait PortRead {\n\n unsafe fn read(port: u16) -> Self;\n\n}\n\n\n", "file_path": "src/arch/intel/chips/port.rs", "rank": 33, "score": 51250.194693943806 }, { "content": "pub trait ControllerType {\n\n const DISPLAY_STR: &'static str;\n\n}\n\n\n\npub struct XPAIC(xApic);\n\n\n\npub struct X2APIC(LocalApic, IoApic);\n\n\n\npub struct PIC(ChainedPics);\n\n\n\nimpl ControllerType for XPAIC {\n\n const DISPLAY_STR: &'static str = \"xapic\";\n\n}\n\n\n\nimpl ControllerType for X2APIC {\n\n const DISPLAY_STR: &'static str = \"x2apic\";\n\n}\n\n\n\nimpl ControllerType for PIC {\n\n const DISPLAY_STR: &'static str = \"8259A\";\n", "file_path": "src/arch/intel/interrupt/controller.rs", "rank": 34, "score": 51250.194693943806 }, { "content": "/// 写入端口操作\n\npub trait PortWrite {\n\n unsafe fn write(port: u16, value: Self);\n\n}\n\n\n", "file_path": "src/arch/intel/chips/port.rs", "rank": 35, "score": 51250.194693943806 }, { "content": "pub trait ResultEx<T> {\n\n fn unwrap(self) -> T;\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct Error {\n\n repr: Repr,\n\n}\n\n\n\n#[derive(Debug, Copy, Clone)]\n\npub enum ProcessErrorKind {\n\n TryAgain,\n\n CrateNewProcessFailed,\n\n}\n\n\n\n#[derive(Debug, Copy, Clone)]\n\npub enum DevicesErrorKind {\n\n NotSupport\n\n}\n\n\n", "file_path": "src/result/mod.rs", "rank": 36, "score": 51089.55344653079 }, { "content": "pub trait CfgSpaceReader {\n\n /// len > 3 && len % 4 == 0\n\n unsafe fn read_range(&self, offset: u8, len: u16) -> Vec<u8> {\n\n let mut res = Vec::with_capacity(len as usize);\n\n let offset = offset as u16;\n\n let data = (offset..offset + len).step_by(4).fold(Vec::new(), |mut acc, offset| {\n\n let val = self.read(offset as u8);\n\n acc.push(val);\n\n acc\n\n });\n\n res.set_len(len as usize);\n\n LittleEndian::write_u32_into(data.as_slice(), &mut res);\n\n res\n\n }\n\n\n\n unsafe fn read(&self, offset: u8) -> u32;\n\n\n\n unsafe fn read_u8(&self, offset: u8) -> u8 {\n\n let dword_offset = (offset / 4) * 4;\n\n let dword = self.read(dword_offset);\n\n\n\n let shift = (offset % 4) * 8;\n\n ((dword >> shift) & 0xFF) as u8\n\n }\n\n}\n\n\n", "file_path": "src/devices/bus/pic/mod.rs", "rank": 37, "score": 50346.17520885859 }, { "content": "pub trait CfgSpaceWriter {\n\n unsafe fn write(&self, offset: u8, val: u32);\n\n}\n\n\n\n// When you want to retrieve the actual base address of a BAR, be sure to mask the lower bits.\n\n// For 16-Bit Memory Space BARs, you calculate (BAR[x] & 0xFFF0).\n\n// For 32-Bit Memory Space BARs, you calculate (BAR[x] & 0xFFFFFFF0).\n\n// For 64-Bit Memory Space BARs, you calculate ((BAR[x] & 0xFFFFFFF0) + ((BAR[x+1] & 0xFFFFFFFF) << 32))\n\n// For I/O Space BARs, you calculate (BAR[x] & 0xFFFFFFFC).\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum PciBaseAddress {\n\n None,\n\n Memory(u32),\n\n Port(u16),\n\n}\n\n\n\n#[derive(Debug, Copy, Clone, Eq, PartialEq)]\n\npub struct MemorySpaceBarLayout(u32);\n\n\n\nimpl MemorySpaceBarLayout {\n", "file_path": "src/devices/bus/pic/mod.rs", "rank": 38, "score": 50346.17520885859 }, { "content": "/// 将给定的物理帧转换为页表裸指针\n\npub trait PhysicalToVirtual {\n\n fn phy_to_vir(&self, phy_frame: Frame) -> *mut PageTable;\n\n}\n\n\n\nimpl<T> PhysicalToVirtual for T where T: Fn(Frame) -> *mut PageTable {\n\n fn phy_to_vir(&self, phy_frame: Frame<Page4KB>) -> *mut PageTable {\n\n self(phy_frame)\n\n }\n\n}\n\n\n\n/// 用于遍历页表的结构\n", "file_path": "src/arch/intel/x64/paging/mapper/map_pt.rs", "rank": 39, "score": 48664.59473388225 }, { "content": "pub trait MemoryAreaManagement {\n\n fn add_area(&mut self, start_addr: u64, end_addr: u64, ty: MemoryType, len: u64);\n\n}\n\n\n\npub struct AdaptationAllocator<F: FrameAllocator<Page4KB> + MemoryAreaManagement> {\n\n inner: F,\n\n free: Vec<(usize, usize)>,\n\n}\n\n\n\nimpl<F: FrameAllocator<Page4KB> + MemoryAreaManagement> AdaptationAllocator<F> {\n\n pub fn new(inner: F) -> Self {\n\n Self {\n\n inner,\n\n free: Vec::new(),\n\n }\n\n }\n\n pub fn free_count(&self) -> usize {\n\n let mut size = 0;\n\n for area in self.free.iter() {\n\n size += area.1;\n", "file_path": "src/arch/intel/x64/paging/frame_allocator.rs", "rank": 40, "score": 48664.59473388225 }, { "content": "pub trait Selector: Debug + From<u16> {\n\n /// 返回当前的描述符索引\n\n fn index(&self) -> u16;\n\n /// 返回当前描述符的特权级\n\n fn rpl(&self) -> PrivilegedLevel;\n\n /// 索引(16位)\n\n fn as_u16(&self) -> u16;\n\n /// 索引(usize)\n\n fn as_usize(&self) -> usize;\n\n /// 索引(64位)\n\n fn as_u64(&self) -> u64;\n\n /// 索引(32位)\n\n fn as_u32(&self) -> u32;\n\n}\n\n\n\n/// 系统特权级\n\n#[derive(Debug, Copy, Clone, PartialEq, Eq)]\n\n#[repr(u8)]\n\npub enum PrivilegedLevel {\n\n /// 特权级0\n", "file_path": "src/arch/intel/mod.rs", "rank": 41, "score": 48365.57892225441 }, { "content": "/// NotGiantPageSize只用于4KB和2MB页表\n\npub trait NotGiantPageSize: PageSize {}\n\n\n\n\n\n#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd)]\n\npub struct Page4KB {}\n\n\n\nimpl PageSize for Page4KB {\n\n const P_SIZE: u64 = 4096;\n\n const DISPLAY_STR: &'static str = \"page 4 kb\";\n\n}\n\n\n\nimpl NotGiantPageSize for Page4KB {}\n\n\n\n\n\n#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd)]\n\npub struct Page2MB {}\n\n\n\nimpl PageSize for Page2MB {\n\n const P_SIZE: u64 = Page4KB::P_SIZE * 512;\n\n const DISPLAY_STR: &'static str = \"page 2 MB\";\n", "file_path": "src/arch/intel/x64/paging/page.rs", "rank": 42, "score": 46770.12266313209 }, { "content": "pub trait PhysicalAddress<A: ArchIntel> {\n\n type BITS;\n\n\n\n fn as_u64(self) -> u64;\n\n fn as_usize(self) -> usize;\n\n /// 用于判断物理地址是否是零地址\n\n fn is_null(&self) -> bool;\n\n /// 页表向上对齐\n\n fn align_up<U>(self, align: U) -> Self where U: Into<u64>;\n\n /// 页表向下对齐\n\n fn align_down<U>(self, align: U) -> Self where U: Into<u64>;\n\n /// 判断当前地址是否已经被对齐\n\n fn is_aligned<U>(self, align: U) -> bool where U: Into<u64>;\n\n /// 转换为可变裸指针\n\n fn as_mut(&self) -> *mut Self::BITS;\n\n /// 转换为裸指针\n\n fn as_ptr(&self) -> *const Self::BITS;\n\n}", "file_path": "src/arch/intel/x64/address/mod.rs", "rank": 43, "score": 46602.70639724918 }, { "content": "pub trait VirtualAddress<A: ArchIntel> {\n\n type BITS;\n\n\n\n /// 将虚拟地址结构转为u64类型\n\n fn as_u64(&self) -> u64;\n\n /// 将虚拟地址结构转为usize类型\n\n fn as_usize(&self) -> usize;\n\n /// 从给定的指针中创建虚拟地址\n\n fn from_pointer<T>(pointer: *const T) -> Self;\n\n /// 将虚拟地址转为64位宽的原始指针\n\n fn as_ptr<T>(self) -> *const T where Self: core::marker::Sized {\n\n cast::usize(self.as_u64()) as *const T\n\n }\n\n /// 将虚拟地址转为64位宽的可变原始指针\n\n fn as_mut_ptr<T>(self) -> *mut T where Self: core::marker::Sized {\n\n self.as_ptr::<T>() as *mut T\n\n }\n\n /// 将虚拟地址向上对齐\n\n fn align_up<U>(self, align: U) -> Self where U: Into<Self::BITS>;\n\n /// 将虚拟地址向下对齐\n\n fn align_down<U>(self, align: U) -> Self where U: Into<Self::BITS>;\n\n /// 判断虚拟地址是否被对齐\n\n fn is_aligned<U>(self, align: U) -> bool where U: Into<Self::BITS>;\n\n}\n\n\n\n\n", "file_path": "src/arch/intel/x64/address/mod.rs", "rank": 44, "score": 46602.70639724918 }, { "content": "pub trait Mapper<S: PageSize> {\n\n /// 在页表中创建一个新的映射。\n\n /// 此函数需要其他物理帧才能创建新的页表。\n\n /// 帧的分配由`allocator`参数完成\n\n unsafe fn map_to<A>(&mut self, page: Page<S>, frame: Frame<S>, flags: PageTableFlags, allocator: &mut A)\n\n -> Result<MapperFlush<S>, MapToError<S>>\n\n where A: FrameAllocator<Page4KB>, Self: Sized;\n\n\n\n /// 从页表中解除映射关系,并返回被解除关系的frame。\n\n /// frame没有被释放\n\n fn unmap(&mut self, page: Page<S>) -> Result<(Frame<S>, MapperFlush<S>), UnmapError>;\n\n\n\n /// 更新现有映射的flags。\n\n unsafe fn update_flags(&mut self, page: Page<S>, flags: PageTableFlags) -> Result<MapperFlush<S>, FlagUpdateError>;\n\n\n\n /// 返回给定的页面与之映射的物理帧\n\n fn translate_page(&mut self, page: Page<S>) -> Result<Frame<S>, TranslateError>;\n\n\n\n /// 将给定的frame映射到相同虚拟地址的页面\n\n /// 此函数假定页面已映射到大小为`S`的frame,否则会返回错误。\n\n unsafe fn identity_map<A>(&mut self, frame: Frame<S>, flags: PageTableFlags, allocator: &mut A)\n\n -> Result<MapperFlush<S>, MapToError<S>>\n\n where A: FrameAllocator<Page4KB>, Self: Sized, S: PageSize, Self: Mapper<S> {\n\n let page = Page::include_address(VirtAddr::new(frame.start_address().as_u64()));\n\n self.map_to(page, frame, flags, allocator)\n\n }\n\n}\n\n\n", "file_path": "src/arch/intel/x64/paging/mapper/mod.rs", "rank": 45, "score": 45819.383211607914 }, { "content": "pub trait TablePointer<A: ArchIntel = IntelX64> {\n\n fn limit(&self) -> u16;\n\n\n\n fn base(&self) -> u64;\n\n}\n\n\n\n\n", "file_path": "src/arch/intel/mod.rs", "rank": 46, "score": 45779.97896219286 }, { "content": "/// 实现了PortRead和PortWrite也实现了该trait\n\npub trait PortReadWrite: PortRead + PortWrite {}\n\n\n\n// ---------------------- u8 ---------------------\n\n\n\nimpl PortRead for u8 {\n\n unsafe fn read(port: u16) -> Self {\n\n inb(port)\n\n }\n\n}\n\n\n\nimpl PortWrite for u8 {\n\n unsafe fn write(port: u16, value: Self) {\n\n outb(value, port);\n\n }\n\n}\n\n\n\nimpl PortReadWrite for u8 {}\n\n\n\n// ---------------------- u16 ---------------------\n\nimpl PortWrite for u16 {\n", "file_path": "src/arch/intel/chips/port.rs", "rank": 47, "score": 45070.95778174406 }, { "content": "/// 针对3种不同的页大小的抽象\n\npub trait PageSize: Copy + Eq + PartialEq + Ord {\n\n const P_SIZE: u64;\n\n const DISPLAY_STR: &'static str;\n\n}\n\n\n", "file_path": "src/arch/intel/x64/paging/page.rs", "rank": 48, "score": 42813.48255163248 }, { "content": "#[inline]\n\nfn p1_page(page: Page<Page4KB>, recursive_index: PageIndex) -> Page {\n\n Page::from_page_table_indices(\n\n recursive_index,\n\n page.p4_index(),\n\n page.p3_index(),\n\n page.p2_index(),\n\n )\n\n}\n\n\n\n\n", "file_path": "src/arch/intel/x64/paging/mapper/page.rs", "rank": 49, "score": 42493.20225549713 }, { "content": "use core::fmt;\n\n\n\nuse crate::alloc::string::String;\n\n\n\n#[derive(Debug)]\n\npub struct MemoryError {\n\n kind: MemErrorKind,\n\n msg: String,\n\n}\n\n\n\nimpl fmt::Display for MemoryError {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n match self.kind {\n\n MemErrorKind::NotAligned => write!(f, \"{}\", self.msg),\n\n MemErrorKind::PageTableIndexNotMatch => write!(f, \"{}\", self.msg),\n\n MemErrorKind::FrameNotMatch => write!(f, \"{}\", self.msg),\n\n MemErrorKind::AllocateFiled => write!(f, \"{}\", self.msg),\n\n }\n\n }\n\n}\n", "file_path": "src/result/error/mem.rs", "rank": 50, "score": 41511.41448457136 }, { "content": "\n\nimpl MemoryError {\n\n pub fn new(kind: MemErrorKind, msg: String) -> Self {\n\n MemoryError { kind, msg }\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub enum MemErrorKind {\n\n NotAligned,\n\n PageTableIndexNotMatch,\n\n FrameNotMatch,\n\n AllocateFiled,\n\n}", "file_path": "src/result/error/mem.rs", "rank": 51, "score": 41510.859246877415 }, { "content": "pub mod mem;\n\n\n", "file_path": "src/result/error/mod.rs", "rank": 52, "score": 41499.502858193155 }, { "content": " outb(0x43, self.control.bits as u16);\n\n let mut port = Port::new(n);\n\n port.write(self.frequency as u8);\n\n port.write((self.frequency >> 8) as u8);\n\n }\n\n\n\n pub unsafe fn stop_time_n(&self, n: u16) {\n\n debug_assert!(n >= 0 && n < 6, \"8253 support timer0-timer5\");\n\n outb(self.control.bits, TIMER_MODE);\n\n let mut timer0 = Port::new(n);\n\n timer0.write(0_u8);\n\n timer0.write(0_u8);\n\n }\n\n\n\n pub unsafe fn read_pit_count(&self, n: u16) -> u16 {\n\n debug_assert!(n >= 0 && n < 6, \"8253 support timer0-timer5\");\n\n outb(0, TIMER_MODE);\n\n let mut port = Port::new(n);\n\n let low: u8 = port.read();\n\n let high = port.read();\n", "file_path": "src/arch/intel/timer.rs", "rank": 53, "score": 41487.39913108788 }, { "content": "use bitflags::bitflags;\n\n\n\nuse crate::arch::intel::chips::port::Port;\n\nuse crate::arch::intel::instructions::port::outb;\n\n\n\npub const TIMER_FREQUENCY: usize = 1193182;\n\npub const TIMER_MODE: u16 = 0x43;\n\npub const TIMER0: u16 = 0x40;\n\npub const TIMER1: u16 = 0x41;\n\npub const TIMER2: u16 = 0x42;\n\npub const TIMER3: u16 = 0x43;\n\npub const TIMER4: u16 = 0x44;\n\npub const TIMER5: u16 = 0x45;\n\n\n\npub const IRQ_FREQUENCY: usize = 100;\n\n\n\npub const fn timer_count(frequency: usize) -> usize {\n\n TIMER_FREQUENCY / frequency\n\n}\n\n\n", "file_path": "src/arch/intel/timer.rs", "rank": 54, "score": 41485.78853811108 }, { "content": " ((high as u16) << 8) | low as u16\n\n }\n\n\n\n pub unsafe fn set_pit_count(&self, count: u16) {\n\n let mut port = Port::new(TIMER0);\n\n port.write(count as u8);\n\n port.write((count >> 8) as u8);\n\n }\n\n}\n\n\n", "file_path": "src/arch/intel/timer.rs", "rank": 55, "score": 41479.839130370616 }, { "content": " }\n\n // 比率发生器\n\n pub fn rate_generator(&self) {\n\n unsafe { self.init_time_n(TIMER2) }\n\n }\n\n // 方波发生器\n\n pub fn square_wave_generator(&self) {\n\n unsafe { self.init_time_n(TIMER3) }\n\n }\n\n // 软件触发选通\n\n pub fn software_triggered_strobe(&self) {\n\n unsafe { self.init_time_n(TIMER4) }\n\n }\n\n // 硬件触发选通\n\n pub fn hardware_triggered_strobe(&self) {\n\n unsafe { self.init_time_n(TIMER5) }\n\n }\n\n\n\n pub unsafe fn init_time_n(&self, n: u16) {\n\n debug_assert!(n >= 0 && n < 6, \"8253 support timer0-timer5\");\n", "file_path": "src/arch/intel/timer.rs", "rank": 56, "score": 41474.73579839741 }, { "content": "// 0x40 - 0x42\n\nbitflags! {\n\n /// 110110\n\n pub struct ContorlWord: u8{\n\n /// BCD 码\n\n const BCD = 1 << 0;\n\n const METHOD0 = 000 << 1;\n\n const METHOD1 = 001 << 1;\n\n const METHOD2 = 010 << 1;\n\n const METHOD3 = 011 << 1;\n\n const METHOD4 = 100 << 1;\n\n const METHOD5 = 101 << 1;\n\n /// 锁存数据仅供CPU读\n\n const LOCK_DATA = 00 << 4;\n\n /// 只读写低字节\n\n const LOW_READ_ONLY = 01 << 4;\n\n /// 只读写高字节\n\n const HIGH_READ_ONLY = 10 << 4;\n\n /// 先读写低字节后读写高字节\n\n const READ_LOW_THEN_HIGH = 11 << 4;\n\n /// 选择计数器\n\n const COUNTER0 = 00 << 6;\n\n /// 选择计数器 1\n\n const COUNTER1 = 01 << 6;\n\n /// 选择计数器 2\n\n const COUNTER2 = 10 << 6;\n\n }\n\n}\n\n\n", "file_path": "src/arch/intel/timer.rs", "rank": 57, "score": 41471.48424474347 }, { "content": "#[inline]\n\nfn p3_page<S: PageSize>(page: Page<S>, recursive_index: PageIndex) -> Page {\n\n Page::from_page_table_indices(\n\n recursive_index,\n\n recursive_index,\n\n recursive_index,\n\n page.p4_index(),\n\n )\n\n}\n\n\n", "file_path": "src/arch/intel/x64/paging/mapper/page.rs", "rank": 58, "score": 39987.9246041307 }, { "content": "#[inline]\n\nfn p2_page<S: NotGiantPageSize>(page: Page<S>, recursive_index: PageIndex) -> Page {\n\n Page::from_page_table_indices(\n\n recursive_index,\n\n recursive_index,\n\n page.p4_index(),\n\n page.p3_index(),\n\n )\n\n}\n\n\n", "file_path": "src/arch/intel/x64/paging/mapper/page.rs", "rank": 59, "score": 39448.99567940315 }, { "content": "pub trait MapAllSize: Mapper<Page4KB> + Mapper<Page1GB> + Mapper<Page2MB> {\n\n /// 返回给定虚拟地址所映射的帧以及对应的帧内的偏移量。\n\n /// 如果给定的是有效虚拟地址,则返回映射的帧和该帧内的偏移量。 否则,将返回错误值。\n\n /// 此功能适用于各种种类的较大页面。\n\n fn translate(&self, addr: VirtAddr) -> TranslationResult;\n\n /// 将给定的虚拟地址转换为它映射到的物理地址。\n\n /// 如果给定地址没有有效的映射,则返回 None。\n\n fn translate_addr(&self, addr: VirtAddr) -> Option<PhysAddr> {\n\n match self.translate(addr) {\n\n TranslationResult::Frame4KB { frame, offset } => Some(frame.start_address() + offset),\n\n TranslationResult::Frame2MB { frame, offset } => Some(frame.start_address() + offset),\n\n TranslationResult::Frame1GB { frame, offset } => Some(frame.start_address() + offset),\n\n TranslationResult::PageNotMapped | TranslationResult::InvalidFrameAddress(_) => None,\n\n }\n\n }\n\n}", "file_path": "src/arch/intel/x64/paging/mapper/mod.rs", "rank": 60, "score": 38460.121086550345 }, { "content": " pub unsafe fn set_timer_mode(&mut self, _mode: TimerMode) {\n\n unimplemented!()\n\n }\n\n pub unsafe fn set_timer_divide(&mut self, _divide: TimerDivide) {\n\n unimplemented!()\n\n }\n\n pub unsafe fn set_timer_initial(&mut self, _initial: u32) { unimplemented!() }\n\n pub unsafe fn set_logical_id(&mut self, _dest: u32) {\n\n unimplemented!()\n\n }\n\n pub unsafe fn send_ipi(&mut self, _vector: u8, _dest: u32) {\n\n unimplemented!()\n\n }\n\n pub unsafe fn send_ipi_all(&mut self, _vector: u8, _who: IpiAllShorthand) {\n\n unimplemented!()\n\n }\n\n pub unsafe fn send_lowest_priority_ipi(&mut self, _vector: u8, _dest: u32) {\n\n unimplemented!()\n\n }\n\n pub unsafe fn send_lowest_priority_ipi_all(&mut self, _vector: u8, _who: IpiAllShorthand) {\n", "file_path": "src/arch/intel/interrupt/controller.rs", "rank": 61, "score": 46.70336695382116 }, { "content": "\n\n#[derive(Copy, Clone, Default)]\n\npub struct ApicInfo {\n\n // for x2apic\n\n pub timer_vector: Option<usize>,\n\n pub error_vector: Option<usize>,\n\n pub spurious_vector: Option<usize>,\n\n pub timer_mode: Option<TimerMode>,\n\n pub timer_divide: Option<TimerDivide>,\n\n pub timer_initial: Option<u32>,\n\n pub ipi_destination_mode: Option<IpiDestMode>,\n\n // for io apic\n\n pub ioapic_offset: Option<u8>,\n\n // for 8259\n\n}\n\n\n\nimpl ApicInfo {\n\n /// Returns a new local APIC builder.\n\n pub fn new() -> Self {\n\n Default::default()\n", "file_path": "src/arch/intel/interrupt/mod.rs", "rank": 62, "score": 46.14050233665526 }, { "content": " }\n\n\n\n pub unsafe fn set_timer_divide(&mut self, divide: TimerDivide) {\n\n self.local_apic.as_mut().expect(\"local apic not init\").set_timer_divide(divide)\n\n }\n\n\n\n pub unsafe fn set_timer_initial(&mut self, initial: u32) {\n\n self.local_apic.as_mut().expect(\"local apic not init\").set_timer_initial(initial)\n\n }\n\n\n\n pub unsafe fn set_logical_id(&mut self, dest: u32) {\n\n self.local_apic.as_mut().expect(\"local apic not init\").set_logical_id(dest)\n\n }\n\n\n\n pub unsafe fn send_ipi(&mut self, vector: u8, dest: u32) {\n\n self.local_apic.as_mut().expect(\"local apic not init\").send_ipi(vector, dest)\n\n }\n\n\n\n pub unsafe fn send_ipi_all(&mut self, vector: u8, who: IpiAllShorthand) {\n\n self.local_apic.as_mut().expect(\"local apic not init\").send_ipi_all(vector, who)\n", "file_path": "src/arch/intel/interrupt/controller.rs", "rank": 65, "score": 40.79292373443741 }, { "content": " pub unsafe fn set_timer_initial(&mut self, _initial: u32) {\n\n panic!(\"8259 not support set_timer_initial \")\n\n }\n\n\n\n pub unsafe fn set_logical_id(&mut self, _dest: u32) {\n\n panic!(\"8259 not support set_logical_id \")\n\n }\n\n\n\n pub unsafe fn send_ipi(&mut self, _vector: u8, _dest: u32) {\n\n panic!(\"8259 not support send_ipi \")\n\n }\n\n\n\n pub unsafe fn send_ipi_all(&mut self, _vector: u8, _who: IpiAllShorthand) {\n\n panic!(\"8259 not support send_lowest_priority_ipi \")\n\n }\n\n\n\n pub unsafe fn send_lowest_priority_ipi(&mut self, _vector: u8, _dest: u32) {\n\n panic!(\"8259 not support send_lowest_priority_ipi \")\n\n }\n\n\n", "file_path": "src/arch/intel/interrupt/controller.rs", "rank": 67, "score": 37.207179360309986 }, { "content": " pub unsafe fn send_sipi(&mut self, _vector: u8, _dest: u32) {\n\n panic!(\"8259 not support send sipi\")\n\n }\n\n\n\n pub unsafe fn send_sipi_all(&mut self, _vector: u8) {\n\n panic!(\"8259 not support send sipi all\")\n\n }\n\n\n\n pub unsafe fn send_ipi_self(&mut self, _vector: u8) {\n\n panic!(\"8259 not support send ipi self\")\n\n }\n\n\n\n pub unsafe fn enable_irq(&mut self, _irq: u8, _dest: u32, _mode: IrqMode, _options: IrqFlags) {\n\n panic!(\"8259 not support irq\")\n\n }\n\n\n\n pub unsafe fn disable_irq(&mut self, _irq: u8) {\n\n panic!(\"8259 not support irq\")\n\n }\n\n\n", "file_path": "src/arch/intel/interrupt/controller.rs", "rank": 68, "score": 35.512692664563396 }, { "content": " }\n\n\n\n pub unsafe fn send_nmi_all(&mut self, who: IpiAllShorthand) {\n\n self.local_apic.as_mut().expect(\"local apic not init\").send_nmi_all(who)\n\n }\n\n\n\n pub unsafe fn send_sipi(&mut self, vector: u8, dest: u32) {\n\n self.local_apic.as_mut().expect(\"local apic not init\").send_ipi(vector, dest)\n\n }\n\n\n\n pub unsafe fn send_sipi_all(&mut self, vector: u8) {\n\n self.local_apic.as_mut().expect(\"local apic not init\").send_sipi_all(vector)\n\n }\n\n\n\n pub unsafe fn send_ipi_self(&mut self, vector: u8) {\n\n self.local_apic.as_mut().expect(\"local apic not init\").send_ipi_self(vector)\n\n }\n\n\n\n pub unsafe fn enable_irq(&mut self, irq: u8, dest: u32, mode: IrqMode, options: IrqFlags) {\n\n self.io_apic.as_mut().expect(\"io apic not init\").enable_irq(irq, dest, mode, options);\n", "file_path": "src/arch/intel/interrupt/controller.rs", "rank": 70, "score": 35.106921748876225 }, { "content": " pub unsafe fn send_ipi_self(&mut self, _vector: u8) {\n\n unimplemented!()\n\n }\n\n pub unsafe fn enable_irq(&mut self, _irq: u8, _dest: u32, _mode: IrqMode, _options: IrqFlags) { unimplemented!() }\n\n pub unsafe fn disable_irq(&mut self, _irq: u8) {\n\n unimplemented!()\n\n }\n\n pub unsafe fn io_apic_set_arbitration_id(&mut self, _id: u8) {\n\n unimplemented!()\n\n }\n\n pub unsafe fn io_apic_set_id(&mut self, _id: u8) {\n\n unimplemented!()\n\n }\n\n}\n\n\n\n\n\n\n", "file_path": "src/arch/intel/interrupt/controller.rs", "rank": 72, "score": 33.26693456275564 }, { "content": "\n\n /// Returns the IOAPIC arbitration IOAPIC_ID.\n\n pub unsafe fn arbitration_id(&mut self) -> u8 {\n\n ((self.regs.read(IOAPIC_ARBITRATION) >> 24) & 0xf) as u8\n\n }\n\n\n\n /// Sets the IOAPIC arbitration IOAPIC_ID to `id`.\n\n pub unsafe fn set_arbitration_id(&mut self, id: u8) {\n\n self.regs.write(IOAPIC_ARBITRATION, u32::from(id) << 24);\n\n }\n\n\n\n /// Enable interrupt number `irq` on the CPUs specified by `dest`.\n\n pub unsafe fn enable_irq(&mut self, irq: u8, dest: u32, mode: IrqMode, options: IrqFlags) {\n\n let lo = lo(irq);\n\n let hi = hi(irq);\n\n\n\n self.regs.set(hi, dest << 24);\n\n\n\n self.regs.clear(lo, IRQ_MODE_MASK | IrqFlags::all().bits());\n\n self.regs.set(lo, mode.as_u32() | options.bits());\n", "file_path": "src/arch/intel/interrupt/x2apic/io_apic.rs", "rank": 76, "score": 31.178855130294785 }, { "content": " unimplemented!()\n\n }\n\n pub unsafe fn send_smi(&mut self, _dest: u32) {\n\n unimplemented!()\n\n }\n\n pub unsafe fn send_smi_all(&mut self, _who: IpiAllShorthand) {\n\n unimplemented!()\n\n }\n\n pub unsafe fn send_nmi(&mut self, _dest: u32) {\n\n unimplemented!()\n\n }\n\n pub unsafe fn send_nmi_all(&mut self, _who: IpiAllShorthand) {\n\n unimplemented!()\n\n }\n\n pub unsafe fn send_sipi(&mut self, _vector: u8, _dest: u32) {\n\n unimplemented!()\n\n }\n\n pub unsafe fn send_sipi_all(&mut self, _vector: u8) {\n\n unimplemented!()\n\n }\n", "file_path": "src/arch/intel/interrupt/controller.rs", "rank": 77, "score": 30.868543595853485 }, { "content": "pub const ICR_LEVEL: usize = 14;\n\npub const ICR_DESTINATION_MODE: usize = 11;\n\npub const ICR_DELIVERY_MODE: Range<usize> = 8..11;\n\npub const ICR_VECTOR: Range<usize> = 0..8;\n\n\n\npub const LVT_TIMER_MODE: Range<usize> = 17..19;\n\npub const LVT_TIMER_MASK: usize = 16;\n\npub const LVT_TIMER_VECTOR: Range<usize> = 0..8;\n\n\n\npub const LVT_ERROR_VECTOR: Range<usize> = 0..8;\n\n\n\npub const TDCR_DIVIDE_VALUE: Range<usize> = 0..4;\n\n\n\npub const IRQ_MASK_BIT: u32 = 0x0001_0000;\n\npub const IRQ_MODE_MASK: u32 = 0x0000_0700;\n\n\n\n\n\n// Register selectors\n\npub const IOAPIC_ID: u32 = 0x00;\n\npub const IOAPIC_VERSION: u32 = 0x01;\n\npub const IOAPIC_ARBITRATION: u32 = 0x02;\n\npub const IOAPIC_TABLE_BASE: u32 = 0x10;", "file_path": "src/arch/intel/interrupt/x2apic/consts.rs", "rank": 78, "score": 30.818250674006723 }, { "content": " pub unsafe fn disable(&mut self) {\n\n self.pic.as_mut().expect(\"pic not init\").disable_8259a()\n\n }\n\n\n\n pub unsafe fn enable_timer(&mut self) {\n\n panic!(\"8259 not support enable_timer \")\n\n }\n\n\n\n pub unsafe fn disable_timer(&mut self) {\n\n panic!(\"8259 not support disable_timer \")\n\n }\n\n\n\n pub unsafe fn set_timer_mode(&mut self, _mode: TimerMode) {\n\n panic!(\"8259 not support set_timer_mode \")\n\n }\n\n\n\n pub unsafe fn set_timer_divide(&mut self, _divide: TimerDivide) {\n\n panic!(\"8259 not support set_timer_divide \")\n\n }\n\n\n", "file_path": "src/arch/intel/interrupt/controller.rs", "rank": 82, "score": 28.992921311361048 }, { "content": " /// This field is required.\n\n set_attr!(set_spurious_vector,spurious_vector,usize);\n\n\n\n\n\n /// Sets the timer mode.\n\n ///\n\n /// Default: Periodic.\n\n set_attr!(set_timer_mode,timer_mode,TimerMode);\n\n\n\n\n\n /// Sets the timer divide configuration.\n\n ///\n\n /// Default: Div256.\n\n set_attr!(set_timer_divide,timer_divide,TimerDivide);\n\n\n\n\n\n /// Sets the timer initial count.\n\n ///\n\n /// Default: 10_000_000.\n\n set_attr!(set_timer_initial,timer_initial,u32);\n", "file_path": "src/arch/intel/interrupt/mod.rs", "rank": 83, "score": 28.929919748602742 }, { "content": " }\n\n\n\n pub unsafe fn send_lowest_priority_ipi(&mut self, vector: u8, dest: u32) {\n\n self.local_apic.as_mut().expect(\"local apic not init\").send_lowest_priority_ipi(vector, dest)\n\n }\n\n\n\n pub unsafe fn send_lowest_priority_ipi_all(&mut self, vector: u8, who: IpiAllShorthand) {\n\n self.local_apic.as_mut().expect(\"local apic not init\").send_lowest_priority_ipi_all(vector, who)\n\n }\n\n\n\n pub unsafe fn send_smi(&mut self, dest: u32) {\n\n self.local_apic.as_mut().expect(\"local apic not init\").send_smi(dest)\n\n }\n\n\n\n pub unsafe fn send_smi_all(&mut self, who: IpiAllShorthand) {\n\n self.local_apic.as_mut().expect(\"local apic not init\").send_smi_all(who)\n\n }\n\n\n\n pub unsafe fn send_nmi(&mut self, dest: u32) {\n\n self.local_apic.as_mut().expect(\"local apic not init\").send_nmi(dest)\n", "file_path": "src/arch/intel/interrupt/controller.rs", "rank": 84, "score": 28.322784630621904 }, { "content": "use alloc::string::String;\n\n\n\nuse crate::arch::intel::interrupt::x2apic::register::{IpiDestMode, TimerDivide, TimerMode};\n\n\n\npub mod xapic;\n\npub mod x2apic;\n\npub mod flags;\n\npub mod controller;\n\npub mod pic;\n\n\n\n#[doc(hidden)]\n\nmacro_rules! set_attr {\n\n\n\n ($name:ident,$attr:ident,$args:ty) => {\n\n pub fn $name(&mut self, arg: $args) -> &mut Self {\n\n self.$attr = Some(arg);\n\n self\n\n }\n\n };\n\n}\n", "file_path": "src/arch/intel/interrupt/mod.rs", "rank": 85, "score": 27.577337459226946 }, { "content": "pub(crate) const X1: u32 = 0x0000000B;\n\n// divide counts by 1\n\npub(crate) const PERIODIC: u32 = 0x00020000;\n\n// Periodic\n\npub(crate) const PCINT: u32 = 0x0340;\n\n// Performance Counter LVT\n\npub(crate) const LINT0: u32 = 0x0350;\n\n// Local Vector Table 1 (LINT0)\n\npub(crate) const LINT1: u32 = 0x0360;\n\n// Local Vector Table 2 (LINT1)\n\npub(crate) const ERROR: u32 = 0x0370;\n\n// Local Vector Table 3 (ERROR)\n\npub(crate) const MASKED: u32 = 0x00010000;\n\n// Interrupt masked\n\npub(crate) const TICR: u32 = 0x0380;\n\n// Timer Initial Count\n\npub(crate) const TCCR: u32 = 0x0390;\n\n// Timer Current Count\n\npub(crate) const TDCR: u32 = 0x03E0; // Timer Divide Configuration\n\n\n", "file_path": "src/arch/intel/interrupt/xapic/consts.rs", "rank": 87, "score": 26.131874544631607 }, { "content": " xapic: None,\n\n pic: Some(t.0),\n\n _mark: Default::default(),\n\n }\n\n }\n\n\n\n pub unsafe fn version(&mut self) -> String {\n\n String::from(\"8259A\")\n\n }\n\n\n\n pub unsafe fn eoi(&mut self, number: Option<u8>) {\n\n self.pic.as_mut().expect(\"pic not init\").notify_end_of_interrupt(number.expect(\"must give notify vector\"))\n\n }\n\n\n\n pub unsafe fn init(&mut self, _info: ApicInfo) {\n\n self.pic.as_mut().expect(\"pic not init\").initialize()\n\n }\n\n\n\n pub unsafe fn enable(&mut self) {}\n\n\n", "file_path": "src/arch/intel/interrupt/controller.rs", "rank": 88, "score": 26.110910890608544 }, { "content": " pub unsafe fn send_lowest_priority_ipi_all(&mut self, _vector: u8, _who: IpiAllShorthand) {\n\n panic!(\"8259 not support send_lowest_priority_ipi_all \")\n\n }\n\n\n\n pub unsafe fn send_smi(&mut self, _dest: u32) {\n\n panic!(\"8259 not support send smi \")\n\n }\n\n\n\n pub unsafe fn send_smi_all(&mut self, _who: IpiAllShorthand) {\n\n panic!(\"8259 not support send smi all\")\n\n }\n\n\n\n pub unsafe fn send_nmi(&mut self, _dest: u32) {\n\n panic!(\"8259 not support send nmi\")\n\n }\n\n\n\n pub unsafe fn send_nmi_all(&mut self, _who: IpiAllShorthand) {\n\n panic!(\"8259 not support send nmi all\")\n\n }\n\n\n", "file_path": "src/arch/intel/interrupt/controller.rs", "rank": 89, "score": 25.79936440703439 }, { "content": " /// **The given MMIO address must already be mapped.**\n\n pub unsafe fn new(base_addr: u64) -> Self {\n\n IoApic {\n\n regs: IoApicRegisters::new(base_addr),\n\n }\n\n }\n\n\n\n /// Initialize the IOAPIC's redirection table entries with the given\n\n /// interrupt offset.\n\n ///\n\n /// Each entry `i` is redirected to `i + offset`.\n\n pub unsafe fn init(&mut self, offset: u8) {\n\n let end = self.max_table_entry() + 1;\n\n\n\n for i in 0..end {\n\n self.regs.set(lo(i), u32::from(i + offset));\n\n self.regs.write(hi(i), 0);\n\n }\n\n }\n\n\n", "file_path": "src/arch/intel/interrupt/x2apic/io_apic.rs", "rank": 90, "score": 25.20666523888252 }, { "content": " pub fn new(layout: u32) -> Self {\n\n Self(layout)\n\n }\n\n\n\n pub fn base_address(&self) -> u32 {\n\n self.0.get_bits(4..32)\n\n }\n\n\n\n pub fn types(&self) -> u8 {\n\n self.0.get_bits(1..3) as u8\n\n }\n\n\n\n pub fn prefetchable(&self) -> bool {\n\n self.0.get_bit(3)\n\n }\n\n}\n\n\n\n\n\nimpl PciBaseAddress {\n\n pub fn is_none(&self) -> bool {\n", "file_path": "src/devices/bus/pic/mod.rs", "rank": 91, "score": 25.08150196738115 }, { "content": " pub unsafe fn version(&mut self) -> String {\n\n format!(\"xapic: {}\", self.xapic.as_ref().expect(\"xapic not init\").version())\n\n }\n\n\n\n pub unsafe fn eoi(&mut self, _number: Option<u8>) {\n\n self.xapic.as_mut().expect(\"xapic not init\").eoi()\n\n }\n\n\n\n pub unsafe fn init(&mut self, _info: ApicInfo) {\n\n self.xapic.as_mut().expect(\"xapic not init\").cpu_init()\n\n }\n\n\n\n pub unsafe fn enable(&mut self) {\n\n unimplemented!()\n\n }\n\n pub unsafe fn disable(&mut self) { unimplemented!() }\n\n pub unsafe fn enable_timer(&mut self) {\n\n unimplemented!()\n\n }\n\n pub unsafe fn disable_timer(&mut self) { unimplemented!() }\n", "file_path": "src/arch/intel/interrupt/controller.rs", "rank": 92, "score": 24.513154117729364 }, { "content": " /// Returns the IOAPIC IOAPIC_ID.\n\n pub unsafe fn id(&mut self) -> u8 {\n\n ((self.regs.read(IOAPIC_ID) >> 24) & 0xf) as u8\n\n }\n\n\n\n /// Sets the IOAPIC IOAPIC_ID to `id`.\n\n pub unsafe fn set_id(&mut self, id: u8) {\n\n self.regs.write(IOAPIC_ID, u32::from(id) << 24);\n\n }\n\n\n\n /// Returns the IOAPIC version.\n\n pub unsafe fn version(&mut self) -> u8 {\n\n (self.regs.read(IOAPIC_VERSION) & 0xff) as u8\n\n }\n\n\n\n /// Returns the entry number (starting at zero) of the highest entry in the\n\n /// redirection table.\n\n pub unsafe fn max_table_entry(&mut self) -> u8 {\n\n ((self.regs.read(IOAPIC_VERSION) >> 16) & 0xff) as u8\n\n }\n", "file_path": "src/arch/intel/interrupt/x2apic/io_apic.rs", "rank": 93, "score": 24.417350800749084 }, { "content": " }\n\n\n\n pub unsafe fn enable(&mut self) {\n\n self.local_apic.as_mut().expect(\"local apic not init\").disable()\n\n }\n\n\n\n pub unsafe fn disable(&mut self) {\n\n self.local_apic.as_mut().expect(\"local apic not init\").disable()\n\n }\n\n\n\n pub unsafe fn enable_timer(&mut self) {\n\n self.local_apic.as_mut().expect(\"local apic not init\").enable_timer()\n\n }\n\n\n\n pub unsafe fn disable_timer(&mut self) {\n\n self.local_apic.as_mut().expect(\"local apic not init\").disable_timer()\n\n }\n\n\n\n pub unsafe fn set_timer_mode(&mut self, mode: TimerMode) {\n\n self.local_apic.as_mut().expect(\"local apic not init\").set_timer_mode(mode)\n", "file_path": "src/arch/intel/interrupt/controller.rs", "rank": 94, "score": 23.64378952804819 }, { "content": "pub enum TimerMode {\n\n /// Timer only fires once.\n\n OneShot = 0b00,\n\n /// Timer fires periodically.\n\n Periodic = 0b01,\n\n /// Timer fires at an absolute time.\n\n TscDeadline = 0b10,\n\n}\n\n\n\nimpl Into<u64> for TimerMode {\n\n fn into(self) -> u64 {\n\n self as u64\n\n }\n\n}\n\n\n\n#[derive(Debug, Copy, Clone)]\n\n#[repr(u8)]\n\npub enum TimerDivide {\n\n /// Divide by 2.\n\n Div2 = 0b0000,\n", "file_path": "src/arch/intel/interrupt/x2apic/register.rs", "rank": 95, "score": 23.41644760385786 }, { "content": "use crate::arch::intel::chips::port::Port;\n\nuse crate::devices::bus::pic::{CfgSpaceReader, CfgSpaceWriter, get_offset, PicAccess};\n\n\n\nuse super::{CONFIG_ADDRESS, CONFIG_DATA};\n\n\n\npub struct ConfigAddressPort {\n\n port: Port<u32>\n\n}\n\n\n\nimpl ConfigAddressPort {\n\n pub fn new() -> Self {\n\n Self {\n\n port: unsafe { Port::new(CONFIG_ADDRESS) }\n\n }\n\n }\n\n\n\n pub unsafe fn read(&mut self, addr: &DeviceAddress) -> u32 {\n\n self.write(addr);\n\n self.port.read()\n\n }\n", "file_path": "src/devices/bus/pic/dev.rs", "rank": 96, "score": 23.239934944066423 }, { "content": "\n\n pub unsafe fn write(&mut self, addr: &DeviceAddress) {\n\n self.port.write(addr.address())\n\n }\n\n}\n\n\n\npub struct ConfigDataPort {\n\n port: Port<u32>\n\n}\n\n\n\nimpl ConfigDataPort {\n\n pub fn new() -> Self {\n\n Self {\n\n port: unsafe { Port::new(CONFIG_DATA) }\n\n }\n\n }\n\n\n\n pub unsafe fn write(&mut self, addr: &DeviceAddress, value: u32) {\n\n let mut cfg = ConfigAddressPort::new();\n\n cfg.write(addr);\n", "file_path": "src/devices/bus/pic/dev.rs", "rank": 97, "score": 22.84390056095433 }, { "content": "\n\n\n\n /// Sets the IPI destination mode.\n\n ///\n\n /// Default: Physical.\n\n set_attr!(set_ipi_destination_mode,ipi_destination_mode,IpiDestMode);\n\n\n\n\n\n /// Builds a new `LocalApic`.\n\n ///\n\n /// # Errors\n\n ///\n\n /// This function returns an error if any of the required fields are empty.\n\n pub fn build(self) -> Result<Self, String> {\n\n if cfg!(x2apic)\n\n && self.timer_vector.is_none()\n\n || self.error_vector.is_none()\n\n || self.spurious_vector.is_none() {\n\n return Err(String::from(\"x2apic: required field(s) empty\"));\n\n }\n\n if cfg!(xapic) && self.ioapic_offset.is_none() {\n\n return Err(String::from(\"xapic: required field(s) empty\"));\n\n }\n\n Ok(self)\n\n }\n\n}", "file_path": "src/arch/intel/interrupt/mod.rs", "rank": 98, "score": 22.72373348336713 } ]
Rust
src/server/mod.rs
Twixes/metrobaza
edc5bfa8080b91fffdf13276ec2f79d340582963
use crate::config; use crate::constructs::components::Validatable; use crate::executor::{ExecutorPayload, QueryResult}; use crate::sql::parse_statement; use hyper::service::{make_service_fn, service_fn}; use hyper::{Body, Method, Request, Response, Server, StatusCode}; use serde::{ser::SerializeMap, Serialize, Serializer}; use std::collections::HashMap; use std::{convert, net, str::FromStr}; use thiserror::Error; use tokio::sync::{mpsc, oneshot}; use tokio::time; use tracing::*; use ulid::Ulid; #[derive(Error, Debug, PartialEq)] #[error("ServerError: {0}")] pub struct ServerError(pub String); impl Serialize for ServerError { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { let mut map = serializer.serialize_map(Some(2))?; map.serialize_entry("type", "server")?; map.serialize_entry("message", &self.0)?; map.end() } } async fn process_post( executor_tx: mpsc::Sender<ExecutorPayload>, body: &str, ) -> (StatusCode, String) { let statement = parse_statement(&body); if let Err(parsing_error) = statement { return ( StatusCode::BAD_REQUEST, serde_json::to_string(&parsing_error).unwrap(), ); } let statement = statement.unwrap(); if let Err(validation_error) = statement.validate() { return ( StatusCode::BAD_REQUEST, serde_json::to_string(&validation_error).unwrap(), ); } let (resp_tx, resp_rx) = oneshot::channel::<QueryResult>(); if let Err(_) = executor_tx.send((statement, resp_tx)).await { return ( StatusCode::INTERNAL_SERVER_ERROR, serde_json::to_string(&ServerError("The query executor has disengaged.".into())) .unwrap(), ); } let query_result = resp_rx.await; ( StatusCode::OK, serde_json::to_string_pretty(&query_result.unwrap()).unwrap(), ) } async fn process_get( _executor_tx: mpsc::Sender<ExecutorPayload>, query: Option<&str>, ) -> (StatusCode, String) { if let Some(query_string) = query { if let Ok(query_map) = serde_urlencoded::from_str::<HashMap<String, String>>(query_string) { if let Some(query) = query_map.get("query") { (StatusCode::OK, query.to_string()) } else { (StatusCode::BAD_REQUEST, "TODO: No query param".to_string()) } } else { ( StatusCode::BAD_REQUEST, "TODO: Bad query string".to_string(), ) } } else { (StatusCode::BAD_REQUEST, "TODO: No query string".to_string()) } } async fn echo( executor_tx: mpsc::Sender<ExecutorPayload>, req: Request<Body>, ) -> Result<Response<Body>, hyper::Error> { let timer = time::Instant::now(); let request_id = Ulid::new(); debug!("⚡️ Received request ID {}", request_id); let response_builder = Response::builder().header("Content-Type", "application/json"); let result = match (req.uri().path(), req.method()) { ("/", &Method::POST) => { let body_bytes = hyper::body::to_bytes(req.into_body()).await?; let body = String::from_utf8(body_bytes.into_iter().collect()).unwrap(); let (status_code, response_string) = process_post(executor_tx, &body).await; Ok(response_builder .header("Content-Type", "application/json") .status(status_code) .body(Body::from(response_string)) .unwrap()) } ("/", &Method::GET) => { let query = req.uri().query(); let (status_code, response_string) = process_get(executor_tx, query).await; Ok(response_builder .status(status_code) .body(Body::from(response_string)) .unwrap()) } ("/", _) => Ok(response_builder .status(StatusCode::METHOD_NOT_ALLOWED) .body(Body::default()) .unwrap()), _ => Ok(response_builder .status(StatusCode::NOT_FOUND) .body(Body::default()) .unwrap()), }; debug!( "🪃 Finished request ID {} in {} µs", request_id, timer.elapsed().as_micros() ); result } async fn shutdown_signal() { tokio::signal::ctrl_c() .await .expect("Failed to install Ctrl+C signal handler"); info!("💤 Shutting down gracefully..."); } pub async fn start_server(config: &config::Config, executor_tx: mpsc::Sender<ExecutorPayload>) { let tcp_listen_address = net::SocketAddr::new( net::IpAddr::from_str(&config.tcp_listen_host).unwrap(), config.tcp_listen_port, ); let server = Server::bind(&tcp_listen_address) .serve(make_service_fn(move |_conn| { let executor_tx = executor_tx.clone(); async move { Ok::<_, convert::Infallible>(service_fn(move |req| echo(executor_tx.clone(), req))) } })) .with_graceful_shutdown(shutdown_signal()); info!("👂 Server listening on {}...", tcp_listen_address); if let Err(e) = server.await { error!("‼️ Encountered server error: {}", e); } else { debug!("⏹ Server no longer listening"); } }
use crate::config; use crate::constructs::components::Validatable; use crate::executor::{ExecutorPayload, QueryResult}; use crate::sql::parse_statement; use hyper::service::{make_service_fn, service_fn}; use hyper::{Body, Method, Request, Response, Server, StatusCode}; use serde::{ser::SerializeMap, Serialize, Serializer}; use std::collections::HashMap; use std::{convert, net, str::FromStr}; use thiserror::Error; use tokio::sync::{mpsc, oneshot}; use tokio::time; use tracing::*; use ulid::Ulid; #[derive(Error, Debug, PartialEq)] #[error("ServerError: {0}")] pub struct ServerError(pub String); impl Serialize for ServerError { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { let mut map = serializer.serialize_map(Some(2))?; map.serialize_entry("type", "server")?; map.serialize_entry("message", &self.0)?; map.end() } } async fn process_post( executor_tx: mpsc::Sender<ExecutorPayload>, body: &str, ) -> (StatusCode, String) { let statement = parse_statement(&body); if let Err(parsing_error) = statement { return ( StatusCode::BAD_REQUEST, serde_json::to_string(&parsing_error).unwrap(), ); } let statement = statement.unwrap(); if let Err(validation_error) = statement.validate() { return ( StatusCode::BAD_REQUEST, serde_json::to_string(&validation_error).unwrap(), ); } let (resp_tx, resp_rx) = oneshot::channel::<QueryResult>(); if let Err(_) = executor_tx.send((statement, resp_tx)).await { return ( StatusCode::INTERNAL_SERVER_ERROR, serde_json::to_string(&ServerError("The query executor has disengaged.".into())) .unwrap(), ); } let query_result = resp_rx.await; ( StatusCode::OK, serde_json::to_string_pretty(&query_result.unwrap()).unwrap(), ) } async fn process_get( _executor_tx: mpsc::Sender<ExecutorPayload>, query: Option<&str>, ) -> (StatusCode, String) { if let Some(query_string) = query { if let Ok(query_map) = serde_urlencoded::from_str::<HashMap<String, String>>(query_string) { if let Some(query) = query_map.get("query") { (StatusCode::OK, query.to_string()) } else { (StatusCode::BAD_REQUEST, "TODO: No query param".to_string()) } } else { ( StatusCode::BAD_REQUEST, "TODO: Bad query string".to_string(), ) } } else { (StatusCode::BAD_REQUEST, "TODO: No query string".to_string()) } } async fn echo( executor_tx: mpsc::Sender<ExecutorPayload>, req: Request<Body>, ) -> Result<Response<Body>, hyper::Error> { let timer = time::Instant::now(); let request_id = Ulid::new(); debug!("⚡️ Received request ID {}", request_id); let response_builder = Response::builder().header("Content-Type", "application/json"); let result = match (req.uri().path(), req.method()) { ("/", &Method::POST) => { let body_bytes = hyper::body::to_bytes(req.into_body()).await?; let body = String::from_utf8(body_bytes.into_iter().collect()).unwrap(); let (status_code, response_string) = process_post(executor_tx, &body).await; Ok(response_builder .header("Content-Type", "application/json") .status(status_code) .body(Body::from(response_string)) .unwrap()) } ("/", &Method::GET) => { let query = req.uri().query(); let (status_code, response_string) = process_get(executor_tx, query).await; Ok(response_builder .status(status_code) .body(Body::from(response_string)) .unwrap()) } ("/", _) => Ok(response_builder .status(StatusCode::METHOD_NOT_ALLOWED) .body(Body::default()) .unwrap()), _ => Ok(response_builder .status(StatusCode::NOT_FOUND) .body(Body::default()) .unwrap()), }; debug!( "🪃 Finished request ID {} in {} µs", request_id, timer.elapsed().as_micros() ); result } async fn shutdown_signal() { tokio::signal::ctrl_c() .await .expect("Failed to install Ctrl+C signal handler"); info!("💤 Shutting down gracefully..."); } pub async fn start_server(config: &config::Config, executor_tx: mpsc::Sender<ExecutorPayload>) { let tcp_listen_address = net::SocketAddr::new( net::IpAddr::from_str(&config.tcp_listen_host).unwrap(), config.tcp_listen_port, ); let server = Server::bind(&tcp_listen_address) .serve(
) .with_graceful_shutdown(shutdown_signal()); info!("👂 Server listening on {}...", tcp_listen_address); if let Err(e) = server.await { error!("‼️ Encountered server error: {}", e); } else { debug!("⏹ Server no longer listening"); } }
make_service_fn(move |_conn| { let executor_tx = executor_tx.clone(); async move { Ok::<_, convert::Infallible>(service_fn(move |req| echo(executor_tx.clone(), req))) } })
call_expression
[ { "content": "pub fn parse_statement(input: &str) -> Result<Statement, SyntaxError> {\n\n let tokens = tokenize_statement(input);\n\n let ExpectOk {\n\n rest,\n\n outcome: found_token_first,\n\n ..\n\n } = expect_next_token(\n\n &tokens,\n\n &format!(\"{} or {}\", Keyword::Create, Keyword::Insert),\n\n )?;\n\n match found_token_first {\n\n // CREATE\n\n Token {\n\n value: TokenValue::Const(Keyword::Create),\n\n ..\n\n } => {\n\n let ExpectOk {\n\n rest,\n\n outcome: found_token_second,\n\n ..\n", "file_path": "src/sql/parser.rs", "rank": 0, "score": 193228.30048860516 }, { "content": "fn get_env(key: &str) -> Result<String, env::VarError> {\n\n env::var(envify_config_key(key))\n\n}\n\n\n", "file_path": "src/config.rs", "rank": 1, "score": 153903.49235087726 }, { "content": "fn get_env_or(key: &str, default: String) -> String {\n\n get_env(key).unwrap_or(default)\n\n}\n\n\n", "file_path": "src/config.rs", "rank": 2, "score": 134847.24333566817 }, { "content": "pub fn tokenize_statement(input: &str) -> Vec<Token> {\n\n let mut tokens = Vec::<Token>::new();\n\n for (line_index, line) in input.lines().enumerate() {\n\n let mut token_candidates = Vec::<String>::new();\n\n let mut current_candidate: String = \"\".to_string();\n\n let mut is_current_character_escaped = false;\n\n let mut is_current_character_inside_string = false;\n\n for character in line.chars() {\n\n // Act upon tokenization-level semantics, but only if the current character is not escaped with a backslash\n\n if !is_current_character_escaped {\n\n // Detect if the next character is escaped\n\n if character == Delimiter::ESCAPE_CHARACTER {\n\n is_current_character_escaped = true;\n\n continue;\n\n }\n\n // Detect if this character starts/ends a string\n\n if character == Delimiter::STRING_MARKER {\n\n current_candidate.push(character);\n\n if is_current_character_inside_string {\n\n token_candidates.push(current_candidate.clone());\n", "file_path": "src/sql/tokenizer.rs", "rank": 3, "score": 133267.0163050931 }, { "content": "// Format internal config key to environment variable name.\n\nfn envify_config_key(key: &str) -> String {\n\n format!(\"EMDRIVE_{}\", &key.to_uppercase())\n\n}\n\n\n", "file_path": "src/config.rs", "rank": 4, "score": 127156.6969228917 }, { "content": "pub fn expect_identifier<'t>(tokens: &'t [Token]) -> ExpectResult<'t, String> {\n\n let ExpectOk {\n\n outcome: found_token,\n\n ..\n\n } = expect_next_token(tokens, &\"an identifier\")?;\n\n match found_token {\n\n Token {\n\n value: TokenValue::Arbitrary(value),\n\n ..\n\n } => Ok(ExpectOk {\n\n rest: &tokens[1..],\n\n tokens_consumed_count: 1,\n\n outcome: value.to_owned(),\n\n }),\n\n wrong_token => Err(SyntaxError(format!(\n\n \"Expected an identifier, instead found {}.\",\n\n wrong_token\n\n ))),\n\n }\n\n}\n\n\n", "file_path": "src/sql/expects/semantic.rs", "rank": 5, "score": 122876.91511469244 }, { "content": "/// Conjure an InsertStatement from tokens following INSERT.\n\npub fn expect_insert<'t>(tokens: &'t [Token]) -> ExpectResult<'t, InsertStatement> {\n\n let ExpectOk { rest, .. } = expect_token_value(tokens, &TokenValue::Const(Keyword::Into))?;\n\n let ExpectOk {\n\n rest,\n\n tokens_consumed_count: tokens_consumed_count_table_name,\n\n outcome: table_name,\n\n } = expect_identifier(rest)?;\n\n let ExpectOk {\n\n rest,\n\n tokens_consumed_count: tokens_consumed_count_column_names,\n\n outcome: column_names,\n\n } = expect_enclosed_comma_separated(rest, expect_identifier)?;\n\n let ExpectOk { rest, .. } = expect_token_value(rest, &TokenValue::Const(Keyword::Values))?;\n\n let ExpectOk {\n\n rest,\n\n tokens_consumed_count: tokens_consumed_count_values,\n\n outcome: values,\n\n } = expect_enclosed_comma_separated(rest, expect_data_instance)?;\n\n Ok(ExpectOk {\n\n rest,\n", "file_path": "src/sql/expects/statements/insert.rs", "rank": 6, "score": 109959.20557343564 }, { "content": "/// Conjure an SelectStatement from tokens following SELECT.\n\npub fn expect_select<'t>(tokens: &'t [Token]) -> ExpectResult<'t, SelectStatement> {\n\n let ExpectOk {\n\n rest,\n\n tokens_consumed_count: tokens_consumed_columns,\n\n outcome: columns,\n\n } = expect_comma_separated(tokens, expect_select_column)?;\n\n let ExpectOk { rest, .. } = expect_token_value(rest, &TokenValue::Const(Keyword::From))?;\n\n let ExpectOk {\n\n rest,\n\n tokens_consumed_count: tokens_consumed_count_table_name,\n\n outcome: table_name,\n\n } = expect_identifier(rest)?;\n\n let ExpectOk {\n\n rest,\n\n tokens_consumed_count: tokens_consumed_count_where_clause,\n\n outcome: maybe_where_clause,\n\n } = detect(\n\n rest,\n\n |tokens| expect_token_value(tokens, &TokenValue::Const(Keyword::Where)),\n\n expect_expression,\n", "file_path": "src/sql/expects/statements/select.rs", "rank": 7, "score": 109959.20557343564 }, { "content": "pub fn expect_end_of_statement<'t>(tokens: &'t [Token]) -> ExpectResult<'t, ()> {\n\n match tokens.first() {\n\n None => Ok(ExpectOk {\n\n rest: tokens,\n\n tokens_consumed_count: 0,\n\n outcome: (),\n\n }),\n\n Some(wrong_token) => Err(SyntaxError(format!(\n\n \"Expected end of statement, instead found {}.\",\n\n wrong_token\n\n ))),\n\n }\n\n}\n\n\n", "file_path": "src/sql/expects/generic.rs", "rank": 8, "score": 106974.05894597938 }, { "content": "/// Conjure a CreateTableStatement from tokens following CREATE TABLE.\n\npub fn expect_create_table<'t>(tokens: &'t [Token]) -> ExpectResult<'t, CreateTableStatement> {\n\n let (if_not_exists, rest, tokens_consumed_count_if_not_exists) =\n\n match expect_token_values_sequence(\n\n tokens,\n\n &[\n\n TokenValue::Const(Keyword::If),\n\n TokenValue::Const(Keyword::Not),\n\n TokenValue::Const(Keyword::Exists),\n\n ],\n\n ) {\n\n Ok(ExpectOk {\n\n rest,\n\n tokens_consumed_count,\n\n ..\n\n }) => (true, rest, tokens_consumed_count),\n\n Err(_) => (false, tokens, 0),\n\n };\n\n let ExpectOk {\n\n outcome: table,\n\n rest,\n", "file_path": "src/sql/expects/statements/create_table.rs", "rank": 9, "score": 102828.07074435079 }, { "content": "pub fn expect_select_column<'t>(tokens: &'t [Token]) -> ExpectResult<'t, SelectColumn> {\n\n let ExpectOk {\n\n outcome: found_token,\n\n ..\n\n } = expect_next_token(tokens, &\"a SELECT column\")?;\n\n match found_token {\n\n Token {\n\n value: TokenValue::Arbitrary(value),\n\n ..\n\n } => Ok(ExpectOk {\n\n rest: &tokens[1..],\n\n tokens_consumed_count: 1,\n\n outcome: SelectColumn::Identifier(value.to_owned()),\n\n }),\n\n Token {\n\n value: TokenValue::Const(Keyword::Asterisk),\n\n ..\n\n } => Ok(ExpectOk {\n\n rest: &tokens[1..],\n\n tokens_consumed_count: 1,\n\n outcome: SelectColumn::All,\n\n }),\n\n wrong_token => Err(SyntaxError(format!(\n\n \"Expected a SELECT column, instead found {}.\",\n\n wrong_token\n\n ))),\n\n }\n\n}\n\n\n", "file_path": "src/sql/expects/statements/select.rs", "rank": 10, "score": 98302.19727236127 }, { "content": "pub fn expect_table_definition<'t>(tokens: &'t [Token]) -> ExpectResult<'t, TableDefinition> {\n\n let ExpectOk {\n\n rest,\n\n tokens_consumed_count: tokens_consumed_count_name,\n\n outcome: name,\n\n } = expect_identifier(tokens)?;\n\n let ExpectOk {\n\n rest,\n\n tokens_consumed_count: tokens_consumed_count_columns,\n\n outcome: columns,\n\n } = expect_enclosed_comma_separated(rest, expect_column_definition)?;\n\n Ok(ExpectOk {\n\n rest,\n\n tokens_consumed_count: tokens_consumed_count_name + tokens_consumed_count_columns,\n\n outcome: TableDefinition::new(name, columns),\n\n })\n\n}\n\n\n", "file_path": "src/sql/expects/statements/create_table.rs", "rank": 11, "score": 95963.25186137536 }, { "content": "pub fn expect_column_definition<'t>(tokens: &'t [Token]) -> ExpectResult<'t, ColumnDefinition> {\n\n let ExpectOk {\n\n rest,\n\n tokens_consumed_count: tokens_consumed_count_name,\n\n outcome: name,\n\n } = expect_identifier(tokens)?;\n\n let ExpectOk {\n\n rest,\n\n tokens_consumed_count: tokens_consumed_count_data_type,\n\n outcome: data_type,\n\n } = expect_data_type(rest)?;\n\n let ExpectOk {\n\n rest,\n\n tokens_consumed_count: tokens_consumed_count_primary_key,\n\n outcome: primary_key_option,\n\n } = detect(\n\n rest,\n\n |tokens| expect_token_value(tokens, &TokenValue::Const(Keyword::Primary)),\n\n |tokens| expect_token_value(tokens, &TokenValue::Const(Keyword::Key)),\n\n &TokenValue::Const(Keyword::Key),\n", "file_path": "src/sql/expects/statements/create_table.rs", "rank": 12, "score": 95963.25186137536 }, { "content": "pub fn expect_expression<'t>(tokens: &'t [Token]) -> ExpectResult<'t, Expression> {\n\n let ExpectOk {\n\n rest: rest_atom,\n\n tokens_consumed_count: tokens_consumed_count_lhs,\n\n outcome: lhs_raw,\n\n } = expect_data_definition(tokens)?;\n\n let lhs = Expression::Atom(lhs_raw);\n\n let ExpectOk {\n\n rest,\n\n tokens_consumed_count: tokens_consumed_count_operator_and_rhs,\n\n outcome: operator_and_rhs,\n\n } = detect(\n\n rest_atom,\n\n |tokens| expect_next_token(tokens, &\"an operator\"),\n\n expect_data_definition,\n\n &\"the right-hand side of the expression\",\n\n )?;\n\n match operator_and_rhs {\n\n Some((\n\n Token {\n", "file_path": "src/sql/expects/semantic.rs", "rank": 13, "score": 83883.92538753668 }, { "content": "fn get_env_cast_or<T: str::FromStr + fmt::Display>(key: &str, default: T) -> T {\n\n let value_raw = get_env(key);\n\n if let Ok(value_raw) = value_raw {\n\n match T::from_str(&value_raw) {\n\n Ok(value) => value,\n\n Err(_) => panic!(\"{} is not a valid {} value!\", value_raw, key),\n\n }\n\n } else {\n\n default\n\n }\n\n}\n", "file_path": "src/config.rs", "rank": 14, "score": 82663.4937571364 }, { "content": "pub fn expect_function_call<'t>(tokens: &'t [Token]) -> ExpectResult<'t, Function> {\n\n let ExpectOk {\n\n rest,\n\n tokens_consumed_count: tokens_consumed_count_call,\n\n outcome: found_token,\n\n } = expect_next_token(tokens, &\"a function name\")?;\n\n match found_token {\n\n Token {\n\n value: TokenValue::Function(found_function),\n\n ..\n\n } => {\n\n let ExpectOk {\n\n rest,\n\n tokens_consumed_count: tokens_consumed_count_parentheses,\n\n outcome: _,\n\n } = expect_token_values_sequence(\n\n rest,\n\n &[\n\n TokenValue::Delimiting(Delimiter::ParenthesisOpening),\n\n TokenValue::Delimiting(Delimiter::ParenthesisClosing),\n", "file_path": "src/sql/expects/semantic.rs", "rank": 15, "score": 81779.21336644153 }, { "content": "pub fn expect_identity<'t>(tokens: &'t [Token]) -> ExpectResult<'t, Vec<Token>> {\n\n Ok(ExpectOk {\n\n rest: &[][..],\n\n tokens_consumed_count: tokens.len(),\n\n outcome: tokens.to_vec(),\n\n })\n\n}\n\n\n", "file_path": "src/sql/expects/generic.rs", "rank": 16, "score": 81099.20170359865 }, { "content": "pub fn expect_data_instance<'t>(tokens: &'t [Token]) -> ExpectResult<'t, DataInstance> {\n\n let ExpectOk {\n\n rest,\n\n tokens_consumed_count,\n\n outcome: found_token,\n\n } = expect_next_token(tokens, &\"a value\")?;\n\n match found_token {\n\n Token {\n\n value: TokenValue::Const(Keyword::Null),\n\n ..\n\n } => Ok(ExpectOk {\n\n rest,\n\n tokens_consumed_count,\n\n outcome: DataInstance::Null,\n\n }),\n\n Token {\n\n value: TokenValue::String(found_string),\n\n ..\n\n } => Ok(ExpectOk {\n\n rest,\n", "file_path": "src/sql/expects/semantic.rs", "rank": 17, "score": 79803.63052282877 }, { "content": "pub fn expect_data_type<'t>(tokens: &'t [Token]) -> ExpectResult<'t, DataType> {\n\n let is_nullable = matches!(\n\n expect_token_value(tokens, &TokenValue::Const(Keyword::Nullable)),\n\n Ok(_)\n\n );\n\n let ExpectOk {\n\n rest,\n\n tokens_consumed_count,\n\n outcome: data_type,\n\n } = if is_nullable {\n\n expect_enclosed(\n\n &tokens[1..],\n\n expect_data_type_raw,\n\n Delimiter::ParenthesisOpening,\n\n Delimiter::ParenthesisClosing,\n\n )?\n\n } else {\n\n expect_data_type_raw(tokens)?\n\n };\n\n Ok(ExpectOk {\n\n rest,\n\n tokens_consumed_count: usize::from(is_nullable) + tokens_consumed_count,\n\n outcome: DataType {\n\n raw_type: data_type,\n\n is_nullable,\n\n },\n\n })\n\n}\n\n\n", "file_path": "src/sql/expects/semantic.rs", "rank": 18, "score": 79803.63052282877 }, { "content": "pub fn expect_data_definition<'t>(tokens: &'t [Token]) -> ExpectResult<'t, DataDefinition> {\n\n if let Ok(ExpectOk {\n\n rest,\n\n tokens_consumed_count,\n\n outcome: found_function,\n\n }) = expect_function_call(tokens)\n\n {\n\n return Ok(ExpectOk {\n\n rest,\n\n tokens_consumed_count,\n\n outcome: DataDefinition::FunctionCall(found_function),\n\n });\n\n }\n\n if let Ok(ExpectOk {\n\n rest,\n\n tokens_consumed_count,\n\n outcome: found_data_instance,\n\n }) = expect_data_instance(tokens)\n\n {\n\n return Ok(ExpectOk {\n", "file_path": "src/sql/expects/semantic.rs", "rank": 19, "score": 79803.63052282877 }, { "content": "fn determine_table_dir_path(config: &config::Config, schema: &str, table_name: &str) -> PathBuf {\n\n Path::new(&config.data_directory) // $EMDRIVE_DATA_DIRECTORY\n\n .join(schema) // <$EMDRIVE_DATA_DIRECTORY>/<schema>\n\n .join(table_name) // <$EMDRIVE_DATA_DIRECTORY>/<schema>/<table_name>\n\n}\n\n\n\npub async fn does_table_file_exist(\n\n config: &config::Config,\n\n schema: &str,\n\n table_name: &str,\n\n) -> bool {\n\n let path = determine_table_dir_path(config, schema, table_name).join(\"0\");\n\n match fs::metadata(path).await {\n\n Ok(metadata) => metadata.is_file(),\n\n Err(_) => false,\n\n }\n\n}\n\n\n\npub async fn write_table_file(\n\n config: &config::Config,\n", "file_path": "src/storage/filesystem.rs", "rank": 20, "score": 76768.9805219381 }, { "content": "pub fn expect_data_type_raw<'t>(tokens: &'t [Token]) -> ExpectResult<'t, DataTypeRaw> {\n\n let ExpectOk {\n\n outcome: found_token,\n\n ..\n\n } = expect_next_token(tokens, &\"a data type\")?;\n\n match found_token {\n\n Token {\n\n value: TokenValue::Type(found_data_type),\n\n ..\n\n } => Ok(ExpectOk {\n\n rest: &tokens[1..],\n\n tokens_consumed_count: 1,\n\n outcome: *found_data_type,\n\n }),\n\n wrong_token => Err(SyntaxError(format!(\n\n \"Expected a data type, instead found {}.\",\n\n wrong_token\n\n ))),\n\n }\n\n}\n\n\n", "file_path": "src/sql/expects/semantic.rs", "rank": 21, "score": 76195.06580626284 }, { "content": "// Expert an enclosure, the outcome being enclosure contents excluding opening and closing delimiters.\n\npub fn expect_enclosure<'t>(\n\n tokens: &'t [Token],\n\n opening: Delimiter,\n\n closing: Delimiter,\n\n) -> ExpectResult<'t, &'t [Token]> {\n\n let ExpectOk { rest, .. } = expect_token_value(\n\n tokens,\n\n &TokenValue::Delimiting(Delimiter::ParenthesisOpening),\n\n )?;\n\n let mut current_enclosure_depth: usize = 0;\n\n let maybe_enclosure_size: Option<usize> =\n\n rest.iter()\n\n .position(|current_token| match &current_token.value {\n\n TokenValue::Delimiting(current_delimiter) => {\n\n if current_enclosure_depth == 0 && current_delimiter == &closing {\n\n return true;\n\n }\n\n if current_delimiter == &opening {\n\n current_enclosure_depth += 1;\n\n } else if current_delimiter == &closing {\n", "file_path": "src/sql/expects/generic.rs", "rank": 22, "score": 75751.5083622325 }, { "content": "pub fn consume_all<'t, O>(\n\n tokens: &'t [Token],\n\n expect_something: ExpectFn<'t, O>,\n\n) -> Result<O, SyntaxError> {\n\n let ExpectOk { rest, outcome, .. } = expect_something(tokens)?;\n\n expect_end_of_statement(rest)?;\n\n Ok(outcome)\n\n}\n\n\n", "file_path": "src/sql/expects/generic.rs", "rank": 23, "score": 74575.50833425537 }, { "content": "pub fn expect_next_token<'t>(\n\n tokens: &'t [Token],\n\n expectation_description: &dyn std::fmt::Display,\n\n) -> ExpectResult<'t, &'t Token> {\n\n match tokens.first() {\n\n Some(found_token) => Ok(ExpectOk {\n\n rest: &tokens[1..],\n\n tokens_consumed_count: 1,\n\n outcome: found_token,\n\n }),\n\n None => Err(SyntaxError(format!(\n\n \"Expected {}, instead found end of statement.\",\n\n expectation_description\n\n ))),\n\n }\n\n}\n\n\n", "file_path": "src/sql/expects/generic.rs", "rank": 24, "score": 73768.1606592719 }, { "content": "pub fn expect_token_value<'t>(\n\n tokens: &'t [Token],\n\n expected_token_value: &TokenValue,\n\n) -> ExpectResult<'t, ()> {\n\n let ExpectOk {\n\n rest,\n\n tokens_consumed_count,\n\n outcome: found_token,\n\n } = expect_next_token(tokens, expected_token_value)?;\n\n if &found_token.value == expected_token_value {\n\n Ok(ExpectOk {\n\n rest,\n\n tokens_consumed_count,\n\n outcome: (),\n\n })\n\n } else {\n\n Err(SyntaxError(format!(\n\n \"Expected {}, instead found {}.\",\n\n expected_token_value, found_token\n\n )))\n\n }\n\n}\n\n\n", "file_path": "src/sql/expects/generic.rs", "rank": 25, "score": 73768.1606592719 }, { "content": "pub fn expect_enclosed<'t, O>(\n\n tokens: &'t [Token],\n\n expect_inside: ExpectFn<'t, O>,\n\n opening: Delimiter,\n\n closing: Delimiter,\n\n) -> ExpectResult<'t, O> {\n\n let ExpectOk { rest, .. } = expect_token_value(tokens, &TokenValue::Delimiting(opening))?;\n\n let ExpectOk {\n\n rest,\n\n tokens_consumed_count,\n\n outcome,\n\n } = expect_inside(rest)?;\n\n let ExpectOk { rest, .. } = expect_token_value(rest, &TokenValue::Delimiting(closing))?;\n\n let tokens_consumed_count = tokens_consumed_count + 2; // +2 to account for parentheses\n\n Ok(ExpectOk {\n\n rest,\n\n tokens_consumed_count,\n\n outcome,\n\n })\n\n}\n\n\n", "file_path": "src/sql/expects/generic.rs", "rank": 26, "score": 72592.16063129477 }, { "content": "pub fn expect_token_values_sequence<'t>(\n\n tokens: &'t [Token],\n\n expected_token_values: &[TokenValue],\n\n) -> ExpectResult<'t, ()> {\n\n for (token_index, expected_token_value) in expected_token_values.iter().enumerate() {\n\n expect_token_value(&tokens[token_index..], expected_token_value)?;\n\n }\n\n let tokens_consumed_count = expected_token_values.len();\n\n Ok(ExpectOk {\n\n rest: &tokens[tokens_consumed_count..],\n\n tokens_consumed_count,\n\n outcome: (),\n\n })\n\n}\n\n\n", "file_path": "src/sql/expects/generic.rs", "rank": 27, "score": 71928.54135152663 }, { "content": "pub fn empty_page_blob() -> WriteBlob {\n\n vec![0; PAGE_SIZE]\n\n}\n\n\n", "file_path": "src/storage/paging.rs", "rank": 28, "score": 71928.54135152663 }, { "content": "pub fn construct_blank_table() -> WriteBlob {\n\n // 2 pages, as that's the minimum number of them - 1. the meta page, 2. B+ tree root page (a leaf initially)\n\n let mut core_blob: WriteBlob = Vec::with_capacity(PAGE_SIZE * 2);\n\n core_blob.append(\n\n &mut Page::Meta {\n\n layout_version: LATEST_LAYOUT_VERSION,\n\n b_tree_root_page_index: 1,\n\n }\n\n .into(),\n\n );\n\n core_blob.append(\n\n &mut Page::BTreeLeaf {\n\n next_leaf_page_index: 0,\n\n rows: Vec::new(),\n\n }\n\n .into(),\n\n );\n\n assert_eq!(core_blob.len(), PAGE_SIZE * 2);\n\n core_blob\n\n}\n", "file_path": "src/storage/paging.rs", "rank": 29, "score": 71928.54135152663 }, { "content": "pub fn detect<'t, P, M>(\n\n tokens: &'t [Token],\n\n expect_predicate: ExpectFn<'t, P>,\n\n expect_meaning: ExpectFn<'t, M>,\n\n expectation_description: &dyn std::fmt::Display,\n\n) -> ExpectResult<'t, Option<(P, M)>> {\n\n match expect_predicate(tokens) {\n\n Ok(ExpectOk {\n\n rest,\n\n tokens_consumed_count: tokens_consumed_count_predicate,\n\n outcome: outcome_predicate,\n\n }) => {\n\n // Let's just make sure we're not at the end of the statement\n\n expect_next_token(rest, expectation_description)?;\n\n match expect_meaning(rest) {\n\n Ok(ExpectOk {\n\n rest,\n\n tokens_consumed_count: tokens_consumed_count_meaning,\n\n outcome: outcome_meaning,\n\n }) => Ok(ExpectOk {\n", "file_path": "src/sql/expects/generic.rs", "rank": 30, "score": 71530.58222766138 }, { "content": "pub fn expect_comma_separated<'t, O>(\n\n tokens: &'t [Token],\n\n expect_element: ExpectFn<'t, O>,\n\n) -> ExpectResult<'t, Vec<O>> {\n\n let mut tokens_consumed_total_count = 0;\n\n let mut outcomes = Vec::<O>::new();\n\n loop {\n\n // Parse next element\n\n let ExpectOk {\n\n tokens_consumed_count,\n\n outcome,\n\n ..\n\n } = expect_element(&tokens[tokens_consumed_total_count..])?;\n\n tokens_consumed_total_count += tokens_consumed_count;\n\n outcomes.push(outcome);\n\n // Check for the comma (trailing comma disallowed)\n\n match expect_token_value(\n\n &tokens[tokens_consumed_total_count..],\n\n &TokenValue::Delimiting(Delimiter::Comma),\n\n ) {\n", "file_path": "src/sql/expects/generic.rs", "rank": 31, "score": 70752.5413235495 }, { "content": "pub fn expect_enclosed_comma_separated<'t, O>(\n\n tokens: &'t [Token],\n\n expect_element: ExpectFn<'t, O>,\n\n) -> ExpectResult<'t, Vec<O>> {\n\n const SEPARATOR: TokenValue = TokenValue::Delimiting(Delimiter::Comma);\n\n let ExpectOk {\n\n rest,\n\n tokens_consumed_count,\n\n outcome: enclosure_tokens,\n\n } = expect_enclosure(\n\n tokens,\n\n Delimiter::ParenthesisOpening,\n\n Delimiter::ParenthesisClosing,\n\n )?;\n\n // Disallow empty enclosures\n\n if enclosure_tokens.is_empty() {\n\n return Err(SyntaxError(format!(\n\n \"Found an enclosure delimited by {} and {} as expected, but it's empty.\",\n\n tokens[0],\n\n tokens[tokens_consumed_count - 1],\n", "file_path": "src/sql/expects/generic.rs", "rank": 32, "score": 69041.57324190714 }, { "content": "type ExpectFn<'t, O> = fn(&'t [Token]) -> ExpectResult<'t, O>;\n", "file_path": "src/sql/expects/mod.rs", "rank": 33, "score": 60803.73819827433 }, { "content": "pub trait Validatable {\n\n /// Make sure that this definition (self) actually makes sense.\n\n fn validate(&self) -> Result<(), ValidationError>;\n\n}\n\n\n\n#[derive(Debug, PartialEq, Eq)]\n\npub struct ColumnDefinition {\n\n pub name: String,\n\n pub data_type: DataType,\n\n pub primary_key: bool,\n\n pub default: Option<DataDefinition>,\n\n}\n\n\n\nimpl Validatable for ColumnDefinition {\n\n fn validate(&self) -> Result<(), ValidationError> {\n\n if self.name.is_empty() {\n\n return Err(ValidationError(\"A column must have a name\".into()));\n\n }\n\n Ok(())\n\n }\n", "file_path": "src/constructs/components.rs", "rank": 34, "score": 42691.94113031176 }, { "content": "fn main() {\n\n setup_panic!(Metadata {\n\n name: \"Emdrive\".into(),\n\n version: env!(\"CARGO_PKG_VERSION\").into(),\n\n authors: \"\".into(), // Empty to disable\n\n homepage: env!(\"CARGO_PKG_REPOSITORY\").into(),\n\n });\n\n let subscriber = FmtSubscriber::builder()\n\n .with_env_filter(EnvFilter::from_str(&\"emdrive=debug\").unwrap())\n\n .finish();\n\n tracing::subscriber::set_global_default(subscriber).expect(\"setting default subscriber failed\");\n\n info!(\"🔢 Starting Emdrive...\");\n\n let instance = Instance::preload();\n\n let runtime = tokio::runtime::Runtime::new().unwrap();\n\n runtime.block_on(instance.run()).unwrap();\n\n info!(\"🛑 Emdrive shut down\");\n\n}\n", "file_path": "src/main.rs", "rank": 35, "score": 42106.03378973085 }, { "content": "/// Trait for reading data from blobs.\n\npub trait Encodable: Sized {\n\n /// Extract value from blob in an optimized way, returning the rest of the blob for futher processing.\n\n fn try_decode<'b>(blob: ReadBlob<'b>) -> Result<(Self, ReadBlob<'b>), String>;\n\n\n\n /// How many bytes are needed to encode this value.\n\n /// Returns the advanced cursor position, 0 being the very front of the blob.\n\n fn encoded_size(&self) -> usize;\n\n\n\n /// Encode and write this value to blob at specified position.\n\n /// Returns the advanced cursor position, 0 being the very front of the blob.\n\n fn encode(&self, blob: &mut WriteBlob, position: usize) -> usize;\n\n\n\n /// Like `encode`, but writing to the end of the blob.\n\n fn encode_back(&self, blob: &mut WriteBlob, position: usize) -> usize {\n\n let retreated_position = position - self.encoded_size();\n\n self.encode(blob, retreated_position);\n\n retreated_position\n\n }\n\n}\n\n\n", "file_path": "src/storage/encoding.rs", "rank": 36, "score": 40770.71536356908 }, { "content": "pub trait EncodableWithAssumption<'b>: Sized {\n\n type Assumption;\n\n\n\n /// Like `Encodable::try_decode`, but with `assumption` which allows for contextful decoding.\n\n fn try_decode_assume(\n\n blob: ReadBlob<'b>,\n\n assumption: Self::Assumption,\n\n ) -> Result<(Self, ReadBlob<'b>), String>;\n\n}\n\n\n\nmacro_rules! encodable_number_impl {\n\n ($($t:ty)*) => ($(\n\n impl Encodable for $t {\n\n fn try_decode<'b>(blob: ReadBlob<'b>) -> Result<(Self, ReadBlob<'b>), String> {\n\n const SIZE: usize = mem::size_of::<$t>();\n\n Ok((\n\n Self::from_be_bytes(\n\n unsafe {\n\n // SAFETY: Recasting to an array is safe when blob is at least SIZE bytes long.\n\n *(blob[..SIZE].as_ptr() as *const [u8; SIZE])\n", "file_path": "src/storage/encoding.rs", "rank": 37, "score": 39017.841896667545 }, { "content": " let (statement, tx) = payload;\n\n debug!(\"➡️ Executing statement: {:?}\", statement);\n\n // TODO: Implement real query execution\n\n let result = QueryResult {\n\n column_names: vec![\"id\".to_string()],\n\n rows: vec![Row(vec![DataInstance::Direct(DataInstanceRaw::UInt64(1))])],\n\n };\n\n tx.send(result).unwrap();\n\n }\n\n debug!(\"🎗 Executor disengaged\");\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/executor/mod.rs", "rank": 47, "score": 31855.741248743154 }, { "content": "#[derive(Debug)]\n\npub struct QueryResult {\n\n pub column_names: Vec<String>,\n\n pub rows: Vec<Row>,\n\n}\n\n\n\nimpl Serialize for QueryResult {\n\n fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>\n\n where\n\n S: Serializer,\n\n {\n\n let mut seq = serializer.serialize_seq(Some(self.rows.len()))?;\n\n for row in &self.rows {\n\n seq.serialize_element(&NamedRow(&self.column_names, &row.0))?;\n\n }\n\n seq.end()\n\n }\n\n}\n\n\n\n/// Payload with a statement and a sender to return the result to.\n", "file_path": "src/executor/mod.rs", "rank": 48, "score": 31855.167543761523 }, { "content": "pub type ExecutorPayload = (Statement, oneshot::Sender<QueryResult>);\n\n\n\npub struct Executor {\n\n config: config::Config,\n\n tables: Arc<Mutex<Vec<TableDefinition>>>,\n\n rx: Option<mpsc::Receiver<ExecutorPayload>>,\n\n}\n\n\n\nimpl Executor {\n\n pub fn new(config: &config::Config) -> Self {\n\n Executor {\n\n config: config.clone(),\n\n tables: Arc::new(Mutex::new(Vec::new())),\n\n rx: None,\n\n }\n\n }\n\n\n\n pub fn prepare_channel(&mut self) -> mpsc::Sender<ExecutorPayload> {\n\n let (tx, rx) = mpsc::channel(MAX_IN_FLIGHT_REQUESTS);\n\n self.rx = Some(rx);\n", "file_path": "src/executor/mod.rs", "rank": 49, "score": 31853.154857722064 }, { "content": " tx\n\n }\n\n\n\n pub async fn bootstrap(&mut self) -> Result<(), io::Error> {\n\n debug!(\"⬆️ Bootstraping the executor...\");\n\n for table in SystemTable::ALL {\n\n let table_definition = table.get_definition();\n\n write::ensure_table_file_exists(&self.config, &table_definition).await?;\n\n }\n\n Ok(())\n\n }\n\n\n\n pub async fn start(&mut self) -> Result<(), io::Error> {\n\n let mut rx = self\n\n .rx\n\n .take()\n\n .expect(\"`prepare_channel` must be ran before `start`\");\n\n self.bootstrap().await?;\n\n debug!(\"🗡 Executor engaged\");\n\n while let Some(payload) = rx.recv().await {\n", "file_path": "src/executor/mod.rs", "rank": 50, "score": 31851.982923981468 }, { "content": "use crate::{\n\n config,\n\n constructs::components::TableDefinition,\n\n storage::{filesystem::seek_read_decode_page, paging::Page, Row},\n\n};\n\n\n\npub async fn read_all_rows(\n\n config: &config::Config,\n\n schema: &str,\n\n table_definition: &TableDefinition,\n\n) -> Result<Vec<Row>, String> {\n\n let meta = seek_read_decode_page(&config, &schema, &table_definition, 0)\n\n .await\n\n .unwrap();\n\n match meta {\n\n Page::Meta {\n\n b_tree_root_page_index,\n\n ..\n\n } => {\n\n let data =\n", "file_path": "src/executor/read.rs", "rank": 51, "score": 31848.691280558472 }, { "content": "mod read;\n\nmod write;\n\n\n\nuse parking_lot::Mutex;\n\nuse std::io;\n\nuse std::sync::Arc;\n\n\n\nuse crate::config;\n\nuse crate::storage::system::SystemTable;\n\nuse crate::{\n\n constructs::components::{DataInstance, DataInstanceRaw, TableDefinition},\n\n sql::Statement,\n\n storage::{NamedRow, Row},\n\n};\n\nuse serde::{ser::SerializeSeq, Serialize, Serializer};\n\nuse tokio::sync::{mpsc, oneshot};\n\nuse tracing::*;\n\n\n\nconst MAX_IN_FLIGHT_REQUESTS: usize = 100;\n\n\n", "file_path": "src/executor/mod.rs", "rank": 52, "score": 31846.468734672766 }, { "content": "use std::io;\n\n\n\nuse crate::config;\n\nuse crate::storage::filesystem::{\n\n does_table_file_exist, seek_read_decode_page, seek_write_page, write_table_file,\n\n};\n\nuse crate::storage::paging::{construct_blank_table, Page};\n\nuse crate::storage::system::SYSTEM_SCHEMA_NAME;\n\nuse crate::{constructs::components::TableDefinition, storage::Row};\n\nuse tracing::*;\n\n\n\npub async fn ensure_table_file_exists(\n\n config: &config::Config,\n\n table_definition: &TableDefinition,\n\n) -> io::Result<()> {\n\n if !does_table_file_exist(&config, SYSTEM_SCHEMA_NAME, &table_definition.name).await {\n\n let blank_table_blob = construct_blank_table();\n\n match write_table_file(\n\n &config,\n\n SYSTEM_SCHEMA_NAME,\n", "file_path": "src/executor/write.rs", "rank": 53, "score": 31843.84224048798 }, { "content": " &table_definition.name,\n\n blank_table_blob,\n\n )\n\n .await\n\n {\n\n Ok(_) => debug!(\"Initialized system table `{}`\", table_definition.name),\n\n Err(error) => {\n\n trace!(\n\n \"Failed to initialize system table `{}`: {}\",\n\n table_definition.name,\n\n error\n\n );\n\n return Err(error);\n\n }\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n\npub async fn b_tree_insert(\n", "file_path": "src/executor/write.rs", "rank": 54, "score": 31843.744959006195 }, { "content": " config: &config::Config,\n\n schema: &str,\n\n table_definition: &TableDefinition,\n\n row: Row,\n\n) -> Result<(), String> {\n\n let b_tree_root_page_index =\n\n match seek_read_decode_page(config, schema, table_definition, 0).await? {\n\n Page::Meta {\n\n b_tree_root_page_index,\n\n ..\n\n } => b_tree_root_page_index,\n\n _ => panic!(\n\n \"Found a non-meta page at the beginning of table {}.{}'s data file \",\n\n schema, table_definition.name\n\n ),\n\n };\n\n let mut b_tree_root_page =\n\n seek_read_decode_page(config, schema, table_definition, b_tree_root_page_index).await?;\n\n match b_tree_root_page {\n\n Page::BTreeLeaf { ref mut rows, .. } => rows.push(row),\n", "file_path": "src/executor/write.rs", "rank": 55, "score": 31843.059027364 }, { "content": " seek_read_decode_page(&config, &schema, &table_definition, b_tree_root_page_index)\n\n .await\n\n .unwrap();\n\n match data {\n\n Page::BTreeLeaf { rows, .. } => Ok(rows),\n\n _ => Err(\"Invalid page type 1\".to_string()),\n\n }\n\n }\n\n _ => Err(\"Invalid page type 0\".to_string()),\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod read_tests {\n\n use crate::{\n\n constructs::components::{ColumnDefinition, DataType, DataTypeRaw},\n\n storage::{filesystem::write_table_file, paging::construct_blank_table},\n\n };\n\n\n\n use super::*;\n", "file_path": "src/executor/read.rs", "rank": 56, "score": 31842.269680862202 }, { "content": " is_nullable: false,\n\n },\n\n primary_key: false,\n\n default: None,\n\n },\n\n ],\n\n )\n\n }\n\n\n\n #[tokio::test]\n\n async fn read_all_rows_empty() {\n\n let config = config::Config {\n\n data_directory: env!(\"TMPDIR\").to_string(),\n\n ..Default::default()\n\n };\n\n let schema = \"test\";\n\n let data = construct_blank_table();\n\n let test_table = get_test_table();\n\n write_table_file(&config, schema, &test_table.name, data)\n\n .await\n\n .unwrap();\n\n let rows = read_all_rows(&config, schema, &test_table).await.unwrap();\n\n assert_eq!(rows.len(), 0);\n\n }\n\n}\n", "file_path": "src/executor/read.rs", "rank": 57, "score": 31840.651231853957 }, { "content": " use pretty_assertions::assert_eq;\n\n use rand::{distributions::Alphanumeric, thread_rng, Rng};\n\n\n\n fn get_test_table() -> TableDefinition {\n\n TableDefinition::new(\n\n \"tables\".into(),\n\n vec![\n\n ColumnDefinition {\n\n name: \"id\".into(),\n\n data_type: DataType {\n\n raw_type: DataTypeRaw::Uuid,\n\n is_nullable: false,\n\n },\n\n primary_key: true,\n\n default: None,\n\n },\n\n ColumnDefinition {\n\n name: \"table_name\".into(),\n\n data_type: DataType {\n\n raw_type: DataTypeRaw::String,\n", "file_path": "src/executor/read.rs", "rank": 58, "score": 31837.871073056333 }, { "content": " _ => panic!(\n\n \"Found a non-B-tree page at `b_tree_root_page_index` of table {}.{}'s data file\",\n\n schema, table_definition.name\n\n ),\n\n };\n\n seek_write_page(\n\n config,\n\n schema,\n\n &table_definition.name,\n\n b_tree_root_page_index,\n\n b_tree_root_page.into(),\n\n )\n\n .await\n\n .unwrap();\n\n Ok(())\n\n}\n", "file_path": "src/executor/write.rs", "rank": 59, "score": 31836.421987743433 }, { "content": " pub values: Vec<DataInstance>,\n\n}\n\n\n\nimpl Validatable for InsertStatement {\n\n fn validate(&self) -> Result<(), ValidationError> {\n\n Ok(()) // TODO: Add checks\n\n }\n\n}\n\n\n\n#[derive(Debug, PartialEq, Eq)]\n\npub enum SelectColumn {\n\n All,\n\n Identifier(String),\n\n}\n\n\n\n#[derive(Debug, PartialEq, Eq)]\n\npub struct SelectStatement {\n\n pub columns: Vec<SelectColumn>,\n\n /// String means table name\n\n pub source: String,\n\n pub where_clause: Option<Expression>,\n\n}\n\n\n\nimpl Validatable for SelectStatement {\n\n fn validate(&self) -> Result<(), ValidationError> {\n\n Ok(()) // TODO: Add checks\n\n }\n\n}\n", "file_path": "src/constructs/statements.rs", "rank": 60, "score": 31686.54160460977 }, { "content": "use crate::sql::ValidationError;\n\n\n\nuse super::components::*;\n\n\n\n#[derive(Debug, PartialEq, Eq)]\n\npub struct CreateTableStatement {\n\n pub table: TableDefinition,\n\n pub if_not_exists: bool,\n\n}\n\n\n\nimpl Validatable for CreateTableStatement {\n\n fn validate(&self) -> Result<(), ValidationError> {\n\n self.table.validate()\n\n }\n\n}\n\n\n\n#[derive(Debug, PartialEq, Eq)]\n\npub struct InsertStatement {\n\n pub table_name: String,\n\n pub column_names: Vec<String>,\n", "file_path": "src/constructs/statements.rs", "rank": 61, "score": 31685.536098044973 }, { "content": "use crate::constructs::statements::InsertStatement;\n\nuse crate::sql::expects::{generic::*, semantic::*, ExpectOk, ExpectResult};\n\nuse crate::sql::tokenizer::*;\n\n\n\n/// Conjure an InsertStatement from tokens following INSERT.\n", "file_path": "src/sql/expects/statements/insert.rs", "rank": 62, "score": 28992.79272896853 }, { "content": "use crate::constructs::statements::{SelectColumn, SelectStatement};\n\nuse crate::sql::expects::{generic::*, semantic::*, ExpectOk, ExpectResult};\n\nuse crate::sql::{tokenizer::*, SyntaxError};\n\n\n", "file_path": "src/sql/expects/statements/select.rs", "rank": 63, "score": 28992.555518985428 }, { "content": "mod create_table;\n\nmod insert;\n\nmod select;\n\n\n\npub use create_table::*;\n\npub use insert::*;\n\npub use select::*;\n", "file_path": "src/sql/expects/statements/mod.rs", "rank": 64, "score": 28990.764499791734 }, { "content": " tokens_consumed_count: 2 // +2 to account for INTO + VALUES\n\n + tokens_consumed_count_table_name\n\n + tokens_consumed_count_column_names + tokens_consumed_count_values,\n\n outcome: InsertStatement {\n\n table_name,\n\n column_names,\n\n values,\n\n },\n\n })\n\n}\n", "file_path": "src/sql/expects/statements/insert.rs", "rank": 65, "score": 28985.249835795017 }, { "content": " &\"WHERE conditions\",\n\n )?;\n\n Ok(ExpectOk {\n\n rest,\n\n tokens_consumed_count: 1 // +1 to account for FROM\n\n + tokens_consumed_columns\n\n + tokens_consumed_count_table_name + tokens_consumed_count_where_clause,\n\n outcome: SelectStatement {\n\n columns,\n\n source: table_name,\n\n where_clause: maybe_where_clause.and_then(|(_, where_clause)| Some(where_clause)),\n\n },\n\n })\n\n}\n", "file_path": "src/sql/expects/statements/select.rs", "rank": 66, "score": 28985.068199540623 }, { "content": "use crate::constructs::{\n\n components::{ColumnDefinition, TableDefinition},\n\n statements::CreateTableStatement,\n\n};\n\nuse crate::sql::expects::{generic::*, semantic::*, ExpectOk, ExpectResult};\n\nuse crate::sql::tokenizer::*;\n\n\n", "file_path": "src/sql/expects/statements/create_table.rs", "rank": 67, "score": 27814.14836016436 }, { "content": " tokens_consumed_count: tokens_consumed_count_table_definition,\n\n } = expect_table_definition(rest)?;\n\n Ok(ExpectOk {\n\n rest,\n\n tokens_consumed_count: tokens_consumed_count_table_definition\n\n + tokens_consumed_count_if_not_exists,\n\n outcome: CreateTableStatement {\n\n table,\n\n if_not_exists,\n\n },\n\n })\n\n}\n", "file_path": "src/sql/expects/statements/create_table.rs", "rank": 68, "score": 27806.81099706723 }, { "content": " )?;\n\n let ExpectOk {\n\n rest,\n\n tokens_consumed_count: tokens_consumed_count_default,\n\n outcome: maybe_default,\n\n } = detect(\n\n rest,\n\n |tokens| expect_token_value(tokens, &TokenValue::Const(Keyword::Default)),\n\n expect_data_definition,\n\n &\"DEFAULT definition\",\n\n )?;\n\n // TODO: Test against types like UINT16(8)\n\n Ok(ExpectOk {\n\n rest,\n\n tokens_consumed_count: tokens_consumed_count_name\n\n + tokens_consumed_count_data_type\n\n + tokens_consumed_count_primary_key\n\n + tokens_consumed_count_default,\n\n outcome: ColumnDefinition {\n\n name,\n\n data_type,\n\n primary_key: primary_key_option.is_some(),\n\n default: maybe_default.and_then(|(_, default)| Some(default)),\n\n },\n\n })\n\n}\n\n\n", "file_path": "src/sql/expects/statements/create_table.rs", "rank": 69, "score": 27806.12383600302 }, { "content": "use serde::{ser::SerializeMap, Serialize, Serializer};\n\nuse thiserror::Error;\n\n\n\n#[derive(Error, Debug, PartialEq)]\n\n#[error(\"SyntaxError: {0}\")]\n\npub struct SyntaxError(pub String);\n\n\n\nimpl Serialize for SyntaxError {\n\n fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>\n\n where\n\n S: Serializer,\n\n {\n\n let mut map = serializer.serialize_map(Some(2))?;\n\n map.serialize_entry(\"type\", \"syntax\")?;\n\n map.serialize_entry(\"message\", &self.0)?;\n\n map.end()\n\n }\n\n}\n\n\n\n#[derive(Error, Debug, PartialEq)]\n", "file_path": "src/sql/errors.rs", "rank": 70, "score": 23.573984660074764 }, { "content": " }\n\n\n\n pub async fn run(&self) -> Result<(), io::Error> {\n\n info!(\"⚙️ Launch configuration:\\n{}\", &self.config);\n\n let mut executor = executor::Executor::new(&self.config);\n\n let executor_tx = executor.prepare_channel();\n\n let (executor_join_result, _) = tokio::join!(\n\n tokio::spawn(async move {\n\n executor.start().await;\n\n }),\n\n server::start_server(&self.config, executor_tx),\n\n );\n\n executor_join_result.expect(\"Failed to join executor\");\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/lib.rs", "rank": 71, "score": 21.908382228392195 }, { "content": "#[error(\"ValidationError: {0}\")]\n\npub struct ValidationError(pub String);\n\n\n\nimpl Serialize for ValidationError {\n\n fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>\n\n where\n\n S: Serializer,\n\n {\n\n let mut map = serializer.serialize_map(Some(2))?;\n\n map.serialize_entry(\"type\", \"validation\")?;\n\n map.serialize_entry(\"message\", &self.0)?;\n\n map.end()\n\n }\n\n}\n", "file_path": "src/sql/errors.rs", "rank": 72, "score": 20.447919142193932 }, { "content": "impl Serialize for NamedRow<'_> {\n\n fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>\n\n where\n\n S: Serializer,\n\n {\n\n let mut state = serializer.serialize_map(Some(self.0.len()))?;\n\n for (name, value) in self.0.iter().zip(self.1.iter()) {\n\n state.serialize_entry(name, value)?;\n\n }\n\n state.end()\n\n }\n\n}\n\n\n\nimpl Encodable for Row {\n\n fn try_decode<'b>(_blob: ReadBlob<'b>) -> Result<(Self, ReadBlob<'b>), String> {\n\n panic!(\"`try_decode` would be too ambiguous for `Row` - `try_decode_assume` should be used instead\")\n\n }\n\n\n\n fn encode(&self, blob: &mut WriteBlob, mut position: usize) -> usize {\n\n for value in &self.0 {\n", "file_path": "src/storage/encoding.rs", "rank": 73, "score": 17.371643377188736 }, { "content": " }\n\n}\n\n\n\nimpl fmt::Display for Function {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(\n\n f,\n\n \"keyword `{}`\",\n\n match self {\n\n Self::Ulid => \"ULID\",\n\n Self::Now => \"NOW\",\n\n }\n\n )\n\n }\n\n}\n\n\n\nimpl FromStr for Function {\n\n type Err = String;\n\n\n\n fn from_str(candidate: &str) -> std::result::Result<Self, Self::Err> {\n", "file_path": "src/constructs/functions.rs", "rank": 74, "score": 17.3523925304492 }, { "content": "use std::str::FromStr;\n\n\n\nuse crate::constructs::{components::DataTypeRaw, functions::Function};\n\nuse std::fmt::{self, Debug};\n\n\n\n#[derive(Debug, PartialEq, Eq, Clone)]\n\npub enum Delimiter {\n\n Comma,\n\n ParenthesisOpening,\n\n ParenthesisClosing,\n\n Equal,\n\n}\n\n\n\nimpl Delimiter {\n\n /// Delimiting characters that affect statement meaning. Each one is a Delimiter variant.\n\n const MEANINGFUL_CHARS: &'static [char] = &[',', '(', ')', '='];\n\n const STATEMENT_SEPARATOR: char = ';';\n\n const STRING_MARKER: char = '\\'';\n\n const ESCAPE_CHARACTER: char = '\\\\';\n\n}\n", "file_path": "src/sql/tokenizer.rs", "rank": 75, "score": 16.86891865796233 }, { "content": "use crate::config::Config;\n\nuse std::{\n\n convert::TryInto,\n\n fs,\n\n io::{self, Write},\n\n mem, path, process,\n\n};\n\nuse tracing::*;\n\n\n\n// Massively work-in-progress!\n\n\n\npub struct Index<'a> {\n\n collection_name: String,\n\n data: Vec<u128>,\n\n config: &'a Config,\n\n}\n\n\n\nimpl<'a> Index<'a> {\n\n pub fn new(collection_name: &str, config: &'a Config) -> Self {\n\n let mut index = Index {\n", "file_path": "src/storage/index.rs", "rank": 76, "score": 16.864376070279217 }, { "content": "pub mod config;\n\nmod constructs;\n\nmod executor;\n\npub mod server;\n\nmod sql;\n\npub mod storage;\n\n\n\npub use config::Config;\n\nuse std::io;\n\nuse tracing::*;\n\n\n\npub struct Instance {\n\n config: Config,\n\n}\n\n\n\nimpl Instance {\n\n pub fn preload() -> Self {\n\n Instance {\n\n config: Config::from_env(),\n\n }\n", "file_path": "src/lib.rs", "rank": 77, "score": 16.863562823726546 }, { "content": " let mut buffer = Vec::with_capacity(PAGE_SIZE);\n\n file.read_buf(&mut buffer).await?;\n\n Ok(buffer)\n\n}\n\n\n\npub async fn seek_read_decode_page(\n\n config: &config::Config,\n\n schema: &str,\n\n table_definition: &TableDefinition,\n\n page_index: PageIndex,\n\n) -> Result<Page, String> {\n\n let buffer = seek_read_page(config, schema, &table_definition.name, page_index)\n\n .await\n\n .unwrap();\n\n let (page, _rest) = Page::try_decode_assume(&buffer, table_definition)?;\n\n Ok(page)\n\n}\n\n\n\npub async fn seek_write_page(\n\n config: &config::Config,\n", "file_path": "src/storage/filesystem.rs", "rank": 78, "score": 16.631104393521206 }, { "content": "\n\nimpl fmt::Display for Delimiter {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(\n\n f,\n\n \"{}\",\n\n match self {\n\n Self::Comma => \"comma `,`\",\n\n Self::ParenthesisOpening => \"opening parenthesis `(`\",\n\n Self::ParenthesisClosing => \"closing parenthesis `)`\",\n\n Self::Equal => \"equality sign `=`\",\n\n }\n\n )\n\n }\n\n}\n\n\n\nimpl FromStr for Delimiter {\n\n type Err = String;\n\n\n\n fn from_str(candidate: &str) -> std::result::Result<Self, Self::Err> {\n", "file_path": "src/sql/tokenizer.rs", "rank": 79, "score": 15.943113184311198 }, { "content": " Expression, TableDefinition,\n\n },\n\n functions::Function,\n\n statements::SelectColumn,\n\n };\n\n\n\n use super::*;\n\n use pretty_assertions::assert_eq;\n\n\n\n #[test]\n\n fn parsing_works_with_create_table() {\n\n let statement = \"CREATE TABLE IF NOT EXISTS test (\n\n id STRING PRIMARY KEY,\n\n server_id nullable(UINT64),\n\n hash UINT128 DEFAULT 666,\n\n sent_at TIMESTAMP DEFAULT NOW()\n\n );\";\n\n\n\n let detected_statement = parse_statement(&statement).unwrap();\n\n\n", "file_path": "src/sql/parser.rs", "rank": 80, "score": 15.655856844297594 }, { "content": "mod generic;\n\nmod semantic;\n\nmod statements;\n\n\n\nuse crate::sql::errors::*;\n\nuse crate::sql::tokenizer::*;\n\n\n\npub use generic::*;\n\npub use semantic::*;\n\npub use statements::*;\n\n\n\n#[derive(Debug, PartialEq, Eq)]\n\npub struct ExpectOk<'t, O> {\n\n pub rest: &'t [Token],\n\n pub tokens_consumed_count: usize,\n\n pub outcome: O,\n\n}\n\npub type ExpectResult<'t, O> = Result<ExpectOk<'t, O>, SyntaxError>;\n", "file_path": "src/sql/expects/mod.rs", "rank": 81, "score": 15.244512802431254 }, { "content": "}\n\n\n\n#[derive(Debug, PartialEq, Eq, Clone)]\n\npub struct DataType {\n\n pub raw_type: DataTypeRaw,\n\n pub is_nullable: bool,\n\n}\n\n\n\nimpl FromStr for DataTypeRaw {\n\n type Err = String;\n\n\n\n fn from_str(candidate: &str) -> std::result::Result<Self, Self::Err> {\n\n match candidate.to_lowercase().as_str() {\n\n \"uint8\" => Ok(Self::UInt8),\n\n \"uint16\" => Ok(Self::UInt16),\n\n \"uint32\" => Ok(Self::UInt32),\n\n \"uint64\" => Ok(Self::UInt64),\n\n \"uint128\" => Ok(Self::UInt128),\n\n \"bool\" => Ok(Self::Bool),\n\n \"timestamp\" => Ok(Self::Timestamp),\n", "file_path": "src/constructs/components.rs", "rank": 82, "score": 15.019752096057834 }, { "content": " .collect()\n\n }\n\n\n\n pub fn add(&mut self, value: u128) {\n\n self.data.push(value);\n\n self.sync_to_disk();\n\n }\n\n\n\n pub fn sync_from_disk(&mut self) -> Vec<u128> {\n\n let raw_data = self.read_file();\n\n let data = match raw_data {\n\n Ok(raw_data) => {\n\n let result = self.parse_index_raw_data(raw_data);\n\n debug!(\"Result: {:?}\", result);\n\n result\n\n }\n\n Err(err) if err.kind() == io::ErrorKind::NotFound => {\n\n self.create_empty_file().unwrap();\n\n vec![]\n\n }\n", "file_path": "src/storage/index.rs", "rank": 83, "score": 14.830720398634059 }, { "content": "use crate::constructs::components::{DataInstance, DataInstanceRaw, DataType, DataTypeRaw};\n\nuse serde::{ser::SerializeMap, Serialize, Serializer};\n\nuse std::{\n\n convert::{From, TryFrom},\n\n fmt::Debug,\n\n mem, str,\n\n};\n\nuse time::OffsetDateTime;\n\nuse uuid::Uuid;\n\n\n\n// Important note: all data stored on disk by Emdrive is big-endian. Use `from_be_bytes` and `to_be_bytes` methods.\n\n\n\n// Read-only blob that is being decoded.\n\npub type ReadBlob<'b> = &'b [u8];\n\n// Read-write blob used for encoding.\n\npub type WriteBlob = Vec<u8>;\n\n/// Length of variable-length value.\n\npub type VarLen = u16;\n\n/// Page index.\n\npub type PageIndex = u32;\n\n/// A count that pertains to a single row (e.g. rows inside a leaf).\n\npub type LocalCount = u16;\n\n/// A count that pertains to possibly more than a single row (e.g. rows in all leaf children of a node).\n\npub type GlobalCount = u64;\n\n\n\n/// Trait for reading data from blobs.\n", "file_path": "src/storage/encoding.rs", "rank": 84, "score": 14.367598754598616 }, { "content": "use std::{fmt, str::FromStr};\n\n\n\nuse time::OffsetDateTime;\n\nuse ulid::Ulid;\n\nuse uuid::Uuid;\n\n\n\nuse super::components::DataInstanceRaw;\n\n\n\n#[derive(Debug, PartialEq, Eq, Clone)]\n\npub enum Function {\n\n Ulid,\n\n Now,\n\n}\n\n\n\nimpl Function {\n\n pub fn call(&self) -> DataInstanceRaw {\n\n match self {\n\n Self::Ulid => DataInstanceRaw::Uuid(Uuid::from(Ulid::new())),\n\n Self::Now => DataInstanceRaw::Timestamp(OffsetDateTime::now_utc()),\n\n }\n", "file_path": "src/constructs/functions.rs", "rank": 85, "score": 14.264370748186671 }, { "content": " }\n\n // Process token candidates found on this line\n\n tokens.extend(token_candidates.iter().map(|candidate| Token {\n\n value: TokenValue::from_str(candidate).unwrap(),\n\n line_number: line_index + 1,\n\n }))\n\n }\n\n tokens\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use pretty_assertions::assert_eq;\n\n\n\n #[test]\n\n fn tokenization_works_with_create_table() {\n\n let statement = \"CREATE TABLE IF NOT EXISTS test (\n\n server_id nullable(UINT64),\n\n hash UINT128 METRIC KEY,\n", "file_path": "src/sql/tokenizer.rs", "rank": 86, "score": 14.237827649068013 }, { "content": "use serde::Serialize;\n\nuse std::{collections::HashSet, str::FromStr};\n\nuse time::OffsetDateTime;\n\nuse uuid::Uuid;\n\n\n\nuse crate::sql::ValidationError;\n\n\n\nuse super::functions::Function;\n\n\n\n#[derive(Debug, PartialEq, Eq, Clone, Copy)]\n\npub enum DataTypeRaw {\n\n UInt8,\n\n UInt16,\n\n UInt32,\n\n UInt64,\n\n UInt128,\n\n Bool,\n\n Timestamp,\n\n Uuid,\n\n String,\n", "file_path": "src/constructs/components.rs", "rank": 87, "score": 14.226834001112424 }, { "content": "use std::{env, fmt, str};\n\n\n\n/// DBMS configuration.\n\n#[derive(Debug, Clone)]\n\npub struct Config {\n\n /// Path to database state, i.e. saved data. Conventionally `emdrive/` in `/var/lib/`.\n\n pub data_directory: String,\n\n /// TCP interface listener host, `127.0.0.1` by default.\n\n pub tcp_listen_host: String,\n\n /// TCP interface listener port. `8824` by default.\n\n pub tcp_listen_port: u16,\n\n}\n\n\n\nimpl Default for Config {\n\n fn default() -> Config {\n\n Config {\n\n data_directory: \"/var/lib/emdrive/data\".to_string(),\n\n tcp_listen_host: \"127.0.0.1\".to_string(),\n\n tcp_listen_port: 8824,\n\n }\n", "file_path": "src/config.rs", "rank": 88, "score": 13.58348262400088 }, { "content": " }\n\n }\n\n}\n\n\n\n#[derive(Debug, PartialEq, Eq, Clone)]\n\npub struct Token {\n\n pub value: TokenValue,\n\n pub line_number: usize,\n\n}\n\n\n\nimpl fmt::Display for Token {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(f, \"{} at line {}\", self.value, self.line_number)\n\n }\n\n}\n\n\n", "file_path": "src/sql/tokenizer.rs", "rank": 89, "score": 13.299720329303184 }, { "content": "use crate::constructs::components::{ColumnDefinition, DataType, DataTypeRaw, TableDefinition};\n\n\n\npub const SYSTEM_SCHEMA_NAME: &str = \"system\";\n\n\n\npub enum SystemTable {\n\n Tables,\n\n Columns,\n\n}\n\n\n\nimpl SystemTable {\n\n /// Array of all system tables.\n\n pub const ALL: [Self; 2] = [Self::Tables, Self::Columns];\n\n\n\n pub fn get_definition(&self) -> TableDefinition {\n\n match self {\n\n Self::Tables => TableDefinition::new(\n\n \"tables\".into(),\n\n vec![\n\n ColumnDefinition {\n\n name: \"id\".into(),\n", "file_path": "src/storage/system.rs", "rank": 90, "score": 13.094638599779797 }, { "content": " Function(Function),\n\n String(String),\n\n Arbitrary(String),\n\n}\n\n\n\nimpl fmt::Display for TokenValue {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n match self {\n\n Self::Delimiting(value) => fmt::Display::fmt(&value, f),\n\n Self::Const(value) => fmt::Display::fmt(&value, f),\n\n Self::Type(value) => value.fmt(f),\n\n Self::Function(value) => fmt::Display::fmt(&value, f),\n\n Self::String(value) => write!(f, \"string `\\\"{}\\\"`\", value),\n\n Self::Arbitrary(value) => write!(f, \"arbitrary `{}`\", value),\n\n }\n\n }\n\n}\n\n\n\nimpl FromStr for TokenValue {\n\n type Err = ();\n", "file_path": "src/sql/tokenizer.rs", "rank": 91, "score": 12.97115001853657 }, { "content": " schema: &str,\n\n table_name: &str,\n\n data: WriteBlob,\n\n) -> Result<(), std::io::Error> {\n\n let dir_path = determine_table_dir_path(config, schema, table_name);\n\n fs::create_dir_all(&dir_path).await?;\n\n let file_path = dir_path.join(\"0\");\n\n fs::write(file_path, data).await\n\n}\n\n\n\npub async fn seek_read_page(\n\n config: &config::Config,\n\n schema: &str,\n\n table_name: &str,\n\n page_index: PageIndex,\n\n) -> Result<Vec<u8>, std::io::Error> {\n\n let path = determine_table_dir_path(config, schema, table_name).join(\"0\");\n\n let mut file = fs::File::open(path).await?;\n\n file.seek(SeekFrom::Start(page_index as u64 * PAGE_SIZE as u64))\n\n .await?;\n", "file_path": "src/storage/filesystem.rs", "rank": 92, "score": 12.561377719623206 }, { "content": " Keyword::Asterisk => \"*\",\n\n }\n\n )\n\n }\n\n}\n\n\n\nimpl FromStr for Keyword {\n\n type Err = String;\n\n\n\n fn from_str(candidate: &str) -> std::result::Result<Self, Self::Err> {\n\n match candidate.to_lowercase().as_str() {\n\n \"create\" => Ok(Self::Create),\n\n \"select\" => Ok(Self::Select),\n\n \"from\" => Ok(Self::From),\n\n \"where\" => Ok(Self::Where),\n\n \"as\" => Ok(Self::As),\n\n \"insert\" => Ok(Self::Insert),\n\n \"into\" => Ok(Self::Into),\n\n \"values\" => Ok(Self::Values),\n\n \"table\" => Ok(Self::Table),\n", "file_path": "src/sql/tokenizer.rs", "rank": 93, "score": 12.313100243255626 }, { "content": " Err(err) => {\n\n debug!(\"Error while reading index data: {}\", err);\n\n process::exit(1);\n\n }\n\n };\n\n debug!(\"Found {} index data: {:?}\", &self.collection_name, &data);\n\n self.data = data;\n\n self.data.clone()\n\n }\n\n\n\n pub fn sync_to_disk(&mut self) {\n\n let mut file = self.create_empty_file().unwrap();\n\n let buffer: Vec<u8> = self\n\n .data\n\n .iter()\n\n .flat_map(|x| {\n\n let bytes: Vec<u8> = x.to_be_bytes().iter().copied().collect();\n\n bytes\n\n })\n\n .collect();\n", "file_path": "src/storage/index.rs", "rank": 94, "score": 11.86300698123121 }, { "content": "\n\n fn from_str(candidate: &str) -> std::result::Result<Self, Self::Err> {\n\n if let Ok(delimiter) = Delimiter::from_str(candidate) {\n\n Ok(Self::Delimiting(delimiter))\n\n } else if let Ok(keyword) = Keyword::from_str(candidate) {\n\n Ok(Self::Const(keyword))\n\n } else if let Ok(data_type_raw) = DataTypeRaw::from_str(candidate) {\n\n Ok(Self::Type(data_type_raw))\n\n } else if let Ok(function) = Function::from_str(candidate) {\n\n Ok(Self::Function(function))\n\n } else {\n\n let mut candidate_chars = candidate.chars();\n\n if let (Some(Delimiter::STRING_MARKER), Some(Delimiter::STRING_MARKER)) =\n\n (candidate_chars.next(), candidate_chars.next_back())\n\n {\n\n // We only need to use as_str() here as the first and last chars have been consumed by nth()s\n\n Ok(Self::String(candidate_chars.as_str().to_string()))\n\n } else {\n\n Ok(Self::Arbitrary(candidate.to_string()))\n\n }\n", "file_path": "src/sql/tokenizer.rs", "rank": 95, "score": 11.799038582050752 }, { "content": "use std::str::FromStr;\n\n\n\nuse emdrive::Instance;\n\nuse human_panic::setup_panic;\n\nuse tracing::*;\n\nuse tracing_subscriber::{EnvFilter, FmtSubscriber};\n\n\n", "file_path": "src/main.rs", "rank": 96, "score": 11.54910987862322 }, { "content": " Ok((blob[0] != 0, &blob[1..]))\n\n }\n\n\n\n fn encode(&self, blob: &mut WriteBlob, position: usize) -> usize {\n\n blob[position] = *self as u8;\n\n position + 1\n\n }\n\n\n\n fn encoded_size(&self) -> usize {\n\n 1 // bool\n\n }\n\n}\n\n\n\nimpl Encodable for String {\n\n fn try_decode<'b>(blob: ReadBlob<'b>) -> Result<(Self, ReadBlob<'b>), String> {\n\n let (char_count, rest) = VarLen::try_decode(blob)?;\n\n let char_count_idx = usize::from(char_count);\n\n match str::from_utf8(&rest[..char_count_idx]) {\n\n Ok(ok) => Ok((ok.to_string(), &rest[char_count_idx..])),\n\n Err(err) => Err(err.to_string()),\n", "file_path": "src/storage/encoding.rs", "rank": 97, "score": 11.541872043450248 }, { "content": " // Then we save that rows position to the _front_ of the page\n\n position = LocalCount::try_from(position_back)\n\n .unwrap()\n\n .encode(&mut page_blob, position);\n\n // TODO: Protect against page overflow and inadvertent overwrites in the middle of the page\n\n }\n\n }\n\n };\n\n assert_eq!(page_blob.len(), PAGE_SIZE, \"Page serialization fault - ended up with a blob that is {} B long, instead of the correct {} B\", page_blob.len(), PAGE_SIZE);\n\n page_blob\n\n }\n\n}\n\n\n\nimpl<'b> EncodableWithAssumption<'b> for Page {\n\n type Assumption = &'b TableDefinition;\n\n\n\n fn try_decode_assume(\n\n blob: ReadBlob<'b>,\n\n assumption: Self::Assumption,\n\n ) -> Result<(Self, ReadBlob<'b>), String> {\n", "file_path": "src/storage/paging.rs", "rank": 98, "score": 11.47736317675845 }, { "content": "encodable_number_impl! { isize i8 i16 i32 i64 i128 usize u16 u32 u64 u128 }\n\n\n\n// u8 is a special case, as it can be used in blobs with zero transformation\n\nimpl Encodable for u8 {\n\n fn try_decode<'b>(blob: ReadBlob<'b>) -> Result<(Self, ReadBlob<'b>), String> {\n\n Ok((blob[0], &blob[1..]))\n\n }\n\n\n\n fn encode(&self, blob: &mut WriteBlob, position: usize) -> usize {\n\n blob[position] = *self;\n\n position + 1\n\n }\n\n\n\n fn encoded_size(&self) -> usize {\n\n 1 // u8\n\n }\n\n}\n\n\n\nimpl Encodable for bool {\n\n fn try_decode<'b>(blob: ReadBlob<'b>) -> Result<(Self, ReadBlob<'b>), String> {\n", "file_path": "src/storage/encoding.rs", "rank": 99, "score": 11.270937834561009 } ]
Rust
crates/core/src/config/mod.rs
seank-com/ajour
b4f29e4b7526a91b03e7e73388dc2f5966950968
use crate::catalog; use crate::error::FilesystemError; use crate::repository::CompressionFormat; use glob::MatchOptions; use serde::{Deserialize, Serialize}; use std::collections::HashMap; use std::fmt::{self, Display, Formatter}; use std::fs::create_dir_all; use std::path::{Path, PathBuf}; mod addons; mod wow; use crate::fs::PersistentData; pub use crate::config::addons::Addons; pub use crate::config::wow::{Flavor, Wow}; #[derive(Deserialize, Serialize, Debug, PartialEq, Default, Clone)] pub struct Config { #[serde(default)] pub wow: Wow, #[serde(default)] pub addons: Addons, pub theme: Option<String>, #[serde(default)] pub column_config: ColumnConfig, pub window_size: Option<(u32, u32)>, pub scale: Option<f64>, pub backup_directory: Option<PathBuf>, #[serde(default)] pub backup_addons: bool, #[serde(default)] pub backup_wtf: bool, #[serde(default)] pub backup_config: bool, #[serde(default)] pub hide_ignored_addons: bool, #[serde(default)] pub self_update_channel: SelfUpdateChannel, #[serde(default)] pub weak_auras_account: HashMap<Flavor, String>, #[serde(default = "default_true")] pub alternating_row_colors: bool, #[serde(default)] pub language: Language, #[serde(default)] pub catalog_source: Option<catalog::Source>, #[serde(default)] pub auto_update: bool, #[serde(default)] pub compression_format: CompressionFormat, #[serde(default)] #[cfg(target_os = "windows")] pub close_to_tray: bool, #[serde(default)] #[cfg(target_os = "windows")] pub autostart: bool, #[serde(default)] #[cfg(target_os = "windows")] pub start_closed_to_tray: bool, } impl Config { pub fn add_wow_directories(&mut self, path: PathBuf, flavor: Option<Flavor>) { if let Some(flavor) = flavor { let flavor_path = self.get_flavor_directory_for_flavor(&flavor, &path); if flavor_path.exists() { self.wow.directories.insert(flavor, flavor_path); } } else { let flavors = &Flavor::ALL[..]; for flavor in flavors { let flavor_path = self.get_flavor_directory_for_flavor(flavor, &path); if flavor_path.exists() { self.wow.directories.insert(*flavor, flavor_path); } } } } pub fn get_flavor_directory_for_flavor(&self, flavor: &Flavor, path: &Path) -> PathBuf { path.join(&flavor.folder_name()) } pub fn get_root_directory_for_flavor(&self, flavor: &Flavor) -> Option<PathBuf> { self.wow .directories .get(flavor) .map(|p| p.parent().unwrap().to_path_buf()) } pub fn get_addon_directory_for_flavor(&self, flavor: &Flavor) -> Option<PathBuf> { let dir = self.wow.directories.get(flavor); match dir { Some(dir) => { let mut addon_dir = dir.join("Interface/AddOns"); if !addon_dir.exists() { let options = MatchOptions { case_sensitive: false, ..Default::default() }; let pattern = format!("{}/?nterface/?ddons", dir.display()); for path in glob::glob_with(&pattern, options).unwrap().flatten() { addon_dir = path; } } if dir.exists() && !addon_dir.exists() { let _ = create_dir_all(&addon_dir); } Some(addon_dir) } None => None, } } pub fn get_download_directory_for_flavor(&self, flavor: Flavor) -> Option<PathBuf> { self.wow.directories.get(&flavor).cloned() } pub fn get_wtf_directory_for_flavor(&self, flavor: &Flavor) -> Option<PathBuf> { let dir = self.wow.directories.get(flavor); match dir { Some(dir) => { let mut addon_dir = dir.join("WTF"); if !addon_dir.exists() { let options = MatchOptions { case_sensitive: false, ..Default::default() }; let pattern = format!("{}/?tf", dir.display()); for path in glob::glob_with(&pattern, options).unwrap().flatten() { addon_dir = path; } } Some(addon_dir) } None => None, } } } impl PersistentData for Config { fn relative_path() -> PathBuf { PathBuf::from("ajour.yml") } } #[derive(Deserialize, Serialize, Debug, PartialEq, Clone)] pub enum ColumnConfig { V1 { local_version_width: u16, remote_version_width: u16, status_width: u16, }, V2 { columns: Vec<ColumnConfigV2>, }, V3 { my_addons_columns: Vec<ColumnConfigV2>, catalog_columns: Vec<ColumnConfigV2>, #[serde(default)] aura_columns: Vec<ColumnConfigV2>, }, } #[derive(Deserialize, Serialize, Debug, PartialEq, Clone)] pub struct ColumnConfigV2 { pub key: String, pub width: Option<u16>, pub hidden: bool, } impl Default for ColumnConfig { fn default() -> Self { ColumnConfig::V1 { local_version_width: 150, remote_version_width: 150, status_width: 85, } } } #[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] pub enum SelfUpdateChannel { Stable, Beta, } impl SelfUpdateChannel { pub const fn all() -> [Self; 2] { [SelfUpdateChannel::Stable, SelfUpdateChannel::Beta] } } impl Default for SelfUpdateChannel { fn default() -> Self { SelfUpdateChannel::Stable } } impl Display for SelfUpdateChannel { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { let s = match self { SelfUpdateChannel::Stable => "Stable", SelfUpdateChannel::Beta => "Beta", }; write!(f, "{}", s) } } #[derive(Debug, Clone, Copy, PartialEq, Eq, Deserialize, Serialize, Hash, PartialOrd, Ord)] pub enum Language { Czech, Norwegian, English, Danish, German, French, Hungarian, Portuguese, Russian, Slovak, Swedish, Spanish, Turkish, Ukrainian, } impl std::fmt::Display for Language { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!( f, "{}", match self { Language::Czech => "Čeština", Language::Danish => "Dansk", Language::English => "English", Language::French => "Français", Language::German => "Deutsch", Language::Hungarian => "Magyar", Language::Norwegian => "Norsk Bokmål", Language::Portuguese => "Português", Language::Russian => "Pусский", Language::Slovak => "Slovenčina", Language::Spanish => "Español", Language::Swedish => "Svenska", Language::Turkish => "Türkçe", Language::Ukrainian => "Yкраїнська", } ) } } impl Language { pub const ALL: [Language; 14] = [ Language::Czech, Language::Danish, Language::German, Language::English, Language::Spanish, Language::French, Language::Hungarian, Language::Norwegian, Language::Portuguese, Language::Russian, Language::Slovak, Language::Swedish, Language::Turkish, Language::Ukrainian, ]; pub const fn language_code(self) -> &'static str { match self { Language::Czech => "cs_CZ", Language::English => "en_US", Language::Danish => "da_DK", Language::German => "de_DE", Language::French => "fr_FR", Language::Russian => "ru_RU", Language::Swedish => "se_SE", Language::Spanish => "es_ES", Language::Hungarian => "hu_HU", Language::Norwegian => "nb_NO", Language::Slovak => "sk_SK", Language::Turkish => "tr_TR", Language::Portuguese => "pt_PT", Language::Ukrainian => "uk_UA", } } } impl Default for Language { fn default() -> Language { Language::English } } pub async fn load_config() -> Result<Config, FilesystemError> { log::debug!("loading config"); Ok(Config::load_or_default()?) } const fn default_true() -> bool { true }
use crate::catalog; use crate::error::FilesystemError; use crate::repository::CompressionFormat; use glob::MatchOptions; use serde::{Deserialize, Serialize}; use std::collections::HashMap; use std::fmt::{self, Display, Formatter}; use std::fs::create_dir_all; use std::path::{Path, PathBuf}; mod addons; mod wow; use crate::fs::PersistentData; pub use crate::config::addons::Addons; pub use crate::config::wow::{Flavor, Wow}; #[derive(Deserialize, Serialize, Debug, PartialEq, Default, Clone)] pub struct Config { #[serde(default)] pub wow: Wow, #[serde(default)] pub addons: Addons, pub theme: Option<String>, #[serde(default)] pub column_config: ColumnConfig, pub window_size: Option<(u32, u32)>, pub scale: Option<f64>, pub backup_directory: Option<PathBuf>, #[serde(default)] pub backup_addons: bool, #[serde(default)] pub backup_wtf: bool, #[serde(default)] pub backup_config: bool, #[serde(default)] pub hide_ignored_addons: bool, #[serde(default)] pub self_update_channel: SelfUpdateChannel, #[serde(default)] pub weak_auras_account: HashMap<Flavor, String>, #[serde(default = "default_true")] pub alternating_row_colors: bool, #[serde(default)] pub language: Language, #[serde(default)] pub catalog_source: Option<catalog::Source>, #[serde(default)] pub auto_update: bool, #[serde(default)] pub compression_format: CompressionFormat, #[serde(default)] #[cfg(target_os = "windows")] pub close_to_tray: bool, #[serde(default)] #[cfg(target_os = "windows")] pub autostart: bool, #[serde(default)] #[cfg(target_os = "windows")] pub start_closed_to_tray: bool, } impl Config { pub fn add_wow_directories(&mut self, path: PathBuf, flavor: Option<Flavor>) { if let Some(flavor) = flavor { let flavor_path = self.get_flavor_directory_for_flavor(&flavor, &path); if flavor_path.exists() { self.wow.directories.insert(flavor, flavor_path); } } else { let flavors = &Flavor::ALL[..]; for flavor in flavors { let flavor_path = self.get_flavor_directory_for_flavor(flavor, &path); if flavor_path.exists() { self.wow.directories.insert(*flavor, flavor_path); } } } } pub fn get_flavor_directory_for_flavor(&self, flavor: &Flavor, path: &Path) -> PathBuf { path.join(&flavor.folder_name()) } pub fn get_root_directory_for_flavor(&self, flavor: &Flavor) -> Option<PathBuf> { self.wow .directories .get(flavor) .map(|p| p.parent().unwrap().to_path_buf()) } pub fn get_addon_directory_for_flavor(&self, flavor: &Flavor) -> Option<PathBuf> { let dir = self.wow.directories.get(flavor); match dir { Some(dir) => { let mut addon_dir = dir.join("Interface/AddOns"); if !addon_dir.exists() { let options = MatchOptions { case_sensitive: false, ..Default::default() }; let pattern = format!("{}/?nterface/?ddons", dir.display()); for path in glob::glob_with(&pattern, options).unwrap().flatten() { addon_dir = path; } } if dir.exists() && !addon_dir.exists() { let _ = create_dir_all(&addon_dir); } Some(addon_dir) } None => None, } } pub fn get_download_directory_for_flavor(&self, flavor: Flavor) -> Option<PathBuf> { self.wow.directories.get(&flavor).cloned() } pub fn get_wtf_directory_for_flavor(&self, flavor: &Flavor) -> Option<PathBuf> { let dir = self.wow.directories.get(flavor); match dir { Some(dir) => { let mut addon_dir = dir.join("WTF"); if !addon_dir.exists() { let options = MatchOptions { case_sensitive: false, ..Default::default() }; let pattern = format!("{}/?tf", dir.display()); for path in glob::glob_with(&pattern, options).unwrap().flatten() { addon_dir = path; } } Some(addon_dir) } None => None, } } } impl PersistentData for Config { fn relative_path() -> PathBuf { PathBuf::from("ajour.yml") } } #[derive(Deserialize, Serialize, Debug, PartialEq, Clone)] pub enum ColumnConfig { V1 { local_version_width: u16, remote_version_width: u16, status_width: u16, }, V2 { columns: Vec<ColumnConfigV2>, }, V3 { my_addons_columns: Vec<ColumnConfigV2>, catalog_columns: Vec<ColumnConfigV2>, #[serde(default)] aura_columns: Vec<ColumnConfigV2>, }, } #[derive(Deserialize, Serialize, Debug, PartialEq, Clone)] pub struct ColumnConfigV2 { pub key: String, pub width: Option<u16>, pub hidden: bool, } impl Default for ColumnConfig { fn default() -> Self { ColumnConfig::V1 { local_version_width: 150, remote_version_width: 150, status_width: 85, } } } #[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] pub enum SelfUpdateChannel { Stable, Beta, } impl SelfUpdateChannel { pub const fn all() -> [Self; 2] { [SelfUpdateChannel::Stable, SelfUpdateChannel::Beta] } } impl Default for SelfUpdateChannel { fn default() -> Self { SelfUpdateChannel::Stable } } impl Display for SelfUpdateChannel {
} #[derive(Debug, Clone, Copy, PartialEq, Eq, Deserialize, Serialize, Hash, PartialOrd, Ord)] pub enum Language { Czech, Norwegian, English, Danish, German, French, Hungarian, Portuguese, Russian, Slovak, Swedish, Spanish, Turkish, Ukrainian, } impl std::fmt::Display for Language { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!( f, "{}", match self { Language::Czech => "Čeština", Language::Danish => "Dansk", Language::English => "English", Language::French => "Français", Language::German => "Deutsch", Language::Hungarian => "Magyar", Language::Norwegian => "Norsk Bokmål", Language::Portuguese => "Português", Language::Russian => "Pусский", Language::Slovak => "Slovenčina", Language::Spanish => "Español", Language::Swedish => "Svenska", Language::Turkish => "Türkçe", Language::Ukrainian => "Yкраїнська", } ) } } impl Language { pub const ALL: [Language; 14] = [ Language::Czech, Language::Danish, Language::German, Language::English, Language::Spanish, Language::French, Language::Hungarian, Language::Norwegian, Language::Portuguese, Language::Russian, Language::Slovak, Language::Swedish, Language::Turkish, Language::Ukrainian, ]; pub const fn language_code(self) -> &'static str { match self { Language::Czech => "cs_CZ", Language::English => "en_US", Language::Danish => "da_DK", Language::German => "de_DE", Language::French => "fr_FR", Language::Russian => "ru_RU", Language::Swedish => "se_SE", Language::Spanish => "es_ES", Language::Hungarian => "hu_HU", Language::Norwegian => "nb_NO", Language::Slovak => "sk_SK", Language::Turkish => "tr_TR", Language::Portuguese => "pt_PT", Language::Ukrainian => "uk_UA", } } } impl Default for Language { fn default() -> Language { Language::English } } pub async fn load_config() -> Result<Config, FilesystemError> { log::debug!("loading config"); Ok(Config::load_or_default()?) } const fn default_true() -> bool { true }
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { let s = match self { SelfUpdateChannel::Stable => "Stable", SelfUpdateChannel::Beta => "Beta", }; write!(f, "{}", s) }
function_block-full_function
[ { "content": "pub fn path_add(path: PathBuf, flavor: Option<Flavor>) -> Result<()> {\n\n task::block_on(async {\n\n log::debug!(\"Adding {:?} from {:?} to known directories\", flavor, &path);\n\n let mut config = load_config().await?;\n\n config.add_wow_directories(path, flavor);\n\n let _ = config.save();\n\n\n\n Ok(())\n\n })\n\n}\n", "file_path": "src/command/paths.rs", "rank": 0, "score": 283015.5988648462 }, { "content": "pub fn config_dir() -> PathBuf {\n\n let config_dir = CONFIG_DIR.lock().unwrap().clone();\n\n\n\n if !config_dir.exists() {\n\n let _ = fs::create_dir_all(&config_dir);\n\n }\n\n\n\n config_dir\n\n}\n\n\n", "file_path": "crates/core/src/fs/mod.rs", "rank": 1, "score": 242218.28610512867 }, { "content": "pub fn fingerprint_addon_dir(addon_dir: &Path) -> Result<u32, ParseError> {\n\n let mut to_fingerprint = HashSet::new();\n\n let mut to_parse = VecDeque::new();\n\n let root_dir = addon_dir.parent().ok_or(ParseError::NoParentDirectory {\n\n dir: addon_dir.to_owned(),\n\n })?;\n\n\n\n // Add initial files\n\n let glob_pattern = format!(\n\n \"{}/**/*.*\",\n\n addon_dir.to_str().ok_or(ParseError::InvalidUtf8Path {\n\n path: addon_dir.to_owned(),\n\n })?\n\n );\n\n for path in glob::glob(&glob_pattern)? {\n\n let path = path?;\n\n if !path.is_file() {\n\n continue;\n\n }\n\n\n", "file_path": "crates/core/src/parse.rs", "rank": 2, "score": 240037.22030491545 }, { "content": "/// Logic to help pick the right World of Warcraft folder.\n\npub fn wow_path_resolution(path: Option<PathBuf>) -> Option<PathBuf> {\n\n if let Some(path) = path {\n\n // Known folders in World of Warcraft dir\n\n let known_folders = Flavor::ALL\n\n .iter()\n\n .map(|f| f.folder_name())\n\n .collect::<Vec<String>>();\n\n\n\n // If chosen path has any of the known Wow folders, we have the right one.\n\n for folder in known_folders.iter() {\n\n if path.join(folder).exists() {\n\n return Some(path);\n\n }\n\n }\n\n\n\n // Iterate ancestors. If we find any of the known folders we can guess the root.\n\n for ancestor in path.as_path().ancestors() {\n\n if let Some(file_name) = ancestor.file_name() {\n\n for folder in known_folders.iter() {\n\n if file_name == OsStr::new(folder) {\n\n return ancestor.parent().map(|p| p.to_path_buf());\n\n }\n\n }\n\n }\n\n }\n\n }\n\n\n\n None\n\n}\n\n\n", "file_path": "crates/core/src/utility.rs", "rank": 3, "score": 235975.1240594866 }, { "content": "/// Returns flavor `String` in Tukui format\n\nfn format_flavor(flavor: &Flavor) -> String {\n\n let base_flavor = flavor.base_flavor();\n\n match base_flavor {\n\n Flavor::Retail => \"retail\".to_owned(),\n\n Flavor::ClassicTbc => \"tbc\".to_owned(),\n\n Flavor::ClassicEra => \"classic\".to_owned(),\n\n _ => panic!(\"Unknown base flavor {}\", base_flavor),\n\n }\n\n}\n\n\n", "file_path": "crates/core/src/repository/backend/tukui.rs", "rank": 4, "score": 211267.66563569478 }, { "content": "/// Helper function to parse a given TOC file\n\n/// (`DirEntry`) into a `Addon` struct.\n\n///\n\n/// TOC format summary:\n\n/// https://wowwiki.fandom.com/wiki/TOC_format\n\npub fn parse_toc_path(toc_path: &Path) -> Option<AddonFolder> {\n\n //direntry\n\n let file = if let Ok(file) = File::open(toc_path) {\n\n file\n\n } else {\n\n return None;\n\n };\n\n let reader = BufReader::new(file);\n\n\n\n let path = toc_path.parent()?.to_path_buf();\n\n let id = path.file_name()?.to_str()?.to_string();\n\n let mut title: Option<String> = None;\n\n let mut interface: Option<String> = None;\n\n let mut author: Option<String> = None;\n\n let mut notes: Option<String> = None;\n\n let mut version: Option<String> = None;\n\n let mut dependencies: Vec<String> = Vec::new();\n\n let mut wowi_id: Option<String> = None;\n\n let mut tukui_id: Option<String> = None;\n\n let mut curse_id: Option<i32> = None;\n", "file_path": "crates/core/src/parse.rs", "rank": 5, "score": 204866.8252731855 }, { "content": "pub fn localized_string(key: &str) -> String {\n\n let lang = LANG.get().expect(\"LANG not set\").read().unwrap();\n\n\n\n if let Some(text) = get_text!(LOCALIZATION_CTX, *lang, key) {\n\n let text = text.to_string();\n\n if text.is_empty() {\n\n key.to_owned()\n\n } else {\n\n text\n\n }\n\n } else {\n\n key.to_owned()\n\n }\n\n}\n\n\n", "file_path": "src/localization.rs", "rank": 6, "score": 197800.88844035336 }, { "content": "fn apply_config(ajour: &mut Ajour, mut config: Config) {\n\n // Set column widths from the config\n\n match &config.column_config {\n\n ColumnConfig::V1 {\n\n local_version_width,\n\n remote_version_width,\n\n status_width,\n\n } => {\n\n ajour\n\n .header_state\n\n .columns\n\n .get_mut(1)\n\n .as_mut()\n\n .unwrap()\n\n .width = Length::Units(*local_version_width);\n\n ajour\n\n .header_state\n\n .columns\n\n .get_mut(2)\n\n .as_mut()\n", "file_path": "src/gui/mod.rs", "rank": 7, "score": 197326.32355465138 }, { "content": "pub fn install_from_source(url: Uri, flavor: Flavor) -> Result<()> {\n\n task::block_on(async {\n\n log::debug!(\"Fetching remote info for {:?}\", &url);\n\n\n\n // Will use hash of url as temp name to download zip as\n\n let mut hasher = DefaultHasher::new();\n\n hasher.write(url.to_string().as_bytes());\n\n let url_hash = hasher.finish();\n\n\n\n let config = load_config().await?;\n\n let global_release_channel = config.addons.global_release_channel;\n\n\n\n let addon_cache = Arc::new(Mutex::new(load_addon_cache().await?));\n\n let fingerprint_cache = Arc::new(Mutex::new(load_fingerprint_cache().await?));\n\n\n\n // Fetch the remote repo metadata\n\n let mut repo_package = RepositoryPackage::from_source_url(Flavor::Retail, url)?;\n\n repo_package.resolve_metadata().await?;\n\n\n\n // Build an addon using this repo package\n", "file_path": "src/command/install.rs", "rank": 8, "score": 185427.62007937516 }, { "content": "/// Starts the GUI.\n\n/// This function does not return.\n\npub fn run(opts: Opts, config: Config) {\n\n // Set LANG using config (defaults to \"en_US\")\n\n LANG.set(RwLock::new(config.language.language_code()))\n\n .expect(\"setting LANG from config\");\n\n\n\n log::debug!(\"config loaded:\\n{:#?}\", &config);\n\n\n\n let mut settings = Settings::default();\n\n settings.window.size = config.window_size.unwrap_or((900, 620));\n\n\n\n #[cfg(target_os = \"windows\")]\n\n {\n\n settings.exit_on_close_request = false;\n\n }\n\n\n\n #[cfg(not(target_os = \"linux\"))]\n\n // TODO (casperstorm): Due to an upstream bug, min_size causes the window to become unresizable\n\n // on Linux.\n\n // @see: https://github.com/ajour/ajour/issues/427\n\n {\n", "file_path": "src/gui/mod.rs", "rank": 9, "score": 183982.2697268443 }, { "content": "fn changelog_endpoint(id: &str, flavor: &Flavor) -> String {\n\n match flavor {\n\n Flavor::Retail | Flavor::RetailPtr | Flavor::RetailBeta => match id {\n\n \"-1\" => \"https://www.tukui.org/ui/tukui/changelog\".to_owned(),\n\n \"-2\" => \"https://www.tukui.org/ui/elvui/changelog\".to_owned(),\n\n _ => format!(\"https://www.tukui.org/addons.php?id={}&changelog\", id),\n\n },\n\n Flavor::ClassicEra => format!(\n\n \"https://www.tukui.org/classic-addons.php?id={}&changelog\",\n\n id\n\n ),\n\n Flavor::ClassicTbc | Flavor::ClassicPtr | Flavor::ClassicBeta => format!(\n\n \"https://www.tukui.org/classic-tbc-addons.php?id={}&changelog\",\n\n id\n\n ),\n\n }\n\n}\n\n\n\npub(crate) async fn batch_fetch_repo_packages(\n\n flavor: Flavor,\n", "file_path": "crates/core/src/repository/backend/tukui.rs", "rank": 10, "score": 182010.183085486 }, { "content": "/// Return the tukui API endpoint.\n\nfn api_endpoint(id: &str, flavor: &Flavor) -> String {\n\n format!(\n\n \"https://hub.wowup.io/tukui/{}/{}\",\n\n format_flavor(flavor),\n\n id\n\n )\n\n}\n\n\n", "file_path": "crates/core/src/repository/backend/tukui.rs", "rank": 11, "score": 182010.183085486 }, { "content": "pub fn spawn_sys_tray(enabled: bool, start_closed_to_tray: bool) {\n\n thread::spawn(move || {\n\n let (sender, receiver) = sync_channel(1);\n\n let _ = TRAY_SENDER.set(sender);\n\n\n\n // Stores the window handle so we can post messages to its queue\n\n let mut window: Option<WindowHandle> = None;\n\n\n\n // Spawn tray initially if enabled\n\n if enabled {\n\n unsafe { create_window(false, start_closed_to_tray) };\n\n }\n\n\n\n // Make GUI visible if we don't start to tray\n\n if !start_closed_to_tray {\n\n GUI_VISIBLE.store(true, Ordering::Relaxed);\n\n }\n\n\n\n while let Ok(msg) = receiver.recv() {\n\n match msg {\n", "file_path": "src/tray/mod.rs", "rank": 12, "score": 179062.44580376116 }, { "content": "/// Deletes all saved varaible files correlating to `[AddonFolder]`.\n\npub fn delete_saved_variables(addon_folders: &[AddonFolder], wtf_path: &Path) -> Result<()> {\n\n for entry in WalkDir::new(&wtf_path)\n\n .into_iter()\n\n .filter_map(std::result::Result::ok)\n\n {\n\n let path = entry.path();\n\n let parent_name = path\n\n .parent()\n\n .and_then(|a| a.file_name())\n\n .and_then(|a| a.to_str());\n\n\n\n if parent_name == Some(\"SavedVariables\") {\n\n let file_name = path\n\n .file_stem()\n\n .and_then(|a| a.to_str())\n\n .map(|a| a.trim_end_matches(\".bak\"));\n\n\n\n // NOTE: Will reject \"Foobar_<invalid utf8>\".\n\n if let Some(file_name_str) = file_name {\n\n for folder in addon_folders {\n", "file_path": "crates/core/src/fs/addon.rs", "rank": 13, "score": 164996.89991480674 }, { "content": "fn save_column_configs(ajour: &mut Ajour) {\n\n let my_addons_columns: Vec<_> = ajour\n\n .header_state\n\n .columns\n\n .iter()\n\n .map(ColumnConfigV2::from)\n\n .collect();\n\n\n\n let catalog_columns: Vec<_> = ajour\n\n .catalog_header_state\n\n .columns\n\n .iter()\n\n .map(ColumnConfigV2::from)\n\n .collect();\n\n\n\n let aura_columns: Vec<_> = ajour\n\n .aura_header_state\n\n .columns\n\n .iter()\n\n .map(ColumnConfigV2::from)\n", "file_path": "src/gui/update.rs", "rank": 14, "score": 162274.47676609724 }, { "content": "/// Returns a localized `timeago::Formatter`.\n\n/// If user has chosen a language whic his not supported by `timeago` we fallback to english.\n\npub fn localized_timeago_formatter() -> timeago::Formatter<Box<dyn timeago::Language>> {\n\n let lang = LANG.get().expect(\"LANG not set\").read().unwrap();\n\n let isolang = isolang::Language::from_locale(&lang).unwrap();\n\n\n\n // this step might fail if timeago does not support the chosen language.\n\n // In that case we fallback to `en_US`.\n\n if let Some(timeago_lang) = timeago::from_isolang(isolang) {\n\n timeago::Formatter::with_language(timeago_lang)\n\n } else {\n\n timeago::Formatter::with_language(Box::new(timeago::English))\n\n }\n\n}\n", "file_path": "src/localization.rs", "rank": 15, "score": 152427.01449726295 }, { "content": "fn sort_auras(auras: &mut [Aura], sort_direction: SortDirection, column_key: AuraColumnKey) {\n\n match (column_key, sort_direction) {\n\n (AuraColumnKey::Title, SortDirection::Asc) => {\n\n auras.sort_by(|a, b| a.name().to_lowercase().cmp(&b.name().to_lowercase()));\n\n }\n\n (AuraColumnKey::Title, SortDirection::Desc) => {\n\n auras.sort_by(|a, b| {\n\n a.name()\n\n .to_lowercase()\n\n .cmp(&b.name().to_lowercase())\n\n .reverse()\n\n });\n\n }\n\n (AuraColumnKey::LocalVersion, SortDirection::Asc) => {\n\n auras.sort_by(|a, b| {\n\n a.installed_symver()\n\n .cmp(&b.installed_symver())\n\n .then_with(|| a.name().cmp(&b.name()))\n\n });\n\n }\n", "file_path": "src/gui/update.rs", "rank": 16, "score": 148539.55357054572 }, { "content": "pub fn update_all_addons() -> Result<()> {\n\n log::info!(\"Checking for addon updates...\");\n\n\n\n task::block_on(async {\n\n let config = load_config().await?;\n\n let global_release_channel = config.addons.global_release_channel;\n\n\n\n let fingerprint_cache: Arc<Mutex<_>> =\n\n Arc::new(Mutex::new(load_fingerprint_cache().await?));\n\n\n\n let addon_cache: Arc<Mutex<_>> = Arc::new(Mutex::new(load_addon_cache().await?));\n\n\n\n let mut addons_to_update = vec![];\n\n\n\n // Update addons for known flavors\n\n let flavors = config.wow.directories.keys().collect::<Vec<_>>();\n\n for flavor in flavors {\n\n // Returns None if no path is found\n\n let addon_directory = match config.get_addon_directory_for_flavor(flavor) {\n\n Some(path) => path,\n", "file_path": "src/command/update_addons.rs", "rank": 17, "score": 146277.107031553 }, { "content": "fn color_to_hex(color: &iced_native::Color) -> String {\n\n let mut color_str = String::from(\"#\");\n\n\n\n let iced_native::Color { r, g, b, .. } = color;\n\n color_str.push_str(&format!(\"{:02X}\", (r * 255.0) as u8));\n\n color_str.push_str(&format!(\"{:02X}\", (g * 255.0) as u8));\n\n color_str.push_str(&format!(\"{:02X}\", (b * 255.0) as u8));\n\n\n\n color_str\n\n}\n\n\n\nimpl PartialEq for Theme {\n\n fn eq(&self, other: &Self) -> bool {\n\n self.name == other.name\n\n }\n\n}\n\n\n\nimpl PartialOrd for Theme {\n\n fn partial_cmp(&self, other: &Self) -> Option<Ordering> {\n\n Some(self.name.cmp(&other.name))\n", "file_path": "crates/core/src/theme/mod.rs", "rank": 18, "score": 145744.42376840452 }, { "content": "/// Finds a case sensitive path from an insensitive path\n\n/// Useful if, say, a WoW addon points to a local path in a different case but you're not on Windows\n\nfn find_file<P>(path: P) -> Option<PathBuf>\n\nwhere\n\n P: AsRef<Path>,\n\n{\n\n let mut current = path.as_ref();\n\n let mut to_finds = Vec::new();\n\n\n\n // Find first parent that exists\n\n while !current.exists() {\n\n to_finds.push(current.file_name()?);\n\n current = current.parent()?;\n\n }\n\n\n\n // Match to finds\n\n let mut current = current.to_path_buf();\n\n to_finds.reverse();\n\n for to_find in to_finds {\n\n let mut children = current.read_dir().ok()?;\n\n let lower = to_find.to_str()?.to_ascii_lowercase();\n\n let found = children\n", "file_path": "crates/core/src/parse.rs", "rank": 19, "score": 145539.7757550978 }, { "content": "fn str_to_flavor(s: &str) -> Result<Flavor, &'static str> {\n\n match s {\n\n \"retail\" => Ok(Flavor::Retail),\n\n \"beta\" => Ok(Flavor::RetailBeta),\n\n \"ptr\" => Ok(Flavor::RetailPtr),\n\n \"classic_tbc\" => Ok(Flavor::ClassicTbc),\n\n \"classic_era\" => Ok(Flavor::ClassicEra),\n\n \"classic_ptr\" => Ok(Flavor::ClassicPtr),\n\n \"classic_beta\" => Ok(Flavor::ClassicBeta),\n\n _ => Err(\"valid values are ['retail','ptr','beta','classic_tbc','classic_era','classic_ptr','classic_beta']\"),\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, Copy)]\n\npub enum BackupFolder {\n\n All,\n\n AddOns,\n\n Wtf,\n\n Config,\n\n}\n\n\n", "file_path": "src/cli.rs", "rank": 20, "score": 143640.53330392778 }, { "content": "pub fn update_both() -> Result<()> {\n\n update_all_addons()?;\n\n\n\n update_all_auras()?;\n\n\n\n Ok(())\n\n}\n", "file_path": "src/command/mod.rs", "rank": 21, "score": 141792.3310305357 }, { "content": "#[allow(clippy::unnecessary_operation)]\n\nfn setup_logger(is_cli: bool, is_debug: bool) -> Result<()> {\n\n let mut logger = fern::Dispatch::new()\n\n .format(|out, message, record| {\n\n out.finish(format_args!(\n\n \"{} [{}][{}] {}\",\n\n chrono::Local::now().format(\"%H:%M:%S%.3f\"),\n\n record.target(),\n\n record.level(),\n\n message\n\n ))\n\n })\n\n .level(log::LevelFilter::Off)\n\n .level_for(\"panic\", log::LevelFilter::Error)\n\n .level_for(\"ajour\", log::LevelFilter::Trace);\n\n\n\n if !is_cli {\n\n logger = logger.level_for(\"ajour_core\", log::LevelFilter::Trace);\n\n }\n\n\n\n if is_cli || is_debug {\n", "file_path": "src/main.rs", "rank": 22, "score": 141317.95517716644 }, { "content": "fn hex_to_color(hex: &str) -> Option<iced_native::Color> {\n\n if hex.len() == 7 {\n\n let hash = &hex[0..1];\n\n let r = u8::from_str_radix(&hex[1..3], 16);\n\n let g = u8::from_str_radix(&hex[3..5], 16);\n\n let b = u8::from_str_radix(&hex[5..7], 16);\n\n\n\n return match (hash, r, g, b) {\n\n (\"#\", Ok(r), Ok(g), Ok(b)) => Some(iced_native::Color {\n\n r: r as f32 / 255.0,\n\n g: g as f32 / 255.0,\n\n b: b as f32 / 255.0,\n\n a: 1.0,\n\n }),\n\n _ => None,\n\n };\n\n }\n\n\n\n None\n\n}\n\n\n", "file_path": "crates/core/src/theme/mod.rs", "rank": 23, "score": 139712.4864591718 }, { "content": "#[derive(Debug, Clone)]\n\nstruct MaybeAuraDisplay(Option<AuraDisplay>);\n\n\n\nimpl MaybeAuraDisplay {\n\n fn into_inner(self) -> Option<AuraDisplay> {\n\n self.0\n\n }\n\n}\n\n\n\nimpl<'lua> FromLua<'lua> for MaybeAuraDisplay {\n\n fn from_lua(lua_value: Value<'lua>, _lua: &'lua Lua) -> Result<Self, mlua::Error> {\n\n if let Value::Table(table) = lua_value {\n\n if let Some(url) = table.get::<_, Option<String>>(\"url\")? {\n\n if let Ok(uri) = url.parse::<http::Uri>() {\n\n let mut path = uri.path().split_terminator('/');\n\n path.next();\n\n\n\n let slug = path.next();\n\n let version = path.next().map(str::parse::<u16>).and_then(Result::ok);\n\n\n\n if let Some(slug) = slug {\n", "file_path": "crates/weak_auras/src/lib.rs", "rank": 24, "score": 138673.07955420352 }, { "content": "#[allow(clippy::too_many_arguments)]\n\npub fn menu_container<'a>(\n\n color_palette: ColorPalette,\n\n flavor: Flavor,\n\n update_all_button_state: &'a mut button::State,\n\n refresh_button_state: &'a mut button::State,\n\n addons_search_state: &'a mut AddonsSearchState,\n\n state: &HashMap<Mode, State>,\n\n addons: &[Addon],\n\n config: &Config,\n\n) -> Container<'a, Message> {\n\n // MyAddons state.\n\n let state = state\n\n .get(&Mode::MyAddons(flavor))\n\n .cloned()\n\n .unwrap_or_default();\n\n\n\n // A row contain general settings.\n\n let mut settings_row = Row::new().align_items(Align::Center);\n\n\n\n let mut update_all_button = Button::new(\n", "file_path": "src/gui/element/my_addons.rs", "rank": 25, "score": 138630.49095714086 }, { "content": "fn handle_self_update_temp(cleanup_path: &Path) -> Result<()> {\n\n #[cfg(not(target_os = \"linux\"))]\n\n let current_bin = env::current_exe()?;\n\n\n\n #[cfg(target_os = \"linux\")]\n\n let current_bin =\n\n PathBuf::from(env::var(\"APPIMAGE\").context(\"error getting APPIMAGE env variable\")?);\n\n\n\n // Fix for self updating pre 0.5.4 to >= 0.5.4\n\n //\n\n // Pre 0.5.4, `cleanup_path` is actually the file name of the main bin name that\n\n // got passed via the CLI in the self update process. We want to rename the\n\n // current bin to that bin name. This was passed as a string of just the file\n\n // name, so we want to make an actual full path out of it first.\n\n if current_bin\n\n .file_name()\n\n .unwrap_or_default()\n\n .to_str()\n\n .unwrap_or_default()\n\n .starts_with(\"tmp_\")\n", "file_path": "src/main.rs", "rank": 26, "score": 136630.39105804017 }, { "content": "pub fn titles_row_header<'a>(\n\n color_palette: ColorPalette,\n\n addons: &[Addon],\n\n header_state: &'a mut header::State,\n\n column_state: &'a mut [ColumnState],\n\n previous_column_key: Option<ColumnKey>,\n\n previous_sort_direction: Option<SortDirection>,\n\n) -> Header<'a, Message> {\n\n // A row containing titles above the addon rows.\n\n let mut row_titles = vec![];\n\n\n\n for column in column_state.iter_mut().filter(|c| !c.hidden) {\n\n let column_key = column.key;\n\n\n\n let row_title = row_title(\n\n column_key,\n\n previous_column_key,\n\n previous_sort_direction,\n\n &column.key.title(),\n\n );\n", "file_path": "src/gui/element/my_addons.rs", "rank": 27, "score": 134964.6811721555 }, { "content": "/// Deletes an Addon and all dependencies from disk.\n\npub fn delete_addons(addon_folders: &[AddonFolder]) -> Result<()> {\n\n for folder in addon_folders {\n\n let path = &folder.path;\n\n if path.exists() {\n\n remove_dir_all(path)?;\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/core/src/fs/addon.rs", "rank": 28, "score": 134197.80517451777 }, { "content": "fn row_title<T: PartialEq>(\n\n column_key: T,\n\n previous_column_key: Option<T>,\n\n previous_sort_direction: Option<SortDirection>,\n\n title: &str,\n\n) -> String {\n\n if Some(column_key) == previous_column_key {\n\n match previous_sort_direction {\n\n Some(SortDirection::Asc) => format!(\"{} ▲\", title),\n\n Some(SortDirection::Desc) => format!(\"{} ▼\", title),\n\n _ => title.to_string(),\n\n }\n\n } else {\n\n title.to_string()\n\n }\n\n}\n\n\n", "file_path": "src/gui/element/my_addons.rs", "rank": 29, "score": 132008.0350956434 }, { "content": "#[allow(clippy::too_many_arguments)]\n\npub fn data_row_container<'a, 'b>(\n\n color_palette: ColorPalette,\n\n addon: &'a mut Addon,\n\n is_addon_expanded: bool,\n\n expand_type: &'a ExpandType,\n\n config: &Config,\n\n column_config: &'b [(ColumnKey, Length, bool)],\n\n is_odd: Option<bool>,\n\n) -> TableRow<'a, Message> {\n\n let default_height = Length::Units(26);\n\n let default_row_height = 26;\n\n\n\n let mut row_containers = vec![];\n\n\n\n let author = addon.author().map(str::to_string);\n\n let game_version = addon.game_version().map(str::to_string);\n\n let notes = addon.notes().map(str::to_string);\n\n let website_url = addon.website_url().map(str::to_string);\n\n let changelog_url = addon.changelog_url(config.addons.global_release_channel);\n\n let repository_kind = addon.repository_kind();\n", "file_path": "src/gui/element/my_addons.rs", "rank": 30, "score": 130705.52568282167 }, { "content": "/// Takes a `&str` and formats it into a proper\n\n/// World of Warcraft release version.\n\n///\n\n/// Eg. 90001 would be 9.0.1.\n\npub fn format_interface_into_game_version(interface: &str) -> String {\n\n if interface.len() == 5 {\n\n let major = interface[..1].parse::<u8>();\n\n let minor = interface[1..3].parse::<u8>();\n\n let patch = interface[3..5].parse::<u8>();\n\n if let (Ok(major), Ok(minor), Ok(patch)) = (major, minor, patch) {\n\n return format!(\"{}.{}.{}\", major, minor, patch);\n\n }\n\n }\n\n\n\n interface.to_owned()\n\n}\n\n\n\n/// Takes a `&str` and strips any non-digit.\n\n/// This is used to unify and compare addon versions:\n\n///\n\n/// A string looking like 213r323 would return 213323.\n\n/// A string looking like Rematch_4_10_15.zip would return 41015.\n\npub(crate) fn strip_non_digits(string: &str) -> String {\n\n let re = Regex::new(r\"[\\D]\").unwrap();\n", "file_path": "crates/core/src/utility.rs", "rank": 31, "score": 126904.60603991887 }, { "content": "/// Defines a serializable struct that should persist on the filesystem inside the\n\n/// Ajour config directory.\n\npub trait PersistentData: DeserializeOwned + Serialize {\n\n /// Only method required to implement PersistentData on an object. Always relative to\n\n /// the config folder for Ajour.\n\n fn relative_path() -> PathBuf;\n\n\n\n /// Returns the full file path. Will create any parent directories that don't\n\n /// exist.\n\n fn path() -> Result<PathBuf> {\n\n let path = config_dir().join(Self::relative_path());\n\n\n\n if let Some(dir) = path.parent() {\n\n std::fs::create_dir_all(dir)?;\n\n }\n\n\n\n Ok(path)\n\n }\n\n\n\n /// Load from `PersistentData::path()`.\n\n fn load() -> Result<Self> {\n\n let path = Self::path()?;\n", "file_path": "crates/core/src/fs/save.rs", "rank": 32, "score": 122261.04209201856 }, { "content": "/// Remove a file, retrying if the operation fails because of permissions\n\n///\n\n/// Will retry for ~30 seconds with longer and longer delays between each, to allow for virus scan\n\n/// and other automated operations to complete.\n\npub fn remove_file<P>(path: P) -> io::Result<()>\n\nwhere\n\n P: AsRef<Path>,\n\n{\n\n // 21 Fibonacci steps starting at 1 ms is ~28 seconds total\n\n // See https://github.com/rust-lang/rustup/pull/1873 where this was used by Rustup to work around\n\n // virus scanning file locks\n\n let path = path.as_ref();\n\n\n\n retry(\n\n Fibonacci::from_millis(1).take(21),\n\n || match fs::remove_file(path) {\n\n Ok(_) => OperationResult::Ok(()),\n\n Err(e) => match e.kind() {\n\n io::ErrorKind::PermissionDenied => OperationResult::Retry(e),\n\n _ => OperationResult::Err(e),\n\n },\n\n },\n\n )\n\n .map_err(|e| match e {\n", "file_path": "crates/core/src/utility.rs", "rank": 33, "score": 120148.22972486755 }, { "content": "struct Default;\n\n\n\nimpl StyleSheet for Default {\n\n fn style(&self) -> Style {\n\n Style {\n\n text_color: None,\n\n background: None,\n\n border_radius: 0.0,\n\n border_width: 0.0,\n\n border_color: Color::TRANSPARENT,\n\n offset_right: 0.0,\n\n offset_left: 0.0,\n\n }\n\n }\n\n\n\n fn hovered(&self) -> Style {\n\n Style {\n\n background: Some(Background::Color(Color::from_rgb(0.90, 0.90, 0.90))),\n\n ..self.style()\n\n }\n", "file_path": "crates/widgets/src/style/table_row.rs", "rank": 34, "score": 118990.15634848573 }, { "content": "pub fn main() {\n\n let opts_result = cli::get_opts();\n\n\n\n #[cfg(debug_assertions)]\n\n let is_debug = true;\n\n #[cfg(not(debug_assertions))]\n\n let is_debug = false;\n\n\n\n // If this is a clap error, we map to None since we are going to exit and display\n\n // an error message anyway and this value won't matter. If it's not an error,\n\n // the underlying `command` will drive this variable. If a `command` is passed\n\n // on the command line, Ajour functions as a CLI instead of launching the GUI.\n\n let is_cli = opts_result\n\n .as_ref()\n\n .map(|o| &o.command)\n\n .unwrap_or(&None)\n\n .is_some();\n\n\n\n // This function validates whether or not we need to exit and print any message\n\n // due to arguments passed on the command line. If not, it will return a\n", "file_path": "src/main.rs", "rank": 35, "score": 117011.8542097368 }, { "content": "// Newtype on iced::Color so we can impl Deserialzer for it\n\nstruct Color(iced_native::Color);\n\n\n\nmod serde_color {\n\n use super::{color_to_hex, hex_to_color, Color};\n\n use serde::de::{self, Error, Unexpected, Visitor};\n\n use serde::ser;\n\n use std::fmt;\n\n\n\n pub(crate) fn deserialize<'de, D>(deserializer: D) -> Result<iced_native::Color, D::Error>\n\n where\n\n D: de::Deserializer<'de>,\n\n {\n\n struct ColorVisitor;\n\n\n\n impl<'de> Visitor<'de> for ColorVisitor {\n\n type Value = Color;\n\n\n\n fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {\n\n formatter.write_str(\"a hex string in the format of '#09ACDF'\")\n\n }\n", "file_path": "crates/core/src/theme/mod.rs", "rank": 36, "score": 116542.53470892407 }, { "content": "pub fn backup(\n\n backup_folder: BackupFolder,\n\n destination: PathBuf,\n\n flavors: Vec<Flavor>,\n\n compression_format: CompressionFormat,\n\n) -> Result<()> {\n\n task::block_on(async {\n\n let config = load_config().await?;\n\n\n\n let flavors = if flavors.is_empty() {\n\n Flavor::ALL.to_vec()\n\n } else {\n\n flavors\n\n };\n\n\n\n if !destination.exists() {\n\n create_dir(destination.clone())?;\n\n }\n\n\n\n if !destination.is_dir() {\n", "file_path": "src/command/backup.rs", "rank": 37, "score": 113967.73781034716 }, { "content": "fn process_already_running() -> bool {\n\n let old_process = if let Ok(process) = Process::load() {\n\n process\n\n } else {\n\n return false;\n\n };\n\n\n\n unsafe {\n\n let current_pid = GetCurrentProcessId();\n\n\n\n // In case new process somehow got recycled PID of old process\n\n if current_pid == old_process.pid {\n\n return false;\n\n }\n\n\n\n let handle = OpenProcess(\n\n SYNCHRONIZE | PROCESS_QUERY_LIMITED_INFORMATION,\n\n 0,\n\n old_process.pid,\n\n );\n", "file_path": "src/process.rs", "rank": 38, "score": 111690.13748585152 }, { "content": "pub fn avoid_multiple_instances() {\n\n if process_already_running() {\n\n log::info!(\"Another instance of Ajour is already running. Exiting...\");\n\n std::process::exit(0);\n\n } else {\n\n // Otherwise this is the only instance. Save info about this process to the\n\n // pid file so future launches of Ajour can detect this running process.\n\n save_current_process_file();\n\n }\n\n}\n\n\n", "file_path": "src/process.rs", "rank": 39, "score": 111143.66774232595 }, { "content": "#[allow(unused_variables)]\n\npub fn validate_opts_or_exit(\n\n opts_result: Result<Opts, clap::Error>,\n\n is_cli: bool,\n\n is_debug: bool,\n\n) -> Opts {\n\n // If an error, we need to setup the AttachConsole fix for Windows release\n\n // so we can exit and display the error message to the user.\n\n let is_opts_error = opts_result.is_err();\n\n\n\n // Workaround to output to console even though we compile with windows_subsystem = \"windows\"\n\n // in release mode\n\n #[cfg(target_os = \"windows\")]\n\n {\n\n if (is_cli || is_opts_error) && !is_debug {\n\n use winapi::um::wincon::{AttachConsole, ATTACH_PARENT_PROCESS};\n\n\n\n unsafe {\n\n AttachConsole(ATTACH_PARENT_PROCESS);\n\n }\n\n }\n", "file_path": "src/cli.rs", "rank": 40, "score": 111143.66774232595 }, { "content": "/// Ajour user-agent.\n\nfn user_agent() -> String {\n\n format!(\"ajour/{}\", env!(\"CARGO_PKG_VERSION\"))\n\n}\n\n\n\n/// Generic request function.\n\npub async fn request_async<T: ToString>(\n\n url: T,\n\n headers: Vec<(&str, &str)>,\n\n timeout: Option<u64>,\n\n) -> Result<Response<isahc::AsyncBody>, DownloadError> {\n\n // Sometimes a download url has a space.\n\n let url = url.to_string().replace(\" \", \"%20\");\n\n\n\n let mut request = Request::builder().uri(url);\n\n\n\n for (name, value) in headers {\n\n request = request.header(name, value);\n\n }\n\n\n\n request = request.header(\"user-agent\", &user_agent());\n", "file_path": "crates/core/src/network.rs", "rank": 41, "score": 108961.16579458308 }, { "content": "pub fn handle_message(ajour: &mut Ajour, message: Message) -> Result<Command<Message>> {\n\n match message {\n\n Message::CachesLoaded(result) => {\n\n log::debug!(\"Message::CachesLoaded(error: {})\", result.is_err());\n\n\n\n if let Ok((fingerprint_cache, addon_cache)) = result {\n\n ajour.fingerprint_cache = Some(Arc::new(Mutex::new(fingerprint_cache)));\n\n ajour.addon_cache = Some(Arc::new(Mutex::new(addon_cache)));\n\n }\n\n\n\n return Ok(Command::perform(async {}, Message::Parse));\n\n }\n\n Message::Parse(_) => {\n\n log::debug!(\"Message::Parse\");\n\n\n\n // Begin to parse addon folder(s).\n\n let mut commands = vec![];\n\n\n\n // If a backup directory is selected, find the latest backup\n\n if let Some(dir) = &ajour.config.backup_directory {\n", "file_path": "src/gui/update.rs", "rank": 42, "score": 108711.56777236698 }, { "content": "pub fn data_container<'a>(\n\n color_palette: ColorPalette,\n\n release: &Option<Release>,\n\n scrollable_state: &'a mut scrollable::State,\n\n website_button_state: &'a mut button::State,\n\n donation_button_state: &'a mut button::State,\n\n) -> Container<'a, Message> {\n\n let ajour_title = Text::new(localized_string(\"ajour\")).size(DEFAULT_HEADER_FONT_SIZE);\n\n let ajour_title_container =\n\n Container::new(ajour_title).style(style::BrightBackgroundContainer(color_palette));\n\n\n\n let changelog_title_text = Text::new(if let Some(release) = release {\n\n let mut vars = HashMap::new();\n\n // TODO (casperstorm): change \"addon\" to \"tag\" or \"version\".\n\n vars.insert(\"addon\".to_string(), &release.tag_name);\n\n let fmt = localized_string(\"changelog-for\");\n\n strfmt(&fmt, &vars).unwrap()\n\n } else {\n\n localized_string(\"changelog\")\n\n })\n", "file_path": "src/gui/element/about.rs", "rank": 43, "score": 105185.581630793 }, { "content": "type Etag = Option<String>;\n\n\n\nasync fn get_catalog_addons_from(\n\n url: &str,\n\n cached_etag: Etag,\n\n) -> Result<Option<(Etag, Vec<CatalogAddon>)>, DownloadError> {\n\n let mut headers = vec![];\n\n if let Some(etag) = cached_etag.as_deref() {\n\n headers.push((\"If-None-Match\", etag));\n\n }\n\n\n\n let mut response = request_async(url, headers, None).await?;\n\n\n\n match response.status().as_u16() {\n\n 200 => {\n\n log::debug!(\"Downloaded latest catalog from {}\", url);\n\n\n\n let etag = response\n\n .headers()\n\n .get(\"etag\")\n", "file_path": "crates/core/src/catalog.rs", "rank": 44, "score": 103708.7409150051 }, { "content": "#[allow(clippy::too_many_arguments)]\n\npub fn menu_container<'a>(\n\n color_palette: ColorPalette,\n\n flavor: Flavor,\n\n update_all_button_state: &'a mut button::State,\n\n refresh_button_state: &'a mut button::State,\n\n state: &HashMap<Mode, State>,\n\n num_auras: usize,\n\n updates_available: bool,\n\n is_updating: bool,\n\n updates_queued: bool,\n\n accounts_picklist_state: &'a mut pick_list::State<String>,\n\n accounts: &'a [String],\n\n chosen_account: Option<String>,\n\n) -> Container<'a, Message> {\n\n // MyWeakAuras state.\n\n let state = state\n\n .get(&Mode::MyWeakAuras(flavor))\n\n .cloned()\n\n .unwrap_or_default();\n\n\n", "file_path": "src/gui/element/my_weakauras.rs", "rank": 45, "score": 102558.53082751094 }, { "content": "#[allow(clippy::too_many_arguments)]\n\npub fn data_container<'a>(\n\n color_palette: ColorPalette,\n\n mode: &Mode,\n\n state: &HashMap<Mode, State>,\n\n error: &Option<anyhow::Error>,\n\n config: &Config,\n\n updatable_addons: usize,\n\n updatable_wagos: usize,\n\n settings_button_state: &'a mut button::State,\n\n about_button_state: &'a mut button::State,\n\n catalog_mode_btn_state: &'a mut button::State,\n\n install_mode_btn_state: &'a mut button::State,\n\n self_update_state: &'a mut SelfUpdateState,\n\n flavor_picklist_state: &'a mut pick_list::State<Flavor>,\n\n weak_auras_is_installed: bool,\n\n) -> Container<'a, Message> {\n\n let flavor = config.wow.flavor;\n\n let mut valid_flavors = config\n\n .wow\n\n .directories\n", "file_path": "src/gui/element/menu.rs", "rank": 46, "score": 102558.53082751094 }, { "content": "// TODO (casperstorm): I would like to make this only handle title and description.\n\npub fn data_container<'a>(\n\n color_palette: ColorPalette,\n\n title: &str,\n\n description: &str,\n\n onboarding_directory_btn_state: Option<&'a mut button::State>,\n\n) -> Container<'a, Message> {\n\n let title = Text::new(title)\n\n .size(DEFAULT_FONT_SIZE)\n\n .width(Length::Fill)\n\n .horizontal_alignment(HorizontalAlignment::Center);\n\n let title_container = Container::new(title)\n\n .width(Length::Fill)\n\n .style(style::BrightBackgroundContainer(color_palette));\n\n\n\n let description = Text::new(description)\n\n .size(DEFAULT_FONT_SIZE)\n\n .width(Length::Fill)\n\n .horizontal_alignment(HorizontalAlignment::Center);\n\n let description_container = Container::new(description)\n\n .width(Length::Fill)\n", "file_path": "src/gui/element/status.rs", "rank": 47, "score": 102558.53082751094 }, { "content": "/// Get `Default` and save it.\n\nfn get_default_and_save<T: PersistentData + Default>() -> Result<T> {\n\n let data = Default::default();\n\n\n\n <T as PersistentData>::save(&data)?;\n\n\n\n Ok(data)\n\n}\n", "file_path": "crates/core/src/fs/save.rs", "rank": 48, "score": 100834.42498965027 }, { "content": "#[derive(Debug)]\n\nstruct TrayState {\n\n gui_handle: Option<HWND>,\n\n gui_hidden: bool,\n\n about_shown: bool,\n\n close_gui: bool,\n\n show_balloon: bool,\n\n}\n\n\n\nunsafe impl Send for TrayState {}\n\nunsafe impl Sync for TrayState {}\n\n\n\npub struct WindowHandle(HWND);\n\n\n\nunsafe impl Send for WindowHandle {}\n\nunsafe impl Sync for WindowHandle {}\n\n\n\n#[macro_export]\n\nmacro_rules! str_to_wide {\n\n ($str:expr) => {{\n\n $str.encode_utf16()\n\n .chain(std::iter::once(0))\n\n .collect::<Vec<_>>()\n\n }};\n\n}\n\n\n", "file_path": "src/tray/mod.rs", "rank": 49, "score": 100151.83340302182 }, { "content": "pub fn titles_row_header<'a>(\n\n color_palette: ColorPalette,\n\n auras: &[Aura],\n\n header_state: &'a mut header::State,\n\n column_state: &'a mut [AuraColumnState],\n\n previous_column_key: Option<AuraColumnKey>,\n\n previous_sort_direction: Option<SortDirection>,\n\n) -> Header<'a, Message> {\n\n // A row containing titles above the addon rows.\n\n let mut row_titles = vec![];\n\n\n\n for column in column_state.iter_mut().filter(|c| !c.hidden) {\n\n let column_key = column.key;\n\n\n\n let row_title = row_title(\n\n column_key,\n\n previous_column_key,\n\n previous_sort_direction,\n\n &column.key.title(),\n\n );\n", "file_path": "src/gui/element/my_weakauras.rs", "rank": 50, "score": 100108.57686015975 }, { "content": "pub fn update_all_auras() -> Result<()> {\n\n log::info!(\"Checking for WeakAura and Plater updates...\");\n\n\n\n task::block_on(async {\n\n let config = load_config().await?;\n\n\n\n let mut flavors_setup = 0usize;\n\n\n\n let flavors = config.wow.directories.keys().collect::<Vec<_>>();\n\n for flavor in flavors {\n\n if let Some(account) = config.weak_auras_account.get(flavor).cloned() {\n\n flavors_setup += 1;\n\n\n\n log::info!(\n\n \"{} - Parsing WeakAura and Plater for account {}\",\n\n flavor,\n\n &account\n\n );\n\n\n\n let wtf_path = match config.get_wtf_directory_for_flavor(flavor) {\n", "file_path": "src/command/update_weakauras.rs", "rank": 51, "score": 100108.57686015975 }, { "content": "pub fn titles_row_header<'a>(\n\n color_palette: ColorPalette,\n\n catalog: &Catalog,\n\n header_state: &'a mut header::State,\n\n column_state: &'a mut [CatalogColumnState],\n\n previous_column_key: Option<CatalogColumnKey>,\n\n previous_sort_direction: Option<SortDirection>,\n\n) -> Header<'a, Message> {\n\n // A row containing titles above the addon rows.\n\n let mut row_titles = vec![];\n\n\n\n for column in column_state.iter_mut().filter(|c| !c.hidden) {\n\n let column_key = column.key;\n\n\n\n let row_title = row_title(\n\n column_key,\n\n previous_column_key,\n\n previous_sort_direction,\n\n &column.key.title(),\n\n );\n", "file_path": "src/gui/element/catalog.rs", "rank": 52, "score": 100108.57686015975 }, { "content": "fn sort_addons(\n\n addons: &mut [Addon],\n\n global_release_channel: GlobalReleaseChannel,\n\n sort_direction: SortDirection,\n\n column_key: ColumnKey,\n\n) {\n\n match (column_key, sort_direction) {\n\n (ColumnKey::Title, SortDirection::Asc) => {\n\n addons.sort_by(|a, b| a.title().to_lowercase().cmp(&b.title().to_lowercase()));\n\n }\n\n (ColumnKey::Title, SortDirection::Desc) => {\n\n addons.sort_by(|a, b| {\n\n a.title()\n\n .to_lowercase()\n\n .cmp(&b.title().to_lowercase())\n\n .reverse()\n\n .then_with(|| {\n\n a.relevant_release_package(global_release_channel)\n\n .cmp(&b.relevant_release_package(global_release_channel))\n\n })\n", "file_path": "src/gui/update.rs", "rank": 53, "score": 100014.51058091066 }, { "content": "pub fn data_container<'a, 'b>(\n\n color_palette: ColorPalette,\n\n scrollable_state: &'a mut scrollable::State,\n\n config: &Config,\n\n theme_state: &'a mut ThemeState,\n\n scale_state: &'a mut ScaleState,\n\n backup_state: &'a mut BackupState,\n\n default_backup_compression_format: &'a mut pick_list::State<CompressionFormat>,\n\n column_settings: &'a mut ColumnSettings,\n\n column_config: &'b [(ColumnKey, Length, bool)],\n\n catalog_column_settings: &'a mut CatalogColumnSettings,\n\n catalog_column_config: &'b [(CatalogColumnKey, Length, bool)],\n\n open_config_dir_button_state: &'a mut button::State,\n\n self_update_channel_state: &'a mut SelfUpdateChannelState,\n\n default_addon_release_channel_picklist_state: &'a mut pick_list::State<GlobalReleaseChannel>,\n\n reset_columns_button_state: &'a mut button::State,\n\n localization_picklist_state: &'a mut pick_list::State<Language>,\n\n wow_directories: &'a mut Vec<WowDirectoryState>,\n\n) -> Container<'a, Message> {\n\n let mut scrollable = Scrollable::new(scrollable_state)\n", "file_path": "src/gui/element/settings.rs", "rank": 54, "score": 98299.37533817711 }, { "content": "fn row_title<T: PartialEq>(\n\n column_key: T,\n\n previous_column_key: Option<T>,\n\n previous_sort_direction: Option<SortDirection>,\n\n title: &str,\n\n) -> String {\n\n if Some(column_key) == previous_column_key {\n\n match previous_sort_direction {\n\n Some(SortDirection::Asc) => format!(\"{} ▲\", title),\n\n Some(SortDirection::Desc) => format!(\"{} ▼\", title),\n\n _ => title.to_string(),\n\n }\n\n } else {\n\n title.to_string()\n\n }\n\n}\n\n\n", "file_path": "src/gui/element/catalog.rs", "rank": 55, "score": 98288.49499721653 }, { "content": "fn row_title<T: PartialEq>(\n\n column_key: T,\n\n previous_column_key: Option<T>,\n\n previous_sort_direction: Option<SortDirection>,\n\n title: &str,\n\n) -> String {\n\n if Some(column_key) == previous_column_key {\n\n match previous_sort_direction {\n\n Some(SortDirection::Asc) => format!(\"{} ▲\", title),\n\n Some(SortDirection::Desc) => format!(\"{} ▼\", title),\n\n _ => title.to_string(),\n\n }\n\n } else {\n\n title.to_string()\n\n }\n\n}\n\n\n", "file_path": "src/gui/element/my_weakauras.rs", "rank": 56, "score": 98288.49499721653 }, { "content": "fn sort_catalog_addons(\n\n addons: &mut [CatalogRow],\n\n sort_direction: SortDirection,\n\n column_key: CatalogColumnKey,\n\n flavor: &Flavor,\n\n) {\n\n match (column_key, sort_direction) {\n\n (CatalogColumnKey::Title, SortDirection::Asc) => {\n\n addons.sort_by(|a, b| a.addon.name.cmp(&b.addon.name));\n\n }\n\n (CatalogColumnKey::Title, SortDirection::Desc) => {\n\n addons.sort_by(|a, b| a.addon.name.cmp(&b.addon.name).reverse());\n\n }\n\n (CatalogColumnKey::Description, SortDirection::Asc) => {\n\n addons.sort_by(|a, b| a.addon.summary.cmp(&b.addon.summary));\n\n }\n\n (CatalogColumnKey::Description, SortDirection::Desc) => {\n\n addons.sort_by(|a, b| a.addon.summary.cmp(&b.addon.summary).reverse());\n\n }\n\n (CatalogColumnKey::Source, SortDirection::Asc) => {\n", "file_path": "src/gui/update.rs", "rank": 57, "score": 97203.46837598109 }, { "content": "fn build_addons(\n\n flavor: Flavor,\n\n repo_packages: &mut Vec<RepositoryPackage>,\n\n addon_folders: &mut Vec<AddonFolder>,\n\n cache_entries: &[AddonCacheEntry],\n\n) -> Vec<Addon> {\n\n let cached_addons: Vec<_> = cache_entries\n\n .iter()\n\n .filter_map(|e| {\n\n // Get and remove any matching repo package\n\n let repo_idx = repo_packages\n\n .iter()\n\n .position(|r| r.id == e.repository_id && r.kind == e.repository)?;\n\n\n\n let mut repo_package = repo_packages.remove(repo_idx);\n\n\n\n // Set the file id / version from the cache entry. This is needed to properly\n\n // validate the installed version against the remote version\n\n match &e.external_release_id {\n\n Some(ExternalReleaseId::FileId(file_id)) => {\n", "file_path": "crates/core/src/parse.rs", "rank": 58, "score": 97203.46837598109 }, { "content": "#[derive(Debug, Clone)]\n\nstruct AuraDisplay {\n\n url: String,\n\n slug: String,\n\n version: Option<u16>,\n\n version_string: Option<String>,\n\n parent: Option<String>,\n\n id: String,\n\n uid: Option<String>,\n\n ignore_updates: bool,\n\n skip_version: Option<u16>,\n\n kind: AuraDisplayKind,\n\n}\n\n\n", "file_path": "crates/weak_auras/src/lib.rs", "rank": 59, "score": 96051.71906856704 }, { "content": "pub fn data_row_container<'a, 'b>(\n\n color_palette: ColorPalette,\n\n aura: &'a Aura,\n\n column_config: &'b [(AuraColumnKey, Length, bool)],\n\n is_odd: Option<bool>,\n\n) -> TableRow<'a, Message> {\n\n let default_height = Length::Units(26);\n\n let default_row_height = 26;\n\n\n\n let mut row_containers = vec![];\n\n\n\n if let Some((idx, width)) = column_config\n\n .iter()\n\n .enumerate()\n\n .filter_map(|(idx, (key, width, hidden))| {\n\n if *key == AuraColumnKey::Title && !hidden {\n\n Some((idx, width))\n\n } else {\n\n None\n\n }\n", "file_path": "src/gui/element/my_weakauras.rs", "rank": 60, "score": 95849.4213708259 }, { "content": "#[allow(clippy::too_many_arguments)]\n\npub fn data_row_container<'a, 'b>(\n\n color_palette: ColorPalette,\n\n config: &Config,\n\n addon: &'a mut CatalogRow,\n\n column_config: &'b [(CatalogColumnKey, Length, bool)],\n\n installed_for_flavor: bool,\n\n install_addon: Option<&InstallAddon>,\n\n is_odd: Option<bool>,\n\n) -> TableRow<'a, Message> {\n\n let default_height = Length::Units(26);\n\n let default_row_height = 26;\n\n\n\n let mut row_containers = vec![];\n\n\n\n let addon_data = &addon.addon;\n\n let install_button_state = &mut addon.install_button_state;\n\n\n\n let flavor_exists_for_addon = addon_data\n\n .game_versions\n\n .iter()\n", "file_path": "src/gui/element/catalog.rs", "rank": 61, "score": 95849.4213708259 }, { "content": "struct CompanionAddon {}\n\n\n\nimpl CompanionAddon {\n\n async fn exists(addon_dir: impl AsRef<Path>) -> bool {\n\n let companion_folder = addon_dir.as_ref().join(ADDON_NAME);\n\n let toc_file = companion_folder.join(TOC_NAME);\n\n let init_file = companion_folder.join(\"init.lua\");\n\n let data_file = companion_folder.join(\"data.lua\");\n\n\n\n companion_folder.is_dir().await\n\n && toc_file.is_file().await\n\n && init_file.is_file().await\n\n && data_file.is_file().await\n\n }\n\n\n\n async fn create(addon_dir: impl AsRef<Path>) -> Result<(), Error> {\n\n let companion_folder = addon_dir.as_ref().join(ADDON_NAME);\n\n let toc_file = companion_folder.join(TOC_NAME);\n\n let init_file = companion_folder.join(\"init.lua\");\n\n let data_file = companion_folder.join(\"data.lua\");\n", "file_path": "crates/weak_auras/src/companion.rs", "rank": 62, "score": 95496.70883901534 }, { "content": "fn query_and_sort_catalog(ajour: &mut Ajour) {\n\n if let Some(catalog) = &ajour.catalog {\n\n let query = ajour\n\n .catalog_search_state\n\n .query\n\n .as_ref()\n\n .map(|s| s.to_lowercase());\n\n let flavor = &ajour.config.wow.flavor;\n\n let source = &ajour.config.catalog_source;\n\n let category = &ajour.catalog_search_state.category;\n\n let result_size = ajour.catalog_search_state.result_size.as_usize();\n\n\n\n // Increase penalty for gaps between matching characters\n\n let fuzzy_match_config = SkimScoreConfig {\n\n gap_start: -12,\n\n gap_extension: -6,\n\n ..Default::default()\n\n };\n\n let fuzzy_matcher = SkimMatcherV2::default().score_config(fuzzy_match_config);\n\n\n", "file_path": "src/gui/update.rs", "rank": 63, "score": 93967.79447253677 }, { "content": "/// Log any errors\n\npub fn log_error(error: &anyhow::Error) {\n\n log::error!(\"{}\", error);\n\n\n\n let mut causes = error.chain();\n\n // Remove first entry since it's same as top level error\n\n causes.next();\n\n\n\n for cause in causes {\n\n log::error!(\"caused by: {}\", cause);\n\n }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 64, "score": 92653.20455595876 }, { "content": "/// Return the wowi API endpoint.\n\nfn api_endpoint(ids: &str) -> String {\n\n format!(\"{}/{}.json\", API_ENDPOINT, ids)\n\n}\n\n\n\n/// Returns the addon website url.\n\npub(crate) fn addon_url(id: &str) -> String {\n\n format!(\"{}{}\", ADDON_URL, id)\n\n}\n\n\n\npub(crate) async fn batch_fetch_repo_packages(\n\n flavor: Flavor,\n\n wowi_ids: &[String],\n\n) -> Result<Vec<RepositoryPackage>, DownloadError> {\n\n let mut wowi_repo_packages = vec![];\n\n\n\n if wowi_ids.is_empty() {\n\n return Ok(wowi_repo_packages);\n\n }\n\n\n\n let wowi_packages = wowi::fetch_remote_packages(&wowi_ids).await?;\n", "file_path": "crates/core/src/repository/backend/wowi.rs", "rank": 65, "score": 89744.61179329036 }, { "content": "pub fn get_opts() -> Result<Opts, clap::Error> {\n\n let args = env::args_os();\n\n\n\n Opts::from_iter_safe(args)\n\n}\n\n\n", "file_path": "src/cli.rs", "rank": 66, "score": 87875.97979191643 }, { "content": "/// Helper function to split a comma separated string into `Vec<String>`.\n\nfn split_dependencies_into_vec(value: &str) -> Vec<String> {\n\n if value.is_empty() {\n\n return vec![];\n\n }\n\n\n\n value\n\n .split([','].as_ref())\n\n .map(|s| s.trim().to_string())\n\n .collect()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_toc_title() {\n\n let title = RE_TOC_TITLE.replace_all(\"Atlas |cFF0099FF[Foobar]|r\", \"$1\");\n\n assert_eq!(title, \"Atlas [Foobar]\");\n\n\n", "file_path": "crates/core/src/parse.rs", "rank": 67, "score": 86529.08597432083 }, { "content": "use serde::{Deserialize, Serialize};\n\nuse std::collections::HashMap;\n\nuse std::path::PathBuf;\n\n\n\n/// Struct for settings related to World of Warcraft.\n\n#[derive(Deserialize, Serialize, Clone, Debug, PartialEq, Eq)]\n\n#[serde(default)]\n\npub struct Wow {\n\n #[serde(default)]\n\n #[allow(deprecated)]\n\n pub directory: Option<PathBuf>,\n\n\n\n #[serde(default)]\n\n pub directories: HashMap<Flavor, PathBuf>,\n\n\n\n #[serde(default)]\n\n pub flavor: Flavor,\n\n}\n\n\n\nimpl Default for Wow {\n", "file_path": "crates/core/src/config/wow.rs", "rank": 68, "score": 78582.18234666123 }, { "content": " fn default() -> Self {\n\n Wow {\n\n directory: None,\n\n directories: HashMap::new(),\n\n flavor: Flavor::Retail,\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq, Deserialize, Serialize, Hash, PartialOrd, Ord)]\n\npub enum Flavor {\n\n #[serde(alias = \"retail\", alias = \"wow_retail\")]\n\n Retail,\n\n #[serde(alias = \"RetailPTR\")]\n\n RetailPtr,\n\n RetailBeta,\n\n #[serde(\n\n alias = \"Classic\",\n\n alias = \"classic\",\n\n alias = \"wow_classic\",\n", "file_path": "crates/core/src/config/wow.rs", "rank": 69, "score": 78570.68514336775 }, { "content": " Flavor::ClassicTbc,\n\n Flavor::ClassicPtr,\n\n Flavor::ClassicBeta,\n\n ];\n\n\n\n /// Returns flavor `String` in CurseForge format\n\n pub(crate) fn curse_format(self) -> String {\n\n match self {\n\n Flavor::Retail | Flavor::RetailPtr | Flavor::RetailBeta => \"wow_retail\".to_owned(),\n\n Flavor::ClassicTbc | Flavor::ClassicPtr | Flavor::ClassicBeta => {\n\n \"wow_burning_crusade\".to_owned()\n\n }\n\n Flavor::ClassicEra => \"wow_classic\".to_owned(),\n\n }\n\n }\n\n\n\n /// Returns flavor `String` in WowUp.Hub format\n\n pub(crate) fn hub_format(self) -> String {\n\n match self {\n\n Flavor::Retail | Flavor::RetailPtr | Flavor::RetailBeta => \"retail\".to_owned(),\n", "file_path": "crates/core/src/config/wow.rs", "rank": 70, "score": 78552.75587997238 }, { "content": " Flavor::RetailPtr => \"_ptr_\".to_owned(),\n\n Flavor::RetailBeta => \"_beta_\".to_owned(),\n\n Flavor::ClassicEra => \"_classic_era_\".to_owned(),\n\n Flavor::ClassicTbc => \"_classic_\".to_owned(),\n\n Flavor::ClassicPtr => \"_classic_ptr_\".to_owned(),\n\n Flavor::ClassicBeta => \"_classic_beta_\".to_owned(),\n\n }\n\n }\n\n}\n\n\n\nimpl Default for Flavor {\n\n fn default() -> Flavor {\n\n Flavor::Retail\n\n }\n\n}\n\n\n\nimpl std::fmt::Display for Flavor {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n write!(\n\n f,\n", "file_path": "crates/core/src/config/wow.rs", "rank": 71, "score": 78552.06226627188 }, { "content": " Flavor::ClassicTbc | Flavor::ClassicPtr | Flavor::ClassicBeta => {\n\n \"burningCrusade\".to_owned()\n\n }\n\n Flavor::ClassicEra => \"classic\".to_owned(),\n\n }\n\n }\n\n\n\n /// Returns `Flavor` which self relates to.\n\n pub fn base_flavor(self) -> Flavor {\n\n match self {\n\n Flavor::Retail | Flavor::RetailPtr | Flavor::RetailBeta => Flavor::Retail,\n\n Flavor::ClassicTbc | Flavor::ClassicPtr | Flavor::ClassicBeta => Flavor::ClassicTbc,\n\n Flavor::ClassicEra => Flavor::ClassicEra,\n\n }\n\n }\n\n\n\n /// Returns `String` which correlate to the folder on disk.\n\n pub(crate) fn folder_name(self) -> String {\n\n match self {\n\n Flavor::Retail => \"_retail_\".to_owned(),\n", "file_path": "crates/core/src/config/wow.rs", "rank": 72, "score": 78546.94053289635 }, { "content": " alias = \"classic_era\"\n\n )]\n\n ClassicEra,\n\n #[serde(\n\n alias = \"wow_burning_crusade\",\n\n alias = \"burningCrusade\",\n\n alias = \"burning_crusade\"\n\n )]\n\n ClassicTbc,\n\n #[serde(alias = \"ClassicPTR\")]\n\n ClassicPtr,\n\n ClassicBeta,\n\n}\n\n\n\nimpl Flavor {\n\n pub const ALL: [Flavor; 7] = [\n\n Flavor::Retail,\n\n Flavor::RetailPtr,\n\n Flavor::RetailBeta,\n\n Flavor::ClassicEra,\n", "file_path": "crates/core/src/config/wow.rs", "rank": 73, "score": 78544.61527638693 }, { "content": " \"{}\",\n\n match self {\n\n Flavor::Retail => \"Retail\",\n\n Flavor::RetailPtr => \"Retail PTR\",\n\n Flavor::RetailBeta => \"Retail Beta\",\n\n Flavor::ClassicEra => \"Classic Era\",\n\n Flavor::ClassicTbc => \"Classic TBC\",\n\n Flavor::ClassicBeta => \"Classic Beta\",\n\n Flavor::ClassicPtr => \"Classic PTR\",\n\n }\n\n )\n\n }\n\n}\n", "file_path": "crates/core/src/config/wow.rs", "rank": 74, "score": 78539.38690107792 }, { "content": "use super::Flavor;\n\nuse crate::repository::{GlobalReleaseChannel, ReleaseChannel};\n\nuse de::de_ignored;\n\nuse serde::{Deserialize, Serialize};\n\nuse std::collections::HashMap;\n\n\n\n/// Struct for addons specific settings.\n\n#[derive(Deserialize, Serialize, Clone, Debug, PartialEq, Eq)]\n\npub struct Addons {\n\n #[serde(default)]\n\n pub global_release_channel: GlobalReleaseChannel,\n\n\n\n #[serde(default, deserialize_with = \"de_ignored\")]\n\n pub ignored: HashMap<Flavor, Vec<String>>,\n\n\n\n #[serde(default)]\n\n pub release_channels: HashMap<Flavor, HashMap<String, ReleaseChannel>>,\n\n\n\n #[serde(default)]\n\n pub delete_saved_variables: bool,\n", "file_path": "crates/core/src/config/addons.rs", "rank": 75, "score": 78015.02300291258 }, { "content": "}\n\n\n\nimpl Default for Addons {\n\n fn default() -> Self {\n\n Addons {\n\n global_release_channel: GlobalReleaseChannel::Stable,\n\n ignored: HashMap::new(),\n\n release_channels: HashMap::new(),\n\n delete_saved_variables: Default::default(),\n\n }\n\n }\n\n}\n\n\n\nmod de {\n\n use crate::config::Flavor;\n\n use serde::{\n\n de::{self, MapAccess, SeqAccess, Visitor},\n\n Deserialize, Deserializer,\n\n };\n\n use std::collections::HashMap;\n", "file_path": "crates/core/src/config/addons.rs", "rank": 76, "score": 78006.86268247313 }, { "content": " use std::fmt;\n\n\n\n pub(crate) fn de_ignored<'de, D>(\n\n deserializer: D,\n\n ) -> Result<HashMap<Flavor, Vec<String>>, D::Error>\n\n where\n\n D: Deserializer<'de>,\n\n {\n\n struct DeIgnored;\n\n\n\n impl<'de> Visitor<'de> for DeIgnored {\n\n type Value = HashMap<Flavor, Vec<String>>;\n\n\n\n fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {\n\n formatter.write_str(\"Vec<String> or HashMap<Flavor, Vec<String>>\")\n\n }\n\n\n\n fn visit_seq<A>(self, mut seq: A) -> Result<Self::Value, A::Error>\n\n where\n\n A: SeqAccess<'de>,\n", "file_path": "crates/core/src/config/addons.rs", "rank": 77, "score": 78002.79530188543 }, { "content": " {\n\n let mut map = HashMap::new();\n\n let mut ignored = vec![];\n\n\n\n while let Ok(Some(value)) = seq.next_element::<String>() {\n\n ignored.push(value);\n\n }\n\n\n\n map.insert(Flavor::Retail, ignored.clone());\n\n map.insert(Flavor::ClassicTbc, ignored);\n\n\n\n Ok(map)\n\n }\n\n\n\n fn visit_map<A>(self, map: A) -> Result<Self::Value, A::Error>\n\n where\n\n A: MapAccess<'de>,\n\n {\n\n Deserialize::deserialize(de::value::MapAccessDeserializer::new(map))\n\n }\n\n }\n\n\n\n deserializer.deserialize_any(DeIgnored)\n\n }\n\n}\n", "file_path": "crates/core/src/config/addons.rs", "rank": 78, "score": 77993.42431446146 }, { "content": "use crate::fs;\n\nuse serde::{Deserialize, Serialize};\n\nuse std::cmp::Ordering;\n\n\n\npub async fn load_user_themes() -> Vec<Theme> {\n\n log::debug!(\"loading user themes\");\n\n\n\n fs::load_user_themes().await\n\n}\n\n\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\npub struct Theme {\n\n pub name: String,\n\n pub palette: ColorPalette,\n\n}\n\n\n\n#[derive(Debug, Clone, Copy, Deserialize, Serialize)]\n\npub struct BaseColors {\n\n #[serde(with = \"serde_color\")]\n\n pub background: iced_native::Color,\n", "file_path": "crates/core/src/theme/mod.rs", "rank": 90, "score": 77202.92794252922 }, { "content": " #[serde(with = \"serde_color\")]\n\n pub secondary: iced_native::Color,\n\n #[serde(with = \"serde_color\")]\n\n pub surface: iced_native::Color,\n\n #[serde(with = \"serde_color\")]\n\n pub error: iced_native::Color,\n\n}\n\n\n\n#[derive(Debug, Clone, Copy, Deserialize, Serialize)]\n\npub struct ColorPalette {\n\n pub base: BaseColors,\n\n pub normal: NormalColors,\n\n pub bright: BrightColors,\n\n}\n\n\n\nimpl Theme {\n\n pub fn all() -> Vec<(String, Theme)> {\n\n vec![\n\n (\"Alliance\".to_string(), Theme::alliance()),\n\n (\"Ayu\".to_string(), Theme::ayu()),\n", "file_path": "crates/core/src/theme/mod.rs", "rank": 94, "score": 77196.19710758499 }, { "content": " #[serde(with = \"serde_color\")]\n\n pub foreground: iced_native::Color,\n\n}\n\n\n\n#[derive(Debug, Clone, Copy, Deserialize, Serialize)]\n\npub struct NormalColors {\n\n #[serde(with = \"serde_color\")]\n\n pub primary: iced_native::Color,\n\n #[serde(with = \"serde_color\")]\n\n pub secondary: iced_native::Color,\n\n #[serde(with = \"serde_color\")]\n\n pub surface: iced_native::Color,\n\n #[serde(with = \"serde_color\")]\n\n pub error: iced_native::Color,\n\n}\n\n\n\n#[derive(Debug, Clone, Copy, Deserialize, Serialize)]\n\npub struct BrightColors {\n\n #[serde(with = \"serde_color\")]\n\n pub primary: iced_native::Color,\n", "file_path": "crates/core/src/theme/mod.rs", "rank": 97, "score": 77188.58626086822 }, { "content": " {\n\n serializer.serialize_str(&color_to_hex(color))\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::{serde_color::deserialize, Theme};\n\n use serde::de::value::{Error, StrDeserializer};\n\n use serde::de::IntoDeserializer;\n\n\n\n #[test]\n\n fn test_hex_color_deser() {\n\n let colors = [\n\n \"AABBCC\", \"AABBCG\", \"#AABBCG\", \"#AABB091\", \"#AABBCC\", \"#AABB09\",\n\n ];\n\n\n\n for (idx, color_str) in colors.iter().enumerate() {\n\n let deserializer: StrDeserializer<Error> = color_str.into_deserializer();\n\n\n", "file_path": "crates/core/src/theme/mod.rs", "rank": 99, "score": 77184.20260493274 } ]
Rust
src/parsers/apache2/mod.rs
u-siem/usiem-apache-httpd
c19d21c8fe0bacb86ff6eebf9c4e0e708e7d8714
use chrono::prelude::{TimeZone, Utc}; use std::borrow::Cow; use usiem::components::common::LogParsingError; use usiem::events::common::{HttpMethod, WebProtocol}; use usiem::events::field::{SiemField, SiemIp}; use usiem::events::webserver::{WebServerEvent, WebServerOutcome}; use usiem::events::{SiemEvent, SiemLog}; pub fn parse_log_combinedio(log: SiemLog) -> Result<SiemLog, LogParsingError> { let log_line = log.message(); let start_log_pos = match log_line.find("\"") { Some(val) => val, None => return Err(LogParsingError::NoValidParser(log)), }; let log_header = &log_line[..start_log_pos]; let (pre_data, event_created) = match log_header.find('[') { Some(v1) => match log_header.find(']') { Some(v2) => { if (v2 - v1) > 27 || (v2 - v1) < 24 { return Err(LogParsingError::NoValidParser(log)); } else { (&log_header[..v1-1],&log_header[v1 + 1..v2]) } } None => return Err(LogParsingError::NoValidParser(log)), }, None => return Err(LogParsingError::NoValidParser(log)), }; let event_created = match Utc.datetime_from_str(event_created, "%d/%b/%Y:%H:%M:%S %z") { Ok(timestamp) => timestamp.timestamp_millis(), Err(_err) => return Err(LogParsingError::NoValidParser(log)), }; let log_content = &log_line[start_log_pos..]; let fields = extract_fields(log_content); let pre_fields = extract_fields(pre_data); let (http_method, url, version) = match fields.get(0) { Some(v) => match extract_http_content(v) { Ok((method, url, version)) => (method, url, version), Err(_) => return Err(LogParsingError::NoValidParser(log)), }, None => return Err(LogParsingError::NoValidParser(log)), }; let (http_protocol, http_version) = match version.find('/') { Some(p) => (parse_http_protocol(&version[..p]), &version[(p+1)..]), None => (parse_http_protocol(version), ""), }; let http_code = match fields.get(1) { Some(v) => match v.parse::<u32>() { Ok(v) => v, Err(_) => return Err(LogParsingError::NoValidParser(log)), }, None => return Err(LogParsingError::NoValidParser(log)), }; let referer = match fields.get(3) { Some(v) => { if *v == "-" { "" }else{ v } }, None => return Err(LogParsingError::NoValidParser(log)), }; let user_agent = match fields.get(4) { Some(v) => { if *v == "-" { "" }else{ v } }, None => return Err(LogParsingError::NoValidParser(log)), }; let in_bytes = match fields.get(5) { Some(v) => match v.parse::<u32>() { Ok(v) => v, Err(_) => 0, }, None => 0, }; let out_bytes = match fields.get(6) { Some(v) => match v.parse::<u32>() { Ok(v) => v, Err(_) => 0, }, None => 0, }; let (url_path, url_query, url_extension) = extract_url_parts(url); let pre_size = pre_fields.len(); let user_name = match pre_fields.get(pre_size - 1) { Some(v) => match *v { "-" => Cow::Borrowed(""), _ => Cow::Owned(v.to_string()) }, None => return Err(LogParsingError::NoValidParser(log)), }; let (source_ip, source_host) = match pre_fields.get(pre_size - 3) { Some(v) => match SiemIp::from_ip_str(v) { Ok(ip) => (ip, (*v).to_string()), Err(_) => (SiemIp::V4(0), (*v).to_string()) }, None => return Err(LogParsingError::NoValidParser(log)), }; let (destination_ip, destination_host) = if pre_size >= 4 { match pre_fields.get(pre_size - 4) { Some(v) => match SiemIp::from_ip_str(v) { Ok(ip) => (Some(ip), Some(v.to_string())), Err(_) => (None, Some(v.to_string())) }, None => (None,None) } }else{ (None,None) }; let http_version = match http_version { "" => None, _ => Some(SiemField::from_str(http_version.to_string())) }; let mut log = SiemLog::new( log_line.to_string(), log.event_received(), log.origin().clone(), ); log.set_category(Cow::Borrowed("Web Server")); log.set_product(Cow::Borrowed("Apache")); log.set_service(Cow::Borrowed("Web Server")); let outcome = if http_code < 400 { WebServerOutcome::ALLOW }else{ WebServerOutcome::BLOCK }; log.set_event(SiemEvent::WebServer(WebServerEvent { source_ip, destination_ip, destination_port: 80, in_bytes, out_bytes, http_code, http_method: parse_http_method(http_method), duration: 0.0, user_agent: Cow::Owned(user_agent.to_string()), url_full: Cow::Owned(url.to_string()), url_domain: Cow::Borrowed(""), url_path: Cow::Owned(url_path.to_string()), url_query: Cow::Owned(url_query.to_string()), url_extension: Cow::Owned(url_extension.to_string()), protocol: http_protocol, user_name, mime_type: Cow::Borrowed(""), outcome })); log.set_event_created(event_created); log.add_field("source.hostname", SiemField::from_str(source_host)); match http_version { Some(v) => {log.add_field("http.version", v);}, None => {} }; match destination_host { Some(v) => { log.add_field("destination.hostname", SiemField::from_str(v)); }, None => {} }; match referer { "" => {} _ => { log.add_field( "http.request.referrer", SiemField::Text(Cow::Owned(referer.to_string())), ); } }; Ok(log) } pub fn parse_http_method(method: &str) -> HttpMethod { match method { "GET" => HttpMethod::GET, "HEAD" => HttpMethod::HEAD, "POST" => HttpMethod::POST, "PUT" => HttpMethod::PUT, "PATCH" => HttpMethod::PATCH, "OPTIONS" => HttpMethod::OPTIONS, "CONNECT" => HttpMethod::CONNECT, _ => HttpMethod::UNKNOWN(method.to_uppercase()), } } pub fn parse_http_protocol(version: &str) -> WebProtocol { let proto = match version.find('/') { Some(p) => &version[..p], None => version, }; match proto { "HTTP" => WebProtocol::HTTP, "WS" => WebProtocol::WS, "WSS" => WebProtocol::WSS, "FTP" => WebProtocol::FTP, _ => WebProtocol::UNKNOWN(proto.to_uppercase()), } } pub fn extract_http_content<'a>( message: &'a str, ) -> Result<(&'a str, &'a str, &'a str), &'static str> { let mut splited = message.split(' '); let method = match splited.next() { Some(mt) => mt, None => return Err("No method"), }; let url = match splited.next() { Some(mt) => mt, None => return Err("No URL"), }; let version = match splited.next() { Some(mt) => mt, None => return Err("No version"), }; Ok((method, url, version)) } pub fn extract_url_parts<'a>(url: &'a str) -> (&'a str, &'a str, &'a str) { let pos = match url.find('?') { Some(v) => v, None => url.len(), }; let path = &url[..pos]; let query = &url[pos..]; let extension = match path.rfind('.') { Some(v) => { if (path.len() - v) > 8 { "" } else { &path[v+1..] } } None => "", }; (path, query, extension) } pub fn extract_fields<'a>(message: &'a str) -> Vec<&'a str> { let mut field_map = Vec::with_capacity(80); let mut start_field = 0; let mut is_string = false; for (i, c) in message.char_indices() { if c == '"' { if is_string { if start_field != i { field_map.push(&message[start_field..i]); } start_field = i + 1; } else { start_field = i + 1; } is_string = !is_string; } else if !is_string && c == ' ' { if start_field != i { field_map.push(&message[start_field..i]); } start_field = i + 1; } } field_map.push(&message[start_field..]); field_map } #[cfg(test)] mod filterlog_tests { use super::{extract_fields, parse_log_combinedio}; use usiem::events::field::{SiemIp, SiemField}; use usiem::events::SiemLog; use usiem::events::field_dictionary; #[test] fn test_extract_fields() { let log = "\"GET / HTTP/1.1\" 304 - \"-\" \"Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:85.0) Gecko/20100101 Firefox/85.0\" 465 164"; let map = extract_fields(log); assert_eq!(map.get(0), Some(&"GET / HTTP/1.1")); assert_eq!(map.get(1), Some(&"304")); assert_eq!(map.get(2), Some(&"-")); assert_eq!(map.get(3), Some(&"-")); assert_eq!( map.get(4), Some(&"Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:85.0) Gecko/20100101 Firefox/85.0") ); assert_eq!(map.get(5), Some(&"465")); assert_eq!(map.get(6), Some(&"164")); } #[test] fn test_parse_combinedio() { let log = "172.17.0.1 - - [23/Feb/2021:20:39:35 +0000] \"GET / HTTP/1.1\" 304 - \"-\" \"Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:85.0) Gecko/20100101 Firefox/85.0\" 465 164"; let log = SiemLog::new(log.to_string(), 0, SiemIp::V4(0)); let siem_log = parse_log_combinedio(log); match siem_log { Ok(log) => { assert_eq!(log.service(), "Web Server"); assert_eq!(log.field(field_dictionary::HTTP_REQUEST_METHOD), Some(&SiemField::from_str("GET"))); assert_eq!(log.field(field_dictionary::HTTP_RESPONSE_STATUS_CODE), Some(&SiemField::U32(304))); assert_eq!(log.field(field_dictionary::SOURCE_BYTES), Some(&SiemField::U32(164))); assert_eq!(log.field(field_dictionary::DESTINATION_BYTES), Some(&SiemField::U32(465))); assert_eq!(log.field(field_dictionary::URL_PATH), Some(&SiemField::from_str("/"))); assert_eq!(log.field("user_agent.original"), Some(&SiemField::from_str("Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:85.0) Gecko/20100101 Firefox/85.0"))); assert_eq!(log.field(field_dictionary::SOURCE_IP), Some(&SiemField::IP(SiemIp::from_ip_str("172.17.0.1").unwrap()))); assert_eq!(log.field("http.version"), Some(&SiemField::from_str("1.1"))); } Err(_) => assert_eq!(1, 0), } } }
use chrono::prelude::{TimeZone, Utc}; use std::borrow::Cow; use usiem::components::common::LogParsingError; use usiem::events::common::{HttpMethod, WebProtocol}; use usiem::events::field::{SiemField, SiemIp}; use usiem::events::webserver::{WebServerEvent, WebServerOutcome}; use usiem::events::{SiemEvent, SiemLog}; pub fn parse_log_combinedio(log: SiemLog) -> Result<SiemLog, LogParsingError> { let log_line = log.message(); let start_log_pos = match log_line.find("\"") { Some(val) => val, None => return Err(LogParsingError::NoValidParser(log)), }; let log_header = &log_line[..start_log_pos]; let (pre_data, event_created) = match log_header.find('[') { Some(v1) => match log_header.find(']') { Some(v2) => { if (v2 - v1) > 27 || (v2 - v1) < 24 { return Err(LogParsingError::NoValidParser(log)); } else { (&log_header[..v1-1],&log_header[v1 + 1..v2]) } } None => return Err(LogParsingError::NoValidParser(log)), }, None => return Err(LogParsingError::NoValidParser(log)), }; let event_created = match Utc.datetime_from_str(event_created, "%d/%b/%Y:%H:%M:%S %z") { Ok(timestamp) => timestamp.timestamp_millis(), Err(_err) => return Err(LogParsingError::NoValidParser(log)), }; let log_content = &log_line[start_log_pos..]; let fields = extract_fields(log_content); let pre_fields = extract_fields(pre_data); let (http_method, url, version) = match fields.get(0) { Some(v) => match extract_http_content(v) { Ok((method, url, version)) => (method, url, version), Err(_) => return Err(LogParsingError::NoValidParser(log)), }, None => return Err(LogParsingError::NoValidParser(log)), }; let (http_protocol, http_version) = match version.find('/') { Some(p) => (parse_http_protocol(&version[..p]), &version[(p+1)..]), None => (parse_http_protocol(version), ""), }; let http_code = match fields.get(1) { Some(v) => match v.parse::<u32>() { Ok(v) => v, Err(_) => return Err(LogParsingError::NoValidParser(log)), }, None => return Err(LogParsingError::NoValidParser(log)), }; let referer = match fields.get(3) { Some(v) => { if *v == "-" { "" }else{ v } }, None => return Err(LogParsingError::NoValidParser(log)), }; let user_agent = match fields.get(4) { Some(v) => { if *v == "-" { "" }else{ v } }, None => return Err(LogParsingError::NoValidParser(log)), }; let in_bytes = match fields.get(5) { Some(v) => match v.parse::<u32>() { Ok(v) => v, Err(_) => 0, }, None => 0, }; let out_bytes = match fields.get(6) { Some(v) => match v.parse::<u32>() { Ok(v) => v, Err(_) => 0, }, None => 0, }; let (url_path, url_query, url_extension) = extract_url_parts(url); let pre_size = pre_fields.len(); let user_name = match pre_fields.get(pre_size - 1) { Some(v) => match *v { "-" => Cow::Borrowed(""), _ => Cow::Owned(v.to_string()) }, None => return Err(LogParsingError::NoValidParser(log)), }; let (source_ip, source_host) = match pre_fields.get(pre_size - 3) { Some(v) => match SiemIp::from_ip_str(v) { Ok(ip) => (ip, (*v).to_string()), Err(_) => (SiemIp::V4(0), (*v).to_string()) }, None => return Err(LogParsingError::NoValidParser(log)), }; let (destination_ip, destination_host) = if pre_size >= 4 { match pre_fields.get(pre_size - 4) { Some(v) => match SiemIp::from_ip_str(v) { Ok(ip) => (Some(ip), Some(v.to_string())), Err(_) => (None, Some(v.to_string())) }, None => (None,None) } }else{ (None,None) }; let http_version = match http_version { "" => None, _ => Some(SiemField::from_str(http_version.to_string())) }; let mut log = SiemLog::new( log_line.to_string(), log.event_received(), log.origin().clone(), ); log.set_category(Cow::Borrowed("Web Server")); log.set_product(Cow::Borrowed("Apache")); log.set_service(Cow::Borrowed("Web Server")); let outcome = if http_code < 400 { WebServerOutcome::ALLOW }else{ WebServerOutcome::BLOCK }; log.set_event(SiemEvent::WebServer(WebServerEvent { source_ip, destination_ip, destination_port: 80, in_bytes, out_bytes, http_code, http_method: parse_http_method(http_method), duration: 0.0, user_agent: Cow::Owned(user_agent.to_string()), url_full: Cow::Owned(url.to_string()), url_domain: Cow::Borrowed(""), url_path: Cow::Owned(url_path.to_string()), url_query: Cow::Owned(url_query.to_string()), url_extension: Cow::Owned(url_extension.to_string()), protocol: http_protocol, user_name, mime_type: Cow::Borrowed(""), outcome })); log.set_event_created(event_created); log.add_field("source.hostname", SiemField::from_str(source_host)); match http_version { Some(v) => {log.add_field("http.version", v);}, None => {} }; match destination_host { Some(v) => { log.add_field("destination.hostname", SiemField::from_str(v)); }, None => {} }; match referer { "" => {} _ => { log.add_field( "http.request.referrer", SiemField::Text(Cow::Owned(referer.to_string())), ); } }; Ok(log) } pub fn parse_http_method(method: &str) -> HttpMethod { match method { "GET" => HttpMethod::GET, "HEAD" => HttpMethod::HEAD, "POST" => HttpMethod::POST, "PUT" => HttpMethod::PUT, "PATCH" => HttpMethod::PATCH, "OPTIONS" => HttpMethod::OPTIONS, "CONNECT" => HttpMethod::CONNECT, _ => HttpMethod::UNKNOWN(method.to_uppercase()), } } pub fn parse_http_protocol(version: &str) -> WebProtocol {
pub fn extract_http_content<'a>( message: &'a str, ) -> Result<(&'a str, &'a str, &'a str), &'static str> { let mut splited = message.split(' '); let method = match splited.next() { Some(mt) => mt, None => return Err("No method"), }; let url = match splited.next() { Some(mt) => mt, None => return Err("No URL"), }; let version = match splited.next() { Some(mt) => mt, None => return Err("No version"), }; Ok((method, url, version)) } pub fn extract_url_parts<'a>(url: &'a str) -> (&'a str, &'a str, &'a str) { let pos = match url.find('?') { Some(v) => v, None => url.len(), }; let path = &url[..pos]; let query = &url[pos..]; let extension = match path.rfind('.') { Some(v) => { if (path.len() - v) > 8 { "" } else { &path[v+1..] } } None => "", }; (path, query, extension) } pub fn extract_fields<'a>(message: &'a str) -> Vec<&'a str> { let mut field_map = Vec::with_capacity(80); let mut start_field = 0; let mut is_string = false; for (i, c) in message.char_indices() { if c == '"' { if is_string { if start_field != i { field_map.push(&message[start_field..i]); } start_field = i + 1; } else { start_field = i + 1; } is_string = !is_string; } else if !is_string && c == ' ' { if start_field != i { field_map.push(&message[start_field..i]); } start_field = i + 1; } } field_map.push(&message[start_field..]); field_map } #[cfg(test)] mod filterlog_tests { use super::{extract_fields, parse_log_combinedio}; use usiem::events::field::{SiemIp, SiemField}; use usiem::events::SiemLog; use usiem::events::field_dictionary; #[test] fn test_extract_fields() { let log = "\"GET / HTTP/1.1\" 304 - \"-\" \"Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:85.0) Gecko/20100101 Firefox/85.0\" 465 164"; let map = extract_fields(log); assert_eq!(map.get(0), Some(&"GET / HTTP/1.1")); assert_eq!(map.get(1), Some(&"304")); assert_eq!(map.get(2), Some(&"-")); assert_eq!(map.get(3), Some(&"-")); assert_eq!( map.get(4), Some(&"Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:85.0) Gecko/20100101 Firefox/85.0") ); assert_eq!(map.get(5), Some(&"465")); assert_eq!(map.get(6), Some(&"164")); } #[test] fn test_parse_combinedio() { let log = "172.17.0.1 - - [23/Feb/2021:20:39:35 +0000] \"GET / HTTP/1.1\" 304 - \"-\" \"Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:85.0) Gecko/20100101 Firefox/85.0\" 465 164"; let log = SiemLog::new(log.to_string(), 0, SiemIp::V4(0)); let siem_log = parse_log_combinedio(log); match siem_log { Ok(log) => { assert_eq!(log.service(), "Web Server"); assert_eq!(log.field(field_dictionary::HTTP_REQUEST_METHOD), Some(&SiemField::from_str("GET"))); assert_eq!(log.field(field_dictionary::HTTP_RESPONSE_STATUS_CODE), Some(&SiemField::U32(304))); assert_eq!(log.field(field_dictionary::SOURCE_BYTES), Some(&SiemField::U32(164))); assert_eq!(log.field(field_dictionary::DESTINATION_BYTES), Some(&SiemField::U32(465))); assert_eq!(log.field(field_dictionary::URL_PATH), Some(&SiemField::from_str("/"))); assert_eq!(log.field("user_agent.original"), Some(&SiemField::from_str("Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:85.0) Gecko/20100101 Firefox/85.0"))); assert_eq!(log.field(field_dictionary::SOURCE_IP), Some(&SiemField::IP(SiemIp::from_ip_str("172.17.0.1").unwrap()))); assert_eq!(log.field("http.version"), Some(&SiemField::from_str("1.1"))); } Err(_) => assert_eq!(1, 0), } } }
let proto = match version.find('/') { Some(p) => &version[..p], None => version, }; match proto { "HTTP" => WebProtocol::HTTP, "WS" => WebProtocol::WS, "WSS" => WebProtocol::WSS, "FTP" => WebProtocol::FTP, _ => WebProtocol::UNKNOWN(proto.to_uppercase()), } }
function_block-function_prefix_line
[ { "content": "/// Always use JSON format. Easy ato process and with more information.\n\npub fn parse_log_json(mut log: SiemLog) -> Result<SiemLog, LogParsingError> {\n\n let mod_log = match log.event() {\n\n SiemEvent::Unknown => {\n\n //Check JSON and extract\n\n let log_line = log.message();\n\n let start_log_pos = match log_line.find(\"{\") {\n\n Some(val) => val,\n\n None => return Err(LogParsingError::NoValidParser(log)),\n\n };\n\n let mod_log: ModSecurityLog = match serde_json::from_str(&log_line[start_log_pos..]) {\n\n Ok(v) => v,\n\n Err(_) => return Err(LogParsingError::NoValidParser(log)),\n\n };\n\n mod_log\n\n }\n\n SiemEvent::Json(_) => {\n\n let log_line = log.message();\n\n let start_log_pos = match log_line.find(\"{\") {\n\n Some(val) => val,\n\n None => return Err(LogParsingError::NoValidParser(log)),\n", "file_path": "src/parsers/modsecurity/mod.rs", "rank": 2, "score": 118714.03757745144 }, { "content": "fn get_url(url : &'static str, client : &reqwest::blocking::Client) {\n\n let res = client.get(url).send().unwrap();\n\n if res.status().is_success() {\n\n panic!(\"The URL {} MUST be blocked. Error in configuration\", url);\n\n }\n\n}\n", "file_path": "tests/integration.rs", "rank": 6, "score": 89703.51166226072 }, { "content": "fn get_allowed_url(url : &'static str, client : &reqwest::blocking::Client) {\n\n let res = client.get(url).send().unwrap();\n\n if !res.status().is_success() {\n\n panic!(\"The URL {} MUST NOT be blocked. Error in configuration\", url);\n\n }\n\n}", "file_path": "tests/integration.rs", "rank": 7, "score": 87295.94440671199 }, { "content": "fn extract_id(msg: &str) -> Option<u32> {\n\n match msg.find(\"[id \\\"\") {\n\n Some(v) => match msg[v + 5..].find(\"\\\"]\") {\n\n Some(v2) => Some(msg[v + 5..v + 5 + v2].parse::<u32>().unwrap_or(0)),\n\n None => None,\n\n },\n\n None => None,\n\n }\n\n}\n\n\n", "file_path": "src/parsers/modsecurity/mod.rs", "rank": 8, "score": 77051.87894674414 }, { "content": "fn test_success_apache(success_text : &str) {\n\n let log = SiemLog::new(success_text.to_string(), 0, SiemIp::V4(0));\n\n let siem_log = apache2::parse_log_combinedio(log);\n\n match siem_log {\n\n Ok(log) => {\n\n assert_eq!(log.field(field_dictionary::HTTP_REQUEST_METHOD), Some(&SiemField::from_str(\"GET\")));\n\n match log.field(field_dictionary::HTTP_RESPONSE_STATUS_CODE) {\n\n Some(v) => {\n\n match v {\n\n SiemField::U32(v) => {\n\n if *v != 200 && *v != 304{\n\n assert_eq!(*v,0);\n\n }\n\n },\n\n _=> {assert_eq!(1,0);}\n\n }\n\n },\n\n None => {assert_eq!(0,200)}\n\n };\n\n assert_ne!(log.field(field_dictionary::SOURCE_IP), None);\n\n assert_ne!(log.field(field_dictionary::SOURCE_BYTES), None);\n\n assert_ne!(log.field(field_dictionary::DESTINATION_BYTES), None);\n\n assert_ne!(log.field(\"user_agent.original\"), None);\n\n assert_ne!(log.field(\"http.version\"), None);\n\n }\n\n Err(_) => assert_eq!(1, 0),\n\n }\n\n}\n\n\n", "file_path": "tests/integration.rs", "rank": 10, "score": 57587.934686494074 }, { "content": "fn test_denied_sqli(denied_text : &str) {\n\n let log = SiemLog::new(denied_text.to_string(), 0, SiemIp::V4(0));\n\n match modsecurity::parse_log_json(log) {\n\n Ok(log) => {\n\n assert_eq!(log.service(), \"ModSecurity\");\n\n assert_eq!(\n\n log.field(field_dictionary::HTTP_REQUEST_METHOD),\n\n Some(&SiemField::from_str(\"GET\"))\n\n );\n\n assert_eq!(\n\n log.field(field_dictionary::HTTP_RESPONSE_STATUS_CODE),\n\n Some(&SiemField::U32(403))\n\n );\n\n assert_eq!(\n\n log.field(field_dictionary::RULE_CATEGORY),\n\n Some(&SiemField::from_str(IntrusionCategory::SQL_INJECTION.to_string()))\n\n );\n\n assert_eq!(\n\n log.field(field_dictionary::DESTINATION_PORT),\n\n Some(&SiemField::U32(80))\n", "file_path": "tests/integration.rs", "rank": 11, "score": 57587.934686494074 }, { "content": "fn test_denied_apache(denied_text : &str) {\n\n let log = SiemLog::new(denied_text.to_string(), 0, SiemIp::V4(0));\n\n match apache2::parse_log_combinedio(log) {\n\n Ok(log) => {\n\n assert_eq!(log.field(field_dictionary::HTTP_REQUEST_METHOD), Some(&SiemField::from_str(\"GET\")));\n\n assert_eq!(log.field(field_dictionary::HTTP_RESPONSE_STATUS_CODE), Some(&SiemField::U32(403)));\n\n },\n\n Err(_) => {\n\n panic!(\"Cannot parse log\")\n\n }\n\n }\n\n}\n", "file_path": "tests/integration.rs", "rank": 12, "score": 57587.934686494074 }, { "content": "fn extract_tags<'a>(msg: &'a str) -> BTreeSet<&'a str> {\n\n let mut tags = BTreeSet::new();\n\n let mut last_pos = 0;\n\n loop {\n\n match msg[last_pos..].find(\"[tag \\\"\") {\n\n Some(v) => match msg[last_pos + v..].find(\"\\\"]\") {\n\n Some(v2) => {\n\n let tag = &msg[(last_pos + v + 6)..last_pos + v + v2];\n\n tags.insert(tag);\n\n last_pos += v + v2;\n\n }\n\n None => {\n\n break;\n\n }\n\n },\n\n None => {\n\n break;\n\n }\n\n }\n\n }\n", "file_path": "src/parsers/modsecurity/mod.rs", "rank": 13, "score": 54548.806686236676 }, { "content": "fn extract_rule_content(msg: &str) -> String {\n\n let msg_pos = match msg.find(\"[msg \\\"\") {\n\n Some(v) => v,\n\n None => return String::new(),\n\n };\n\n let end_pos = match msg[msg_pos..].find(\"\\\"] \") {\n\n Some(v) => v,\n\n None => return String::new(),\n\n };\n\n let msg = &msg[msg_pos + 6..msg_pos + end_pos];\n\n return msg.to_string();\n\n}\n\n\n", "file_path": "src/parsers/modsecurity/mod.rs", "rank": 14, "score": 52021.536228133606 }, { "content": "fn map_attack(tags: BTreeSet<&str>, last_cat: IntrusionCategory) -> IntrusionCategory {\n\n let mut cat = IntrusionCategory::UNKNOWN;\n\n if last_cat == IntrusionCategory::SQL_INJECTION || tags.contains(\"attack-sqli\") {\n\n cat = IntrusionCategory::SQL_INJECTION;\n\n }\n\n if last_cat == IntrusionCategory::DOS || tags.contains(\"attack-dos\") {\n\n cat = IntrusionCategory::DOS;\n\n }\n\n if last_cat == IntrusionCategory::XSS || tags.contains(\"attack-xss\") {\n\n cat = IntrusionCategory::XSS;\n\n }\n\n if last_cat == IntrusionCategory::REMOTE_EXPLOIT || tags.contains(\"attack-injection-php\") {\n\n cat = IntrusionCategory::REMOTE_EXPLOIT;\n\n }\n\n if last_cat == IntrusionCategory::LOCAL_EXPLOIT || tags.contains(\"attack-lfi\") {\n\n cat = IntrusionCategory::LOCAL_EXPLOIT;\n\n }\n\n if tags.contains(\"attack-rfi\") {\n\n cat = IntrusionCategory::REMOTE_EXPLOIT;\n\n }\n", "file_path": "src/parsers/modsecurity/mod.rs", "rank": 15, "score": 39574.061802390235 }, { "content": "#[test]\n\nfn test_apache_integration() {\n\n let out_dir = env::var(\"CI_CD\").unwrap_or(String::from(\"\"));\n\n if out_dir == \"\" {\n\n return;\n\n }\n\n println!(\"Starting CI/CD test\");\n\n let client = reqwest::blocking::Client::builder().build().unwrap();\n\n let res = client.get(\"http://127.0.0.1:8080/modsec_log\").send().unwrap();\n\n\n\n if !res.status().is_success() {\n\n panic!(\"ModSecurity must be active\");\n\n }\n\n\n\n let normal_url = \"http://127.0.0.1:8080/\";\n\n get_allowed_url(normal_url, &client);\n\n // HACK PAGE\n\n let sqli_url = \"http://127.0.0.1:8080/sqli.html?a=' or 1=1--\";\n\n get_url(sqli_url, &client);\n\n\n\n\n", "file_path": "tests/integration.rs", "rank": 16, "score": 34955.48620394275 }, { "content": " field_dictionary::EVENT_OUTCOME,\n\n FieldType::TextOptions(event_outcome, \"Outcome of the event\"),\n\n );\n\n let mut http_request_method = BTreeMap::new();\n\n http_request_method.insert(\"GET\", \"The GET method requests that the target resource transfers a representation of its state.\");\n\n http_request_method.insert(\"HEAD\", \"The HEAD method requests that the target resource transfers a representation of its state, like for a GET request, but without the representation data enclosed in the response body.\");\n\n http_request_method.insert(\"POST\", \"The POST method requests that the target resource processes the representation enclosed in the request according to the semantics of the target resource.\");\n\n http_request_method.insert(\"PUT\", \"The PUT method requests that the target resource creates or updates its state with the state defined by the representation enclosed in the request.\");\n\n http_request_method.insert(\"PATCH\", \"The PATCH method requests that the target resource modifies its state according to the partial update defined in the representation enclosed in the request.\");\n\n http_request_method.insert(\"OPTIONS\", \"The OPTIONS method requests that the target resource transfers the HTTP methods that it supports.\");\n\n http_request_method.insert(\"CONNECT\", \"The CONNECT method request that the intermediary establishes a TCP/IP tunnel to the origin server identified by the request target.\");\n\n fields.insert(\n\n field_dictionary::HTTP_REQUEST_METHOD,\n\n FieldType::TextOptions(http_request_method, \"HTTP Request method: get, post...\"),\n\n );\n\n let mut web_protocol = BTreeMap::new();\n\n web_protocol.insert(\"HTTP\", \"HyperText Transfer Protocol. HTTP is the underlying protocol used by the World Wide Web. \");\n\n web_protocol.insert(\"HTTPS\", \"Secured HTTP protocol\");\n\n web_protocol.insert(\"FTP\", \"The File Transfer Protocol is a standard communication protocol used for the transfer of computer files from a server to a client on a computer network.\");\n\n web_protocol.insert(\"WS\", \"WebSocket is a computer communications protocol, providing full-duplex communication channels over a single TCP connection.\");\n", "file_path": "src/parsers/mod.rs", "rank": 17, "score": 24.394122399738038 }, { "content": " );\n\n let mut http_request_method = BTreeMap::new();\n\n http_request_method.insert(\"GET\", \"The GET method requests that the target resource transfers a representation of its state.\");\n\n http_request_method.insert(\"HEAD\", \"The HEAD method requests that the target resource transfers a representation of its state, like for a GET request, but without the representation data enclosed in the response body.\");\n\n http_request_method.insert(\"POST\", \"The POST method requests that the target resource processes the representation enclosed in the request according to the semantics of the target resource.\");\n\n http_request_method.insert(\"PUT\", \"The PUT method requests that the target resource creates or updates its state with the state defined by the representation enclosed in the request.\");\n\n http_request_method.insert(\"PATCH\", \"The PATCH method requests that the target resource modifies its state according to the partial update defined in the representation enclosed in the request.\");\n\n http_request_method.insert(\"OPTIONS\", \"The OPTIONS method requests that the target resource transfers the HTTP methods that it supports.\");\n\n http_request_method.insert(\"CONNECT\", \"The CONNECT method request that the intermediary establishes a TCP/IP tunnel to the origin server identified by the request target.\");\n\n fields.insert(\n\n field_dictionary::HTTP_REQUEST_METHOD,\n\n FieldType::TextOptions(http_request_method, \"HTTP Request method: get, post...\"),\n\n );\n\n let mut web_protocol = BTreeMap::new();\n\n web_protocol.insert(\"HTTP\", \"HyperText Transfer Protocol. HTTP is the underlying protocol used by the World Wide Web. \");\n\n web_protocol.insert(\"HTTPS\", \"Secured HTTP protocol\");\n\n web_protocol.insert(\"FTP\", \"The File Transfer Protocol is a standard communication protocol used for the transfer of computer files from a server to a client on a computer network.\");\n\n web_protocol.insert(\"WS\", \"WebSocket is a computer communications protocol, providing full-duplex communication channels over a single TCP connection.\");\n\n web_protocol.insert(\"WSS\", \"Secured WebSocket protocol\");\n\n fields.insert(\n", "file_path": "src/parsers/mod.rs", "rank": 19, "score": 22.259777365335964 }, { "content": " let source_port = mod_log.transaction.remote_port;\n\n let destination_port = mod_log.transaction.local_port;\n\n let destination_ip = match SiemIp::from_ip_str(&mod_log.transaction.local_address[..]) {\n\n Ok(ip) => ip,\n\n Err(_) => return Err(LogParsingError::NoValidParser(log)),\n\n };\n\n\n\n let user_agent = mod_log\n\n .request\n\n .headers\n\n .get(\"User-Agent\")\n\n .map(|v| Cow::Owned(v.to_string()))\n\n .unwrap_or(Cow::Borrowed(\"\"));\n\n let user_agent = SiemField::from_str(user_agent.to_string());\n\n let (method, url_full, version) = match extract_http_content(&mod_log.request.request_line) {\n\n Ok((method, url_full, version)) => (method, url_full, version),\n\n Err(_) => return Err(LogParsingError::NoValidParser(log)),\n\n };\n\n\n\n let outcome = match mod_log.audit_data.action.intercepted {\n", "file_path": "src/parsers/modsecurity/mod.rs", "rank": 21, "score": 19.90890627911655 }, { "content": " true => IntrusionOutcome::BLOCKED,\n\n false => IntrusionOutcome::DETECTED,\n\n };\n\n let protocol = parse_http_protocol(version);\n\n let protocol = SiemField::from_str(protocol.to_string());\n\n let method = parse_http_method(method);\n\n let method = SiemField::from_str(method.to_string());\n\n let (url_path, url_query, url_extension) = extract_url_parts(url_full);\n\n\n\n let url_path = SiemField::from_str(url_path.to_string());\n\n let url_query = SiemField::from_str(url_query.to_string());\n\n let url_extension = SiemField::from_str(url_extension.to_string());\n\n let url_full = SiemField::from_str(url_full.to_string());\n\n let status_code = mod_log.response.status;\n\n let rule_description = SiemField::from_str(mod_log.audit_data.action.message.to_string());\n\n\n\n let rule_name = {\n\n if mod_log.audit_data.messages.len() > 3 {\n\n mod_log\n\n .audit_data\n", "file_path": "src/parsers/modsecurity/mod.rs", "rank": 23, "score": 19.006280152976725 }, { "content": " }\n\n /// Check if the parser can parse the log. Must be fast.\n\n fn device_match(&self, log: &SiemLog) -> bool {\n\n let msg = log.message();\n\n match msg.find('[') {\n\n Some(v1) => match msg.find(']') {\n\n Some(v2) => {\n\n if (v2 - v1) > 27 || (v2 - v1) < 24 {\n\n false\n\n } else {\n\n true\n\n }\n\n }\n\n None => false,\n\n },\n\n None => false,\n\n }\n\n }\n\n /// Name of the parser\n\n fn name(&self) -> &str {\n", "file_path": "src/parsers/mod.rs", "rank": 26, "score": 16.02786434166986 }, { "content": "use chrono::prelude::{TimeZone, Utc};\n\nuse std::borrow::Cow;\n\nuse usiem::components::common::LogParsingError;\n\nuse usiem::events::field::{SiemField, SiemIp};\n\nuse usiem::events::intrusion::{IntrusionCategory, IntrusionEvent, IntrusionOutcome};\n\nuse usiem::events::protocol::NetworkProtocol;\n\nuse usiem::events::{SiemEvent, SiemLog};\n\nmod modsec;\n\nuse super::apache2::{\n\n extract_http_content, extract_url_parts, parse_http_method, parse_http_protocol,\n\n};\n\nuse modsec::ModSecurityLog;\n\nuse std::collections::BTreeSet;\n\nuse usiem::events::field_dictionary;\n\n\n\n/// Always use JSON format. Easy ato process and with more information.\n", "file_path": "src/parsers/modsecurity/mod.rs", "rank": 27, "score": 15.655222431692401 }, { "content": "pub mod apache2;\n\npub mod modsecurity;\n\nuse lazy_static::lazy_static;\n\nuse std::collections::BTreeMap;\n\nuse usiem::components::common::{LogParser, LogParsingError};\n\nuse usiem::events::field_dictionary;\n\nuse usiem::events::schema::{FieldSchema, FieldType};\n\nuse usiem::events::SiemLog;\n\n\n\nlazy_static! {\n\n static ref SCHEMA: FieldSchema = FieldSchema {\n\n fields: {\n\n let mut fields = BTreeMap::new();\n\n fields.insert(\n\n field_dictionary::SOURCE_IP,\n\n FieldType::Ip(\"IP of the initiator of a connection\"),\n\n );\n\n fields.insert(\n\n field_dictionary::DESTINATION_IP,\n\n FieldType::Ip(\"IP of the target of a conector\"),\n", "file_path": "src/parsers/mod.rs", "rank": 28, "score": 15.254384991302107 }, { "content": " \"http.request.referrer\",\n\n FieldType::Ip(\"IP or Hostname of the server that sent the log\"),\n\n );\n\n fields.insert(\n\n \"http.version\",\n\n FieldType::Text(\"Customer name for SOC environments. Ex: Contoso\"),\n\n );\n\n fields.insert(\n\n \"source.hostname\",\n\n FieldType::Text(\"Customer name for SOC environments. Ex: Contoso\"),\n\n );\n\n fields.insert(\n\n \"destination.hostname\",\n\n FieldType::Text(\"Customer name for SOC environments. Ex: Contoso\"),\n\n );\n\n let mut event_outcome = BTreeMap::new();\n\n event_outcome.insert(\"BLOCK\", \"Connection was blocked\");\n\n event_outcome.insert(\"ALLOW\", \"Connection was allowed\");\n\n event_outcome.insert(\"UNKNOWN\", \"Unknow connection state.\");\n\n fields.insert(\n", "file_path": "src/parsers/mod.rs", "rank": 29, "score": 15.250282733028849 }, { "content": " field_dictionary::NETWORK_PROTOCOL,\n\n FieldType::TextOptions(web_protocol, \"Network protocol: http, ftp, snmp...\"),\n\n );\n\n fields\n\n },\n\n allow_unknown_fields: false,\n\n gdpr: None,\n\n };\n\n static ref MOD_SCHEMA: FieldSchema = FieldSchema {\n\n fields: {\n\n let mut fields = BTreeMap::new();\n\n fields.insert(\n\n field_dictionary::SOURCE_IP,\n\n FieldType::Ip(\"IP of the initiator of a connection\"),\n\n );\n\n fields.insert(\n\n field_dictionary::DESTINATION_IP,\n\n FieldType::Ip(\"IP of the target of a conector\"),\n\n );\n\n fields.insert(\n", "file_path": "src/parsers/mod.rs", "rank": 30, "score": 14.739509421680705 }, { "content": " FieldType::Text(\"HTTP response mime type\"),\n\n );\n\n fields.insert(\n\n field_dictionary::URL_PATH,\n\n FieldType::Text(\"URL path: /api/v1\"),\n\n );\n\n fields.insert(\n\n field_dictionary::URL_QUERY,\n\n FieldType::Text(\"URL query: ?a=b&c=d\"),\n\n );\n\n fields.insert(\"url.extension\", FieldType::Text(\"URL extension: exe, html\"));\n\n fields.insert(\n\n field_dictionary::NETWORK_DURATION,\n\n FieldType::Decimal(\"Duration of the communication\"),\n\n );\n\n fields.insert(field_dictionary::USER_NAME, FieldType::Text(\"User name\"));\n\n fields.insert(\"user_agent.original\", FieldType::Text(\"Full user agent\"));\n\n fields.insert(\n\n \"http.request.referrer\",\n\n FieldType::Ip(\"IP or Hostname of the server that sent the log\"),\n", "file_path": "src/parsers/mod.rs", "rank": 33, "score": 13.507375773049354 }, { "content": " );\n\n assert_eq!(\n\n log.field(field_dictionary::RULE_ID),\n\n Some(&SiemField::U32(942100))\n\n );\n\n assert_eq!(\n\n log.field(field_dictionary::RULE_NAME),\n\n Some(&SiemField::from_str(\"SQL Injection Attack Detected via libinjection\"))\n\n );\n\n assert_ne!(\n\n log.field(field_dictionary::SOURCE_IP),\n\n None\n\n );\n\n assert_eq!(\n\n log.field(field_dictionary::URL_FULL),\n\n Some(&SiemField::from_str(\"/sqli.html?a=%27%20or%201=1--\"))\n\n );\n\n assert_eq!(\n\n log.field(field_dictionary::URL_QUERY),\n\n Some(&SiemField::from_str(\"?a=%27%20or%201=1--\"))\n", "file_path": "tests/integration.rs", "rank": 37, "score": 12.764540423065586 }, { "content": " fn device_match(&self, log: &SiemLog) -> bool {\n\n let msg = log.message();\n\n match msg.find('{') {\n\n Some(_) => true,\n\n None => false,\n\n }\n\n }\n\n /// Name of the parser\n\n fn name(&self) -> &str {\n\n \"Apache2ModSecurityParser\"\n\n }\n\n /// Description of the parser\n\n fn description(&self) -> &str {\n\n \"Parser of Apache2 ModSecurity logs. Supports only JSON format.\"\n\n }\n\n /// Get parser schema\n\n fn schema(&self) -> &'static FieldSchema {\n\n &MOD_SCHEMA\n\n }\n\n}\n", "file_path": "src/parsers/mod.rs", "rank": 38, "score": 12.608823280878187 }, { "content": " );\n\n fields.insert(\n\n \"http.version\",\n\n FieldType::Text(\"Customer name for SOC environments. Ex: Contoso\"),\n\n );\n\n fields.insert(\n\n \"source.hostname\",\n\n FieldType::Text(\"Customer name for SOC environments. Ex: Contoso\"),\n\n );\n\n fields.insert(\n\n \"destination.hostname\",\n\n FieldType::Text(\"Customer name for SOC environments. Ex: Contoso\"),\n\n );\n\n let mut event_outcome = BTreeMap::new();\n\n event_outcome.insert(\"BLOCK\", \"Connection was blocked\");\n\n event_outcome.insert(\"ALLOW\", \"Connection was allowed\");\n\n event_outcome.insert(\"UNKNOWN\", \"Unknow connection state.\");\n\n fields.insert(\n\n field_dictionary::EVENT_OUTCOME,\n\n FieldType::TextOptions(event_outcome, \"Outcome of the event\"),\n", "file_path": "src/parsers/mod.rs", "rank": 39, "score": 12.424867554763743 }, { "content": " destination_ip,\n\n source_ip,\n\n destination_port,\n\n outcome,\n\n rule_id,\n\n rule_category: category,\n\n rule_name: Cow::Owned(rule_name),\n\n source_port,\n\n network_protocol: NetworkProtocol::TCP,\n\n });\n\n log.set_event(event);\n\n log.add_field(field_dictionary::HTTP_REQUEST_METHOD, method);\n\n log.add_field(\n\n field_dictionary::HTTP_RESPONSE_STATUS_CODE,\n\n SiemField::U32(status_code),\n\n );\n\n log.set_product(Cow::Borrowed(\"Apache2\"));\n\n log.set_service(Cow::Borrowed(\"ModSecurity\"));\n\n log.set_vendor(Cow::Borrowed(\"ModSecurity\"));\n\n log.set_category(Cow::Borrowed(\"WAF\"));\n", "file_path": "src/parsers/modsecurity/mod.rs", "rank": 40, "score": 12.29347623410965 }, { "content": " };\n\n let mod_log: ModSecurityLog = match serde_json::from_str(&log_line[start_log_pos..]) {\n\n Ok(v) => v,\n\n Err(_) => return Err(LogParsingError::NoValidParser(log)),\n\n };\n\n mod_log\n\n }\n\n _ => return Err(LogParsingError::NoValidParser(log)),\n\n };\n\n\n\n let event_created =\n\n match Utc.datetime_from_str(&mod_log.transaction.time, \"%d/%b/%Y:%H:%M:%S %z\") {\n\n Ok(timestamp) => timestamp.timestamp_millis(),\n\n Err(_err) => return Err(LogParsingError::NoValidParser(log)),\n\n };\n\n\n\n let source_ip = match SiemIp::from_ip_str(&mod_log.transaction.remote_address[..]) {\n\n Ok(ip) => ip,\n\n Err(_) => return Err(LogParsingError::NoValidParser(log)),\n\n };\n", "file_path": "src/parsers/modsecurity/mod.rs", "rank": 42, "score": 11.707577794530007 }, { "content": " Some(&SiemField::U32(36296))\n\n );\n\n assert_eq!(\n\n log.field(field_dictionary::RULE_ID),\n\n Some(&SiemField::U32(942100))\n\n );\n\n assert_eq!(\n\n log.field(field_dictionary::RULE_NAME),\n\n Some(&SiemField::from_str(\n\n \"SQL Injection Attack Detected via libinjection\"\n\n ))\n\n );\n\n assert_eq!(\n\n log.field(field_dictionary::SOURCE_IP),\n\n Some(&SiemField::IP(SiemIp::from_ip_str(\"172.17.0.1\").unwrap()))\n\n );\n\n assert_eq!(\n\n log.field(field_dictionary::URL_FULL),\n\n Some(&SiemField::from_str(\"/xss.html?default=%27OR%201=1--\"))\n\n );\n", "file_path": "src/parsers/modsecurity/mod.rs", "rank": 43, "score": 11.624215911682464 }, { "content": " log.add_field(field_dictionary::NETWORK_PROTOCOL, protocol);\n\n log.add_field(field_dictionary::URL_PATH, url_path);\n\n log.add_field(field_dictionary::URL_QUERY, url_query);\n\n log.add_field(\"url.extension\", url_extension);\n\n log.add_field(field_dictionary::URL_FULL, url_full);\n\n log.add_field(\"rule.description\", rule_description);\n\n log.add_field(\"user_agent.original\", user_agent);\n\n log.set_event_created(event_created);\n\n\n\n Ok(log)\n\n}\n\n\n", "file_path": "src/parsers/modsecurity/mod.rs", "rank": 45, "score": 10.706367511159861 }, { "content": " assert_eq!(\n\n log.field(field_dictionary::HTTP_REQUEST_METHOD),\n\n Some(&SiemField::from_str(\"GET\"))\n\n );\n\n assert_eq!(\n\n log.field(field_dictionary::HTTP_RESPONSE_STATUS_CODE),\n\n Some(&SiemField::U32(403))\n\n );\n\n assert_eq!(\n\n log.field(field_dictionary::RULE_CATEGORY),\n\n Some(&SiemField::from_str(\n\n IntrusionCategory::SQL_INJECTION.to_string()\n\n ))\n\n );\n\n assert_eq!(\n\n log.field(field_dictionary::DESTINATION_PORT),\n\n Some(&SiemField::U32(80))\n\n );\n\n assert_eq!(\n\n log.field(field_dictionary::SOURCE_PORT),\n", "file_path": "src/parsers/modsecurity/mod.rs", "rank": 46, "score": 10.256801978755266 }, { "content": " );\n\n assert_eq!(\n\n log.field(field_dictionary::URL_PATH),\n\n Some(&SiemField::from_str(\"/sqli.html\"))\n\n );\n\n assert_eq!(\n\n log.field(\"url.extension\"),\n\n Some(&SiemField::from_str(\"html\"))\n\n );\n\n assert_eq!(log.field(\"user_agent.original\"), Some(&SiemField::from_str(\"\")));\n\n }\n\n Err(_) => {\n\n panic!(\"Cannot parse log\")\n\n }\n\n }\n\n}\n\n\n", "file_path": "tests/integration.rs", "rank": 47, "score": 10.234380286085893 }, { "content": " assert_eq!(\n\n log.field(field_dictionary::URL_QUERY),\n\n Some(&SiemField::from_str(\"?default=%27OR%201=1--\"))\n\n );\n\n assert_eq!(\n\n log.field(field_dictionary::URL_PATH),\n\n Some(&SiemField::from_str(\"/xss.html\"))\n\n );\n\n assert_eq!(\n\n log.field(\"url.extension\"),\n\n Some(&SiemField::from_str(\"html\"))\n\n );\n\n assert_eq!(log.field(\"user_agent.original\"), Some(&SiemField::from_str(\"Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:85.0) Gecko/20100101 Firefox/85.0\")));\n\n }\n\n Err(_) => assert_eq!(1, 0),\n\n }\n\n }\n\n}\n", "file_path": "src/parsers/modsecurity/mod.rs", "rank": 48, "score": 10.1570184019718 }, { "content": " field_dictionary::RULE_CATEGORY,\n\n FieldType::TextOptions(web_cat, \"Category of the rule\"),\n\n );\n\n fields.insert(\n\n field_dictionary::RULE_NAME,\n\n FieldType::Text(\"Name of the rule\"),\n\n );\n\n fields\n\n },\n\n allow_unknown_fields: false,\n\n gdpr: None,\n\n };\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct Apache2Parser {}\n\nimpl LogParser for Apache2Parser {\n\n /// Parse the log. If it fails it must give a reason why. This allow optimization of the parsing process.\n\n fn parse_log(&self, log: SiemLog) -> Result<SiemLog, LogParsingError> {\n\n apache2::parse_log_combinedio(log)\n", "file_path": "src/parsers/mod.rs", "rank": 49, "score": 9.70207321573481 }, { "content": " return tags;\n\n}\n\n\n\n#[cfg(test)]\n\nmod filterlog_tests {\n\n use super::parse_log_json;\n\n use usiem::events::field::{SiemField, SiemIp};\n\n use usiem::events::field_dictionary;\n\n use usiem::events::intrusion::IntrusionCategory;\n\n use usiem::events::SiemLog;\n\n\n\n #[test]\n\n fn test_parse_log_json() {\n\n let log = \"{\\\"transaction\\\":{\\\"time\\\":\\\"21/Feb/2021:23:16:17 +0000\\\",\\\"transaction_id\\\":\\\"YDLpwdKBz7is4x7ElBXe@gAAAEA\\\",\\\"remote_address\\\":\\\"172.17.0.1\\\",\\\"remote_port\\\":36296,\\\"local_address\\\":\\\"172.17.0.2\\\",\\\"local_port\\\":80},\\\"request\\\":{\\\"request_line\\\":\\\"GET /xss.html?default=%27OR%201=1-- HTTP/1.1\\\",\\\"headers\\\":{\\\"Host\\\":\\\"localhost:8080\\\",\\\"User-Agent\\\":\\\"Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:85.0) Gecko/20100101 Firefox/85.0\\\",\\\"Accept\\\":\\\"text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8\\\",\\\"Accept-Language\\\":\\\"es-ES,es;q=0.8,en-US;q=0.5,en;q=0.3\\\",\\\"Accept-Encoding\\\":\\\"gzip, deflate\\\",\\\"Connection\\\":\\\"keep-alive\\\",\\\"Upgrade-Insecure-Requests\\\":\\\"1\\\",\\\"Cache-Control\\\":\\\"max-age=0\\\"}},\\\"response\\\":{\\\"protocol\\\":\\\"HTTP/1.1\\\",\\\"status\\\":403,\\\"headers\\\":{\\\"Content-Length\\\":\\\"217\\\",\\\"Keep-Alive\\\":\\\"timeout=5, max=100\\\",\\\"Connection\\\":\\\"Keep-Alive\\\",\\\"Content-Type\\\":\\\"text/html; charset=iso-8859-1\\\"},\\\"body\\\":\\\"<!DOCTYPE HTML PUBLIC \\\\\\\"-//IETF//DTD HTML 2.0//EN\\\\\\\">\\\\n<html><head>\\\\n<title>403 Forbidden</title>\\\\n</head><body>\\\\n<h1>Forbidden</h1>\\\\n<p>You don't have permission to access /xss.html\\\\non this server.<br />\\\\n</p>\\\\n</body></html>\\\\n\\\"},\\\"audit_data\\\":{\\\"messages\\\":[\\\"Warning. detected SQLi using libinjection with fingerprint 's&1c' [file \\\\\\\"/usr/local/apache2/coreruleset/rules/REQUEST-942-APPLICATION-ATTACK-SQLI.conf\\\\\\\"] [line \\\\\\\"65\\\\\\\"] [id \\\\\\\"942100\\\\\\\"] [msg \\\\\\\"SQL Injection Attack Detected via libinjection\\\\\\\"] [data \\\\\\\"Matched Data: s&1c found within ARGS:default: 'OR 1=1--\\\\\\\"] [severity \\\\\\\"CRITICAL\\\\\\\"] [ver \\\\\\\"OWASP_CRS/3.3.0\\\\\\\"] [tag \\\\\\\"application-multi\\\\\\\"] [tag \\\\\\\"language-multi\\\\\\\"] [tag \\\\\\\"platform-multi\\\\\\\"] [tag \\\\\\\"attack-sqli\\\\\\\"] [tag \\\\\\\"paranoia-level/1\\\\\\\"] [tag \\\\\\\"OWASP_CRS\\\\\\\"] [tag \\\\\\\"capec/1000/152/248/66\\\\\\\"] [tag \\\\\\\"PCI/6.5.2\\\\\\\"]\\\",\\\"Access denied with code 403 (phase 2). Operator GE matched 5 at TX:anomaly_score. [file \\\\\\\"/usr/local/apache2/coreruleset/rules/REQUEST-949-BLOCKING-EVALUATION.conf\\\\\\\"] [line \\\\\\\"150\\\\\\\"] [id \\\\\\\"949110\\\\\\\"] [msg \\\\\\\"Inbound Anomaly Score Exceeded (Total Score: 5)\\\\\\\"] [severity \\\\\\\"CRITICAL\\\\\\\"] [ver \\\\\\\"OWASP_CRS/3.3.0\\\\\\\"] [tag \\\\\\\"application-multi\\\\\\\"] [tag \\\\\\\"language-multi\\\\\\\"] [tag \\\\\\\"platform-multi\\\\\\\"] [tag \\\\\\\"attack-generic\\\\\\\"]\\\",\\\"Warning. Operator GE matched 5 at TX:inbound_anomaly_score. [file \\\\\\\"/usr/local/apache2/coreruleset/rules/RESPONSE-980-CORRELATION.conf\\\\\\\"] [line \\\\\\\"87\\\\\\\"] [id \\\\\\\"980130\\\\\\\"] [msg \\\\\\\"Inbound Anomaly Score Exceeded (Total Inbound Score: 5 - SQLI=5,XSS=0,RFI=0,LFI=0,RCE=0,PHPI=0,HTTP=0,SESS=0): individual paranoia level scores: 5, 0, 0, 0\\\\\\\"] [ver \\\\\\\"OWASP_CRS/3.3.0\\\\\\\"] [tag \\\\\\\"event-correlation\\\\\\\"]\\\"],\\\"error_messages\\\":[\\\"[file \\\\\\\"apache2_util.c\\\\\\\"] [line 273] [level 3] [client 172.17.0.1] ModSecurity: Warning. detected SQLi using libinjection with fingerprint 's&1c' [file \\\\\\\"/usr/local/apache2/coreruleset/rules/REQUEST-942-APPLICATION-ATTACK-SQLI.conf\\\\\\\"] [line \\\\\\\"65\\\\\\\"] [id \\\\\\\"942100\\\\\\\"] [msg \\\\\\\"SQL Injection Attack Detected via libinjection\\\\\\\"] [data \\\\\\\"Matched Data: s&1c found within ARGS:default: 'OR 1=1--\\\\\\\"] [severity \\\\\\\"CRITICAL\\\\\\\"] [ver \\\\\\\"OWASP_CRS/3.3.0\\\\\\\"] [tag \\\\\\\"application-multi\\\\\\\"] [tag \\\\\\\"language-multi\\\\\\\"] [tag \\\\\\\"platform-multi\\\\\\\"] [tag \\\\\\\"attack-sqli\\\\\\\"] [tag \\\\\\\"paranoia-level/1\\\\\\\"] [tag \\\\\\\"OWASP_CRS\\\\\\\"] [tag \\\\\\\"capec/1000/152/248/66\\\\\\\"] [tag \\\\\\\"PCI/6.5.2\\\\\\\"] [hostname \\\\\\\"localhost\\\\\\\"] [uri \\\\\\\"/xss.html\\\\\\\"] [unique_id \\\\\\\"YDLpwdKBz7is4x7ElBXe@gAAAEA\\\\\\\"]\\\",\\\"[file \\\\\\\"apache2_util.c\\\\\\\"] [line 273] [level 3] [client 172.17.0.1] ModSecurity: Access denied with code 403 (phase 2). Operator GE matched 5 at TX:anomaly_score. [file \\\\\\\"/usr/local/apache2/coreruleset/rules/REQUEST-949-BLOCKING-EVALUATION.conf\\\\\\\"] [line \\\\\\\"150\\\\\\\"] [id \\\\\\\"949110\\\\\\\"] [msg \\\\\\\"Inbound Anomaly Score Exceeded (Total Score: 5)\\\\\\\"] [severity \\\\\\\"CRITICAL\\\\\\\"] [ver \\\\\\\"OWASP_CRS/3.3.0\\\\\\\"] [tag \\\\\\\"application-multi\\\\\\\"] [tag \\\\\\\"language-multi\\\\\\\"] [tag \\\\\\\"platform-multi\\\\\\\"] [tag \\\\\\\"attack-generic\\\\\\\"] [hostname \\\\\\\"localhost\\\\\\\"] [uri \\\\\\\"/xss.html\\\\\\\"] [unique_id \\\\\\\"YDLpwdKBz7is4x7ElBXe@gAAAEA\\\\\\\"]\\\",\\\"[file \\\\\\\"apache2_util.c\\\\\\\"] [line 273] [level 3] [client 172.17.0.1] ModSecurity: Warning. Operator GE matched 5 at TX:inbound_anomaly_score. [file \\\\\\\"/usr/local/apache2/coreruleset/rules/RESPONSE-980-CORRELATION.conf\\\\\\\"] [line \\\\\\\"87\\\\\\\"] [id \\\\\\\"980130\\\\\\\"] [msg \\\\\\\"Inbound Anomaly Score Exceeded (Total Inbound Score: 5 - SQLI=5,XSS=0,RFI=0,LFI=0,RCE=0,PHPI=0,HTTP=0,SESS=0): individual paranoia level scores: 5, 0, 0, 0\\\\\\\"] [ver \\\\\\\"OWASP_CRS/3.3.0\\\\\\\"] [tag \\\\\\\"event-correlation\\\\\\\"] [hostname \\\\\\\"localhost\\\\\\\"] [uri \\\\\\\"/xss.html\\\\\\\"] [unique_id \\\\\\\"YDLpwdKBz7is4x7ElBXe@gAAAEA\\\\\\\"]\\\"],\\\"action\\\":{\\\"intercepted\\\":true,\\\"phase\\\":2,\\\"message\\\":\\\"Operator GE matched 5 at TX:anomaly_score.\\\"},\\\"stopwatch\\\":{\\\"p1\\\":1283,\\\"p2\\\":1132,\\\"p3\\\":0,\\\"p4\\\":0,\\\"p5\\\":256,\\\"sr\\\":172,\\\"sw\\\":1,\\\"l\\\":0,\\\"gc\\\":0},\\\"response_body_dechunked\\\":true,\\\"producer\\\":[\\\"ModSecurity for Apache/2.9.3 (http://www.modsecurity.org/)\\\",\\\"OWASP_CRS/3.3.0\\\"],\\\"server\\\":\\\"Apache\\\",\\\"engine_mode\\\":\\\"ENABLED\\\"}}\";\n\n //println!(\"{}\",log);\n\n let log = SiemLog::new(log.to_string(), 0, SiemIp::V4(0));\n\n let siem_log = parse_log_json(log);\n\n match siem_log {\n\n Ok(log) => {\n\n assert_eq!(log.service(), \"ModSecurity\");\n", "file_path": "src/parsers/modsecurity/mod.rs", "rank": 50, "score": 9.532476998317838 }, { "content": " fields.insert(\n\n field_dictionary::HTTP_RESPONSE_MIME_TYPE,\n\n FieldType::Text(\"HTTP response mime type\"),\n\n );\n\n fields.insert(\n\n field_dictionary::URL_PATH,\n\n FieldType::Text(\"URL path: /api/v1\"),\n\n );\n\n fields.insert(\n\n field_dictionary::URL_QUERY,\n\n FieldType::Text(\"URL query: ?a=b&c=d\"),\n\n );\n\n fields.insert(\"url.extension\", FieldType::Text(\"URL extension: exe, html\"));\n\n fields.insert(\n\n field_dictionary::NETWORK_DURATION,\n\n FieldType::Decimal(\"Duration of the communication\"),\n\n );\n\n fields.insert(field_dictionary::USER_NAME, FieldType::Text(\"User name\"));\n\n fields.insert(\"user_agent.original\", FieldType::Text(\"Full user agent\"));\n\n fields.insert(\n", "file_path": "src/parsers/mod.rs", "rank": 51, "score": 9.075953949841672 }, { "content": " \"Apache2Parser\"\n\n }\n\n /// Description of the parser\n\n fn description(&self) -> &str {\n\n \"Apache2 parser. Supports combined and combinedio logs\"\n\n }\n\n /// Get parser schema\n\n fn schema(&self) -> &'static FieldSchema {\n\n &SCHEMA\n\n }\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct Apache2ModSecurityParser {}\n\nimpl LogParser for Apache2ModSecurityParser {\n\n /// Parse the log. If it fails it must give a reason why. This allow optimization of the parsing process.\n\n fn parse_log(&self, log: SiemLog) -> Result<SiemLog, LogParsingError> {\n\n modsecurity::parse_log_json(log)\n\n }\n\n /// Check if the parser can parse the log. Must be fast.\n", "file_path": "src/parsers/mod.rs", "rank": 52, "score": 8.702309188948055 }, { "content": "use reqwest;\n\nuse std::env;\n\nuse usiem::events::{SiemLog};\n\nuse usiem::events::field::{SiemIp,SiemField};\n\nuse usiem::events::field_dictionary;\n\nuse usiem_apache2::parsers::apache2;\n\nuse usiem_apache2::parsers::modsecurity;\n\nuse usiem::events::intrusion::{IntrusionCategory};\n\n#[test]\n", "file_path": "tests/integration.rs", "rank": 53, "score": 8.448273078501026 }, { "content": " #[serde(borrow)]\n\n pub protocol: Cow<'input, str>,\n\n pub status : u32,\n\n #[serde(borrow)]\n\n pub headers: BTreeMap<Cow<'input, str>,Cow<'input, str>>\n\n}\n\n\n\n#[derive(Serialize, Deserialize)]\n\npub struct ModSecurityAction<'input> {\n\n pub intercepted : bool,\n\n #[serde(borrow)]\n\n pub message: Cow<'input, str>,\n\n pub phase : u32\n\n}\n\n#[derive(Serialize, Deserialize)]\n\npub struct ModSecurityAudit<'input> {\n\n #[serde(borrow)]\n\n pub action : ModSecurityAction<'input>,\n\n #[serde(borrow)]\n\n pub messages: Vec<Cow<'input, str>>\n\n}", "file_path": "src/parsers/modsecurity/modsec.rs", "rank": 54, "score": 7.805968651085946 }, { "content": "use serde::{Deserialize, Serialize};\n\nuse std::collections::BTreeMap;\n\nuse std::borrow::Cow;\n\n\n\n\n\n#[derive(Serialize, Deserialize)]\n\npub struct ModSecurityLog <'input>{\n\n #[serde(borrow)]\n\n pub transaction: ModSecurityTransaction<'input>,\n\n #[serde(borrow)]\n\n pub request: ModSecurityRequest<'input>,\n\n #[serde(borrow)]\n\n pub response : ModSecurityResponse<'input>,\n\n #[serde(borrow)]\n\n pub audit_data : ModSecurityAudit<'input>\n\n}\n\n\n\n#[derive(Serialize, Deserialize)]\n\npub struct ModSecurityTransaction<'input> {\n\n #[serde(borrow)]\n", "file_path": "src/parsers/modsecurity/modsec.rs", "rank": 55, "score": 7.147487309277529 }, { "content": " pub time: Cow<'input, str>,\n\n #[serde(borrow)]\n\n pub transaction_id: Cow<'input, str>,\n\n #[serde(borrow)]\n\n pub remote_address: Cow<'input, str>,\n\n #[serde(borrow)]\n\n pub local_address: Cow<'input, str>,\n\n pub remote_port: u16,\n\n pub local_port: u16\n\n}\n\n\n\n#[derive(Serialize, Deserialize)]\n\npub struct ModSecurityRequest<'input> {\n\n #[serde(borrow)]\n\n pub request_line: Cow<'input, str>,\n\n #[serde(borrow)]\n\n pub headers: BTreeMap<Cow<'input, str>,Cow<'input, str>>\n\n}\n\n#[derive(Serialize, Deserialize)]\n\npub struct ModSecurityResponse<'input> {\n", "file_path": "src/parsers/modsecurity/modsec.rs", "rank": 56, "score": 6.374626130604517 }, { "content": " let res = client.get(\"http://127.0.0.1:8080/access_log\").send().unwrap();\n\n if !res.status().is_success() {\n\n panic!(\"Apache2 must be active\");\n\n }\n\n let access_text = res.text().unwrap();\n\n let split = access_text.split(\"\\n\");\n\n let access_text: Vec<&str> = split.collect();\n\n\n\n let allowed_text = access_text.get(0).unwrap();\n\n let allowed_text2 = access_text.get(1).unwrap();\n\n let denied_text1 = access_text.get(2).unwrap();\n\n\n\n test_success_apache(allowed_text);\n\n test_success_apache(allowed_text2);\n\n test_denied_apache(denied_text1);\n\n\n\n //Now test Modsecurity blocking a page\n\n let res = client.get(\"http://127.0.0.1:8080/modsec_log\").send().unwrap();\n\n let modsec_text = res.text().unwrap();\n\n let split = modsec_text.split(\"\\n\");\n\n let modsec_text: Vec<&str> = split.collect();\n\n let text_sqli = modsec_text.get(2).unwrap();\n\n test_denied_sqli(text_sqli);\n\n}\n\n\n\n\n", "file_path": "tests/integration.rs", "rank": 57, "score": 6.14188615766439 }, { "content": " web_protocol.insert(\"WSS\", \"Secured WebSocket protocol\");\n\n fields.insert(\n\n field_dictionary::NETWORK_PROTOCOL,\n\n FieldType::TextOptions(web_protocol, \"Network protocol: http, ftp, snmp...\"),\n\n );\n\n let mut web_cat = BTreeMap::new();\n\n web_cat.insert(\"Abortion\", \"Abortion\");\n\n web_cat.insert(\"MatureContent\", \"MatureContent\");\n\n web_cat.insert(\"Alcohol\", \"Alcohol\");\n\n web_cat.insert(\"AlternativeSpirituality\", \"AlternativeSpirituality\");\n\n web_cat.insert(\"ArtCulture\", \"ArtCulture\");\n\n web_cat.insert(\"Auctions\", \"Auctions\");\n\n web_cat.insert(\"AudioVideoClips\", \"AudioVideoClips\");\n\n web_cat.insert(\"Trading\", \"Trading\");\n\n web_cat.insert(\"Economy\", \"Economy\");\n\n web_cat.insert(\"Charitable\", \"Charitable\");\n\n web_cat.insert(\"OnlineChat\", \"OnlineChat\");\n\n web_cat.insert(\"ChildPornography\", \"ChildPornography\");\n\n web_cat.insert(\"CloudInfrastructure\", \"CloudInfrastructure\");\n\n web_cat.insert(\"CompromisedSites\", \"CompromisedSites\");\n", "file_path": "src/parsers/mod.rs", "rank": 58, "score": 5.926437093204207 }, { "content": " .unwrap_or(0)\n\n } else {\n\n mod_log\n\n .audit_data\n\n .messages\n\n .get(0)\n\n .map(|v| extract_id(v).unwrap_or(0))\n\n .unwrap_or(0)\n\n }\n\n };\n\n let category = mod_log.audit_data.messages;\n\n let category = {\n\n let mut cat = IntrusionCategory::UNKNOWN;\n\n for msg in category {\n\n let tg = extract_tags(&msg);\n\n cat = map_attack(tg, cat);\n\n }\n\n cat\n\n };\n\n let event = SiemEvent::Intrusion(IntrusionEvent {\n", "file_path": "src/parsers/modsecurity/mod.rs", "rank": 59, "score": 5.868806104092921 }, { "content": " field_dictionary::DESTINATION_PORT,\n\n FieldType::Numeric(\"Port of the destination\"),\n\n );\n\n fields.insert(\n\n field_dictionary::SOURCE_BYTES,\n\n FieldType::Numeric(\"Bytes sent from the source to the destination\"),\n\n );\n\n fields.insert(\n\n field_dictionary::DESTINATION_BYTES,\n\n FieldType::Numeric(\"Bytes sent from the destination to the source\"),\n\n );\n\n fields.insert(\n\n field_dictionary::HTTP_RESPONSE_STATUS_CODE,\n\n FieldType::Numeric(\"HTTP Status code: 404, 200...\"),\n\n );\n\n fields.insert(field_dictionary::URL_FULL, FieldType::Text(\"Full url\"));\n\n fields.insert(\n\n field_dictionary::URL_DOMAIN,\n\n FieldType::Text(\"Domain of the url\"),\n\n );\n", "file_path": "src/parsers/mod.rs", "rank": 60, "score": 5.226199688148643 }, { "content": " );\n\n fields.insert(\n\n field_dictionary::DESTINATION_PORT,\n\n FieldType::Numeric(\"Port of the destination\"),\n\n );\n\n fields.insert(\n\n field_dictionary::DESTINATION_BYTES,\n\n FieldType::Numeric(\"Bytes sent from the destination to the source\"),\n\n );\n\n fields.insert(\n\n field_dictionary::HTTP_RESPONSE_STATUS_CODE,\n\n FieldType::Numeric(\"HTTP Status code: 404, 200...\"),\n\n );\n\n fields.insert(field_dictionary::URL_FULL, FieldType::Text(\"Full url\"));\n\n fields.insert(\n\n field_dictionary::URL_DOMAIN,\n\n FieldType::Text(\"Domain of the url\"),\n\n );\n\n fields.insert(\n\n field_dictionary::HTTP_RESPONSE_MIME_TYPE,\n", "file_path": "src/parsers/mod.rs", "rank": 61, "score": 5.174088822430592 }, { "content": " .messages\n\n .get(1)\n\n .map(|v| extract_rule_content(v))\n\n .unwrap_or(String::from(\"Unknown Rule\"))\n\n } else {\n\n mod_log\n\n .audit_data\n\n .messages\n\n .get(0)\n\n .map(|v| extract_rule_content(v))\n\n .unwrap_or(String::from(\"Unknown Rule\"))\n\n }\n\n };\n\n let rule_id = {\n\n if mod_log.audit_data.messages.len() > 3 {\n\n mod_log\n\n .audit_data\n\n .messages\n\n .get(1)\n\n .map(|v| extract_id(v).unwrap_or(0))\n", "file_path": "src/parsers/modsecurity/mod.rs", "rank": 62, "score": 5.106477903349905 }, { "content": "[![Rust](https://github.com/u-siem/usiem-apache-httpd/actions/workflows/rust.yml/badge.svg)](https://github.com/u-siem/usiem-apache-httpd/actions/workflows/rust.yml)\n\n# uSIEM Apache2 HTTP server\n\nuSIEM parser for Apache HTTP server and WAF modsecurity supported\n\n\n\n### Modsecurity\n\n\n\nCurrently only modsecurity V2 is supported for Apache2.\n", "file_path": "README.md", "rank": 63, "score": 4.581738304185472 }, { "content": "pub mod parsers;", "file_path": "src/lib.rs", "rank": 64, "score": 2.913675949296275 }, { "content": " cat = IntrusionCategory::ANOMALY;\n\n }\n\n if tags.contains(\"attack-protocol\") {\n\n cat = IntrusionCategory::PROTOCOL_ATTACK;\n\n }\n\n if tags.contains(\"anomaly-generic\") {\n\n cat = IntrusionCategory::WEB_ATTACK;\n\n }\n\n return cat;\n\n}\n\n\n", "file_path": "src/parsers/modsecurity/mod.rs", "rank": 65, "score": 2.8220126605003046 }, { "content": " web_cat.insert(\"Informational\", \"Informational\");\n\n web_cat.insert(\"InternetConnectedDevices\", \"InternetConnectedDevices\");\n\n web_cat.insert(\"InternetTelephony\", \"InternetTelephony\");\n\n web_cat.insert(\"IntimateApparel\", \"IntimateApparel\");\n\n web_cat.insert(\"JobSearch\", \"JobSearch\");\n\n web_cat.insert(\n\n \"MaliciousOutboundDataBotnets\",\n\n \"MaliciousOutboundDataBotnets\",\n\n );\n\n web_cat.insert(\"MaliciousSources\", \"MaliciousSources\");\n\n web_cat.insert(\"Marijuana\", \"Marijuana\");\n\n web_cat.insert(\"MediaSharing\", \"MediaSharing\");\n\n web_cat.insert(\"Military\", \"Military\");\n\n web_cat.insert(\"PotentiallyAdult\", \"PotentiallyAdult\");\n\n web_cat.insert(\"News\", \"News\");\n\n web_cat.insert(\"Forums\", \"Forums\");\n\n web_cat.insert(\"Nudity\", \"Nudity\");\n\n web_cat.insert(\"BusinessApplications\", \"BusinessApplications\");\n\n web_cat.insert(\"OnlineMeetings\", \"OnlineMeetings\");\n\n web_cat.insert(\"P2P\", \"P2P\");\n", "file_path": "src/parsers/mod.rs", "rank": 66, "score": 1.872765078687511 }, { "content": " web_cat.insert(\"PersonalSites\", \"PersonalSites\");\n\n web_cat.insert(\"PersonalsDating\", \"PersonalsDating\");\n\n web_cat.insert(\"Phishing\", \"Phishing\");\n\n web_cat.insert(\"CopyrightConcerns\", \"CopyrightConcerns\");\n\n web_cat.insert(\"Placeholders\", \"Placeholders\");\n\n web_cat.insert(\"PoliticalAdvocacy\", \"PoliticalAdvocacy\");\n\n web_cat.insert(\"Pornography\", \"Pornography\");\n\n web_cat.insert(\"PotentiallyUnwantedSoftware\", \"PotentiallyUnwantedSoftware\");\n\n web_cat.insert(\"ProxyAvoidance\", \"ProxyAvoidance\");\n\n web_cat.insert(\"RadioAudioStreams\", \"RadioAudioStreams\");\n\n web_cat.insert(\"RealEstate\", \"RealEstate\");\n\n web_cat.insert(\"Reference\", \"Reference\");\n\n web_cat.insert(\"Religion\", \"Religion\");\n\n web_cat.insert(\"RemoteAccess\", \"RemoteAccess\");\n\n web_cat.insert(\"Restaurants\", \"Restaurants\");\n\n web_cat.insert(\"QuestionableLegality\", \"QuestionableLegality\");\n\n web_cat.insert(\"SearchEngines\", \"SearchEngines\");\n\n web_cat.insert(\"SexEducation\", \"SexEducation\");\n\n web_cat.insert(\"SexualExpression\", \"SexualExpression\");\n\n web_cat.insert(\"Shopping\", \"Shopping\");\n", "file_path": "src/parsers/mod.rs", "rank": 67, "score": 1.7974681935360328 }, { "content": " web_cat.insert(\"SocialNetworking\", \"SocialNetworking\");\n\n web_cat.insert(\"DailyLiving\", \"DailyLiving\");\n\n web_cat.insert(\"SoftwareDownloads\", \"SoftwareDownloads\");\n\n web_cat.insert(\"Spam\", \"Spam\");\n\n web_cat.insert(\"Sports\", \"Sports\");\n\n web_cat.insert(\"Suspicious\", \"Suspicious\");\n\n web_cat.insert(\"Technology\", \"Technology\");\n\n web_cat.insert(\"Tobacco\", \"Tobacco\");\n\n web_cat.insert(\"Translation\", \"Translation\");\n\n web_cat.insert(\"Travel\", \"Travel\");\n\n web_cat.insert(\"VideoStreams\", \"VideoStreams\");\n\n web_cat.insert(\"Uncategorized\", \"Uncategorized\");\n\n web_cat.insert(\"URLShorteners\", \"URLShorteners\");\n\n web_cat.insert(\"Vehicles\", \"Vehicles\");\n\n web_cat.insert(\"Violence\", \"Violence\");\n\n web_cat.insert(\"Weapons\", \"Weapons\");\n\n web_cat.insert(\"WebAds\", \"WebAds\");\n\n web_cat.insert(\"WebHosting\", \"WebHosting\");\n\n web_cat.insert(\"WebInfrastructure\", \"WebInfrastructure\");\n\n fields.insert(\n", "file_path": "src/parsers/mod.rs", "rank": 68, "score": 1.7775618042949894 }, { "content": " if tags.contains(\"attack-rce\") {\n\n cat = IntrusionCategory::REMOTE_EXPLOIT;\n\n }\n\n if last_cat == IntrusionCategory::SESSION_FIXATION || tags.contains(\"attack-fixation\") {\n\n cat = IntrusionCategory::SESSION_FIXATION;\n\n }\n\n if last_cat == IntrusionCategory::REPUTATION || tags.contains(\"attack-reputation-ip\") {\n\n cat = IntrusionCategory::REPUTATION;\n\n }\n\n if last_cat == IntrusionCategory::INFORMATION_LEAKAGE || tags.contains(\"attack-disclosure\") {\n\n cat = IntrusionCategory::INFORMATION_LEAKAGE;\n\n }\n\n if last_cat == IntrusionCategory::SURVEILLANCE\n\n || tags.contains(\"attack-reputation-scanner\")\n\n || tags.contains(\"attack-reputation-crawler\")\n\n || tags.contains(\"attack-reputation-scripting\")\n\n {\n\n cat = IntrusionCategory::SURVEILLANCE;\n\n }\n\n if tags.contains(\"anomaly-evaluation\") {\n", "file_path": "src/parsers/modsecurity/mod.rs", "rank": 69, "score": 1.3540693696408344 } ]
Rust
src/http/ctr.rs
zaksabeast/libctr-rs
556cf4857866db512ae415fd33f17131178ab465
use super::{ get_httpc_service_raw_handle, httpc_add_post_data_ascii, httpc_add_request_header_field, httpc_begin_request, httpc_create_context, httpc_initialize_connection_session, httpc_receive_data_with_timeout, httpc_set_proxy_default, httpc_set_socket_buffer_size, DefaultRootCert, HttpContextHandle, RequestMethod, RequestStatus, }; use crate::{ ipc::ThreadCommandBuilder, res::CtrResult, srv::get_service_handle_direct, utils::base64_encode, Handle, }; pub struct HttpContext { session_handle: Handle, context_handle: HttpContextHandle, } #[cfg_attr(not(target_os = "horizon"), mocktopus::macros::mockable)] impl HttpContext { pub fn new(url: &str, method: RequestMethod) -> CtrResult<Self> { let context_handle = httpc_create_context(method, url)?; let session_handle = get_service_handle_direct("http:C")?; httpc_initialize_connection_session(&session_handle, &context_handle)?; httpc_set_proxy_default(&session_handle, &context_handle)?; Ok(Self { session_handle, context_handle, }) } pub fn add_default_cert(&self, cert: DefaultRootCert) -> CtrResult<()> { let mut command = ThreadCommandBuilder::new(0x25u16); unsafe { command.push(self.context_handle.get_raw()) }; command.push(cert); let mut parser = command.build().send_sync_request(&self.session_handle)?; parser.pop_result()?; Ok(()) } pub fn set_client_cert_default(&self) -> CtrResult<()> { let mut command = ThreadCommandBuilder::new(0x28u16); unsafe { command.push(self.context_handle.get_raw()) }; command.push(0x40u32); let mut parser = command.build().send_sync_request(&self.session_handle)?; parser.pop_result()?; Ok(()) } pub fn add_header(&self, header_name: &str, value: &str) -> CtrResult<()> { httpc_add_request_header_field( &self.session_handle, &self.context_handle, header_name, value, ) } pub fn add_post_ascii_field(&self, post_field_name: &str, value: &str) -> CtrResult<()> { httpc_add_post_data_ascii( &self.session_handle, &self.context_handle, post_field_name, value, ) } pub fn add_post_base64_field<T: AsRef<[u8]>>( &self, post_field_name: &str, value: T, ) -> CtrResult<()> { self.add_post_ascii_field(post_field_name, &base64_encode(value)) } pub fn set_socket_buffer_size(&self, socket_buffer_size: u32) -> CtrResult<()> { httpc_set_socket_buffer_size(&self.session_handle, socket_buffer_size) } pub fn get_download_size_state(&self) -> CtrResult<(u32, u32)> { let mut command = ThreadCommandBuilder::new(0x6u16); unsafe { command.push(self.context_handle.get_raw()) }; let mut parser = command .build() .send_sync_request_with_raw_handle(get_httpc_service_raw_handle())?; parser.pop_result()?; Ok((parser.pop(), parser.pop())) } pub fn cancel_connection(&self) -> CtrResult<()> { let mut command = ThreadCommandBuilder::new(0x4u16); unsafe { command.push(self.context_handle.get_raw()) }; let mut parser = command .build() .send_sync_request_with_raw_handle(get_httpc_service_raw_handle())?; parser.pop_result()?; Ok(()) } pub fn download_data_into_buffer_with_timeout( &self, out_buffer: &mut [u8], nanosecond_timeout: u64, ) -> CtrResult<()> { httpc_begin_request(&self.session_handle, &self.context_handle)?; httpc_receive_data_with_timeout( &self.session_handle, &self.context_handle, out_buffer, nanosecond_timeout, )?; let (downloaded_size, _content_size) = self.get_download_size_state()?; if out_buffer.len() < (downloaded_size as usize) { self.cancel_connection()?; } Ok(()) } pub fn download_data_into_buffer(&self, out_buffer: &mut [u8]) -> CtrResult<()> { self.download_data_into_buffer_with_timeout(out_buffer, 60000000000) } pub fn get_response_status_code(&self) -> CtrResult<u32> { let mut command = ThreadCommandBuilder::new(0x22u16); unsafe { command.push(self.context_handle.get_raw()) }; let mut parser = command.build().send_sync_request(&self.session_handle)?; parser.pop_result()?; Ok(parser.pop()) } pub fn get_request_status(&self) -> CtrResult<RequestStatus> { let mut command = ThreadCommandBuilder::new(0x4u16); unsafe { command.push(self.context_handle.get_raw()) }; let mut parser = command .build() .send_sync_request_with_raw_handle(get_httpc_service_raw_handle())?; parser.pop_result()?; Ok(parser.pop().into()) } }
use super::{ get_httpc_service_raw_handle, httpc_add_post_data_ascii, httpc_add_request_header_field, httpc_begin_request, httpc_create_context, httpc_initialize_connection_session, httpc_receive_data_with_timeout, httpc_set_proxy_default, httpc_set_socket_buffer_size, DefaultRootCert, HttpContextHandle, RequestMethod, RequestStatus, }; use crate::{ ipc::ThreadCommandBuilder, res::CtrResult, srv::get_service_handle_direct, utils::base64_encode, Handle, }; pub struct HttpContext { session_handle: Handle, context_handle: HttpContextHandle, } #[cfg_attr(not(target_os = "horizon"), mocktopus::macros::mockable)] impl HttpContext { pub fn new(url: &str, method: RequestMethod) -> CtrResult<Self> { let context_handle = httpc_create_context(method, url)?; let session_handle = get_service_handle_direct("http:C")?; httpc_initialize_connection_session(&session_handle, &context_handle)?; httpc_set_proxy_default(&session_handle, &context_handle)?; Ok(Self { session_handle, context_handle, }) }
pub fn set_client_cert_default(&self) -> CtrResult<()> { let mut command = ThreadCommandBuilder::new(0x28u16); unsafe { command.push(self.context_handle.get_raw()) }; command.push(0x40u32); let mut parser = command.build().send_sync_request(&self.session_handle)?; parser.pop_result()?; Ok(()) } pub fn add_header(&self, header_name: &str, value: &str) -> CtrResult<()> { httpc_add_request_header_field( &self.session_handle, &self.context_handle, header_name, value, ) } pub fn add_post_ascii_field(&self, post_field_name: &str, value: &str) -> CtrResult<()> { httpc_add_post_data_ascii( &self.session_handle, &self.context_handle, post_field_name, value, ) } pub fn add_post_base64_field<T: AsRef<[u8]>>( &self, post_field_name: &str, value: T, ) -> CtrResult<()> { self.add_post_ascii_field(post_field_name, &base64_encode(value)) } pub fn set_socket_buffer_size(&self, socket_buffer_size: u32) -> CtrResult<()> { httpc_set_socket_buffer_size(&self.session_handle, socket_buffer_size) } pub fn get_download_size_state(&self) -> CtrResult<(u32, u32)> { let mut command = ThreadCommandBuilder::new(0x6u16); unsafe { command.push(self.context_handle.get_raw()) }; let mut parser = command .build() .send_sync_request_with_raw_handle(get_httpc_service_raw_handle())?; parser.pop_result()?; Ok((parser.pop(), parser.pop())) } pub fn cancel_connection(&self) -> CtrResult<()> { let mut command = ThreadCommandBuilder::new(0x4u16); unsafe { command.push(self.context_handle.get_raw()) }; let mut parser = command .build() .send_sync_request_with_raw_handle(get_httpc_service_raw_handle())?; parser.pop_result()?; Ok(()) } pub fn download_data_into_buffer_with_timeout( &self, out_buffer: &mut [u8], nanosecond_timeout: u64, ) -> CtrResult<()> { httpc_begin_request(&self.session_handle, &self.context_handle)?; httpc_receive_data_with_timeout( &self.session_handle, &self.context_handle, out_buffer, nanosecond_timeout, )?; let (downloaded_size, _content_size) = self.get_download_size_state()?; if out_buffer.len() < (downloaded_size as usize) { self.cancel_connection()?; } Ok(()) } pub fn download_data_into_buffer(&self, out_buffer: &mut [u8]) -> CtrResult<()> { self.download_data_into_buffer_with_timeout(out_buffer, 60000000000) } pub fn get_response_status_code(&self) -> CtrResult<u32> { let mut command = ThreadCommandBuilder::new(0x22u16); unsafe { command.push(self.context_handle.get_raw()) }; let mut parser = command.build().send_sync_request(&self.session_handle)?; parser.pop_result()?; Ok(parser.pop()) } pub fn get_request_status(&self) -> CtrResult<RequestStatus> { let mut command = ThreadCommandBuilder::new(0x4u16); unsafe { command.push(self.context_handle.get_raw()) }; let mut parser = command .build() .send_sync_request_with_raw_handle(get_httpc_service_raw_handle())?; parser.pop_result()?; Ok(parser.pop().into()) } }
pub fn add_default_cert(&self, cert: DefaultRootCert) -> CtrResult<()> { let mut command = ThreadCommandBuilder::new(0x25u16); unsafe { command.push(self.context_handle.get_raw()) }; command.push(cert); let mut parser = command.build().send_sync_request(&self.session_handle)?; parser.pop_result()?; Ok(()) }
function_block-full_function
[ { "content": "#[cfg_attr(not(target_os = \"horizon\"), mocktopus::macros::mockable)]\n\npub fn get_service_handle_direct(_name: &str) -> CtrResult<Handle> {\n\n Ok(0.into())\n\n}\n", "file_path": "src/srv/mock.rs", "rank": 0, "score": 195878.7015791129 }, { "content": "pub fn get_service_handle_direct(name: &str) -> CtrResult<Handle> {\n\n let mut raw_handle = 0;\n\n let c_name = cstring::parse_result(CString::new(name))?;\n\n let result = unsafe { srvGetServiceHandleDirect(&mut raw_handle, c_name.as_ptr()) };\n\n parse_result(result)?;\n\n Ok(raw_handle.into())\n\n}\n", "file_path": "src/srv/ctr.rs", "rank": 1, "score": 195874.80693474069 }, { "content": "#[cfg_attr(not(target_os = \"horizon\"), mocktopus::macros::mockable)]\n\npub fn register_service(_name: &str, _max_sessions: i32) -> CtrResult<Handle> {\n\n Ok(0.into())\n\n}\n\n\n", "file_path": "src/srv/mock.rs", "rank": 2, "score": 178036.0362804654 }, { "content": "pub fn register_service(name: &str, max_sessions: i32) -> CtrResult<Handle> {\n\n let c_name = cstring::parse_result(CString::new(name))?;\n\n let mut raw_handle = 0;\n\n let result = unsafe { srvRegisterService(&mut raw_handle, c_name.as_ptr(), max_sessions) };\n\n\n\n parse_result(result)?;\n\n\n\n Ok(raw_handle.into())\n\n}\n\n\n", "file_path": "src/srv/ctr.rs", "rank": 3, "score": 178032.14163609315 }, { "content": "/// Sends a sync request.\n\n/// This is often used with atomic handles, which are u32s instead of Handles.\n\n/// As a result, this takes a u32 to be more generic, and to avoid converting a u32 to a Handle, then immediately back into a u32.\n\npub fn control_service(service_op: u32, service_name: &str) -> CtrResult<Handle> {\n\n let mut result_code;\n\n let mut handle: u32 = 0;\n\n\n\n unsafe {\n\n asm!(\"svc 0xB0\", in(\"r0\") service_op, in(\"r1\") &mut handle, in(\"r2\") service_name.as_ptr(), lateout(\"r0\") result_code);\n\n }\n\n\n\n parse_result(result_code)?;\n\n Ok(handle.into())\n\n}\n\n\n\n#[inline(never)]\n", "file_path": "src/svc/ctr.rs", "rank": 4, "score": 175551.17640806874 }, { "content": "pub fn parse_null_terminated_str(bytes: &[u8]) -> &str {\n\n str::from_utf8(bytes)\n\n .unwrap_or(\"\")\n\n .split(char::from(0))\n\n .next()\n\n .unwrap_or(\"\")\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n mod parse_null_terminated_str {\n\n use super::*;\n\n\n\n #[test]\n\n fn should_return_a_str() {\n\n let bytes = [0x74, 0x65, 0x73, 0x74];\n\n let result = parse_null_terminated_str(&bytes);\n\n assert_eq!(result, \"test\")\n", "file_path": "src/utils/cstring.rs", "rank": 5, "score": 172775.65431018983 }, { "content": "pub fn str_from_utf8(bytes: &[u8]) -> CtrResult<&str> {\n\n str::from_utf8(bytes).map_err(|_| GenericResultCode::InvalidString.into())\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n mod parse_num {\n\n use super::*;\n\n\n\n #[test]\n\n fn should_parse_a_number_from_a_string() {\n\n let result: u64 = parse_num(\"12345\").unwrap();\n\n assert_eq!(result, 12345u64);\n\n }\n\n\n\n #[test]\n\n fn should_return_an_error_if_the_string_is_an_invalid_number() {\n\n let result = parse_num::<u64>(\"abcd\").unwrap_err();\n", "file_path": "src/utils/parse.rs", "rank": 6, "score": 169689.28072613792 }, { "content": "pub fn parse_num<T: FromStr>(num: &str) -> CtrResult<T> {\n\n num.parse()\n\n .map_err(|_| GenericResultCode::InvalidValue.into())\n\n}\n\n\n", "file_path": "src/utils/parse.rs", "rank": 7, "score": 160313.72424330993 }, { "content": "#[cfg_attr(not(target_os = \"horizon\"), mocktopus::macros::mockable)]\n\npub fn unregister_service(_name: &str) -> CtrResult<ResultCode> {\n\n Ok(0)\n\n}\n\n\n", "file_path": "src/srv/mock.rs", "rank": 8, "score": 157805.77480712032 }, { "content": "pub fn unregister_service(name: &str) -> CtrResult<ResultCode> {\n\n let c_name = cstring::parse_result(CString::new(name))?;\n\n let result = unsafe { srvUnregisterService(c_name.as_ptr()) };\n\n parse_result(result)\n\n}\n\n\n", "file_path": "src/srv/ctr.rs", "rank": 9, "score": 157801.88016274807 }, { "content": "pub fn base64_decode(base64: &str) -> CtrResult<Vec<u8>> {\n\n decode(base64.replace(\"*\", \"=\")).map_err(|_| GenericResultCode::InvalidValue.into())\n\n}\n\n\n", "file_path": "src/utils/shared.rs", "rank": 10, "score": 154974.15020338178 }, { "content": "#[cfg_attr(not(target_os = \"horizon\"), mocktopus::macros::mockable)]\n\npub fn copy_handle(\n\n _out_process: &Handle,\n\n _input: &Handle,\n\n _in_process: &Handle,\n\n) -> CtrResult<Handle> {\n\n Ok(0.into())\n\n}\n\n\n", "file_path": "src/svc/mock.rs", "rank": 11, "score": 152525.73800496606 }, { "content": "pub fn close_async(event_handle: &Handle) -> CtrResult<()> {\n\n let mut command = ThreadCommandBuilder::new(0x8u16);\n\n command.push_curent_process_id();\n\n\n\n // This is safe since we're not duplicating the handle outside of an svc\n\n unsafe { command.push_raw_handle(event_handle.get_raw()) };\n\n\n\n let mut parser = command\n\n .build()\n\n .send_sync_request_with_raw_handle(get_raw_handle())?;\n\n parser.pop_result()?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/ac/shared.rs", "rank": 12, "score": 143279.4819200971 }, { "content": "#[cfg_attr(not(target_os = \"horizon\"), mocktopus::macros::mockable)]\n\npub fn accept_session(_port: &Handle) -> CtrResult<Handle> {\n\n Ok(0.into())\n\n}\n\n\n", "file_path": "src/svc/mock.rs", "rank": 13, "score": 141459.16547865726 }, { "content": "/// Accepts a session to a service.\n\npub fn accept_session(port: &Handle) -> CtrResult<Handle> {\n\n let mut raw_handle = 0;\n\n let result = unsafe { svcAcceptSession(&mut raw_handle, port.get_raw()) };\n\n\n\n parse_result(result)?;\n\n\n\n Ok(raw_handle.into())\n\n}\n\n\n", "file_path": "src/svc/ctr.rs", "rank": 14, "score": 141455.27083428504 }, { "content": "#[cfg_attr(not(target_os = \"horizon\"), mocktopus::macros::mockable)]\n\npub fn enable_notifications() -> CtrResult<Handle> {\n\n Ok(0.into())\n\n}\n\n\n", "file_path": "src/srv/mock.rs", "rank": 15, "score": 140193.25700249174 }, { "content": "pub fn enable_notifications() -> CtrResult<Handle> {\n\n let mut raw_handle = 0;\n\n let result = unsafe { srvEnableNotification(&mut raw_handle) };\n\n\n\n parse_result(result)?;\n\n\n\n Ok(raw_handle.into())\n\n}\n\n\n", "file_path": "src/srv/ctr.rs", "rank": 16, "score": 140189.3623581195 }, { "content": "/// Closes a handle.\n\n/// This is pretty much only for implementing Drop on Handle.\n\n/// If you're thinking about using this, consider using a Handle and let it manage closing the underlying handle.\n\npub fn close_handle(handle: u32) -> CtrResult<ResultCode> {\n\n let mut result_code;\n\n\n\n unsafe {\n\n asm!(\"svc 0x23\", in(\"r0\") handle, lateout(\"r0\") result_code);\n\n }\n\n\n\n parse_result(result_code)\n\n}\n\n\n\n#[inline(never)]\n", "file_path": "src/svc/ctr.rs", "rank": 17, "score": 139463.54882245304 }, { "content": "#[cfg_attr(not(target_os = \"horizon\"), mocktopus::macros::mockable)]\n\npub fn close_handle(_handle: u32) -> CtrResult<ResultCode> {\n\n Ok(0)\n\n}\n\n\n", "file_path": "src/svc/mock.rs", "rank": 18, "score": 139454.6055906223 }, { "content": "#[inline(never)]\n\npub fn copy_handle(out_process: &Handle, input: &Handle, in_process: &Handle) -> CtrResult<Handle> {\n\n let mut result;\n\n let mut out_handle = 0u32;\n\n unsafe {\n\n asm!(\n\n \"\n\n str r0, [sp, #-4]!\n\n svc 0xB1\n\n ldr r2, [sp], #4\n\n str r1, [r2]\n\n \",\n\n in(\"r0\") &mut out_handle,\n\n in(\"r1\") out_process.get_raw(),\n\n in(\"r2\") input.get_raw(),\n\n in(\"r3\") in_process.get_raw(),\n\n lateout(\"r0\") result\n\n )\n\n }\n\n\n\n parse_result(result)?;\n\n Ok(out_handle.into())\n\n}\n\n\n", "file_path": "src/svc/ctr.rs", "rank": 19, "score": 138073.45280904305 }, { "content": "#[cfg_attr(not(target_os = \"horizon\"), mocktopus::macros::mockable)]\n\npub fn eat_events(_debug_process: &Handle) -> CtrResult<()> {\n\n Ok(())\n\n}\n\n\n", "file_path": "src/svc/mock.rs", "rank": 20, "score": 133664.25753276842 }, { "content": "// Thanks to Luma3ds\n\npub fn eat_events(debug_process: &Handle) -> CtrResult<()> {\n\n loop {\n\n if get_process_debug_event(debug_process) == -0x27bfdff7 {\n\n break;\n\n }\n\n continue_debug_event(\n\n debug_process,\n\n DebugFlag::InhibitUserCpuExceptionHandlersAndSignalFaultExceptionEvents,\n\n )?;\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/svc/ctr.rs", "rank": 21, "score": 133660.3628883962 }, { "content": "#[inline]\n\npub fn make_shared_handles_header(number: u32) -> u32 {\n\n (number - 1) << 26\n\n}\n\n\n\n#[cfg(target_os = \"horizon\")]\n\n#[inline]\n\nunsafe fn get_thread_local_storage() -> *mut u32 {\n\n let ret: *mut u32;\n\n\n\n asm!(\"mrc p15, 0, {}, c13, c0, 3\", out(reg) ret);\n\n ret\n\n}\n\n\n\n#[cfg(target_os = \"horizon\")]\n\n#[inline]\n\npub(in crate) fn get_thread_command_buffer() -> &'static mut [u32] {\n\n // This is safe because the command buffer is valid for 64 u32 reads/writes\n\n unsafe {\n\n slice::from_raw_parts_mut(get_thread_local_storage().offset(0x20), COMMAND_BUFFER_SIZE)\n\n }\n\n}\n\n\n", "file_path": "src/ipc/shared.rs", "rank": 22, "score": 133660.36288839616 }, { "content": "#[cfg_attr(not(target_os = \"horizon\"), mocktopus::macros::mockable)]\n\npub fn wait_synchronization(_handle: &Handle, _wait_nanoseconds: i64) -> CtrResult<ResultCode> {\n\n Ok(0)\n\n}\n\n\n", "file_path": "src/svc/mock.rs", "rank": 23, "score": 130696.28097377994 }, { "content": "pub fn wait_synchronization(handle: &Handle, wait_nanoseconds: i64) -> CtrResult<ResultCode> {\n\n let result = unsafe { svcWaitSynchronization(handle.get_raw(), wait_nanoseconds) };\n\n parse_result(result)\n\n}\n\n\n", "file_path": "src/svc/ctr.rs", "rank": 24, "score": 130692.38632940769 }, { "content": "#[cfg_attr(not(target_os = \"horizon\"), mocktopus::macros::mockable)]\n\npub fn signal_event(event: &Handle) -> CtrResult<ResultCode> {\n\n Ok(0)\n\n}\n\n\n", "file_path": "src/svc/mock.rs", "rank": 25, "score": 129730.93938588686 }, { "content": "#[cfg_attr(not(target_os = \"horizon\"), mocktopus::macros::mockable)]\n\npub fn open_process(_process_id: u32) -> CtrResult<Handle> {\n\n Ok(0.into())\n\n}\n\n\n", "file_path": "src/svc/mock.rs", "rank": 26, "score": 129730.93938588686 }, { "content": "pub fn open_process(process_id: u32) -> CtrResult<Handle> {\n\n let mut raw_handle = 0;\n\n let result = unsafe { svcOpenProcess(&mut raw_handle, process_id) };\n\n\n\n parse_result(result)?;\n\n Ok(raw_handle.into())\n\n}\n\n\n", "file_path": "src/svc/ctr.rs", "rank": 27, "score": 129727.0447415146 }, { "content": "pub fn signal_event(event: &Handle) -> CtrResult<ResultCode> {\n\n let result = unsafe { svcSignalEvent(event.get_raw()) };\n\n parse_result(result)\n\n}\n\n\n", "file_path": "src/svc/ctr.rs", "rank": 28, "score": 129727.0447415146 }, { "content": "#[cfg_attr(not(target_os = \"horizon\"), mocktopus::macros::mockable)]\n\npub fn get_process_debug_event(_debug_process: &Handle) -> ResultCode {\n\n 0\n\n}\n\n\n", "file_path": "src/svc/mock.rs", "rank": 29, "score": 129540.63107392432 }, { "content": "// TODO: Implement for reals. This is hacked together for now.\n\npub fn get_process_debug_event(debug_process: &Handle) -> ResultCode {\n\n let mut info: [u8; 0x28] = [0; 0x28];\n\n unsafe {\n\n svcGetProcessDebugEvent(\n\n core::mem::transmute::<*mut u8, *mut DebugEventInfo>(info.as_mut_ptr()),\n\n debug_process.get_raw(),\n\n )\n\n }\n\n}\n\n\n", "file_path": "src/svc/ctr.rs", "rank": 30, "score": 129536.73642955208 }, { "content": "#[cfg_attr(not(target_os = \"horizon\"), mocktopus::macros::mockable)]\n\npub fn debug_active_process(_process_id: u32) -> CtrResult<Handle> {\n\n Ok(0.into())\n\n}\n\n\n", "file_path": "src/svc/mock.rs", "rank": 31, "score": 127716.41998811226 }, { "content": "pub fn debug_active_process(process_id: u32) -> CtrResult<Handle> {\n\n let mut raw_handle = 0u32;\n\n let result = unsafe { svcDebugActiveProcess(&mut raw_handle, process_id) };\n\n\n\n parse_result(result)?;\n\n Ok(raw_handle.into())\n\n}\n\n\n", "file_path": "src/svc/ctr.rs", "rank": 32, "score": 127712.52534374001 }, { "content": "pub fn unmap_memory_block(memory_block_handle: &Handle, slice: &[u8]) -> CtrResult<ResultCode> {\n\n let result =\n\n unsafe { svcUnmapMemoryBlock(memory_block_handle.get_raw(), slice.as_ptr() as u32) };\n\n parse_result(result)\n\n}\n\n\n", "file_path": "src/svc/ctr.rs", "rank": 33, "score": 127334.50941748414 }, { "content": "#[cfg_attr(not(target_os = \"horizon\"), mocktopus::macros::mockable)]\n\npub fn create_event(_reset_type: EventResetType) -> CtrResult<Handle> {\n\n Ok(0.into())\n\n}\n\n\n", "file_path": "src/svc/mock.rs", "rank": 34, "score": 125790.26481015471 }, { "content": "pub fn create_event(reset_type: EventResetType) -> CtrResult<Handle> {\n\n let mut raw_handle = 0;\n\n let result = unsafe { svcCreateEvent(&mut raw_handle, reset_type as u32) };\n\n\n\n parse_result(result)?;\n\n\n\n Ok(raw_handle.into())\n\n}\n\n\n", "file_path": "src/svc/ctr.rs", "rank": 35, "score": 125786.37016578246 }, { "content": "/// Sends a sync request.\n\n/// This is often used with atomic handles, which are u32s instead of Handles.\n\n/// As a result, this takes a u32 to be more generic, and to avoid converting a u32 to a Handle, then immediately back into a u32.\n\npub fn send_raw_sync_request(raw_handle: u32) -> CtrResult<ResultCode> {\n\n let mut result_code;\n\n\n\n unsafe {\n\n asm!(\"svc 0x32\", in(\"r0\") raw_handle, lateout(\"r0\") result_code);\n\n }\n\n\n\n parse_result(result_code)\n\n}\n\n\n\n#[inline(never)]\n", "file_path": "src/svc/ctr.rs", "rank": 36, "score": 123950.86094887723 }, { "content": "#[cfg_attr(not(target_os = \"horizon\"), mocktopus::macros::mockable)]\n\npub fn send_raw_sync_request(_raw_handle: u32) -> CtrResult<ResultCode> {\n\n Ok(0)\n\n}\n\n\n", "file_path": "src/svc/mock.rs", "rank": 37, "score": 123946.7846576497 }, { "content": "#[cfg(target_os = \"horizon\")]\n\nfn log(file_name: &str, log_type: LogType, text: &str) {\n\n let cmd_buf = get_thread_command_buffer();\n\n let mut command_cache: [u32; 25] = [0; 25];\n\n\n\n // This is safe because the command buffer and the cache are aligned,\n\n // valid for 25 u32 reads/writes, and don't overlap\n\n unsafe { copy_nonoverlapping(cmd_buf.as_ptr(), command_cache.as_mut_ptr(), 25) };\n\n\n\n let archive_path: FsPath = \"\".try_into().unwrap();\n\n let file_path: FsPath = file_name.try_into().unwrap();\n\n if let Ok(mut file) = File::new(\n\n &archive_path,\n\n &file_path,\n\n ArchiveId::Sdmc,\n\n OpenFlags::ReadWriteCreate,\n\n ) {\n\n let new_line_text = format!(\"[{}] {}: {}\\n\", log_type, get_time(), text);\n\n file.write_str(&new_line_text).unwrap();\n\n }\n\n\n", "file_path": "src/log.rs", "rank": 38, "score": 123262.5477953355 }, { "content": "#[cfg(not(target_os = \"horizon\"))]\n\nfn log(_file_name: &str, _log_type: LogType, _text: &str) {}\n\n\n\n/// Logs text to the SD card in a file at the root of the sd card.\n", "file_path": "src/log.rs", "rank": 39, "score": 123262.5477953355 }, { "content": "#[cfg_attr(not(target_os = \"horizon\"), mocktopus::macros::mockable)]\n\npub fn continue_debug_event(_debug_process: &Handle, _flag: DebugFlag) -> CtrResult<()> {\n\n Ok(())\n\n}\n\n\n", "file_path": "src/svc/mock.rs", "rank": 40, "score": 122275.44572246264 }, { "content": "pub fn continue_debug_event(debug_process: &Handle, flag: DebugFlag) -> CtrResult<()> {\n\n let result = unsafe { svcContinueDebugEvent(debug_process.get_raw(), flag.into()) };\n\n\n\n parse_result(result)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/svc/ctr.rs", "rank": 41, "score": 122271.55107809039 }, { "content": "#[cfg_attr(not(target_os = \"horizon\"), mocktopus::macros::mockable)]\n\npub fn exit() {}\n\n\n", "file_path": "src/hid/mock.rs", "rank": 42, "score": 118468.15572661662 }, { "content": "pub fn exit() {\n\n unsafe { hidExit() };\n\n}\n\n\n", "file_path": "src/hid/ctr.rs", "rank": 43, "score": 118464.2610822444 }, { "content": "#[cfg_attr(not(target_os = \"horizon\"), mocktopus::macros::mockable)]\n\npub fn write_process_memory(_debug_process: &Handle, _buffer: &[u8], _addr: u32) -> CtrResult<()> {\n\n Ok(())\n\n}\n\n\n", "file_path": "src/svc/mock.rs", "rank": 44, "score": 117554.44576958132 }, { "content": "pub fn write_process_memory(debug_process: &Handle, buffer: &[u8], addr: u32) -> CtrResult<()> {\n\n let result = unsafe {\n\n svcWriteProcessMemory(\n\n debug_process.get_raw(),\n\n buffer.as_ptr() as *const c_void,\n\n addr,\n\n buffer.len() as u32,\n\n )\n\n };\n\n\n\n parse_result(result)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/svc/ctr.rs", "rank": 45, "score": 117550.55112520908 }, { "content": "/// Replies to a request and receives a new request.\n\npub fn reply_and_receive(raw_handles: &[u32], reply_target: Option<usize>) -> (usize, ResultCode) {\n\n let raw_reply_target_handle = match reply_target {\n\n Some(target_index) => raw_handles[target_index],\n\n None => 0,\n\n };\n\n\n\n let mut index = -1;\n\n\n\n let result = unsafe {\n\n svcReplyAndReceive(\n\n &mut index,\n\n raw_handles.as_ptr(),\n\n // If the handle count is wrong, there's not much we can do to recover\n\n raw_handles.len().try_into().unwrap(),\n\n raw_reply_target_handle,\n\n )\n\n };\n\n\n\n (index as usize, result)\n\n}\n\n\n", "file_path": "src/svc/ctr.rs", "rank": 46, "score": 117550.55112520908 }, { "content": "#[cfg_attr(not(target_os = \"horizon\"), mocktopus::macros::mockable)]\n\npub fn get_process_info(_process: &Handle, _info_type: ProcessInfoType) -> CtrResult<i64> {\n\n Ok(0)\n\n}\n\n\n", "file_path": "src/svc/mock.rs", "rank": 47, "score": 117279.18473101241 }, { "content": "pub fn get_process_info(process: &Handle, info_type: ProcessInfoType) -> CtrResult<i64> {\n\n let mut out = 0;\n\n let result = unsafe { svcGetProcessInfo(&mut out, process.get_raw(), info_type as u32) };\n\n\n\n parse_result(result)?;\n\n\n\n Ok(out)\n\n}\n\n\n", "file_path": "src/svc/ctr.rs", "rank": 48, "score": 117275.29008664019 }, { "content": "#[cfg_attr(not(target_os = \"horizon\"), mocktopus::macros::mockable)]\n\npub fn scan_input() {}\n\n\n", "file_path": "src/hid/mock.rs", "rank": 49, "score": 116603.07632396283 }, { "content": "#[cfg_attr(not(target_os = \"horizon\"), mocktopus::macros::mockable)]\n\npub fn reply_and_receive(\n\n _raw_handles: &[u32],\n\n _reply_target: Option<usize>,\n\n) -> (usize, ResultCode) {\n\n (0, 0)\n\n}\n\n\n", "file_path": "src/svc/mock.rs", "rank": 50, "score": 116603.07632396283 }, { "content": "#[cfg_attr(not(target_os = \"horizon\"), mocktopus::macros::mockable)]\n\npub fn exit_process() {}\n\n\n", "file_path": "src/svc/mock.rs", "rank": 51, "score": 116603.07632396283 }, { "content": "pub fn scan_input() {\n\n unsafe { hidScanInput() };\n\n}\n\n\n", "file_path": "src/hid/ctr.rs", "rank": 52, "score": 116599.18167959058 }, { "content": "pub fn exit_process() {\n\n unsafe { svcExitProcess() }\n\n}\n\n\n", "file_path": "src/svc/ctr.rs", "rank": 53, "score": 116599.18167959058 }, { "content": "pub fn sysm_exit() {}\n\n\n", "file_path": "src/ptm/mock.rs", "rank": 54, "score": 116599.18167959058 }, { "content": "/// Exits the ptm:sysm service.\n\npub fn sysm_exit() {\n\n unsafe { ptmSysmExit() }\n\n}\n\n\n", "file_path": "src/ptm/ctr.rs", "rank": 55, "score": 116599.18167959058 }, { "content": "#[cfg_attr(not(target_os = \"horizon\"), mocktopus::macros::mockable)]\n\npub fn create_memory_block(\n\n _slice: &mut [u8],\n\n _my_permission: MemoryPermission,\n\n _other_process_permission: MemoryPermission,\n\n) -> CtrResult<Handle> {\n\n Ok(0.into())\n\n}\n\n\n", "file_path": "src/svc/mock.rs", "rank": 56, "score": 114834.64363320373 }, { "content": "#[cfg_attr(not(target_os = \"horizon\"), mocktopus::macros::mockable)]\n\npub fn unmap_memory_block(\n\n _memory_block_handle: &Handle,\n\n _slice: &mut [u8],\n\n) -> CtrResult<ResultCode> {\n\n Ok(0)\n\n}\n\n\n", "file_path": "src/svc/mock.rs", "rank": 57, "score": 114834.64363320373 }, { "content": "pub fn create_memory_block(\n\n slice: &mut [u8],\n\n my_permission: MemoryPermission,\n\n other_process_permission: MemoryPermission,\n\n) -> CtrResult<Handle> {\n\n // Check alignment\n\n // svc::create_memory_block can only take alignments of 0x1000\n\n if (slice.as_ptr() as u32 & (0x1000 - 1)) != 0 {\n\n return Err(GenericResultCode::AlignmentError.into());\n\n }\n\n\n\n let mut handle: u32 = 0;\n\n let result = unsafe {\n\n svcCreateMemoryBlock(\n\n &mut handle,\n\n slice.as_mut_ptr() as u32,\n\n slice.len() as u32,\n\n my_permission as u32,\n\n other_process_permission as u32,\n\n )\n\n };\n\n\n\n parse_result(result)?;\n\n\n\n Ok(handle.into())\n\n}\n\n\n", "file_path": "src/svc/ctr.rs", "rank": 58, "score": 114830.74898883147 }, { "content": "#[cfg_attr(not(target_os = \"horizon\"), mocktopus::macros::mockable)]\n\npub fn keys_down() -> u32 {\n\n 0\n\n}\n\n\n", "file_path": "src/hid/mock.rs", "rank": 59, "score": 114169.49792894916 }, { "content": "#[cfg_attr(not(target_os = \"horizon\"), mocktopus::macros::mockable)]\n\npub fn keys_up() -> u32 {\n\n 0\n\n}\n", "file_path": "src/hid/mock.rs", "rank": 60, "score": 114169.49792894916 }, { "content": "pub fn keys_down() -> u32 {\n\n unsafe { hidKeysDown() }\n\n}\n\n\n", "file_path": "src/hid/ctr.rs", "rank": 61, "score": 114165.60328457691 }, { "content": "pub fn keys_up() -> u32 {\n\n unsafe { hidKeysUp() }\n\n}\n", "file_path": "src/hid/ctr.rs", "rank": 62, "score": 114165.60328457691 }, { "content": "#[cfg_attr(not(target_os = \"horizon\"), mocktopus::macros::mockable)]\n\npub fn query_debug_process_memory(\n\n _debug_process: &Handle,\n\n _addr: u32,\n\n) -> CtrResult<MemQueryResponse> {\n\n Ok(MemQueryResponse {\n\n mem_info: MemInfo {\n\n base_addr: 0,\n\n size: 0,\n\n perm: 0,\n\n state: 0,\n\n },\n\n page_info: PageInfo { flags: 0 },\n\n })\n\n}\n\n\n", "file_path": "src/svc/mock.rs", "rank": 63, "score": 113155.53516025536 }, { "content": "pub fn query_debug_process_memory(\n\n debug_process: &Handle,\n\n addr: u32,\n\n) -> CtrResult<MemQueryResponse> {\n\n let mut mem_info = ctru_sys::MemInfo {\n\n base_addr: 0,\n\n perm: 0,\n\n size: 0,\n\n state: 0,\n\n };\n\n let mut page_info = ctru_sys::PageInfo { flags: 0 };\n\n let result = unsafe {\n\n svcQueryDebugProcessMemory(&mut mem_info, &mut page_info, debug_process.get_raw(), addr)\n\n };\n\n\n\n parse_result(result)?;\n\n Ok(MemQueryResponse {\n\n mem_info: unsafe { transmute::<ctru_sys::MemInfo, super::MemInfo>(mem_info) },\n\n page_info: unsafe { transmute::<ctru_sys::PageInfo, super::PageInfo>(page_info) },\n\n })\n\n}\n\n\n\n#[inline(never)]\n", "file_path": "src/svc/ctr.rs", "rank": 64, "score": 113151.64051588313 }, { "content": "#[cfg_attr(not(target_os = \"horizon\"), mocktopus::macros::mockable)]\n\npub fn keys_held() -> u32 {\n\n 0\n\n}\n\n\n", "file_path": "src/hid/mock.rs", "rank": 65, "score": 112401.06523819006 }, { "content": "#[cfg_attr(not(target_os = \"horizon\"), mocktopus::macros::mockable)]\n\npub fn get_time() -> u64 {\n\n 0\n\n}\n", "file_path": "src/os/mock.rs", "rank": 66, "score": 112401.06523819006 }, { "content": "#[cfg_attr(not(target_os = \"horizon\"), mocktopus::macros::mockable)]\n\npub fn init() -> CtrResult<()> {\n\n Ok(())\n\n}\n\n\n", "file_path": "src/hid/mock.rs", "rank": 67, "score": 112401.06523819006 }, { "content": "#[cfg_attr(not(target_os = \"horizon\"), mocktopus::macros::mockable)]\n\npub fn keys_down_repeat() -> u32 {\n\n 0\n\n}\n\n\n", "file_path": "src/hid/mock.rs", "rank": 68, "score": 112401.06523819006 }, { "content": "/// Returns the OS time in milliseconds.\n\npub fn get_time() -> u64 {\n\n unsafe { osGetTime() }\n\n}\n", "file_path": "src/os/ctr.rs", "rank": 69, "score": 112397.1705938178 }, { "content": "pub fn keys_held() -> u32 {\n\n unsafe { hidKeysHeld() }\n\n}\n\n\n", "file_path": "src/hid/ctr.rs", "rank": 70, "score": 112397.1705938178 }, { "content": "pub fn init() -> CtrResult<()> {\n\n let result = unsafe { hidInit() };\n\n parse_result(result)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/hid/ctr.rs", "rank": 71, "score": 112397.1705938178 }, { "content": "pub fn keys_down_repeat() -> u32 {\n\n unsafe { hidKeysDownRepeat() }\n\n}\n\n\n", "file_path": "src/hid/ctr.rs", "rank": 72, "score": 112397.1705938178 }, { "content": "#[cfg_attr(not(target_os = \"horizon\"), mocktopus::macros::mockable)]\n\npub fn read_process_memory(_debug_process: &Handle, _addr: u32, size: u32) -> CtrResult<Vec<u8>> {\n\n Ok(vec![0; size as usize])\n\n}\n\n\n", "file_path": "src/svc/mock.rs", "rank": 73, "score": 111705.2515728771 }, { "content": "pub fn read_process_memory(debug_process: &Handle, addr: u32, size: u32) -> CtrResult<Vec<u8>> {\n\n let mut buffer = vec![0; size as usize];\n\n let result = unsafe {\n\n svcReadProcessMemory(\n\n buffer.as_mut_ptr() as *mut c_void,\n\n debug_process.get_raw(),\n\n addr,\n\n size,\n\n )\n\n };\n\n\n\n parse_result(result)?;\n\n Ok(buffer)\n\n}\n\n\n", "file_path": "src/svc/ctr.rs", "rank": 74, "score": 111701.35692850486 }, { "content": "#[cfg_attr(not(target_os = \"horizon\"), mocktopus::macros::mockable)]\n\npub fn sleep_thread(_nanoseconds: i64) {}\n\n\n", "file_path": "src/svc/mock.rs", "rank": 75, "score": 108521.6379450397 }, { "content": "pub fn sleep_thread(nanoseconds: i64) {\n\n unsafe { svcSleepThread(nanoseconds) }\n\n}\n\n\n", "file_path": "src/svc/ctr.rs", "rank": 76, "score": 108517.74330066747 }, { "content": "#[inline]\n\npub fn get_cur_process_id_header() -> u32 {\n\n 0x20\n\n}\n\n\n", "file_path": "src/ipc/shared.rs", "rank": 77, "score": 107602.04863594272 }, { "content": "#[cfg(not(target_os = \"horizon\"))]\n\n#[inline]\n\npub fn set_static_buffers(_data: &[u8]) {}\n\n\n\n/// An abstraction of the thread command buffer loosely based on Citra's [RequestBuilder](https://github.com/citra-emu/citra/blob/1c8461fdea1106c68729643525c365afc93f5621/src/core/hle/ipc_helpers.h#L46).\n\n///\n\n/// This provides a few benefits:\n\n/// - A safe interface for the thread command buffer, which is always non-null and aligned\n\n/// - A more idiomatic way to write commands\n\n/// - An interface that can be mocked for unit testing on systems other than the 3ds\n\n///\n\n/// This interface aims to be inlined when possible and write to the thread command buffer directly to reduce overhead.\n\n///\n\n/// ## Warning\n\n///\n\n/// Because this provides an interface to talk with the single thread command buffer and writes to the thread command buffer as commands are being built,\n\n/// two instances of ThreadCommandBuilder should not be used at the same time. This interface is memory safe, but can be used logically wrong.\n\n///\n\n/// For example, the following is invalid:\n\n/// ```\n\n/// use libctr_rs::ipc::ThreadCommandBuilder;\n\n/// let mut command_1 = ThreadCommandBuilder::new(1u16);\n", "file_path": "src/ipc/shared.rs", "rank": 78, "score": 106925.38497047033 }, { "content": "/// Initializes the AC service. Required to use AC features.\n\npub fn init() -> CtrResult<ResultCode> {\n\n let handle =\n\n get_service_handle_direct(\"ac:i\").or_else(|_| get_service_handle_direct(\"ac:u\"))?;\n\n\n\n let dropped_handle = ManuallyDrop::new(handle);\n\n let raw_handle = unsafe { dropped_handle.get_raw() };\n\n AC_HANDLE.store(raw_handle, Ordering::Relaxed);\n\n\n\n Ok(0)\n\n}\n\n\n\n#[derive(Clone, Copy, Debug, PartialEq, Default)]\n\n#[repr(C)]\n\npub struct SsidInfo {\n\n pub length: u32,\n\n pub name: [u8; 32],\n\n}\n\n\n\n// This is safe because all fields in the struct can function with any value.\n\nunsafe impl TriviallyTransmutable for SsidInfo {}\n", "file_path": "src/ac/shared.rs", "rank": 79, "score": 106925.27904880958 }, { "content": "/// Initializes the NDM service. Required to use NDM features.\n\npub fn init() -> CtrResult<ResultCode> {\n\n let handle = get_service_handle_direct(\"ndm:u\")?;\n\n\n\n let dropped_handle = ManuallyDrop::new(handle);\n\n let raw_handle = unsafe { dropped_handle.get_raw() };\n\n NDM_HANDLE.store(raw_handle, Ordering::Relaxed);\n\n\n\n Ok(0)\n\n}\n\n\n", "file_path": "src/ndm/shared.rs", "rank": 80, "score": 106925.27904880958 }, { "content": "/// Initializes the CFG service. Required to use CFG features.\n\npub fn init() -> CtrResult<ResultCode> {\n\n let handle = get_service_handle_direct(\"cfg:i\")\n\n .or_else(|_| get_service_handle_direct(\"cfg:s\"))\n\n .or_else(|_| get_service_handle_direct(\"cfg:u\"))?;\n\n\n\n let dropped_handle = ManuallyDrop::new(handle);\n\n let raw_handle = unsafe { dropped_handle.get_raw() };\n\n CFG_HANDLE.store(raw_handle, Ordering::Relaxed);\n\n\n\n Ok(0)\n\n}\n\n\n", "file_path": "src/cfg/shared.rs", "rank": 81, "score": 106925.27904880958 }, { "content": "/// Initializes the SRV service. Required to use srv features.\n\npub fn init() -> CtrResult<ResultCode> {\n\n let result = unsafe { srvInit() };\n\n parse_result(result)\n\n}\n\n\n", "file_path": "src/srv/ctr.rs", "rank": 82, "score": 106925.27904880958 }, { "content": "#[cfg_attr(not(target_os = \"horizon\"), mocktopus::macros::mockable)]\n\npub fn receive_notification() -> CtrResult<u32> {\n\n Ok(0)\n\n}\n\n\n", "file_path": "src/srv/mock.rs", "rank": 83, "score": 106925.25269814287 }, { "content": "pub fn exit() -> CtrResult<ResultCode> {\n\n let result = svc::close_handle(get_handle());\n\n\n\n if result.is_ok() {\n\n NDM_HANDLE.store(0, Ordering::Relaxed)\n\n }\n\n\n\n result\n\n}\n\n\n", "file_path": "src/ndm/shared.rs", "rank": 84, "score": 106921.35805377064 }, { "content": "pub fn init() -> CtrResult<ResultCode> {\n\n Ok(0)\n\n}\n\n\n", "file_path": "src/srv/mock.rs", "rank": 85, "score": 106921.35805377064 }, { "content": "pub fn receive_notification() -> CtrResult<u32> {\n\n let mut notification_id = 0u32;\n\n let result = unsafe { srvReceiveNotification(&mut notification_id) };\n\n\n\n parse_result(result)?;\n\n\n\n Ok(notification_id)\n\n}\n\n\n", "file_path": "src/srv/ctr.rs", "rank": 86, "score": 106921.35805377064 }, { "content": "pub fn exit() -> CtrResult<ResultCode> {\n\n let result = svc::close_handle(get_handle());\n\n\n\n if result.is_ok() {\n\n CFG_HANDLE.store(0, Ordering::Relaxed);\n\n }\n\n\n\n result\n\n}\n\n\n", "file_path": "src/cfg/shared.rs", "rank": 87, "score": 106921.35805377064 }, { "content": "/// Sets static buffers 0, 1, and 2 with the provided data\n\npub fn set_static_buffers(data: &[u8]) {\n\n let static_buf = get_thread_static_buffers();\n\n\n\n static_buf[0] = make_static_buffer_header(data.len(), 0);\n\n static_buf[1] = data.as_ptr() as u32;\n\n static_buf[2] = make_static_buffer_header(data.len(), 1);\n\n static_buf[3] = data.as_ptr() as u32;\n\n static_buf[4] = make_static_buffer_header(data.len(), 2);\n\n static_buf[5] = data.as_ptr() as u32;\n\n}\n\n\n", "file_path": "src/ipc/shared.rs", "rank": 88, "score": 106921.35805377064 }, { "content": "/// Initializes the FS service. Required to use FS features.\n\npub fn init() -> CtrResult<ResultCode> {\n\n let fs_handle = get_service_handle_direct(\"fs:USER\")?;\n\n\n\n user::initialize_with_sdk_version(&fs_handle, 0x90c00c8)?;\n\n\n\n let dropped_fs_handle = ManuallyDrop::new(fs_handle);\n\n let raw_fs_handle = unsafe { dropped_fs_handle.get_raw() };\n\n FS_HANDLE.store(raw_fs_handle, Ordering::Relaxed);\n\n\n\n user::set_priority(0)?;\n\n\n\n Ok(0)\n\n}\n\n\n\npub mod user {\n\n use super::*;\n\n\n\n pub fn set_priority(priority: u32) -> CtrResult<ResultCode> {\n\n let mut command = ThreadCommandBuilder::new(0x862u16);\n\n command.push(priority);\n", "file_path": "src/fs/ipc_wrappers.rs", "rank": 89, "score": 105405.65081077967 }, { "content": "#[cfg_attr(not(target_os = \"horizon\"), mocktopus::macros::mockable)]\n\npub fn get_console_username() -> CtrResult<String> {\n\n let mut username_shorts: [u16; 15] = [0; 15];\n\n let username_buffer = transmute_to_bytes_mut(&mut username_shorts[0..14]);\n\n init()?;\n\n get_config_info_blk2(username_buffer, 0xa0000)?;\n\n exit()?;\n\n\n\n let null_terminator_index = username_shorts\n\n .iter()\n\n // We can unwrap here, because\n\n // we guaranteed a null terminator above\n\n .position(|short| *short == 0u16)\n\n .unwrap();\n\n\n\n String::from_utf16(&username_shorts[0..null_terminator_index])\n\n .map_err(|_| GenericResultCode::InvalidString.into())\n\n}\n", "file_path": "src/cfg/shared.rs", "rank": 90, "score": 105405.62446011296 }, { "content": "/// Initializes the ptm:sysm service. Required to use ptm:sysm features.\n\npub fn sysm_init() -> CtrResult<ResultCode> {\n\n let result = unsafe { ptmSysmInit() };\n\n parse_result(result)\n\n}\n\n\n", "file_path": "src/ptm/ctr.rs", "rank": 91, "score": 105405.56587862747 }, { "content": "pub fn sysm_init() -> CtrResult<ResultCode> {\n\n Ok(0)\n\n}\n\n\n", "file_path": "src/ptm/mock.rs", "rank": 92, "score": 105401.72981574072 }, { "content": "/// Breaks execution.\n\npub fn break_execution(reason: UserBreakType) -> ! {\n\n unsafe {\n\n asm!(\"svc 0x3C\", in(\"r0\") reason as u32);\n\n }\n\n\n\n // Allow the empty loop to get the 'never' return type\n\n // We'll never reach this far because the above will break anyways\n\n #[allow(clippy::empty_loop)]\n\n loop {}\n\n}\n\n\n", "file_path": "src/svc/ctr.rs", "rank": 93, "score": 105401.72981574072 }, { "content": "pub fn break_execution(_reason: UserBreakType) -> ! {\n\n panic!()\n\n}\n\n\n", "file_path": "src/svc/mock.rs", "rank": 94, "score": 105401.72981574072 }, { "content": "#[inline]\n\npub fn check_if_fail(result: ResultCode) -> bool {\n\n result < 0\n\n}\n\n\n", "file_path": "src/result.rs", "rank": 95, "score": 104922.2955215802 }, { "content": "#[inline]\n\npub fn check_if_success(result: ResultCode) -> bool {\n\n result >= 0\n\n}\n\n\n\n#[inline]\n", "file_path": "src/result.rs", "rank": 96, "score": 104922.2955215802 }, { "content": "#[cfg_attr(not(target_os = \"horizon\"), mocktopus::macros::mockable)]\n\npub fn acu_get_wifi_status() -> CtrResult<u32> {\n\n let command = ThreadCommandBuilder::new(0xDu16);\n\n let mut parser = command\n\n .build()\n\n .send_sync_request_with_raw_handle(get_raw_handle())?;\n\n parser.pop_result()?;\n\n\n\n Ok(parser.pop())\n\n}\n\n\n", "file_path": "src/ac/shared.rs", "rank": 97, "score": 103957.34727696539 }, { "content": "pub fn transmute_many_pedantic_mut<T: TriviallyTransmutable>(\n\n bytes: &mut [u8],\n\n) -> CtrResult<&mut [T]> {\n\n safe_transmute::transmute_many_pedantic_mut(bytes).map_err(|err| match err {\n\n Error::Guard(_) => GenericResultCode::InvalidSize.into(),\n\n Error::Unaligned(_) => GenericResultCode::AlignmentError.into(),\n\n Error::InvalidValue => GenericResultCode::InvalidValue.into(),\n\n })\n\n}\n\n\n", "file_path": "src/safe_transmute.rs", "rank": 98, "score": 103953.45263259317 }, { "content": "pub fn u8_slice_to_u32(bytes: &[u8]) -> u32 {\n\n let mut arr: [u8; 4] = [0; 4];\n\n\n\n for (index, byte) in bytes.iter().enumerate().take(4) {\n\n arr[index] = *byte;\n\n }\n\n\n\n u32::from_ne_bytes(arr)\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n mod bytes_to_utf16le_string {\n\n use super::*;\n\n\n\n #[test]\n\n fn should_convert_bytes_to_utf16le_string() {\n\n let bytes = [0x54, 0x00, 0x65, 0x00, 0x73, 0x00, 0x74, 0x00];\n", "file_path": "src/utils/convert.rs", "rank": 99, "score": 103402.66728355028 } ]
Rust
vendor/aho-corasick/src/packed/pattern.rs
47565647456/evtx
fbb2a713d335f5208bb6675f4f158babd6f2f389
use std::cmp; use std::fmt; use std::mem; use std::u16; use std::usize; use crate::packed::api::MatchKind; pub type PatternID = u16; #[derive(Clone, Debug)] pub struct Patterns { kind: MatchKind, by_id: Vec<Vec<u8>>, order: Vec<PatternID>, minimum_len: usize, max_pattern_id: PatternID, total_pattern_bytes: usize, } impl Patterns { pub fn new() -> Patterns { Patterns { kind: MatchKind::default(), by_id: vec![], order: vec![], minimum_len: usize::MAX, max_pattern_id: 0, total_pattern_bytes: 0, } } pub fn add(&mut self, bytes: &[u8]) { assert!(!bytes.is_empty()); assert!(self.by_id.len() <= u16::MAX as usize); let id = self.by_id.len() as u16; self.max_pattern_id = id; self.order.push(id); self.by_id.push(bytes.to_vec()); self.minimum_len = cmp::min(self.minimum_len, bytes.len()); self.total_pattern_bytes += bytes.len(); } pub fn set_match_kind(&mut self, kind: MatchKind) { match kind { MatchKind::LeftmostFirst => { self.order.sort(); } MatchKind::LeftmostLongest => { let (order, by_id) = (&mut self.order, &mut self.by_id); order.sort_by(|&id1, &id2| { by_id[id1 as usize] .len() .cmp(&by_id[id2 as usize].len()) .reverse() }); } MatchKind::__Nonexhaustive => unreachable!(), } } pub fn len(&self) -> usize { self.by_id.len() } pub fn is_empty(&self) -> bool { self.len() == 0 } pub fn heap_bytes(&self) -> usize { self.order.len() * mem::size_of::<PatternID>() + self.by_id.len() * mem::size_of::<Vec<u8>>() + self.total_pattern_bytes } pub fn reset(&mut self) { self.kind = MatchKind::default(); self.by_id.clear(); self.order.clear(); self.minimum_len = usize::MAX; self.max_pattern_id = 0; } pub fn max_pattern_id(&self) -> PatternID { assert_eq!((self.max_pattern_id + 1) as usize, self.len()); self.max_pattern_id } pub fn minimum_len(&self) -> usize { self.minimum_len } pub fn match_kind(&self) -> &MatchKind { &self.kind } pub fn get(&self, id: PatternID) -> Pattern<'_> { Pattern(&self.by_id[id as usize]) } #[cfg(target_arch = "x86_64")] pub unsafe fn get_unchecked(&self, id: PatternID) -> Pattern<'_> { Pattern(self.by_id.get_unchecked(id as usize)) } pub fn iter(&self) -> PatternIter<'_> { PatternIter { patterns: self, i: 0 } } } #[derive(Debug)] pub struct PatternIter<'p> { patterns: &'p Patterns, i: usize, } impl<'p> Iterator for PatternIter<'p> { type Item = (PatternID, Pattern<'p>); fn next(&mut self) -> Option<(PatternID, Pattern<'p>)> { if self.i >= self.patterns.len() { return None; } let id = self.patterns.order[self.i]; let p = self.patterns.get(id); self.i += 1; Some((id, p)) } } #[derive(Clone)] pub struct Pattern<'a>(&'a [u8]); impl<'a> fmt::Debug for Pattern<'a> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("Pattern") .field("lit", &String::from_utf8_lossy(&self.0)) .finish() } } impl<'p> Pattern<'p> { pub fn len(&self) -> usize { self.0.len() } pub fn bytes(&self) -> &[u8] { &self.0 } #[cfg(target_arch = "x86_64")] pub fn low_nybbles(&self, len: usize) -> Vec<u8> { let mut nybs = vec![]; for &b in self.bytes().iter().take(len) { nybs.push(b & 0xF); } nybs } #[inline(always)] pub fn is_prefix(&self, bytes: &[u8]) -> bool { self.len() <= bytes.len() && self.equals(&bytes[..self.len()]) } #[inline(always)] pub fn equals(&self, bytes: &[u8]) -> bool { if self.len() != bytes.len() { return false; } if self.len() < 8 { for (&b1, &b2) in self.bytes().iter().zip(bytes) { if b1 != b2 { return false; } } return true; } let mut p1 = self.bytes().as_ptr(); let mut p2 = bytes.as_ptr(); let p1end = self.bytes()[self.len() - 8..].as_ptr(); let p2end = bytes[bytes.len() - 8..].as_ptr(); unsafe { while p1 < p1end { let v1 = (p1 as *const u64).read_unaligned(); let v2 = (p2 as *const u64).read_unaligned(); if v1 != v2 { return false; } p1 = p1.add(8); p2 = p2.add(8); } let v1 = (p1end as *const u64).read_unaligned(); let v2 = (p2end as *const u64).read_unaligned(); v1 == v2 } } }
use std::cmp; use std::fmt; use std::mem; use std::u16; use std::usize; use crate::packed::api::MatchKind; pub type PatternID = u16; #[derive(Clone, Debug)] pub struct Patterns { kind: MatchKind, by_id: Vec<Vec<u8>>, order: Vec<PatternID>, minimum_len: usize, max_pattern_id: PatternID, total_pattern_bytes: usize, } impl Patterns { pub fn new() -> Patterns { Patterns { kind: MatchKind::default(), by_id: vec![], order: vec![], minimum_len: usize::MAX, max_pattern_id: 0, total_pattern_bytes: 0, } } pub fn add(&mut self, bytes: &[u8]) { assert!(!bytes.is_empty()); assert!(self.by_id.len() <= u16::MAX as usize); let id = self.by_id.len() as u16; self.max_pattern_id = id; self.order.push(id); self.by_id.push(bytes.to_vec()); self.minimum_len = cmp::min(self.minimum_len, bytes.len()); self.total_pattern_bytes += bytes.len(); } pub fn set_match_kind(&mut self, kind: MatchKind) { match kind { MatchKind::LeftmostFirst => { self.order.sort(); } MatchKind::LeftmostLong
unreachable!(), } } pub fn len(&self) -> usize { self.by_id.len() } pub fn is_empty(&self) -> bool { self.len() == 0 } pub fn heap_bytes(&self) -> usize { self.order.len() * mem::size_of::<PatternID>() + self.by_id.len() * mem::size_of::<Vec<u8>>() + self.total_pattern_bytes } pub fn reset(&mut self) { self.kind = MatchKind::default(); self.by_id.clear(); self.order.clear(); self.minimum_len = usize::MAX; self.max_pattern_id = 0; } pub fn max_pattern_id(&self) -> PatternID { assert_eq!((self.max_pattern_id + 1) as usize, self.len()); self.max_pattern_id } pub fn minimum_len(&self) -> usize { self.minimum_len } pub fn match_kind(&self) -> &MatchKind { &self.kind } pub fn get(&self, id: PatternID) -> Pattern<'_> { Pattern(&self.by_id[id as usize]) } #[cfg(target_arch = "x86_64")] pub unsafe fn get_unchecked(&self, id: PatternID) -> Pattern<'_> { Pattern(self.by_id.get_unchecked(id as usize)) } pub fn iter(&self) -> PatternIter<'_> { PatternIter { patterns: self, i: 0 } } } #[derive(Debug)] pub struct PatternIter<'p> { patterns: &'p Patterns, i: usize, } impl<'p> Iterator for PatternIter<'p> { type Item = (PatternID, Pattern<'p>); fn next(&mut self) -> Option<(PatternID, Pattern<'p>)> { if self.i >= self.patterns.len() { return None; } let id = self.patterns.order[self.i]; let p = self.patterns.get(id); self.i += 1; Some((id, p)) } } #[derive(Clone)] pub struct Pattern<'a>(&'a [u8]); impl<'a> fmt::Debug for Pattern<'a> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("Pattern") .field("lit", &String::from_utf8_lossy(&self.0)) .finish() } } impl<'p> Pattern<'p> { pub fn len(&self) -> usize { self.0.len() } pub fn bytes(&self) -> &[u8] { &self.0 } #[cfg(target_arch = "x86_64")] pub fn low_nybbles(&self, len: usize) -> Vec<u8> { let mut nybs = vec![]; for &b in self.bytes().iter().take(len) { nybs.push(b & 0xF); } nybs } #[inline(always)] pub fn is_prefix(&self, bytes: &[u8]) -> bool { self.len() <= bytes.len() && self.equals(&bytes[..self.len()]) } #[inline(always)] pub fn equals(&self, bytes: &[u8]) -> bool { if self.len() != bytes.len() { return false; } if self.len() < 8 { for (&b1, &b2) in self.bytes().iter().zip(bytes) { if b1 != b2 { return false; } } return true; } let mut p1 = self.bytes().as_ptr(); let mut p2 = bytes.as_ptr(); let p1end = self.bytes()[self.len() - 8..].as_ptr(); let p2end = bytes[bytes.len() - 8..].as_ptr(); unsafe { while p1 < p1end { let v1 = (p1 as *const u64).read_unaligned(); let v2 = (p2 as *const u64).read_unaligned(); if v1 != v2 { return false; } p1 = p1.add(8); p2 = p2.add(8); } let v1 = (p1end as *const u64).read_unaligned(); let v2 = (p2end as *const u64).read_unaligned(); v1 == v2 } } }
est => { let (order, by_id) = (&mut self.order, &mut self.by_id); order.sort_by(|&id1, &id2| { by_id[id1 as usize] .len() .cmp(&by_id[id2 as usize].len()) .reverse() }); } MatchKind::__Nonexhaustive =>
function_block-random_span
[ { "content": "#[inline]\n\npub fn checksum_ieee(data: &[u8]) -> u32 {\n\n let mut hasher = Hasher::new();\n\n hasher.update(data);\n\n hasher.finalize()\n\n}\n\n\n\n// Rust runs the tests concurrently, so unless we synchronize logging access\n\n// it will crash when attempting to run `cargo test` with some logging facilities.\n", "file_path": "src/lib.rs", "rank": 0, "score": 124408.5862873444 }, { "content": "pub fn sample_with_dependency_id_edge_case() -> PathBuf {\n\n samples_dir().join(\"Archive-ForwardedEvents-test.evtx\")\n\n}\n\n\n", "file_path": "tests/fixtures.rs", "rank": 1, "score": 106121.93464816819 }, { "content": "pub fn read_attribute(cursor: &mut Cursor<&[u8]>) -> Result<BinXMLAttribute> {\n\n trace!(\"Offset `0x{:08x}` - Attribute\", cursor.position());\n\n let name = BinXmlNameRef::from_stream(cursor)?;\n\n\n\n Ok(BinXMLAttribute { name })\n\n}\n\n\n", "file_path": "src/binxml/tokens.rs", "rank": 2, "score": 100063.73575327854 }, { "content": "pub fn read_processing_instruction_data(cursor: &mut Cursor<&[u8]>) -> Result<String> {\n\n trace!(\n\n \"Offset `0x{:08x}` - ProcessingInstructionTarget\",\n\n cursor.position(),\n\n );\n\n\n\n let data =\n\n try_read!(cursor, len_prefixed_utf_16_str, \"pi_data\")?.unwrap_or_else(|| \"\".to_string());\n\n trace!(\"PIData - {}\", data,);\n\n Ok(data)\n\n}\n\n\n", "file_path": "src/binxml/tokens.rs", "rank": 3, "score": 100063.73575327854 }, { "content": "fn to_delimited_list<N: ToString>(ns: impl AsRef<Vec<N>>) -> String {\n\n ns.as_ref()\n\n .iter()\n\n .map(ToString::to_string)\n\n .collect::<Vec<String>>()\n\n .join(\",\")\n\n}\n\n\n\nimpl<'c> From<BinXmlValue<'c>> for serde_json::Value {\n\n fn from(value: BinXmlValue<'c>) -> Self {\n\n match value {\n\n BinXmlValue::NullType => Value::Null,\n\n BinXmlValue::StringType(s) => json!(s),\n\n BinXmlValue::AnsiStringType(s) => json!(s.into_owned()),\n\n BinXmlValue::Int8Type(num) => json!(num),\n\n BinXmlValue::UInt8Type(num) => json!(num),\n\n BinXmlValue::Int16Type(num) => json!(num),\n\n BinXmlValue::UInt16Type(num) => json!(num),\n\n BinXmlValue::Int32Type(num) => json!(num),\n\n BinXmlValue::UInt32Type(num) => json!(num),\n", "file_path": "src/binxml/value_variant.rs", "rank": 4, "score": 99961.19196645211 }, { "content": "pub fn read_fragment_header(cursor: &mut Cursor<&[u8]>) -> Result<BinXMLFragmentHeader> {\n\n trace!(\"Offset `0x{:08x}` - FragmentHeader\", cursor.position());\n\n let major_version = try_read!(cursor, u8, \"fragment_header_major_version\")?;\n\n let minor_version = try_read!(cursor, u8, \"fragment_header_minor_version\")?;\n\n let flags = try_read!(cursor, u8, \"fragment_header_flags\")?;\n\n Ok(BinXMLFragmentHeader {\n\n major_version,\n\n minor_version,\n\n flags,\n\n })\n\n}\n\n\n", "file_path": "src/binxml/tokens.rs", "rank": 5, "score": 96209.06477862669 }, { "content": "pub fn read_entity_ref(cursor: &mut Cursor<&[u8]>) -> Result<BinXmlEntityReference> {\n\n trace!(\"Offset `0x{:08x}` - EntityReference\", cursor.position());\n\n let name = BinXmlNameRef::from_stream(cursor)?;\n\n trace!(\"\\t name: {:?}\", name);\n\n\n\n Ok(BinXmlEntityReference { name })\n\n}\n\n\n", "file_path": "src/binxml/tokens.rs", "rank": 6, "score": 96209.06477862669 }, { "content": "/// Dumps bytes at data to the screen as hex.\n\n/// Display may be one of:\n\n/// b One-byte octal display.\n\n/// Display the input offset in hexadecimal, followed by sixteen space-separated, three column, zero-filled, bytes of input data, in octal, per line.\n\n///\n\n/// c One-byte character display. One-byte character display.\n\n/// Display the input offset in hexadecimal, followed by sixteen space-separated, three column, space-filled, characters of input data per line.\n\n///\n\n/// C Canonical hex display.\n\n/// Display the input offset in hexadecimal, followed by sixteen space-separated, two column, hexadecimal bytes, followed by the same sixteen bytes in %_p format enclosed in ``|'' characters.\n\n///\n\n/// d Two-byte decimal display.\n\n/// o Two-byte octal display.\n\n/// x Two-byte hexadecimal display.\n\n/// Display the input offset in hexadecimal, followed by eight, space separated, four column, zero-filled, two-byte quantities of input data, in hexadecimal, per line.\n\npub fn hexdump(\n\n data: &[u8],\n\n offset: usize,\n\n display: char,\n\n) -> Result<String, Box<dyn std::error::Error>> {\n\n let mut s = String::new();\n\n let mut address = 0;\n\n\n\n let number_of_bytes = match display {\n\n 'b' => 1,\n\n 'c' => 1,\n\n 'C' => 1,\n\n 'd' => 2,\n\n 'o' => 2,\n\n _ => 2,\n\n };\n\n\n\n while address <= data.len() {\n\n // Read next 16 bytes of until end of data\n\n let end = cmp::min(address + 16, data.len());\n", "file_path": "src/utils/hexdump.rs", "rank": 7, "score": 96093.65398622211 }, { "content": "pub fn read_substitution_descriptor(\n\n cursor: &mut Cursor<&[u8]>,\n\n optional: bool,\n\n) -> Result<TemplateSubstitutionDescriptor> {\n\n trace!(\n\n \"Offset `0x{:08x}` - SubstitutionDescriptor<optional={}>\",\n\n cursor.position(),\n\n optional\n\n );\n\n let substitution_index = try_read!(cursor, u16)?;\n\n let value_type_token = try_read!(cursor, u8)?;\n\n\n\n let value_type = BinXmlValueType::from_u8(value_type_token).ok_or(\n\n DeserializationError::InvalidValueVariant {\n\n value: value_type_token,\n\n offset: cursor.position(),\n\n },\n\n )?;\n\n\n\n let ignore = optional && (value_type == BinXmlValueType::NullType);\n\n\n\n Ok(TemplateSubstitutionDescriptor {\n\n substitution_index,\n\n value_type,\n\n ignore,\n\n })\n\n}\n\n\n", "file_path": "src/binxml/tokens.rs", "rank": 8, "score": 91113.98937997688 }, { "content": "#[cfg(test)]\n\npub fn ensure_env_logger_initialized() {\n\n use std::io::Write;\n\n\n\n LOGGER_INIT.call_once(|| {\n\n let mut builder = env_logger::Builder::from_default_env();\n\n builder\n\n .format(|buf, record| writeln!(buf, \"[{}] - {}\", record.level(), record.args()))\n\n .init();\n\n });\n\n}\n\n\n", "file_path": "tests/fixtures.rs", "rank": 9, "score": 91113.98937997688 }, { "content": "#[cfg(test)]\n\npub fn ensure_env_logger_initialized() {\n\n use std::io::Write;\n\n\n\n LOGGER_INIT.call_once(|| {\n\n let mut builder = env_logger::Builder::from_default_env();\n\n builder\n\n .format(|buf, record| writeln!(buf, \"[{}] - {}\", record.level(), record.args()))\n\n .init();\n\n });\n\n}\n\n\n\n// Cannot use `cfg(test)` here since `rustdoc` won't look at it.\n\n#[cfg(debug_assertions)]\n\nmod test_readme {\n\n macro_rules! calculated_doc {\n\n ($doc:expr, $id:ident) => {\n\n #[doc = $doc]\n\n enum $id {}\n\n }\n\n }\n\n\n\n calculated_doc!(include_str!(\"../README.md\"), _DoctestReadme);\n\n}\n", "file_path": "src/lib.rs", "rank": 10, "score": 91113.98937997688 }, { "content": "/// Tests an .evtx file, asserting the number of parsed records matches `count`.\n\nfn test_full_sample(path: impl AsRef<Path>, ok_count: usize, err_count: usize) {\n\n ensure_env_logger_initialized();\n\n let mut parser = EvtxParser::from_path(path).unwrap();\n\n\n\n let mut actual_ok_count = 0;\n\n let mut actual_err_count = 0;\n\n\n\n for r in parser.records() {\n\n if r.is_ok() {\n\n actual_ok_count += 1;\n\n if log::log_enabled!(Level::Debug) {\n\n println!(\"{}\", r.unwrap().data);\n\n }\n\n } else {\n\n actual_err_count += 1;\n\n }\n\n }\n\n assert_eq!(\n\n actual_ok_count, ok_count,\n\n \"XML: Failed to parse all expected records\"\n", "file_path": "tests/test_full_samples.rs", "rank": 11, "score": 90280.53039524205 }, { "content": "pub fn expand_templates<'a>(\n\n token_tree: Vec<BinXMLDeserializedTokens<'a>>,\n\n chunk: &'a EvtxChunk<'a>,\n\n) -> Result<Vec<Cow<'a, BinXMLDeserializedTokens<'a>>>> {\n\n // We can assume the new tree will be at least as big as the old one.\n\n let mut stack = Vec::with_capacity(token_tree.len());\n\n\n\n for token in token_tree {\n\n _expand_templates(Cow::Owned(token), chunk, &mut stack)?\n\n }\n\n\n\n Ok(stack)\n\n}\n", "file_path": "src/binxml/assemble.rs", "rank": 12, "score": 89889.89633503818 }, { "content": "pub fn read_template<'a>(\n\n cursor: &mut Cursor<&'a [u8]>,\n\n chunk: Option<&'a EvtxChunk<'a>>,\n\n ansi_codec: EncodingRef,\n\n) -> Result<BinXmlTemplateRef<'a>> {\n\n trace!(\"TemplateInstance at {}\", cursor.position());\n\n\n\n let _ = try_read!(cursor, u8)?;\n\n let _template_id = try_read!(cursor, u32)?;\n\n let template_definition_data_offset = try_read!(cursor, u32)?;\n\n\n\n // Need to skip over the template data.\n\n if (cursor.position() as u32) == template_definition_data_offset {\n\n let template_header = read_template_definition_header(cursor)?;\n\n try_seek!(\n\n cursor,\n\n cursor.position() + u64::from(template_header.data_size),\n\n \"Skip cached template\"\n\n )?;\n\n }\n", "file_path": "src/binxml/tokens.rs", "rank": 13, "score": 89889.89633503818 }, { "content": "pub fn read_open_start_element(\n\n cursor: &mut Cursor<&[u8]>,\n\n chunk: Option<&EvtxChunk>,\n\n has_attributes: bool,\n\n is_substitution: bool,\n\n) -> Result<BinXMLOpenStartElement> {\n\n trace!(\n\n \"Offset `0x{:08x}` - OpenStartElement<has_attributes={}, is_substitution={}>\",\n\n cursor.position(),\n\n has_attributes,\n\n is_substitution\n\n );\n\n\n\n // According to https://github.com/libyal/libevtx/blob/master/documentation/Windows%20XML%20Event%20Log%20(EVTX).asciidoc\n\n // The dependency identifier is not present when the element start is used in a substitution token.\n\n if !is_substitution {\n\n let _dependency_identifier =\n\n try_read!(cursor, u16, \"open_start_element_dependency_identifier\")?;\n\n\n\n trace!(\n", "file_path": "src/binxml/tokens.rs", "rank": 14, "score": 88929.46237417508 }, { "content": "pub fn read_processing_instruction_target(\n\n cursor: &mut Cursor<&[u8]>,\n\n) -> Result<BinXMLProcessingInstructionTarget> {\n\n trace!(\n\n \"Offset `0x{:08x}` - ProcessingInstructionTarget\",\n\n cursor.position(),\n\n );\n\n\n\n let name = BinXmlNameRef::from_stream(cursor)?;\n\n trace!(\"\\tPITarget Name - {:?}\", name);\n\n Ok(BinXMLProcessingInstructionTarget { name })\n\n}\n\n\n", "file_path": "src/binxml/tokens.rs", "rank": 15, "score": 88929.46237417508 }, { "content": "pub fn read_template_definition_header(\n\n cursor: &mut Cursor<&[u8]>,\n\n) -> Result<BinXmlTemplateDefinitionHeader> {\n\n // If any of these fail we cannot reliably report the template information in error.\n\n let next_template_offset = try_read!(cursor, u32, \"next_template_offset\")?;\n\n let template_guid = try_read!(cursor, guid, \"template_guid\")?;\n\n // Data size includes the fragment header, element and end of file token;\n\n // except for the first 33 bytes of the template definition (above)\n\n let data_size = try_read!(cursor, u32, \"template_data_size\")?;\n\n\n\n Ok(BinXmlTemplateDefinitionHeader {\n\n next_template_offset,\n\n guid: template_guid,\n\n data_size,\n\n })\n\n}\n\n\n", "file_path": "src/binxml/tokens.rs", "rank": 16, "score": 88929.46237417508 }, { "content": "pub fn sample_issue_65() -> PathBuf {\n\n samples_dir().join(\n\n \"E_ShadowCopy6_windows_system32_winevt_logs_Microsoft-Windows-CAPI2%4Operational.evtx\",\n\n )\n\n}\n\n\n", "file_path": "tests/fixtures.rs", "rank": 17, "score": 87512.26646240526 }, { "content": "pub fn create_record_model<'a>(\n\n tokens: Vec<Cow<'a, BinXMLDeserializedTokens<'a>>>,\n\n chunk: &'a EvtxChunk<'a>,\n\n) -> Result<Vec<XmlModel<'a>>> {\n\n let mut current_element: Option<XmlElementBuilder> = None;\n\n let mut current_pi: Option<XmlPIBuilder> = None;\n\n let mut model: Vec<XmlModel> = Vec::with_capacity(tokens.len());\n\n\n\n for token in tokens {\n\n // Handle all places where we don't care if it's an Owned or a Borrowed value.\n\n match token {\n\n Cow::Owned(BinXMLDeserializedTokens::FragmentHeader(_))\n\n | Cow::Borrowed(BinXMLDeserializedTokens::FragmentHeader(_)) => {}\n\n Cow::Owned(BinXMLDeserializedTokens::TemplateInstance(_))\n\n | Cow::Borrowed(BinXMLDeserializedTokens::TemplateInstance(_)) => {\n\n return Err(EvtxError::FailedToCreateRecordModel(\n\n \"Call `expand_templates` before calling this function\",\n\n ));\n\n }\n\n Cow::Owned(BinXMLDeserializedTokens::AttributeList)\n", "file_path": "src/binxml/assemble.rs", "rank": 18, "score": 87512.26646240526 }, { "content": "pub fn read_template_definition<'a>(\n\n cursor: &mut Cursor<&'a [u8]>,\n\n chunk: Option<&'a EvtxChunk<'a>>,\n\n ansi_codec: EncodingRef,\n\n) -> Result<BinXMLTemplateDefinition<'a>> {\n\n let header = read_template_definition_header(cursor)?;\n\n\n\n trace!(\n\n \"Offset `0x{:08x}` - TemplateDefinition {}\",\n\n cursor.position(),\n\n header\n\n );\n\n\n\n let template = match BinXmlDeserializer::read_binxml_fragment(\n\n cursor,\n\n chunk,\n\n Some(header.data_size),\n\n false,\n\n ansi_codec,\n\n ) {\n", "file_path": "src/binxml/tokens.rs", "rank": 19, "score": 87512.26646240526 }, { "content": "pub fn sample_with_no_crc32() -> PathBuf {\n\n samples_dir().join(\"Application_no_crc32.evtx\")\n\n}\n\n\n", "file_path": "tests/fixtures.rs", "rank": 20, "score": 87512.26646240526 }, { "content": "pub fn regular_sample() -> PathBuf {\n\n samples_dir().join(\"security.evtx\")\n\n}\n\n\n", "file_path": "tests/fixtures.rs", "rank": 21, "score": 87512.26646240526 }, { "content": "pub fn samples_dir() -> PathBuf {\n\n PathBuf::from(file!())\n\n .parent()\n\n .unwrap()\n\n .parent()\n\n .unwrap()\n\n .join(\"samples\")\n\n .canonicalize()\n\n .unwrap()\n\n}\n\n\n", "file_path": "tests/fixtures.rs", "rank": 22, "score": 87512.26646240526 }, { "content": "pub fn sample_with_a_bad_checksum_2() -> PathBuf {\n\n samples_dir().join(\n\n \"2-vss_0-Microsoft-Windows-TerminalServices-RemoteConnectionManager%4Operational.evtx\",\n\n )\n\n}\n\n\n", "file_path": "tests/fixtures.rs", "rank": 23, "score": 85327.73945660348 }, { "content": "pub fn sample_with_a_bad_checksum() -> PathBuf {\n\n samples_dir()\n\n .join(\"2-vss_0-Microsoft-Windows-RemoteDesktopServices-RdpCoreTS%4Operational.evtx\")\n\n}\n\n\n", "file_path": "tests/fixtures.rs", "rank": 24, "score": 85327.73945660348 }, { "content": "pub fn sample_with_irregular_values() -> PathBuf {\n\n samples_dir().join(\"sample-with-irregular-bool-values.evtx\")\n\n}\n\n\n", "file_path": "tests/fixtures.rs", "rank": 25, "score": 85327.73945660348 }, { "content": "// first chunk has 90 records\n\nfn process_90_records(buffer: &'static [u8]) {\n\n let mut parser = EvtxParser::from_buffer(buffer.to_vec()).unwrap();\n\n\n\n for (i, record) in parser.records().take(90).enumerate() {\n\n match record {\n\n Ok(r) => {\n\n assert_eq!(r.event_record_id, i as u64 + 1);\n\n }\n\n Err(e) => println!(\"Error while reading record {}, {:?}\", i, e),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/benches/benchmark.rs", "rank": 26, "score": 84721.35531524694 }, { "content": "pub fn sample_with_a_bad_chunk_magic() -> PathBuf {\n\n samples_dir().join(\"sample_with_a_bad_chunk_magic.evtx\")\n\n}\n\n\n", "file_path": "tests/fixtures.rs", "rank": 27, "score": 83313.70861713319 }, { "content": "pub fn sample_with_multiple_xml_fragments() -> PathBuf {\n\n samples_dir()\n\n .join(\"E_Windows_system32_winevt_logs_Microsoft-Windows-Shell-Core%4Operational.evtx\")\n\n}\n\n\n", "file_path": "tests/fixtures.rs", "rank": 28, "score": 83313.70861713319 }, { "content": "pub fn sample_with_a_chunk_past_zeroes() -> PathBuf {\n\n samples_dir().join(\"2-vss_7-System.evtx\")\n\n}\n\n\n", "file_path": "tests/fixtures.rs", "rank": 29, "score": 83313.70861713319 }, { "content": "pub fn sample_binxml_with_incomplete_template() -> PathBuf {\n\n samples_dir().join(\"Microsoft-Windows-LanguagePackSetup%4Operational.evtx\")\n\n}\n\n\n", "file_path": "tests/fixtures.rs", "rank": 30, "score": 83313.70861713319 }, { "content": "pub fn sample_with_invalid_flags_in_header() -> PathBuf {\n\n samples_dir().join(\"post-Security.evtx\")\n\n}\n", "file_path": "tests/fixtures.rs", "rank": 31, "score": 83313.70861713319 }, { "content": "pub fn sample_binxml_with_incomplete_sid() -> PathBuf {\n\n samples_dir().join(\"Microsoft-Windows-HelloForBusiness%4Operational.evtx\")\n\n}\n\n\n", "file_path": "tests/fixtures.rs", "rank": 32, "score": 83313.70861713319 }, { "content": "fn process_90_records_json(buffer: &'static [u8]) {\n\n let mut parser = EvtxParser::from_buffer(buffer.to_vec()).unwrap();\n\n\n\n for (i, record) in parser.records_json().take(90).enumerate() {\n\n match record {\n\n Ok(r) => {\n\n assert_eq!(r.event_record_id, i as u64 + 1);\n\n }\n\n Err(e) => println!(\"Error while reading record {}, {:?}\", i, e),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/benches/benchmark.rs", "rank": 33, "score": 82688.92345945162 }, { "content": "pub fn sample_with_binxml_as_substitution_tokens_and_pi_target() -> PathBuf {\n\n samples_dir().join(\"E_Windows_system32_winevt_logs_Microsoft-Windows-CAPI2%4Operational.evtx\")\n\n}\n\n\n", "file_path": "tests/fixtures.rs", "rank": 34, "score": 79723.07516531117 }, { "content": "pub fn parse_tokens<'a, T: BinXmlOutput>(\n\n tokens: Vec<BinXMLDeserializedTokens<'a>>,\n\n chunk: &'a EvtxChunk<'a>,\n\n visitor: &mut T,\n\n) -> Result<()> {\n\n let expanded_tokens = expand_templates(tokens, chunk)?;\n\n let record_model = create_record_model(expanded_tokens, chunk)?;\n\n\n\n visitor.visit_start_of_stream()?;\n\n\n\n let mut stack = vec![];\n\n\n\n for owned_token in record_model {\n\n match owned_token {\n\n XmlModel::OpenElement(open_element) => {\n\n stack.push(open_element);\n\n visitor.visit_open_start_element(stack.last().ok_or({\n\n EvtxError::FailedToCreateRecordModel(\n\n \"Invalid parser state - expected stack to be non-empty\",\n\n )\n", "file_path": "src/binxml/assemble.rs", "rank": 35, "score": 75995.72999343241 }, { "content": "/// Reads an ansi encoded string from the given stream using `ansi_codec`.\n\npub fn read_ansi_encoded_string<T: ReadSeek>(\n\n stream: &mut T,\n\n size: u64,\n\n ansi_codec: EncodingRef,\n\n) -> DeserializationResult<Option<String>> {\n\n match size {\n\n 0 => Ok(None),\n\n _ => {\n\n let mut bytes = vec![0; size as usize];\n\n stream.read_exact(&mut bytes)?;\n\n\n\n // There may be multiple NULs in the string, prune them.\n\n bytes.retain(|&b| b != 0);\n\n\n\n let s = match decode(&bytes, DecoderTrap::Strict, ansi_codec).0 {\n\n Ok(s) => s,\n\n Err(message) => {\n\n let as_boxed_err = Box::<dyn StdErr + Send + Sync>::from(message.to_string());\n\n let wrapped_io_err = WrappedIoError::capture_hexdump(as_boxed_err, stream);\n\n return Err(DeserializationError::FailedToReadToken {\n", "file_path": "src/utils/binxml_utils.rs", "rank": 36, "score": 75151.93857784008 }, { "content": "pub fn read_len_prefixed_utf16_string<T: ReadSeek>(\n\n stream: &mut T,\n\n is_null_terminated: bool,\n\n) -> Result<Option<String>, FailedToReadString> {\n\n let expected_number_of_characters = stream.read_u16::<LittleEndian>()?;\n\n let needed_bytes = u64::from(expected_number_of_characters * 2);\n\n\n\n trace!(\n\n \"Offset `0x{offset:08x} ({offset})` reading a{nul}string of len {len}\",\n\n offset = stream.tell().unwrap_or(0),\n\n nul = if is_null_terminated {\n\n \" null terminated \"\n\n } else {\n\n \" \"\n\n },\n\n len = expected_number_of_characters\n\n );\n\n\n\n let s = read_utf16_by_size(stream, needed_bytes)?;\n\n\n", "file_path": "src/utils/binxml_utils.rs", "rank": 37, "score": 73649.5499229921 }, { "content": "pub fn read_systemtime<R: ReadSeek>(r: &mut R) -> DeserializationResult<DateTime<Utc>> {\n\n let year = try_read!(r, u16)?;\n\n let month = try_read!(r, u16)?;\n\n let _day_of_week = try_read!(r, u16)?;\n\n let day = try_read!(r, u16)?;\n\n let hour = try_read!(r, u16)?;\n\n let minute = try_read!(r, u16)?;\n\n let second = try_read!(r, u16)?;\n\n let milliseconds = try_read!(r, u16)?;\n\n\n\n Ok(DateTime::from_utc(\n\n NaiveDate::from_ymd(i32::from(year), u32::from(month), u32::from(day)).and_hms_nano(\n\n u32::from(hour),\n\n u32::from(minute),\n\n u32::from(second),\n\n u32::from(milliseconds),\n\n ),\n\n Utc,\n\n ))\n\n}\n", "file_path": "src/utils/time.rs", "rank": 38, "score": 59077.940002811636 }, { "content": "pub fn read_null_terminated_utf16_string<T: ReadSeek>(stream: &mut T) -> io::Result<String> {\n\n read_utf16_string(stream, None)\n\n}\n\n\n", "file_path": "src/utils/binxml_utils.rs", "rank": 39, "score": 55797.539486465015 }, { "content": "pub fn dump_stream<T: ReadSeek>(cursor: &mut T, lookbehind: i32) -> Result<String, Box<dyn Error>> {\n\n let mut s = String::new();\n\n\n\n cursor.seek(SeekFrom::Current(lookbehind.into()))?;\n\n\n\n let mut data = vec![0; 100_usize];\n\n let _ = cursor.read(&mut data)?;\n\n\n\n writeln!(\n\n s,\n\n \"\\n\\n---------------------------------------------------------------------------\"\n\n )?;\n\n writeln!(s, \"Current Value {:02x}\", data[0])?;\n\n writeln!(s, \" --\")?;\n\n write!(s, \"{}\", hexdump(&data, 0, 'C')?)?;\n\n writeln!(\n\n s,\n\n \"\\n----------------------------------------------------------------------------\"\n\n )?;\n\n\n\n Ok(s)\n\n}\n\n\n", "file_path": "src/utils/hexdump.rs", "rank": 40, "score": 52029.50628775032 }, { "content": "/// Reads a utf16 string from the given stream.\n\n/// size is the actual byte representation of the string (not the number of characters).\n\npub fn read_utf16_by_size<T: ReadSeek>(stream: &mut T, size: u64) -> io::Result<Option<String>> {\n\n match size {\n\n 0 => Ok(None),\n\n _ => read_utf16_string(stream, Some(size as usize / 2)).map(|mut s| {\n\n // Strip nul terminator if needed\n\n if let Some('\\0') = s.chars().last() {\n\n s.pop();\n\n }\n\n Some(s)\n\n }),\n\n }\n\n}\n\n\n", "file_path": "src/utils/binxml_utils.rs", "rank": 41, "score": 51862.26667498579 }, { "content": "pub trait BinXmlOutput {\n\n /// Called once when EOF is reached.\n\n fn visit_end_of_stream(&mut self) -> SerializationResult<()>;\n\n\n\n /// Called on <Tag attr=\"value\" another_attr=\"value\">.\n\n fn visit_open_start_element(\n\n &mut self,\n\n open_start_element: &XmlElement,\n\n ) -> SerializationResult<()>;\n\n\n\n /// Called on </Tag>, implementor may want to keep a stack to properly close tags.\n\n fn visit_close_element(&mut self, element: &XmlElement) -> SerializationResult<()>;\n\n\n\n ///\n\n /// Called with value on xml text node, (ex. <Computer>DESKTOP-0QT8017</Computer>)\n\n /// ~~~~~~~~~~~~~~~\n\n fn visit_characters(&mut self, value: &BinXmlValue) -> SerializationResult<()>;\n\n\n\n /// Unimplemented\n\n fn visit_cdata_section(&mut self) -> SerializationResult<()>;\n", "file_path": "src/xml_output.rs", "rank": 42, "score": 48627.504522409254 }, { "content": "fn print_line(\n\n line: &[u8],\n\n address: usize,\n\n display: char,\n\n bytes: usize,\n\n) -> Result<String, Box<dyn std::error::Error>> {\n\n let mut s = String::new();\n\n // print address (ex - 000000d0)\n\n write!(s, \"\\n{:08x}:\", address)?;\n\n\n\n let words = if (line.len() % bytes) == 0 {\n\n line.len() / bytes\n\n } else {\n\n (line.len() / bytes) + 1\n\n };\n\n\n\n for b in 0..words {\n\n let word = match bytes {\n\n 1 => u16::from(line[b]),\n\n _ => {\n", "file_path": "src/utils/hexdump.rs", "rank": 43, "score": 46847.31814915155 }, { "content": "#[test]\n\nfn it_respects_directory_output() {\n\n let d = tempdir().unwrap();\n\n let f = d.as_ref().join(\"test.out\");\n\n\n\n let sample = regular_sample();\n\n\n\n let mut cmd = Command::cargo_bin(\"evtx_dump\").expect(\"failed to find binary\");\n\n cmd.args(&[\"-f\", &f.to_string_lossy(), sample.to_str().unwrap()]);\n\n\n\n assert!(\n\n cmd.output().unwrap().stdout.is_empty(),\n\n \"Expected output to be printed to file, but was printed to stdout\"\n\n );\n\n\n\n let mut expected = vec![];\n\n\n\n File::open(&f).unwrap().read_to_end(&mut expected).unwrap();\n\n assert!(\n\n !expected.is_empty(),\n\n \"Expected output to be printed to file\"\n\n )\n\n}\n\n\n", "file_path": "tests/test_cli.rs", "rank": 44, "score": 45564.34433680696 }, { "content": "#[test]\n\nfn test_issue_65() {\n\n test_full_sample(sample_issue_65(), 459, 0)\n\n}\n\n\n", "file_path": "tests/test_full_samples.rs", "rank": 45, "score": 45564.34433680696 }, { "content": "fn expand_template<'a>(\n\n mut template: BinXmlTemplateRef<'a>,\n\n chunk: &'a EvtxChunk<'a>,\n\n stack: &mut Vec<Cow<'a, BinXMLDeserializedTokens<'a>>>,\n\n) -> Result<()> {\n\n if let Some(template_def) = chunk\n\n .template_table\n\n .get_template(template.template_def_offset)\n\n {\n\n // We expect to find all the templates in the template cache.\n\n for token in template_def.tokens.iter() {\n\n if let BinXMLDeserializedTokens::Substitution(ref substitution_descriptor) = token {\n\n expand_token_substitution(&mut template, substitution_descriptor, chunk, stack)?;\n\n } else {\n\n _expand_templates(Cow::Borrowed(token), chunk, stack)?;\n\n }\n\n }\n\n } else {\n\n // If the file was not closed correctly, there can be a template which was not found in the header.\n\n // In that case, we will try to read it directly from the chunk.\n", "file_path": "src/binxml/assemble.rs", "rank": 46, "score": 44943.82336256429 }, { "content": "fn _expand_templates<'a>(\n\n token: Cow<'a, BinXMLDeserializedTokens<'a>>,\n\n chunk: &'a EvtxChunk<'a>,\n\n stack: &mut Vec<Cow<'a, BinXMLDeserializedTokens<'a>>>,\n\n) -> Result<()> {\n\n match token {\n\n // Owned values can be consumed when flatting, and passed on as owned.\n\n Cow::Owned(BinXMLDeserializedTokens::Value(BinXmlValue::BinXmlType(tokens))) => {\n\n for token in tokens.into_iter() {\n\n _expand_templates(Cow::Owned(token), chunk, stack)?;\n\n }\n\n }\n\n\n\n Cow::Borrowed(BinXMLDeserializedTokens::Value(BinXmlValue::BinXmlType(tokens))) => {\n\n for token in tokens.iter() {\n\n _expand_templates(Cow::Borrowed(token), chunk, stack)?;\n\n }\n\n }\n\n // Actual template handling.\n\n Cow::Owned(BinXMLDeserializedTokens::TemplateInstance(template)) => {\n", "file_path": "src/binxml/assemble.rs", "rank": 47, "score": 44943.82336256429 }, { "content": "#[test]\n\nfn test_sample_with_no_crc32() {\n\n test_full_sample(\n\n sample_with_no_crc32(),\n\n 17,\n\n 0,\n\n )\n\n}\n\n\n", "file_path": "tests/test_full_samples.rs", "rank": 48, "score": 44389.9796441549 }, { "content": "#[test]\n\nfn test_it_refuses_to_overwrite_directory() {\n\n let d = tempdir().unwrap();\n\n\n\n let sample = regular_sample();\n\n let mut cmd = Command::cargo_bin(\"evtx_dump\").expect(\"failed to find binary\");\n\n cmd.args(&[\"-f\", &d.path().to_string_lossy(), sample.to_str().unwrap()]);\n\n\n\n cmd.assert().failure().code(1);\n\n}\n\n\n", "file_path": "tests/test_cli.rs", "rank": 49, "score": 44389.9796441549 }, { "content": "// Stable shim until https://github.com/rust-lang/rust/issues/59359 is merged.\n\n// Taken from proposed std code.\n\npub trait ReadSeek: Read + Seek {\n\n fn tell(&mut self) -> io::Result<u64> {\n\n self.seek(SeekFrom::Current(0))\n\n }\n\n fn stream_len(&mut self) -> io::Result<u64> {\n\n let old_pos = self.tell()?;\n\n let len = self.seek(SeekFrom::End(0))?;\n\n\n\n // Avoid seeking a third time when we were already at the end of the\n\n // stream. The branch is usually way cheaper than a seek operation.\n\n if old_pos != len {\n\n self.seek(SeekFrom::Start(old_pos))?;\n\n }\n\n\n\n Ok(len)\n\n }\n\n}\n\n\n\nimpl<T: Read + Seek> ReadSeek for T {}\n\n\n", "file_path": "src/evtx_parser.rs", "rank": 50, "score": 43754.29277232604 }, { "content": "/// Reads a utf16 string from the given stream.\n\n/// If `len` is given, exactly `len` u16 values are read from the stream.\n\n/// If `len` is None, the string is assumed to be null terminated and the stream will be read to the first null (0).\n\nfn read_utf16_string<T: ReadSeek>(stream: &mut T, len: Option<usize>) -> io::Result<String> {\n\n let mut buffer = match len {\n\n Some(len) => Vec::with_capacity(len),\n\n None => Vec::new(),\n\n };\n\n\n\n match len {\n\n Some(len) => {\n\n for _ in 0..len {\n\n let next_char = stream.read_u16::<byteorder::LittleEndian>()?;\n\n buffer.push(next_char);\n\n }\n\n }\n\n None => loop {\n\n let next_char = stream.read_u16::<byteorder::LittleEndian>()?;\n\n\n\n if next_char == 0 {\n\n break;\n\n }\n\n\n\n buffer.push(next_char);\n\n },\n\n }\n\n\n\n // We need to stop if we see a NUL byte, even if asked for more bytes.\n\n decode_utf16(buffer.into_iter().take_while(|&byte| byte != 0x00))\n\n .map(|r| r.map_err(|_e| Error::from(ErrorKind::InvalidData)))\n\n .collect()\n\n}\n", "file_path": "src/utils/binxml_utils.rs", "rank": 51, "score": 43747.253659519854 }, { "content": "fn expand_token_substitution<'a>(\n\n template: &mut BinXmlTemplateRef<'a>,\n\n substitution_descriptor: &TemplateSubstitutionDescriptor,\n\n chunk: &'a EvtxChunk<'a>,\n\n stack: &mut Vec<Cow<'a, BinXMLDeserializedTokens<'a>>>,\n\n) -> Result<()> {\n\n if substitution_descriptor.ignore {\n\n return Ok(());\n\n }\n\n\n\n let value = template\n\n .substitution_array\n\n .get_mut(substitution_descriptor.substitution_index as usize);\n\n\n\n if let Some(value) = value {\n\n let value = mem::replace(\n\n value,\n\n BinXMLDeserializedTokens::Value(BinXmlValue::NullType),\n\n );\n\n _expand_templates(Cow::Owned(value), chunk, stack)?;\n", "file_path": "src/binxml/assemble.rs", "rank": 52, "score": 43660.8495502197 }, { "content": "fn expand_string_ref<'a>(\n\n string_ref: &BinXmlNameRef,\n\n chunk: &'a EvtxChunk<'a>,\n\n) -> Result<Cow<'a, BinXmlName>> {\n\n match chunk.string_cache.get_cached_string(string_ref.offset) {\n\n Some(s) => Ok(Cow::Borrowed(s)),\n\n None => {\n\n let mut cursor = Cursor::new(chunk.data);\n\n let cursor_ref = cursor.borrow_mut();\n\n try_seek!(cursor_ref, string_ref.offset, \"Cache missed string\")?;\n\n\n\n let string = BinXmlName::from_stream(cursor_ref)?;\n\n Ok(Cow::Owned(string))\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/binxml/assemble.rs", "rank": 53, "score": 43660.8495502197 }, { "content": "#[test]\n\nfn test_event_xml_sample() {\n\n ensure_env_logger_initialized();\n\n let evtx_file = include_bytes!(\"../samples/security.evtx\");\n\n let mut parser = EvtxParser::from_buffer(evtx_file.to_vec())\n\n .unwrap()\n\n .with_configuration(ParserSettings::new().num_threads(1));\n\n\n\n let first_record = parser\n\n .records()\n\n .next()\n\n .expect(\"to have records\")\n\n .expect(\"record to parse correctly\");\n\n\n\n assert_eq!(\n\n first_record.data.lines().map(str::trim).collect::<String>(),\n\n include_str!(\"../samples/security_event_1.xml\")\n\n .lines()\n\n .map(str::trim)\n\n .collect::<String>()\n\n );\n\n}\n\n\n", "file_path": "tests/test_record_samples.rs", "rank": 54, "score": 43310.99278615514 }, { "content": "#[test]\n\nfn test_dirty_sample_parallel() {\n\n ensure_env_logger_initialized();\n\n let evtx_file = include_bytes!(\"../samples/2-system-Security-dirty.evtx\");\n\n\n\n let mut parser = EvtxParser::from_buffer(evtx_file.to_vec())\n\n .unwrap()\n\n .with_configuration(ParserSettings::new().num_threads(8));\n\n\n\n let mut count = 0;\n\n\n\n for r in parser.records() {\n\n r.unwrap();\n\n count += 1;\n\n }\n\n\n\n assert_eq!(count, 14621, \"Parallel iteration failed\");\n\n}\n\n\n", "file_path": "tests/test_full_samples.rs", "rank": 55, "score": 43310.99278615514 }, { "content": "#[test]\n\nfn test_event_json_sample() {\n\n ensure_env_logger_initialized();\n\n let evtx_file = include_bytes!(\"../samples/security.evtx\");\n\n let mut parser = EvtxParser::from_buffer(evtx_file.to_vec())\n\n .unwrap()\n\n .with_configuration(ParserSettings::new().num_threads(1));\n\n\n\n let first_record = parser\n\n .records_json()\n\n .next()\n\n .expect(\"to have records\")\n\n .expect(\"record to parse correctly\");\n\n\n\n assert_eq!(\n\n first_record.data.lines().map(str::trim).collect::<String>(),\n\n include_str!(\"../samples/security_event_1.json\")\n\n .lines()\n\n .map(str::trim)\n\n .collect::<String>()\n\n );\n\n}\n\n\n", "file_path": "tests/test_record_samples.rs", "rank": 56, "score": 43310.99278615514 }, { "content": "// https://github.com/omerbenamram/evtx/issues/10\n\nfn test_dirty_sample_single_threaded() {\n\n ensure_env_logger_initialized();\n\n let evtx_file = include_bytes!(\"../samples/2-system-Security-dirty.evtx\");\n\n\n\n let mut parser = EvtxParser::from_buffer(evtx_file.to_vec()).unwrap();\n\n\n\n let mut count = 0;\n\n for r in parser.records() {\n\n r.unwrap();\n\n count += 1;\n\n }\n\n assert_eq!(count, 14621, \"Single threaded iteration failed\");\n\n}\n\n\n", "file_path": "tests/test_full_samples.rs", "rank": 57, "score": 42316.217806663844 }, { "content": "#[test]\n\nfn test_dirty_sample_with_a_bad_checksum_2() {\n\n // TODO: investigate 2 failing records\n\n test_full_sample(sample_with_a_bad_checksum_2(), 1774, 2)\n\n}\n\n\n", "file_path": "tests/test_full_samples.rs", "rank": 58, "score": 42316.217806663844 }, { "content": "#[test]\n\nfn test_sample_with_multiple_xml_fragments() {\n\n test_full_sample(sample_with_multiple_xml_fragments(), 1146, 0)\n\n}\n\n\n", "file_path": "tests/test_full_samples.rs", "rank": 59, "score": 42316.217806663844 }, { "content": "#[test]\n\nfn test_sample_with_invalid_flags_in_header() {\n\n test_full_sample(sample_with_invalid_flags_in_header(), 126, 0)\n\n}\n", "file_path": "tests/test_full_samples.rs", "rank": 60, "score": 42316.217806663844 }, { "content": "#[test]\n\nfn test_dirty_sample_with_a_bad_checksum() {\n\n test_full_sample(sample_with_a_bad_checksum(), 1910, 4)\n\n}\n\n\n", "file_path": "tests/test_full_samples.rs", "rank": 61, "score": 42316.217806663844 }, { "content": "#[test]\n\nfn test_it_overwrites_file_anyways_if_passed_flag() {\n\n let d = tempdir().unwrap();\n\n let f = d.as_ref().join(\"test.out\");\n\n\n\n let mut file = File::create(&f).unwrap();\n\n file.write_all(b\"I'm a file!\").unwrap();\n\n\n\n let sample = regular_sample();\n\n let mut cmd = Command::cargo_bin(\"evtx_dump\").expect(\"failed to find binary\");\n\n cmd.args(&[\n\n \"-f\",\n\n &f.to_string_lossy(),\n\n \"--no-confirm-overwrite\",\n\n sample.to_str().unwrap(),\n\n ]);\n\n\n\n cmd.assert().success();\n\n\n\n let mut expected = vec![];\n\n\n\n File::open(&f).unwrap().read_to_end(&mut expected).unwrap();\n\n assert!(\n\n !expected.is_empty(),\n\n \"Expected output to be printed to file\"\n\n )\n\n}\n", "file_path": "tests/test_cli.rs", "rank": 62, "score": 42316.217806663844 }, { "content": "#[test]\n\nfn test_dirty_sample_binxml_with_incomplete_token() {\n\n // Contains an unparsable record\n\n test_full_sample(sample_binxml_with_incomplete_sid(), 6, 1)\n\n}\n\n\n", "file_path": "tests/test_full_samples.rs", "rank": 63, "score": 41396.16623090985 }, { "content": "#[test]\n\nfn test_event_xml_sample_with_user_data() {\n\n ensure_env_logger_initialized();\n\n let evtx_file = include_bytes!(\n\n \"../samples/E_Windows_system32_winevt_logs_Microsoft-Windows-CAPI2%4Operational.evtx\"\n\n );\n\n let mut parser = EvtxParser::from_buffer(evtx_file.to_vec())\n\n .unwrap()\n\n .with_configuration(ParserSettings::new().num_threads(1));\n\n\n\n let first_record = parser\n\n .records()\n\n .next()\n\n .expect(\"to have records\")\n\n .expect(\"record to parse correctly\");\n\n\n\n println!(\"{}\", first_record.data);\n\n\n\n assert_eq!(\n\n first_record.data.lines().map(str::trim).collect::<String>(),\n\n include_str!(\"../samples/event_with_template_as_substitution.xml\")\n\n .lines()\n\n .map(str::trim)\n\n .collect::<String>()\n\n );\n\n}\n\n\n", "file_path": "tests/test_record_samples.rs", "rank": 64, "score": 41396.16623090985 }, { "content": "#[test]\n\nfn test_sample_with_dependency_identifier_edge_case() {\n\n test_full_sample(sample_with_dependency_id_edge_case(), 653, 0)\n\n}\n\n\n", "file_path": "tests/test_full_samples.rs", "rank": 65, "score": 41396.16623090985 }, { "content": "#[test]\n\nfn test_event_xml_sample_with_entity_ref() {\n\n ensure_env_logger_initialized();\n\n let evtx_file = include_bytes!(\n\n \"../samples/E_Windows_system32_winevt_logs_Microsoft-Windows-CAPI2%4Operational.evtx\"\n\n );\n\n let mut parser = EvtxParser::from_buffer(evtx_file.to_vec())\n\n .unwrap()\n\n .with_configuration(ParserSettings::new().num_threads(1));\n\n\n\n let record = parser\n\n .records()\n\n .filter_map(|record| record.ok())\n\n .find(|record| record.event_record_id == 28)\n\n .expect(\"record to parse correctly\");\n\n\n\n println!(\"{}\", record.data);\n\n\n\n assert_eq!(\n\n record.data.lines().map(str::trim).collect::<String>(),\n\n include_str!(\"../samples/event_with_entity_ref.xml\")\n\n .lines()\n\n .map(str::trim)\n\n .collect::<String>()\n\n );\n\n}\n\n\n", "file_path": "tests/test_record_samples.rs", "rank": 66, "score": 41396.16623090985 }, { "content": "#[test]\n\nfn test_parses_sample_with_irregular_boolean_values() {\n\n test_full_sample(sample_with_irregular_values(), 3028, 0);\n\n}\n\n\n", "file_path": "tests/test_full_samples.rs", "rank": 67, "score": 41396.16623090985 }, { "content": "#[test]\n\nfn test_event_xml_sample_with_event_data() {\n\n ensure_env_logger_initialized();\n\n let evtx_file = include_bytes!(\"../samples/2-system-Security-dirty.evtx\");\n\n let mut parser = EvtxParser::from_buffer(evtx_file.to_vec())\n\n .unwrap()\n\n .with_configuration(ParserSettings::new().num_threads(1));\n\n\n\n let first_record = parser\n\n .records()\n\n .next()\n\n .expect(\"to have records\")\n\n .expect(\"record to parse correctly\");\n\n\n\n assert_eq!(\n\n first_record.data.lines().map(str::trim).collect::<String>(),\n\n include_str!(\"../samples/event_with_eventdata.xml\")\n\n .lines()\n\n .map(str::trim)\n\n .collect::<String>()\n\n );\n\n}\n\n\n", "file_path": "tests/test_record_samples.rs", "rank": 68, "score": 41396.16623090985 }, { "content": "#[test]\n\nfn test_event_xml_sample_with_entity_ref_2() {\n\n ensure_env_logger_initialized();\n\n let evtx_file = include_bytes!(\n\n \"../samples/E_Windows_system32_winevt_logs_Microsoft-Windows-CAPI2%4Operational.evtx\"\n\n );\n\n let mut parser = EvtxParser::from_buffer(evtx_file.to_vec())\n\n .unwrap()\n\n .with_configuration(ParserSettings::new().num_threads(1));\n\n\n\n let record = parser\n\n .records()\n\n .filter_map(|record| record.ok())\n\n .find(|record| record.event_record_id == 25)\n\n .expect(\"record to parse correctly\");\n\n\n\n println!(\"{}\", record.data);\n\n\n\n assert_eq!(\n\n record.data.lines().map(str::trim).collect::<String>(),\n\n include_str!(\"../samples/event_with_entity_ref_2.xml\")\n\n .lines()\n\n .map(str::trim)\n\n .collect::<String>()\n\n );\n\n}\n\n\n", "file_path": "tests/test_record_samples.rs", "rank": 69, "score": 41396.16623090985 }, { "content": "#[test]\n\nfn test_event_json_with_multiple_nodes_same_name() {\n\n ensure_env_logger_initialized();\n\n let evtx_file = include_bytes!(\n\n \"../samples/E_Windows_system32_winevt_logs_Microsoft-Windows-CAPI2%4Operational.evtx\"\n\n );\n\n let mut parser = EvtxParser::from_buffer(evtx_file.to_vec())\n\n .unwrap()\n\n .with_configuration(ParserSettings::new().num_threads(1));\n\n\n\n let record = parser\n\n .records_json()\n\n .filter_map(|record| record.ok())\n\n .find(|record| record.event_record_id == 28)\n\n .expect(\"record to parse correctly\");\n\n\n\n println!(\"{}\", record.data);\n\n\n\n assert_eq!(\n\n record.data.lines().map(str::trim).collect::<String>(),\n\n include_str!(\"../samples/event_with_multiple_nodes_same_name.json\")\n\n .lines()\n\n .map(str::trim)\n\n .collect::<String>()\n\n );\n\n}\n\n\n", "file_path": "tests/test_record_samples.rs", "rank": 70, "score": 41396.16623090985 }, { "content": "#[test]\n\nfn test_event_json_sample_with_event_data() {\n\n ensure_env_logger_initialized();\n\n let evtx_file = include_bytes!(\"../samples/2-system-Security-dirty.evtx\");\n\n let mut parser = EvtxParser::from_buffer(evtx_file.to_vec())\n\n .unwrap()\n\n .with_configuration(ParserSettings::new().num_threads(1));\n\n\n\n let first_record = parser\n\n .records_json()\n\n .next()\n\n .expect(\"to have records\")\n\n .expect(\"record to parse correctly\");\n\n\n\n assert_eq!(\n\n first_record.data.lines().map(str::trim).collect::<String>(),\n\n include_str!(\"../samples/event_with_eventdata.json\")\n\n .lines()\n\n .map(str::trim)\n\n .collect::<String>()\n\n );\n\n}\n\n\n", "file_path": "tests/test_record_samples.rs", "rank": 71, "score": 41396.16623090985 }, { "content": "#[test]\n\nfn test_dirty_sample_binxml_with_incomplete_template() {\n\n // Contains an unparsable record\n\n test_full_sample(sample_binxml_with_incomplete_template(), 17, 1)\n\n}\n\n\n", "file_path": "tests/test_full_samples.rs", "rank": 72, "score": 41396.16623090985 }, { "content": "#[test]\n\nfn test_dirty_sample_with_a_chunk_past_zeros() {\n\n test_full_sample(sample_with_a_chunk_past_zeroes(), 1160, 0)\n\n}\n\n\n", "file_path": "tests/test_full_samples.rs", "rank": 73, "score": 41396.16623090985 }, { "content": "#[test]\n\nfn test_dirty_sample_with_a_bad_chunk_magic() {\n\n test_full_sample(sample_with_a_bad_chunk_magic(), 270, 5)\n\n}\n\n\n", "file_path": "tests/test_full_samples.rs", "rank": 74, "score": 41396.16623090985 }, { "content": "#[test]\n\nfn test_event_json_sample_with_separate_json_attributes() {\n\n ensure_env_logger_initialized();\n\n let evtx_file = include_bytes!(\"../samples/Application.evtx\");\n\n let mut parser = EvtxParser::from_buffer(evtx_file.to_vec())\n\n .unwrap()\n\n .with_configuration(\n\n ParserSettings::new()\n\n .num_threads(1)\n\n .separate_json_attributes(true),\n\n );\n\n\n\n let first_record = parser\n\n .records_json()\n\n .next()\n\n .expect(\"to have records\")\n\n .expect(\"record to parse correctly\");\n\n\n\n assert_eq!(\n\n first_record.data.lines().map(str::trim).collect::<String>(),\n\n include_str!(\"../samples/application_event_1_separate_attributes.json\")\n\n .lines()\n\n .map(str::trim)\n\n .collect::<String>()\n\n );\n\n}\n", "file_path": "tests/test_record_samples.rs", "rank": 75, "score": 40542.72345474198 }, { "content": "#[test]\n\nfn test_sample_with_binxml_as_substitution_tokens_and_pi_target() {\n\n test_full_sample(\n\n sample_with_binxml_as_substitution_tokens_and_pi_target(),\n\n 340,\n\n 0,\n\n )\n\n}\n\n\n", "file_path": "tests/test_full_samples.rs", "rank": 76, "score": 40542.72345474198 }, { "content": "#[test]\n\nfn test_event_json_with_multiple_nodes_same_name_separate() {\n\n ensure_env_logger_initialized();\n\n let evtx_file = include_bytes!(\n\n \"../samples/E_Windows_system32_winevt_logs_Microsoft-Windows-CAPI2%4Operational.evtx\"\n\n );\n\n let mut parser = EvtxParser::from_buffer(evtx_file.to_vec())\n\n .unwrap()\n\n .with_configuration(\n\n ParserSettings::new()\n\n .num_threads(1)\n\n .separate_json_attributes(true),\n\n );\n\n\n\n let record = parser\n\n .records_json()\n\n .filter_map(|record| record.ok())\n\n .find(|record| record.event_record_id == 28)\n\n .expect(\"record to parse correctly\");\n\n\n\n println!(\"{}\", record.data);\n\n\n\n assert_eq!(\n\n record.data.lines().map(str::trim).collect::<String>(),\n\n include_str!(\"../samples/event_with_multiple_nodes_same_name_separate_attr.json\")\n\n .lines()\n\n .map(str::trim)\n\n .collect::<String>()\n\n );\n\n}\n", "file_path": "tests/test_record_separate_json.rs", "rank": 77, "score": 39748.90877164121 }, { "content": "#[test]\n\nfn test_event_xml_sample_with_event_data_with_attributes_and_text() {\n\n ensure_env_logger_initialized();\n\n let evtx_file = include_bytes!(\"../samples/system.evtx\");\n\n let mut parser = EvtxParser::from_buffer(evtx_file.to_vec())\n\n .unwrap()\n\n .with_configuration(ParserSettings::new().num_threads(1));\n\n\n\n let first_record = parser\n\n .records()\n\n .next()\n\n .expect(\"to have records\")\n\n .expect(\"record to parse correctly\");\n\n\n\n assert_eq!(\n\n first_record.data.lines().map(str::trim).collect::<String>(),\n\n include_str!(\"../samples/event_with_text_and_attributes.xml\")\n\n .lines()\n\n .map(str::trim)\n\n .collect::<String>()\n\n );\n\n}\n\n\n", "file_path": "tests/test_record_samples.rs", "rank": 78, "score": 39748.90877164121 }, { "content": "#[test]\n\nfn test_event_json_sample_with_event_data_with_attributes_and_text() {\n\n ensure_env_logger_initialized();\n\n let evtx_file = include_bytes!(\"../samples/system.evtx\");\n\n let mut parser = EvtxParser::from_buffer(evtx_file.to_vec())\n\n .unwrap()\n\n .with_configuration(ParserSettings::new().num_threads(1));\n\n\n\n let first_record = parser\n\n .records_json()\n\n .next()\n\n .expect(\"to have records\")\n\n .expect(\"record to parse correctly\");\n\n\n\n assert_eq!(\n\n first_record.data.lines().map(str::trim).collect::<String>(),\n\n include_str!(\"../samples/event_with_text_and_attributes.json\")\n\n .lines()\n\n .map(str::trim)\n\n .collect::<String>()\n\n );\n\n}\n\n\n", "file_path": "tests/test_record_samples.rs", "rank": 79, "score": 39748.90877164121 }, { "content": "fn criterion_benchmark(c: &mut Criterion) {\n\n let evtx_file = include_bytes!(\"../../samples/security.evtx\");\n\n // ~11ms before strings cache\n\n // ~9ms after strings cache\n\n // ~8ms with cached templates as well\n\n c.bench_function(\"read 90 records\", move |b| {\n\n b.iter(|| process_90_records(evtx_file))\n\n });\n\n\n\n c.bench_function(\"read 90 records json\", move |b| {\n\n b.iter(|| process_90_records_json(evtx_file))\n\n });\n\n}\n\n\n\ncriterion_group!(benches, criterion_benchmark);\n\ncriterion_main!(benches);\n", "file_path": "src/benches/benchmark.rs", "rank": 80, "score": 39684.73909839049 }, { "content": "use crate::err::{DeserializationError, DeserializationResult, WrappedIoError};\n\n\n\nuse byteorder::ReadBytesExt;\n\nuse std::io::{Read, Seek, SeekFrom};\n\n\n\n#[derive(Debug, PartialEq)]\n\npub struct EvtxFileHeader {\n\n pub first_chunk_number: u64,\n\n pub last_chunk_number: u64,\n\n pub next_record_id: u64,\n\n pub header_size: u32,\n\n pub minor_version: u16,\n\n pub major_version: u16,\n\n pub header_block_size: u16,\n\n pub chunk_count: u16,\n\n pub flags: HeaderFlags,\n\n // Checksum is of first 120 bytes of header\n\n pub checksum: u32,\n\n}\n\n\n", "file_path": "src/evtx_file_header.rs", "rank": 81, "score": 18.935661557375997 }, { "content": " pub fn from_path(path: impl AsRef<Path>) -> Result<Self> {\n\n let path = path\n\n .as_ref()\n\n .canonicalize()\n\n .map_err(|e| InputError::failed_to_open_file(e, &path))?;\n\n\n\n let f = File::open(&path).map_err(|e| InputError::failed_to_open_file(e, &path))?;\n\n\n\n let cursor = f;\n\n Self::from_read_seek(cursor)\n\n }\n\n}\n\n\n\nimpl EvtxParser<Cursor<Vec<u8>>> {\n\n /// Attempts to load an evtx file from a given path, will fail the evtx header is invalid.\n\n pub fn from_buffer(buffer: Vec<u8>) -> Result<Self> {\n\n let cursor = Cursor::new(buffer);\n\n Self::from_read_seek(cursor)\n\n }\n\n}\n", "file_path": "src/evtx_parser.rs", "rank": 82, "score": 18.358635384190688 }, { "content": "use crate::binxml::tokens::read_template_definition;\n\nuse crate::err::DeserializationResult;\n\n\n\nuse crate::model::deserialized::BinXMLTemplateDefinition;\n\nuse crate::ChunkOffset;\n\npub use byteorder::{LittleEndian, ReadBytesExt};\n\n\n\nuse encoding::EncodingRef;\n\nuse log::trace;\n\nuse std::borrow::BorrowMut;\n\nuse std::collections::HashMap;\n\nuse std::io::{Cursor, Seek, SeekFrom};\n\n\n\npub type CachedTemplate<'chunk> = BinXMLTemplateDefinition<'chunk>;\n\n\n\n#[derive(Debug, Default)]\n\npub struct TemplateCache<'chunk>(HashMap<ChunkOffset, CachedTemplate<'chunk>>);\n\n\n\nimpl<'chunk> TemplateCache<'chunk> {\n\n pub fn new() -> Self {\n", "file_path": "src/template_cache.rs", "rank": 83, "score": 17.59298681691345 }, { "content": "\n\n#[derive(Debug, PartialOrd, PartialEq, Clone)]\n\npub struct TemplateSubstitutionDescriptor {\n\n // Zero-based (0 is first replacement)\n\n pub substitution_index: u16,\n\n pub value_type: BinXmlValueType,\n\n pub ignore: bool,\n\n}\n\n\n\n#[repr(C)]\n\n#[derive(Debug, PartialOrd, PartialEq, Clone)]\n\npub struct BinXMLFragmentHeader {\n\n pub major_version: u8,\n\n pub minor_version: u8,\n\n pub flags: u8,\n\n}\n\n\n\n#[derive(Debug, PartialOrd, PartialEq, Clone)]\n\npub struct BinXMLAttribute {\n\n pub name: BinXmlNameRef,\n\n}\n", "file_path": "src/model/deserialized.rs", "rank": 84, "score": 16.89435674734554 }, { "content": "use crate::binxml::name::{BinXmlName, BinXmlNameLink};\n\nuse crate::err::DeserializationResult;\n\nuse crate::ChunkOffset;\n\n\n\nuse log::trace;\n\nuse std::borrow::BorrowMut;\n\nuse std::collections::HashMap;\n\nuse std::io::{Cursor, Seek, SeekFrom};\n\n\n\n#[derive(Debug)]\n\npub struct StringCache(HashMap<ChunkOffset, BinXmlName>);\n\n\n\nimpl StringCache {\n\n pub fn populate(data: &[u8], offsets: &[ChunkOffset]) -> DeserializationResult<Self> {\n\n let mut cache = HashMap::new();\n\n let mut cursor = Cursor::new(data);\n\n let cursor_ref = cursor.borrow_mut();\n\n\n\n for &offset in offsets.iter().filter(|&&offset| offset > 0) {\n\n try_seek!(cursor_ref, offset, \"first xml string\")?;\n", "file_path": "src/string_cache.rs", "rank": 85, "score": 16.86955641394572 }, { "content": "use winstructs::security::Sid;\n\n\n\nuse crate::evtx_chunk::EvtxChunk;\n\nuse std::fmt::Write;\n\n\n\n#[derive(Debug, PartialOrd, PartialEq, Clone)]\n\npub enum BinXmlValue<'a> {\n\n NullType,\n\n // String may originate in substitution.\n\n StringType(String),\n\n AnsiStringType(Cow<'a, str>),\n\n Int8Type(i8),\n\n UInt8Type(u8),\n\n Int16Type(i16),\n\n UInt16Type(u16),\n\n Int32Type(i32),\n\n UInt32Type(u32),\n\n Int64Type(i64),\n\n UInt64Type(u64),\n\n Real32Type(f32),\n", "file_path": "src/binxml/value_variant.rs", "rank": 86, "score": 16.791859381224153 }, { "content": "#[derive(Debug, PartialOrd, PartialEq, Clone, Hash)]\n\npub struct BinXmlNameRef {\n\n pub offset: ChunkOffset,\n\n}\n\n\n\nimpl fmt::Display for BinXmlName {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {\n\n write!(f, \"{}\", self.str)\n\n }\n\n}\n\n\n\n#[derive(Debug, PartialEq, PartialOrd, Clone)]\n\npub(crate) struct BinXmlNameLink {\n\n pub next_string: Option<ChunkOffset>,\n\n pub hash: u16,\n\n}\n\n\n\nimpl BinXmlNameLink {\n\n pub fn from_stream(stream: &mut Cursor<&[u8]>) -> Result<Self> {\n\n let next_string = try_read!(stream, u32)?;\n", "file_path": "src/binxml/name.rs", "rank": 87, "score": 16.660408186809526 }, { "content": " BinaryArrayType,\n\n GuidArrayType,\n\n SizeTArrayType,\n\n FileTimeArrayType,\n\n SysTimeArrayType,\n\n SidArrayType,\n\n HexInt32ArrayType,\n\n HexInt64ArrayType,\n\n EvtHandleArray,\n\n BinXmlArrayType,\n\n EvtXmlArrayType,\n\n}\n\n\n\nimpl BinXmlValueType {\n\n pub fn from_u8(byte: u8) -> Option<BinXmlValueType> {\n\n match byte {\n\n 0x00 => Some(BinXmlValueType::NullType),\n\n 0x01 => Some(BinXmlValueType::StringType),\n\n 0x02 => Some(BinXmlValueType::AnsiStringType),\n\n 0x03 => Some(BinXmlValueType::Int8Type),\n", "file_path": "src/binxml/value_variant.rs", "rank": 88, "score": 16.320023804151205 }, { "content": " pub timestamp: DateTime<Utc>,\n\n pub tokens: Vec<BinXMLDeserializedTokens<'a>>,\n\n pub settings: Arc<ParserSettings>,\n\n}\n\n\n\n#[derive(Debug, Clone, PartialEq)]\n\npub struct EvtxRecordHeader {\n\n pub data_size: u32,\n\n pub event_record_id: RecordId,\n\n pub timestamp: DateTime<Utc>,\n\n}\n\n\n\n#[derive(Debug, Clone, PartialEq)]\n\npub struct SerializedEvtxRecord<T> {\n\n pub event_record_id: RecordId,\n\n pub timestamp: DateTime<Utc>,\n\n pub data: T,\n\n}\n\n\n\nimpl EvtxRecordHeader {\n", "file_path": "src/evtx_record.rs", "rank": 89, "score": 15.906522485016405 }, { "content": " Ok(BinXmlName { str: name })\n\n }\n\n\n\n pub fn as_str(&self) -> &str {\n\n &self.str\n\n }\n\n}\n\n\n\nimpl<'a> From<&'a BinXmlName> for quick_xml::events::BytesStart<'a> {\n\n fn from(name: &'a BinXmlName) -> Self {\n\n BytesStart::borrowed_name(name.as_str().as_bytes())\n\n }\n\n}\n\n\n\nimpl<'a> From<BinXmlName> for quick_xml::events::BytesEnd<'a> {\n\n fn from(name: BinXmlName) -> Self {\n\n let inner = name.as_str().as_bytes();\n\n BytesEnd::owned(inner.to_vec())\n\n }\n\n}\n", "file_path": "src/binxml/name.rs", "rank": 90, "score": 15.852105099824724 }, { "content": " pub header: BinXmlTemplateDefinitionHeader,\n\n pub tokens: Vec<BinXMLDeserializedTokens<'a>>,\n\n}\n\n\n\n#[derive(Debug, PartialOrd, PartialEq, Clone)]\n\npub struct BinXmlEntityReference {\n\n pub name: BinXmlNameRef,\n\n}\n\n\n\n#[derive(Debug, PartialOrd, PartialEq, Clone)]\n\npub struct BinXmlTemplateRef<'a> {\n\n pub template_def_offset: ChunkOffset,\n\n pub substitution_array: Vec<BinXMLDeserializedTokens<'a>>,\n\n}\n\n\n\n#[derive(Debug, PartialOrd, PartialEq, Clone)]\n\npub struct TemplateValueDescriptor {\n\n pub size: u16,\n\n pub value_type: BinXmlValueType,\n\n}\n", "file_path": "src/model/deserialized.rs", "rank": 91, "score": 15.796297303805762 }, { "content": " Real64Type(f64),\n\n BoolType(bool),\n\n BinaryType(&'a [u8]),\n\n GuidType(Guid),\n\n SizeTType(usize),\n\n FileTimeType(DateTime<Utc>),\n\n SysTimeType(DateTime<Utc>),\n\n SidType(Sid),\n\n HexInt32Type(Cow<'a, str>),\n\n HexInt64Type(Cow<'a, str>),\n\n EvtHandle,\n\n // Because of the recursive type, we instantiate this enum via a method of the Deserializer\n\n BinXmlType(Vec<BinXMLDeserializedTokens<'a>>),\n\n EvtXml,\n\n StringArrayType(Vec<String>),\n\n AnsiStringArrayType,\n\n Int8ArrayType(Vec<i8>),\n\n UInt8ArrayType(Vec<u8>),\n\n Int16ArrayType(Vec<i16>),\n\n UInt16ArrayType(Vec<u16>),\n", "file_path": "src/binxml/value_variant.rs", "rank": 92, "score": 15.658237567853144 }, { "content": " if line.len() == bytes * b + 1 {\n\n u16::from_be(u16::from(line[bytes * b]) << 8)\n\n } else {\n\n u16::from_be((u16::from(line[bytes * b]) << 8) + u16::from(line[bytes * b + 1]))\n\n }\n\n }\n\n };\n\n match display {\n\n 'b' => write!(s, \" {:03o}\", word)?,\n\n 'c' => {\n\n if ((word as u8) as char).is_control() {\n\n write!(s, \" \")?\n\n } else {\n\n write!(s, \" {:03}\", (word as u8) as char)?\n\n }\n\n }\n\n 'C' => write!(s, \" {:02x}\", word)?,\n\n 'x' => write!(s, \" {:04x}\", word)?,\n\n 'o' => write!(s, \" {:06o} \", word)?,\n\n 'd' => write!(s, \" {:05} \", word)?,\n", "file_path": "src/utils/hexdump.rs", "rank": 93, "score": 15.633835062546801 }, { "content": " }\n\n}\n\n\n\nimpl ParserSettings {\n\n pub fn new() -> Self {\n\n ParserSettings::default()\n\n }\n\n\n\n /// Sets the number of worker threads.\n\n /// `0` will let rayon decide.\n\n ///\n\n #[cfg(feature = \"multithreading\")]\n\n pub fn num_threads(mut self, num_threads: usize) -> Self {\n\n self.num_threads = if num_threads == 0 {\n\n rayon::current_num_threads()\n\n } else {\n\n num_threads\n\n };\n\n self\n\n }\n", "file_path": "src/evtx_parser.rs", "rank": 94, "score": 15.613866697897867 }, { "content": "use crate::binxml::assemble::parse_tokens;\n\nuse crate::err::{\n\n DeserializationError, DeserializationResult, EvtxError, Result, SerializationError,\n\n};\n\nuse crate::json_output::JsonOutput;\n\nuse crate::model::deserialized::BinXMLDeserializedTokens;\n\nuse crate::xml_output::{BinXmlOutput, XmlOutput};\n\nuse crate::{EvtxChunk, ParserSettings};\n\n\n\nuse byteorder::ReadBytesExt;\n\nuse chrono::prelude::*;\n\nuse std::io::{Cursor, Read};\n\nuse std::sync::Arc;\n\n\n\npub type RecordId = u64;\n\n\n\n#[derive(Debug, Clone)]\n\npub struct EvtxRecord<'a> {\n\n pub chunk: &'a EvtxChunk<'a>,\n\n pub event_record_id: RecordId,\n", "file_path": "src/evtx_record.rs", "rank": 95, "score": 15.415091017384777 }, { "content": " #[test]\n\n #[cfg(feature = \"multithreading\")]\n\n fn test_multithreading() {\n\n use std::collections::HashSet;\n\n\n\n ensure_env_logger_initialized();\n\n let evtx_file = include_bytes!(\"../samples/security.evtx\");\n\n let mut parser = EvtxParser::from_buffer(evtx_file.to_vec()).unwrap();\n\n\n\n let mut record_ids = HashSet::new();\n\n for record in parser.records().take(1000) {\n\n match record {\n\n Ok(r) => {\n\n record_ids.insert(r.event_record_id);\n\n }\n\n Err(e) => panic!(\"Error while reading record {:?}\", e),\n\n }\n\n }\n\n\n\n assert_eq!(record_ids.len(), 1000);\n", "file_path": "src/evtx_parser.rs", "rank": 96, "score": 15.35822542967153 }, { "content": "\n\n let number_of_substitutions = try_read!(cursor, u32)?;\n\n\n\n let mut value_descriptors = Vec::with_capacity(number_of_substitutions as usize);\n\n\n\n for _ in 0..number_of_substitutions {\n\n let size = try_read!(cursor, u16)?;\n\n let value_type_token = try_read!(cursor, u8)?;\n\n\n\n let value_type = BinXmlValueType::from_u8(value_type_token).ok_or(\n\n DeserializationError::InvalidValueVariant {\n\n value: value_type_token,\n\n offset: cursor.position(),\n\n },\n\n )?;\n\n\n\n // Empty\n\n let _ = try_read!(cursor, u8)?;\n\n\n\n value_descriptors.push(TemplateValueDescriptor { size, value_type })\n", "file_path": "src/binxml/tokens.rs", "rank": 97, "score": 15.020059883587624 }, { "content": "}\n\n\n\nimpl EvtxChunkData {\n\n /// Construct a new chunk from the given data.\n\n /// Note that even when validate_checksum is set to false, the header magic is still checked.\n\n pub fn new(data: Vec<u8>, validate_checksum: bool) -> EvtxChunkResult<Self> {\n\n let mut cursor = Cursor::new(data.as_slice());\n\n let header = EvtxChunkHeader::from_reader(&mut cursor)?;\n\n\n\n let chunk = EvtxChunkData { header, data };\n\n if validate_checksum && !chunk.validate_checksum() {\n\n // TODO: return checksum here.\n\n return Err(ChunkError::InvalidChunkChecksum {\n\n expected: 0,\n\n found: 0,\n\n });\n\n }\n\n\n\n Ok(chunk)\n\n }\n", "file_path": "src/evtx_chunk.rs", "rank": 98, "score": 14.320107338670534 }, { "content": " 0x94 => Some(BinXmlValueType::HexInt32ArrayType),\n\n 0x95 => Some(BinXmlValueType::HexInt64ArrayType),\n\n _ => None,\n\n }\n\n }\n\n}\n\n\n\nimpl<'a> BinXmlValue<'a> {\n\n pub fn from_binxml_stream(\n\n cursor: &mut Cursor<&'a [u8]>,\n\n chunk: Option<&'a EvtxChunk<'a>>,\n\n size: Option<u16>,\n\n ansi_codec: EncodingRef,\n\n ) -> Result<BinXmlValue<'a>> {\n\n let value_type_token = try_read!(cursor, u8)?;\n\n\n\n let value_type = BinXmlValueType::from_u8(value_type_token).ok_or(\n\n DeserializationError::InvalidValueVariant {\n\n value: value_type_token,\n\n offset: cursor.position(),\n", "file_path": "src/binxml/value_variant.rs", "rank": 99, "score": 14.28984018126079 } ]
Rust
examples/compute/main.rs
hasenbanck/asche
a205c4364b9e425d3bd6b1b73e12c949a51e1e18
use erupt::vk; use asche::{CommandBufferSemaphore, QueueConfiguration, Queues}; fn main() -> Result<(), asche::AscheError> { let event_loop = winit::event_loop::EventLoop::new(); let window = winit::window::WindowBuilder::new() .with_title("asche - compute example") .with_inner_size(winit::dpi::PhysicalSize::new(1920, 1080)) .build(&event_loop) .unwrap(); #[cfg(feature = "tracing")] { let filter = tracing_subscriber::EnvFilter::from_default_env(); tracing_subscriber::fmt().with_env_filter(filter).init(); } let instance = asche::Instance::new( &window, asche::InstanceConfiguration { app_name: "compute example", app_version: asche::Version { major: 1, minor: 0, patch: 0, }, engine_name: "engine example", engine_version: asche::Version { major: 1, minor: 0, patch: 0, }, extensions: vec![], }, )?; let (device, _, queues) = unsafe { instance.request_device(asche::DeviceConfiguration { queue_configuration: QueueConfiguration { compute_queues: vec![1.0], graphics_queues: vec![], transfer_queues: vec![], }, ..Default::default() }) }?; let Queues { mut compute_queues, graphics_queues: _graphics_queues, transfer_queues: _transfer_queues, } = queues; let mut app = Application::new(device, compute_queues.pop().unwrap())?; app.compute()?; Ok(()) } #[derive(Debug, Clone, Copy, Hash, Eq, PartialEq)] pub enum Lifetime { Buffer, } impl asche::Lifetime for Lifetime {} struct Application { device: asche::Device<Lifetime>, compute_queue: asche::ComputeQueue, compute_command_pool: asche::ComputeCommandPool, pipeline: asche::ComputePipeline, pipeline_layout: asche::PipelineLayout, timeline: asche::TimelineSemaphore, timeline_value: u64, descriptor_set_layout: asche::DescriptorSetLayout, descriptor_pool: asche::DescriptorPool, } impl Drop for Application { fn drop(&mut self) { unsafe { self.device .wait_idle() .expect("couldn't wait for device to become idle while dropping") } } } impl Application { fn new( device: asche::Device<Lifetime>, mut compute_queue: asche::ComputeQueue, ) -> Result<Self, asche::AscheError> { let comp_module = unsafe { device.create_shader_module( "Compute Shader Module", include_bytes!("shader/compute.comp.spv"), ) }?; let mainfunctionname = std::ffi::CString::new("main").unwrap(); let compute_shader_stage = vk::PipelineShaderStageCreateInfoBuilder::new() .stage(vk::ShaderStageFlagBits::COMPUTE) .module(comp_module.raw()) .name(&mainfunctionname); let bindings = [vk::DescriptorSetLayoutBindingBuilder::new() .binding(0) .descriptor_count(1) .descriptor_type(vk::DescriptorType::STORAGE_BUFFER) .stage_flags(vk::ShaderStageFlags::COMPUTE)]; let layout_info = vk::DescriptorSetLayoutCreateInfoBuilder::new().bindings(&bindings); let descriptor_set_layout = unsafe { device.create_descriptor_set_layout("Compute Descriptor Set Layout", layout_info) }?; let pool_sizes = [vk::DescriptorPoolSizeBuilder::new() .descriptor_count(1) ._type(vk::DescriptorType::STORAGE_BUFFER)]; let descriptor_pool = unsafe { device.create_descriptor_pool(&asche::DescriptorPoolDescriptor { name: "Compute Descriptor Pool", max_sets: 16, pool_sizes: &pool_sizes, flags: None, }) }?; let layouts = [descriptor_set_layout.raw()]; let pipeline_layout = vk::PipelineLayoutCreateInfoBuilder::new().set_layouts(&layouts); let pipeline_layout = unsafe { device.create_pipeline_layout("Compute Pipeline Layout", pipeline_layout) }?; let pipeline_info = vk::ComputePipelineCreateInfoBuilder::new() .layout(pipeline_layout.raw()) .stage(compute_shader_stage.build()); let pipeline = unsafe { device.create_compute_pipeline("Compute Pipeline", pipeline_info) }?; let compute_command_pool = unsafe { compute_queue.create_command_pool() }?; let timeline_value = 0; let timeline = unsafe { device.create_timeline_semaphore("Compute Timeline", timeline_value) }?; Ok(Self { device, compute_queue, compute_command_pool, pipeline, pipeline_layout, timeline, timeline_value, descriptor_set_layout, descriptor_pool, }) } fn compute(&mut self) -> Result<(), asche::AscheError> { const ELEMENTS: usize = 64 * 1024; const DATA_SIZE: u64 = (ELEMENTS * std::mem::size_of::<u32>()) as u64; let mut data: Vec<u32> = vec![0; ELEMENTS]; data.iter_mut() .enumerate() .for_each(|(id, x)| *x = id as u32); let mut buffer = unsafe { self.device.create_buffer(&asche::BufferDescriptor::<_> { name: "Input Buffer", usage: vk::BufferUsageFlags::STORAGE_BUFFER, memory_location: vk_alloc::MemoryLocation::CpuToGpu, lifetime: Lifetime::Buffer, sharing_mode: vk::SharingMode::EXCLUSIVE, queues: vk::QueueFlags::COMPUTE, size: DATA_SIZE, flags: None, }) }?; unsafe { let data_slice = buffer .mapped_slice_mut()? .expect("data buffer allocation was not mapped"); data_slice[..].clone_from_slice(bytemuck::cast_slice(&data)); buffer.flush()?; } let compute_buffer = unsafe { self.compute_command_pool.create_command_buffer( &[CommandBufferSemaphore::Timeline { semaphore: self.timeline.handle(), stage: vk::PipelineStageFlags2KHR::NONE_KHR, value: self.timeline_value, }], &[CommandBufferSemaphore::Timeline { semaphore: self.timeline.handle(), stage: vk::PipelineStageFlags2KHR::NONE_KHR, value: self.timeline_value + 1, }], ) }?; let set = unsafe { self.descriptor_pool.create_descriptor_set( "Compute Descriptor Set", &self.descriptor_set_layout, None, ) }?; let buffer_info = [vk::DescriptorBufferInfoBuilder::new() .buffer(buffer.raw()) .offset(0) .range(DATA_SIZE)]; let write = vk::WriteDescriptorSetBuilder::new() .dst_set(set.raw()) .dst_binding(0) .descriptor_type(vk::DescriptorType::STORAGE_BUFFER) .buffer_info(&buffer_info); unsafe { self.device.update_descriptor_sets(&[write], &[]) }; unsafe { let encoder = compute_buffer.record()?; encoder.bind_pipeline(&self.pipeline); encoder.bind_descriptor_sets(self.pipeline_layout.raw(), 0, &[set.raw()], &[]); encoder.dispatch(1024, 1, 1); drop(encoder); self.compute_queue.submit(&compute_buffer, None)?; self.timeline_value += 1; self.timeline.wait_for_value(self.timeline_value)?; } unsafe { let data_slice = buffer .mapped_slice() .expect("data buffer allocation was not mapped") .unwrap(); data[..].clone_from_slice(bytemuck::cast_slice(data_slice)); } data.iter() .enumerate() .for_each(|(id, output)| assert_eq!((id * 42) as u32, *output)); Ok(()) } }
use erupt::vk; use asche::{CommandBufferSemaphore, QueueConfiguration, Queues}; fn main() -> Result<(), asche::AscheError> { let event_loop = winit::event_loop::EventLoop::new(); let window = winit::window::WindowBuilder::new() .with_title("asche - compute example") .with_inner_size(winit::dpi::PhysicalSize::new(1920, 1080)) .build(&event_loop) .unwrap(); #[cfg(feature = "tracing")] { let filter = tracing_subscriber::EnvFilter::from_default_env(); tracing_subscriber::fmt().with_env_filter(filter).init(); }
t app = Application::new(device, compute_queues.pop().unwrap())?; app.compute()?; Ok(()) } #[derive(Debug, Clone, Copy, Hash, Eq, PartialEq)] pub enum Lifetime { Buffer, } impl asche::Lifetime for Lifetime {} struct Application { device: asche::Device<Lifetime>, compute_queue: asche::ComputeQueue, compute_command_pool: asche::ComputeCommandPool, pipeline: asche::ComputePipeline, pipeline_layout: asche::PipelineLayout, timeline: asche::TimelineSemaphore, timeline_value: u64, descriptor_set_layout: asche::DescriptorSetLayout, descriptor_pool: asche::DescriptorPool, } impl Drop for Application { fn drop(&mut self) { unsafe { self.device .wait_idle() .expect("couldn't wait for device to become idle while dropping") } } } impl Application { fn new( device: asche::Device<Lifetime>, mut compute_queue: asche::ComputeQueue, ) -> Result<Self, asche::AscheError> { let comp_module = unsafe { device.create_shader_module( "Compute Shader Module", include_bytes!("shader/compute.comp.spv"), ) }?; let mainfunctionname = std::ffi::CString::new("main").unwrap(); let compute_shader_stage = vk::PipelineShaderStageCreateInfoBuilder::new() .stage(vk::ShaderStageFlagBits::COMPUTE) .module(comp_module.raw()) .name(&mainfunctionname); let bindings = [vk::DescriptorSetLayoutBindingBuilder::new() .binding(0) .descriptor_count(1) .descriptor_type(vk::DescriptorType::STORAGE_BUFFER) .stage_flags(vk::ShaderStageFlags::COMPUTE)]; let layout_info = vk::DescriptorSetLayoutCreateInfoBuilder::new().bindings(&bindings); let descriptor_set_layout = unsafe { device.create_descriptor_set_layout("Compute Descriptor Set Layout", layout_info) }?; let pool_sizes = [vk::DescriptorPoolSizeBuilder::new() .descriptor_count(1) ._type(vk::DescriptorType::STORAGE_BUFFER)]; let descriptor_pool = unsafe { device.create_descriptor_pool(&asche::DescriptorPoolDescriptor { name: "Compute Descriptor Pool", max_sets: 16, pool_sizes: &pool_sizes, flags: None, }) }?; let layouts = [descriptor_set_layout.raw()]; let pipeline_layout = vk::PipelineLayoutCreateInfoBuilder::new().set_layouts(&layouts); let pipeline_layout = unsafe { device.create_pipeline_layout("Compute Pipeline Layout", pipeline_layout) }?; let pipeline_info = vk::ComputePipelineCreateInfoBuilder::new() .layout(pipeline_layout.raw()) .stage(compute_shader_stage.build()); let pipeline = unsafe { device.create_compute_pipeline("Compute Pipeline", pipeline_info) }?; let compute_command_pool = unsafe { compute_queue.create_command_pool() }?; let timeline_value = 0; let timeline = unsafe { device.create_timeline_semaphore("Compute Timeline", timeline_value) }?; Ok(Self { device, compute_queue, compute_command_pool, pipeline, pipeline_layout, timeline, timeline_value, descriptor_set_layout, descriptor_pool, }) } fn compute(&mut self) -> Result<(), asche::AscheError> { const ELEMENTS: usize = 64 * 1024; const DATA_SIZE: u64 = (ELEMENTS * std::mem::size_of::<u32>()) as u64; let mut data: Vec<u32> = vec![0; ELEMENTS]; data.iter_mut() .enumerate() .for_each(|(id, x)| *x = id as u32); let mut buffer = unsafe { self.device.create_buffer(&asche::BufferDescriptor::<_> { name: "Input Buffer", usage: vk::BufferUsageFlags::STORAGE_BUFFER, memory_location: vk_alloc::MemoryLocation::CpuToGpu, lifetime: Lifetime::Buffer, sharing_mode: vk::SharingMode::EXCLUSIVE, queues: vk::QueueFlags::COMPUTE, size: DATA_SIZE, flags: None, }) }?; unsafe { let data_slice = buffer .mapped_slice_mut()? .expect("data buffer allocation was not mapped"); data_slice[..].clone_from_slice(bytemuck::cast_slice(&data)); buffer.flush()?; } let compute_buffer = unsafe { self.compute_command_pool.create_command_buffer( &[CommandBufferSemaphore::Timeline { semaphore: self.timeline.handle(), stage: vk::PipelineStageFlags2KHR::NONE_KHR, value: self.timeline_value, }], &[CommandBufferSemaphore::Timeline { semaphore: self.timeline.handle(), stage: vk::PipelineStageFlags2KHR::NONE_KHR, value: self.timeline_value + 1, }], ) }?; let set = unsafe { self.descriptor_pool.create_descriptor_set( "Compute Descriptor Set", &self.descriptor_set_layout, None, ) }?; let buffer_info = [vk::DescriptorBufferInfoBuilder::new() .buffer(buffer.raw()) .offset(0) .range(DATA_SIZE)]; let write = vk::WriteDescriptorSetBuilder::new() .dst_set(set.raw()) .dst_binding(0) .descriptor_type(vk::DescriptorType::STORAGE_BUFFER) .buffer_info(&buffer_info); unsafe { self.device.update_descriptor_sets(&[write], &[]) }; unsafe { let encoder = compute_buffer.record()?; encoder.bind_pipeline(&self.pipeline); encoder.bind_descriptor_sets(self.pipeline_layout.raw(), 0, &[set.raw()], &[]); encoder.dispatch(1024, 1, 1); drop(encoder); self.compute_queue.submit(&compute_buffer, None)?; self.timeline_value += 1; self.timeline.wait_for_value(self.timeline_value)?; } unsafe { let data_slice = buffer .mapped_slice() .expect("data buffer allocation was not mapped") .unwrap(); data[..].clone_from_slice(bytemuck::cast_slice(data_slice)); } data.iter() .enumerate() .for_each(|(id, output)| assert_eq!((id * 42) as u32, *output)); Ok(()) } }
let instance = asche::Instance::new( &window, asche::InstanceConfiguration { app_name: "compute example", app_version: asche::Version { major: 1, minor: 0, patch: 0, }, engine_name: "engine example", engine_version: asche::Version { major: 1, minor: 0, patch: 0, }, extensions: vec![], }, )?; let (device, _, queues) = unsafe { instance.request_device(asche::DeviceConfiguration { queue_configuration: QueueConfiguration { compute_queues: vec![1.0], graphics_queues: vec![], transfer_queues: vec![], }, ..Default::default() }) }?; let Queues { mut compute_queues, graphics_queues: _graphics_queues, transfer_queues: _transfer_queues, } = queues; let mu
function_block-random_span
[ { "content": "fn main() -> Result<()> {\n\n let event_loop = winit::event_loop::EventLoop::new();\n\n let window = winit::window::WindowBuilder::new()\n\n .with_title(\"asche - raytracing example\")\n\n .with_inner_size(winit::dpi::PhysicalSize::new(1920, 1080))\n\n .build(&event_loop)\n\n .unwrap();\n\n\n\n // Log level is based on RUST_LOG env var.\n\n #[cfg(feature = \"tracing\")]\n\n {\n\n let filter = tracing_subscriber::EnvFilter::from_default_env();\n\n tracing_subscriber::fmt().with_env_filter(filter).init();\n\n }\n\n\n\n let instance = asche::Instance::new(\n\n &window,\n\n asche::InstanceConfiguration {\n\n app_name: \"raytracing example\",\n\n app_version: asche::Version {\n", "file_path": "examples/raytracing/main.rs", "rank": 0, "score": 167995.78246936333 }, { "content": "fn main() -> Result<(), asche::AscheError> {\n\n let event_loop = winit::event_loop::EventLoop::new();\n\n let window = winit::window::WindowBuilder::new()\n\n .with_title(\"asche - cube example\")\n\n .with_inner_size(winit::dpi::PhysicalSize::new(1920, 1080))\n\n .build(&event_loop)\n\n .unwrap();\n\n\n\n // Log level is based on RUST_LOG env var.\n\n #[cfg(feature = \"tracing\")]\n\n {\n\n let filter = tracing_subscriber::EnvFilter::from_default_env();\n\n tracing_subscriber::fmt().with_env_filter(filter).init();\n\n }\n\n\n\n let instance = asche::Instance::new(\n\n &window,\n\n asche::InstanceConfiguration {\n\n app_name: \"cube example\",\n\n app_version: asche::Version {\n", "file_path": "examples/cube/main.rs", "rank": 2, "score": 143119.3854882716 }, { "content": "fn main() -> Result<(), asche::AscheError> {\n\n let event_loop = winit::event_loop::EventLoop::new();\n\n let window = winit::window::WindowBuilder::new()\n\n .with_title(\"asche - triangle example\")\n\n .with_inner_size(winit::dpi::PhysicalSize::new(1920, 1080))\n\n .build(&event_loop)\n\n .unwrap();\n\n\n\n // Log level is based on RUST_LOG env var.\n\n #[cfg(feature = \"tracing\")]\n\n {\n\n let filter = tracing_subscriber::EnvFilter::from_default_env();\n\n tracing_subscriber::fmt().with_env_filter(filter).init();\n\n }\n\n\n\n let instance = asche::Instance::new(\n\n &window,\n\n asche::InstanceConfiguration {\n\n app_name: \"triangle example\",\n\n app_version: asche::Version {\n", "file_path": "examples/triangle/main.rs", "rank": 3, "score": 143119.3854882716 }, { "content": "type Result<T> = std::result::Result<T, asche::AscheError>;\n\n\n\nconst SURFACE_FORMAT: vk::Format = vk::Format::B8G8R8A8_SRGB;\n\nconst OFFSCREEN_FORMAT: vk::Format = vk::Format::R16G16B16A16_SFLOAT;\n\n\n\n#[derive(Debug, Clone, Copy, Hash, Eq, PartialEq)]\n\npub enum Lifetime {\n\n Buffer,\n\n Image,\n\n}\n\n\n\nimpl asche::Lifetime for Lifetime {}\n\n\n", "file_path": "examples/raytracing/main.rs", "rank": 4, "score": 91954.92962086119 }, { "content": "#[inline]\n\nfn align_up(offset: usize, alignment: usize) -> usize {\n\n (offset + (alignment - 1)) & !(alignment - 1)\n\n}\n\n\n", "file_path": "examples/raytracing/main.rs", "rank": 18, "score": 75725.99331716509 }, { "content": "struct RayTracingApplication {\n\n uniforms: Vec<asche::Buffer<Lifetime>>,\n\n sbt_stride_addresses: Vec<vk::StridedDeviceAddressRegionKHR>,\n\n _sbt: asche::Buffer<Lifetime>,\n\n tlas: Vec<Tlas>,\n\n blas: Vec<Blas>,\n\n models: Vec<Model>,\n\n extent: vk::Extent2D,\n\n render_fence: asche::Fence,\n\n render_semaphore: asche::BinarySemaphore,\n\n presentation_semaphore: asche::BinarySemaphore,\n\n transfer_timeline: asche::TimelineSemaphore,\n\n transfer_timeline_value: u64,\n\n offscreen_attachment: Texture,\n\n renderpass: asche::RenderPass,\n\n postprocess_pipeline_layout: asche::PipelineLayout,\n\n postprocess_pipeline: asche::GraphicsPipeline,\n\n _postprocess_descriptor_pool: asche::DescriptorPool,\n\n _postprocess_descriptor_set_layout: asche::DescriptorSetLayout,\n\n postprocess_descriptor_set: asche::DescriptorSet,\n", "file_path": "examples/raytracing/main.rs", "rank": 19, "score": 75530.83233093133 }, { "content": "fn create_cube_data() -> (Vec<Vertex>, Vec<u32>) {\n\n let vertex_data = [\n\n Vertex {\n\n position: [-1.0, -1.0, 1.0, 1.0],\n\n tex_coord: [0.0, 0.0],\n\n },\n\n Vertex {\n\n position: [1.0, -1.0, 1.0, 1.0],\n\n tex_coord: [1.0, 0.0],\n\n },\n\n Vertex {\n\n position: [1.0, 1.0, 1.0, 1.0],\n\n tex_coord: [1.0, 1.0],\n\n },\n\n Vertex {\n\n position: [-1.0, 1.0, 1.0, 1.0],\n\n tex_coord: [0.0, 1.0],\n\n },\n\n Vertex {\n\n position: [-1.0, 1.0, -1.0, 1.0],\n", "file_path": "examples/cube/main.rs", "rank": 20, "score": 73631.1123262294 }, { "content": " },\n\n ..Default::default()\n\n })\n\n }?;\n\n\n\n let Queues {\n\n compute_queues: _compute_queues,\n\n mut graphics_queues,\n\n mut transfer_queues,\n\n } = queues;\n\n\n\n let mut app = Application::new(\n\n device,\n\n swapchain,\n\n graphics_queues.pop().unwrap(),\n\n transfer_queues.pop().unwrap(),\n\n &window,\n\n )?;\n\n\n\n event_loop.run(move |event, _, control_flow| {\n", "file_path": "examples/cube/main.rs", "rank": 21, "score": 59169.0574919851 }, { "content": " },\n\n ..Default::default()\n\n })\n\n }?;\n\n\n\n let Queues {\n\n compute_queues: _compute_queues,\n\n mut graphics_queues,\n\n transfer_queues: _transfer_queues,\n\n } = queues;\n\n\n\n let mut app = Application::new(device, swapchain, graphics_queues.pop().unwrap(), &window)?;\n\n\n\n event_loop.run(move |event, _, control_flow| {\n\n *control_flow = winit::event_loop::ControlFlow::Poll;\n\n\n\n match event {\n\n winit::event::Event::WindowEvent {\n\n event:\n\n winit::event::WindowEvent::KeyboardInput {\n", "file_path": "examples/triangle/main.rs", "rank": 22, "score": 59168.051325470755 }, { "content": " compute_queues.pop().unwrap(),\n\n graphics_queues.pop().unwrap(),\n\n transfer_queues.pop().unwrap(),\n\n window.inner_size().width,\n\n window.inner_size().height,\n\n )\n\n }\n\n .unwrap();\n\n\n\n let (materials, meshes) = gltf::load_models(include_bytes!(\"model.glb\"));\n\n\n\n unsafe {\n\n app.upload_uniforms()?;\n\n app.upload_model(&materials, &meshes)?;\n\n app.update_descriptor_sets();\n\n }\n\n\n\n event_loop.run(move |event, _, control_flow| {\n\n *control_flow = winit::event_loop::ControlFlow::Poll;\n\n\n", "file_path": "examples/raytracing/main.rs", "rank": 23, "score": 59167.224705634966 }, { "content": "impl Drop for RayTracingApplication {\n\n fn drop(&mut self) {\n\n unsafe {\n\n self.device\n\n .wait_idle()\n\n .expect(\"couldn't wait for device to become idle while dropping\");\n\n }\n\n }\n\n}\n\n\n\nimpl RayTracingApplication {\n\n unsafe fn new(\n\n device: asche::Device<Lifetime>,\n\n swapchain: asche::Swapchain,\n\n mut compute_queue: asche::ComputeQueue,\n\n mut graphics_queue: asche::GraphicsQueue,\n\n transfer_queue: asche::TransferQueue,\n\n width: u32,\n\n height: u32,\n\n ) -> Result<Self> {\n", "file_path": "examples/raytracing/main.rs", "rank": 24, "score": 59166.99298254394 }, { "content": " .ray_tracing_pipeline(true),\n\n ),\n\n features_acceleration_structure: Some(\n\n vk::PhysicalDeviceAccelerationStructureFeaturesKHRBuilder::new()\n\n .acceleration_structure(true),\n\n ),\n\n ..Default::default()\n\n })\n\n }?;\n\n\n\n let Queues {\n\n mut compute_queues,\n\n mut graphics_queues,\n\n mut transfer_queues,\n\n } = queues;\n\n\n\n let mut app = unsafe {\n\n RayTracingApplication::new(\n\n device,\n\n swapchain,\n", "file_path": "examples/raytracing/main.rs", "rank": 25, "score": 59164.11569513797 }, { "content": " mut graphics_queue: asche::GraphicsQueue,\n\n transfer_queue: asche::TransferQueue,\n\n window: &winit::window::Window,\n\n ) -> Result<Self, asche::AscheError> {\n\n let extent = vk::Extent2D {\n\n width: window.inner_size().width,\n\n height: window.inner_size().height,\n\n };\n\n\n\n // Shader\n\n let vert_module = unsafe {\n\n device.create_shader_module(\n\n \"Vertex Shader Module\",\n\n include_bytes!(\"shader/cube.vert.spv\"),\n\n )\n\n }?;\n\n let frag_module = unsafe {\n\n device.create_shader_module(\n\n \"Fragment Shader Module\",\n\n include_bytes!(\"shader/cube.frag.spv\"),\n", "file_path": "examples/cube/main.rs", "rank": 26, "score": 59163.72613787861 }, { "content": " }\n\n }\n\n}\n\n\n\nimpl Application {\n\n fn new(\n\n device: asche::Device<Lifetime>,\n\n swapchain: asche::Swapchain,\n\n mut graphics_queue: asche::GraphicsQueue,\n\n window: &winit::window::Window,\n\n ) -> Result<Self, asche::AscheError> {\n\n let extent = vk::Extent2D {\n\n width: window.inner_size().width,\n\n height: window.inner_size().height,\n\n };\n\n\n\n // Shader\n\n let vert_module = unsafe {\n\n device.create_shader_module(\n\n \"Vertex Shader Module\",\n", "file_path": "examples/triangle/main.rs", "rank": 27, "score": 59163.6947130804 }, { "content": " major: 1,\n\n minor: 0,\n\n patch: 0,\n\n },\n\n engine_name: \"engine example\",\n\n engine_version: asche::Version {\n\n major: 1,\n\n minor: 0,\n\n patch: 0,\n\n },\n\n extensions: vec![],\n\n },\n\n )?;\n\n\n\n let (device, swapchain, queues) = unsafe {\n\n instance.request_device(asche::DeviceConfiguration {\n\n queue_configuration: QueueConfiguration {\n\n compute_queues: vec![],\n\n graphics_queues: vec![1.0],\n\n transfer_queues: vec![1.0],\n", "file_path": "examples/cube/main.rs", "rank": 28, "score": 59163.54785787469 }, { "content": " major: 1,\n\n minor: 0,\n\n patch: 0,\n\n },\n\n engine_name: \"engine example\",\n\n engine_version: asche::Version {\n\n major: 1,\n\n minor: 0,\n\n patch: 0,\n\n },\n\n extensions: vec![],\n\n },\n\n )?;\n\n\n\n let (device, swapchain, queues) = unsafe {\n\n instance.request_device(asche::DeviceConfiguration {\n\n queue_configuration: QueueConfiguration {\n\n compute_queues: vec![],\n\n graphics_queues: vec![1.0],\n\n transfer_queues: vec![],\n", "file_path": "examples/triangle/main.rs", "rank": 29, "score": 59163.54785787469 }, { "content": "use bytemuck::{cast_slice, cast_slice_mut, Pod, Zeroable};\n\nuse erupt::{vk, ExtendableFromConst, ExtendableFromMut};\n\nuse glam::{Mat4, Vec3, Vec4};\n\n#[cfg(feature = \"tracing\")]\n\nuse tracing1::info;\n\n\n\nuse asche::{CommandBufferSemaphore, CommonCommands, Queues};\n\n\n\nuse crate::gltf::{Material, Mesh, Vertex};\n\nuse crate::uploader::Uploader;\n\n\n\nmod gltf;\n\nmod uploader;\n\n\n", "file_path": "examples/raytracing/main.rs", "rank": 30, "score": 59163.01458535487 }, { "content": "use erupt::vk;\n\n\n\nuse asche::{CommandBufferSemaphore, QueueConfiguration, Queues};\n\n\n", "file_path": "examples/triangle/main.rs", "rank": 31, "score": 59162.156858718125 }, { "content": " raytracing_pipeline_layout: asche::PipelineLayout,\n\n raytracing_pipeline: asche::RayTracingPipeline,\n\n _vertex_descriptor_set_layout: asche::DescriptorSetLayout,\n\n vertex_descriptor_set: asche::DescriptorSet,\n\n _index_descriptor_set_layout: asche::DescriptorSetLayout,\n\n index_descriptor_set: asche::DescriptorSet,\n\n _storage_descriptor_pool: asche::DescriptorPool,\n\n _raytracing_descriptor_pool: asche::DescriptorPool,\n\n _raytracing_descriptor_set_layout: asche::DescriptorSetLayout,\n\n raytracing_descriptor_set: asche::DescriptorSet,\n\n sampler: asche::Sampler,\n\n uploader: Uploader,\n\n graphics_pool: asche::GraphicsCommandPool,\n\n compute_pool: asche::ComputeCommandPool,\n\n graphics_queue: asche::GraphicsQueue,\n\n compute_queue: asche::ComputeQueue,\n\n swapchain: asche::Swapchain,\n\n device: asche::Device<Lifetime>,\n\n}\n\n\n", "file_path": "examples/raytracing/main.rs", "rank": 32, "score": 59162.04967813495 }, { "content": " match event {\n\n winit::event::Event::WindowEvent {\n\n event:\n\n winit::event::WindowEvent::KeyboardInput {\n\n input:\n\n winit::event::KeyboardInput {\n\n state: winit::event::ElementState::Pressed,\n\n virtual_keycode: Some(winit::event::VirtualKeyCode::Escape),\n\n ..\n\n },\n\n ..\n\n },\n\n ..\n\n } => *control_flow = winit::event_loop::ControlFlow::Exit,\n\n winit::event::Event::WindowEvent {\n\n event: winit::event::WindowEvent::CloseRequested,\n\n window_id,\n\n } if window_id == window.id() => *control_flow = winit::event_loop::ControlFlow::Exit,\n\n winit::event::Event::MainEventsCleared => {\n\n unsafe { app.render().unwrap() };\n\n }\n\n _ => (),\n\n }\n\n });\n\n}\n\n\n", "file_path": "examples/raytracing/main.rs", "rank": 33, "score": 59161.955284361175 }, { "content": "use bytemuck::{cast_slice, Pod, Zeroable};\n\nuse erupt::vk;\n\nuse glam::{Mat4, Vec3, Vec4};\n\n\n\nuse asche::{CommandBufferSemaphore, CommonCommands, QueueConfiguration, Queues};\n\n\n\n#[repr(C)]\n\n#[derive(Clone, Copy)]\n", "file_path": "examples/cube/main.rs", "rank": 34, "score": 59161.74449258664 }, { "content": " *control_flow = winit::event_loop::ControlFlow::Poll;\n\n\n\n match event {\n\n winit::event::Event::WindowEvent {\n\n event:\n\n winit::event::WindowEvent::KeyboardInput {\n\n input:\n\n winit::event::KeyboardInput {\n\n state: winit::event::ElementState::Pressed,\n\n virtual_keycode: Some(winit::event::VirtualKeyCode::Escape),\n\n ..\n\n },\n\n ..\n\n },\n\n ..\n\n } => *control_flow = winit::event_loop::ControlFlow::Exit,\n\n winit::event::Event::WindowEvent {\n\n event: winit::event::WindowEvent::CloseRequested,\n\n window_id,\n\n } if window_id == window.id() => *control_flow = winit::event_loop::ControlFlow::Exit,\n\n winit::event::Event::MainEventsCleared => {\n\n unsafe { app.render().unwrap() };\n\n }\n\n _ => (),\n\n }\n\n });\n\n}\n\n\n", "file_path": "examples/cube/main.rs", "rank": 35, "score": 59161.63657327225 }, { "content": " input:\n\n winit::event::KeyboardInput {\n\n state: winit::event::ElementState::Pressed,\n\n virtual_keycode: Some(winit::event::VirtualKeyCode::Escape),\n\n ..\n\n },\n\n ..\n\n },\n\n ..\n\n } => *control_flow = winit::event_loop::ControlFlow::Exit,\n\n winit::event::Event::WindowEvent {\n\n event: winit::event::WindowEvent::CloseRequested,\n\n window_id,\n\n } if window_id == window.id() => *control_flow = winit::event_loop::ControlFlow::Exit,\n\n winit::event::Event::MainEventsCleared => {\n\n unsafe { app.render().unwrap() };\n\n }\n\n _ => (),\n\n }\n\n });\n\n}\n\n\n\n#[derive(Debug, Clone, Copy, Hash, Eq, PartialEq)]\n\npub enum Lifetime {\n\n Static,\n\n}\n\n\n\nimpl asche::Lifetime for Lifetime {}\n\n\n", "file_path": "examples/triangle/main.rs", "rank": 36, "score": 59161.53864314564 }, { "content": " encoder.copy_acceleration_structure(&info);\n\n }\n\n }\n\n\n\n self.compute_queue.submit(&command_buffer, None)?;\n\n self.transfer_timeline\n\n .wait_for_value(self.transfer_timeline_value)?;\n\n\n\n self.blas = compacted_blas;\n\n\n\n #[cfg(feature = \"tracing\")]\n\n {\n\n let max_size: u64 = max_sizes.iter().sum();\n\n let compacted_size: u64 = compacted_sizes.iter().sum();\n\n info!(\n\n \"Compacted BLAS from {} KiB to {} KiB\",\n\n max_size / 1024,\n\n compacted_size / 1024\n\n );\n\n }\n", "file_path": "examples/raytracing/main.rs", "rank": 37, "score": 59160.510910198194 }, { "content": " }],\n\n )?;\n\n\n\n {\n\n let encoder = compute_buffer.record()?;\n\n encoder.build_acceleration_structures(&geometry_infos, &ranges);\n\n }\n\n\n\n self.compute_queue.submit(&compute_buffer, None)?;\n\n self.transfer_timeline\n\n .wait_for_value(self.transfer_timeline_value)?;\n\n\n\n self.tlas.push(Tlas {\n\n structure,\n\n _buffer: buffer,\n\n });\n\n\n\n self.compute_pool.reset()?;\n\n\n\n Ok(())\n", "file_path": "examples/raytracing/main.rs", "rank": 38, "score": 59160.47450113425 }, { "content": "\n\n Ok(())\n\n }\n\n\n\n unsafe fn create_new_blas(&self, id: &usize, compacted: u64) -> Result<Blas> {\n\n let buffer = self.device.create_buffer(&asche::BufferDescriptor::<_> {\n\n name: &format!(\"Model {} BLAS Buffer\", id),\n\n usage: vk::BufferUsageFlags::ACCELERATION_STRUCTURE_STORAGE_KHR\n\n | vk::BufferUsageFlags::SHADER_DEVICE_ADDRESS,\n\n memory_location: vk_alloc::MemoryLocation::GpuOnly,\n\n lifetime: Lifetime::Buffer,\n\n sharing_mode: vk::SharingMode::CONCURRENT,\n\n queues: vk::QueueFlags::GRAPHICS | vk::QueueFlags::COMPUTE,\n\n size: compacted,\n\n flags: None,\n\n })?;\n\n\n\n let creation_info = vk::AccelerationStructureCreateInfoKHRBuilder::new()\n\n .buffer(buffer.raw())\n\n .size(compacted)\n", "file_path": "examples/raytracing/main.rs", "rank": 39, "score": 59160.21749103043 }, { "content": " | vk::BufferUsageFlags::SHADER_DEVICE_ADDRESS,\n\n vk::QueueFlags::GRAPHICS | vk::QueueFlags::COMPUTE,\n\n )?;\n\n let index_buffer = self.uploader.create_buffer_with_data(\n\n &self.device,\n\n &format!(\"Model {} Index Buffer\", id),\n\n cast_slice(mesh.indices.as_slice()),\n\n vk::BufferUsageFlags::ACCELERATION_STRUCTURE_BUILD_INPUT_READ_ONLY_KHR\n\n | vk::BufferUsageFlags::STORAGE_BUFFER\n\n | vk::BufferUsageFlags::SHADER_DEVICE_ADDRESS,\n\n vk::QueueFlags::GRAPHICS | vk::QueueFlags::COMPUTE,\n\n )?;\n\n let transform_buffer = self.uploader.create_buffer_with_data(\n\n &self.device,\n\n &format!(\"Model {} Transform Buffer\", id),\n\n cast_slice(&transform.matrix),\n\n vk::BufferUsageFlags::ACCELERATION_STRUCTURE_BUILD_INPUT_READ_ONLY_KHR\n\n | vk::BufferUsageFlags::STORAGE_BUFFER\n\n | vk::BufferUsageFlags::SHADER_DEVICE_ADDRESS,\n\n vk::QueueFlags::GRAPHICS | vk::QueueFlags::COMPUTE,\n", "file_path": "examples/raytracing/main.rs", "rank": 40, "score": 59160.11759692189 }, { "content": " let extent = vk::Extent2D { width, height };\n\n\n\n // Sampler\n\n let sampler = device.create_sampler(&asche::SamplerDescriptor {\n\n name: \"Offscreen Texture Sampler\",\n\n ..Default::default()\n\n })?;\n\n\n\n // Utility\n\n let mut timeline_value = 0;\n\n let timeline = device.create_timeline_semaphore(\"Transfer Timeline\", timeline_value)?;\n\n\n\n let compute_pool = compute_queue.create_command_pool()?;\n\n let mut graphics_pool = graphics_queue.create_command_pool()?;\n\n\n\n let mut uploader = Uploader::new(&device, transfer_queue)?;\n\n\n\n // Render pass\n\n let (\n\n renderpass,\n", "file_path": "examples/raytracing/main.rs", "rank": 41, "score": 59160.114409512076 }, { "content": " index_descriptor_set,\n\n _storage_descriptor_pool: storage_descriptor_pool,\n\n raytracing_pipeline_layout,\n\n _raytracing_descriptor_pool: raytracing_descriptor_pool,\n\n _raytracing_descriptor_set_layout: raytracing_descriptor_set_layout,\n\n raytracing_descriptor_set,\n\n compute_pool,\n\n graphics_pool,\n\n sampler,\n\n uploader,\n\n compute_queue,\n\n graphics_queue,\n\n device,\n\n swapchain,\n\n })\n\n }\n\n\n\n #[allow(clippy::type_complexity)]\n\n unsafe fn create_rt_pipeline(\n\n device: &asche::Device<Lifetime>,\n", "file_path": "examples/raytracing/main.rs", "rank": 42, "score": 59159.805913356315 }, { "content": " device: &asche::Device<Lifetime>,\n\n ) -> Result<(\n\n asche::RenderPass,\n\n asche::DescriptorPool,\n\n asche::DescriptorSetLayout,\n\n asche::DescriptorSet,\n\n asche::PipelineLayout,\n\n asche::GraphicsPipeline,\n\n )> {\n\n // Postprocess shader\n\n let mainfunctionname = std::ffi::CString::new(\"main\").unwrap();\n\n let frag_module = device.create_shader_module(\n\n \"Postprocess Fragment Shader Module\",\n\n include_bytes!(\"shader/postprocess.frag.spv\"),\n\n )?;\n\n let vert_module = device.create_shader_module(\n\n \"Postprocess Vertex Shader Module\",\n\n include_bytes!(\"shader/postprocess.vert.spv\"),\n\n )?;\n\n\n", "file_path": "examples/raytracing/main.rs", "rank": 43, "score": 59159.646217399946 }, { "content": " sharing_mode: vk::SharingMode::EXCLUSIVE,\n\n queues: vk::QueueFlags::COMPUTE,\n\n size: size_info.build_scratch_size,\n\n flags: None,\n\n })?;\n\n\n\n let creation_info = vk::AccelerationStructureCreateInfoKHRBuilder::new()\n\n .buffer(buffer.raw())\n\n .size(size_info.acceleration_structure_size)\n\n ._type(vk::AccelerationStructureTypeKHR::TOP_LEVEL_KHR);\n\n\n\n let structure = self\n\n .device\n\n .create_acceleration_structure(\"Model TLAS\", &creation_info)?;\n\n\n\n let geometry_infos = [vk::AccelerationStructureBuildGeometryInfoKHRBuilder::new()\n\n .flags(vk::BuildAccelerationStructureFlagsKHR::PREFER_FAST_TRACE_KHR)\n\n .geometries(&geometries)\n\n .mode(vk::BuildAccelerationStructureModeKHR::BUILD_KHR)\n\n ._type(vk::AccelerationStructureTypeKHR::TOP_LEVEL_KHR)\n", "file_path": "examples/raytracing/main.rs", "rank": 44, "score": 59159.123208042125 }, { "content": " ))\n\n }\n\n\n\n fn create_sbt(\n\n device: &asche::Device<Lifetime>,\n\n uploader: &mut Uploader,\n\n raytrace_properties: &vk::PhysicalDeviceRayTracingPipelinePropertiesKHRBuilder,\n\n groups: &[vk::RayTracingShaderGroupCreateInfoKHRBuilder],\n\n raytracing_pipeline: &asche::RayTracingPipeline,\n\n ) -> Result<(\n\n asche::Buffer<Lifetime>,\n\n Vec<vk::StridedDeviceAddressRegionKHR>,\n\n )> {\n\n // A SBT orders the shaders in 4 groups:\n\n // 1. RG\n\n // 2. Miss\n\n // 3. HG\n\n // 4. Callable\n\n //\n\n // Each group must be aligned by \"shader_group_base_alignment\".\n", "file_path": "examples/raytracing/main.rs", "rank": 45, "score": 59158.866673611396 }, { "content": " self.transfer_timeline_value += 1;\n\n\n\n {\n\n let encoder = compute_buffer.record()?;\n\n encoder.build_acceleration_structures(&infos[id..id + 1], &ranges[id..id + 1]);\n\n encoder.reset_query_pool(query_pool.raw(), id as u32, 1);\n\n encoder.write_acceleration_structures_properties(\n\n &[self.blas[id].structure.raw()],\n\n vk::QueryType::ACCELERATION_STRUCTURE_COMPACTED_SIZE_KHR,\n\n query_pool.raw(),\n\n id as u32,\n\n )\n\n }\n\n\n\n command_buffers.push(compute_buffer);\n\n }\n\n\n\n // Submit the command buffers and wait for them finishing.\n\n self.compute_queue.submit_all(&command_buffers, None)?;\n\n self.transfer_timeline\n", "file_path": "examples/raytracing/main.rs", "rank": 46, "score": 59158.83474532621 }, { "content": " major: 1,\n\n minor: 0,\n\n patch: 0,\n\n },\n\n engine_name: \"engine example\",\n\n engine_version: asche::Version {\n\n major: 1,\n\n minor: 0,\n\n patch: 0,\n\n },\n\n extensions: vec![],\n\n },\n\n )?;\n\n\n\n let (device, swapchain, queues) = unsafe {\n\n instance.request_device(asche::DeviceConfiguration {\n\n swapchain_format: SURFACE_FORMAT,\n\n extensions: vec![\n\n // For RT support\n\n vk::KHR_ACCELERATION_STRUCTURE_EXTENSION_NAME,\n", "file_path": "examples/raytracing/main.rs", "rank": 47, "score": 59158.43103264462 }, { "content": " _image: image,\n\n })\n\n }\n\n\n\n unsafe fn create_buffer(\n\n &mut self,\n\n name: &str,\n\n buffer_data: &[u8],\n\n buffer_type: vk::BufferUsageFlags,\n\n ) -> Result<asche::Buffer<Lifetime>, asche::AscheError> {\n\n let mut stagging_buffer = self.device.create_buffer(&asche::BufferDescriptor::<_> {\n\n name: \"Staging Buffer\",\n\n usage: vk::BufferUsageFlags::TRANSFER_SRC,\n\n memory_location: vk_alloc::MemoryLocation::CpuToGpu,\n\n lifetime: Lifetime::Static,\n\n sharing_mode: vk::SharingMode::CONCURRENT,\n\n queues: vk::QueueFlags::TRANSFER | vk::QueueFlags::GRAPHICS,\n\n size: buffer_data.len() as u64,\n\n flags: None,\n\n })?;\n", "file_path": "examples/cube/main.rs", "rank": 48, "score": 59158.400986189234 }, { "content": " infos.push(geometry_info);\n\n ranges.push(range.build());\n\n self.blas.push(blas);\n\n }\n\n\n\n let compacted_sizes = self.create_as_on_device(&infos, &ranges)?;\n\n self.compact_blas(&mut max_sizes, &compacted_sizes)?;\n\n\n\n self.compute_pool.reset()?;\n\n\n\n Ok(())\n\n }\n\n\n\n #[allow(unused_variables)]\n\n unsafe fn compact_blas(\n\n &mut self,\n\n max_sizes: &mut [u64],\n\n compacted_sizes: &[u64],\n\n ) -> Result<()> {\n\n self.transfer_timeline_value += 1;\n", "file_path": "examples/raytracing/main.rs", "rank": 49, "score": 59158.01060233131 }, { "content": " postprocess_renderpass,\n\n postprocess_descriptor_pool,\n\n postprocess_descriptor_set_layout,\n\n postprocess_descriptor_set,\n\n postprocess_pipeline_layout,\n\n postprocess_pipeline,\n\n ))\n\n }\n\n\n\n unsafe fn create_offscreen_image(\n\n device: &asche::Device<Lifetime>,\n\n extent: vk::Extent2D,\n\n pool: &mut asche::GraphicsCommandPool,\n\n queue: &mut asche::GraphicsQueue,\n\n timeline: &asche::TimelineSemaphore,\n\n timeline_value: &mut u64,\n\n ) -> Result<Texture> {\n\n let image = device.create_image(&asche::ImageDescriptor::<_> {\n\n name: \"Offscreen Image\",\n\n usage: vk::ImageUsageFlags::SAMPLED | vk::ImageUsageFlags::STORAGE,\n", "file_path": "examples/raytracing/main.rs", "rank": 50, "score": 59157.9796818131 }, { "content": " );\n\n\n\n let buffer = self.device.create_buffer(&asche::BufferDescriptor::<_> {\n\n name: \"Model TLAS Buffer\",\n\n usage: vk::BufferUsageFlags::ACCELERATION_STRUCTURE_STORAGE_KHR\n\n | vk::BufferUsageFlags::SHADER_DEVICE_ADDRESS,\n\n memory_location: vk_alloc::MemoryLocation::GpuOnly,\n\n lifetime: Lifetime::Buffer,\n\n sharing_mode: vk::SharingMode::CONCURRENT,\n\n queues: vk::QueueFlags::GRAPHICS | vk::QueueFlags::COMPUTE,\n\n size: size_info.acceleration_structure_size,\n\n flags: None,\n\n })?;\n\n\n\n let scratchpad = self.device.create_buffer(&asche::BufferDescriptor::<_> {\n\n name: \"Model TLAS Scratchpad\",\n\n usage: vk::BufferUsageFlags::ACCELERATION_STRUCTURE_STORAGE_KHR\n\n | vk::BufferUsageFlags::SHADER_DEVICE_ADDRESS,\n\n memory_location: vk_alloc::MemoryLocation::GpuOnly,\n\n lifetime: Lifetime::Buffer,\n", "file_path": "examples/raytracing/main.rs", "rank": 51, "score": 59157.90740994556 }, { "content": " uploader: &mut Uploader,\n\n ) -> Result<(\n\n asche::DescriptorPool,\n\n asche::DescriptorSetLayout,\n\n asche::DescriptorSet,\n\n asche::DescriptorPool,\n\n asche::DescriptorSetLayout,\n\n asche::DescriptorSet,\n\n asche::DescriptorSetLayout,\n\n asche::DescriptorSet,\n\n asche::PipelineLayout,\n\n asche::RayTracingPipeline,\n\n asche::Buffer<Lifetime>,\n\n Vec<vk::StridedDeviceAddressRegionKHR>,\n\n )> {\n\n // Query for RT capabilities\n\n let mut raytrace_properties =\n\n vk::PhysicalDeviceRayTracingPipelinePropertiesKHRBuilder::new();\n\n let properties =\n\n vk::PhysicalDeviceProperties2Builder::new().extend_from(&mut raytrace_properties);\n", "file_path": "examples/raytracing/main.rs", "rank": 52, "score": 59157.86411843313 }, { "content": " matrix: [\n\n row_major_matrix.x_axis.into(),\n\n row_major_matrix.y_axis.into(),\n\n row_major_matrix.z_axis.into(),\n\n ],\n\n };\n\n\n\n let material_buffer = self.uploader.create_buffer_with_data(\n\n &self.device,\n\n &format!(\"Model {} Material Buffer\", id),\n\n cast_slice(&[material_data]),\n\n vk::BufferUsageFlags::STORAGE_BUFFER | vk::BufferUsageFlags::SHADER_DEVICE_ADDRESS,\n\n vk::QueueFlags::GRAPHICS | vk::QueueFlags::COMPUTE,\n\n )?;\n\n let vertex_buffer = self.uploader.create_buffer_with_data(\n\n &self.device,\n\n &format!(\"Model {} Vertex Buffer\", id),\n\n cast_slice(mesh.vertices.as_slice()),\n\n vk::BufferUsageFlags::ACCELERATION_STRUCTURE_BUILD_INPUT_READ_ONLY_KHR\n\n | vk::BufferUsageFlags::STORAGE_BUFFER\n", "file_path": "examples/raytracing/main.rs", "rank": 53, "score": 59157.68719418884 }, { "content": " \"SBT Buffer\",\n\n cast_slice(sbt_data.as_slice()),\n\n vk::BufferUsageFlags::SHADER_BINDING_TABLE_KHR\n\n | vk::BufferUsageFlags::SHADER_DEVICE_ADDRESS,\n\n vk::QueueFlags::COMPUTE | vk::QueueFlags::GRAPHICS,\n\n )\n\n }?;\n\n\n\n let sbt_address = unsafe { sbt.device_address() };\n\n let sbt_stride_addresses = vec![\n\n // RG\n\n vk::StridedDeviceAddressRegionKHR {\n\n device_address: sbt_address + rg_group_offfset as u64,\n\n stride: shader_group_handle_size as u64,\n\n size: rg_group_size as u64,\n\n },\n\n // MISS\n\n vk::StridedDeviceAddressRegionKHR {\n\n device_address: sbt_address + miss_group_offfset as u64,\n\n stride: shader_group_handle_size as u64,\n", "file_path": "examples/raytracing/main.rs", "rank": 54, "score": 59157.574927303314 }, { "content": " }\n\n\n\n self.transfer_timeline_value += 1;\n\n self.transfer_queue.submit(&transfer_buffer, None)?;\n\n self.transfer_timeline\n\n .wait_for_value(self.transfer_timeline_value)?;\n\n\n\n transfer_pool.reset()?;\n\n self.graphics_command_pool.reset()?;\n\n\n\n Ok(dst_buffer)\n\n }\n\n\n\n unsafe fn render(&mut self) -> Result<(), asche::AscheError> {\n\n let frame = self.swapchain.next_frame(&self.presentation_semaphore)?;\n\n\n\n let graphics_buffer = self.graphics_command_pool.create_command_buffer(\n\n &[],\n\n &[CommandBufferSemaphore::Binary {\n\n semaphore: self.render_semaphore.handle(),\n", "file_path": "examples/cube/main.rs", "rank": 55, "score": 59157.55168809352 }, { "content": " })\n\n .collect();\n\n\n\n let instance_count = instance_data.len() as u32;\n\n\n\n // This buffer is only needed for creating the TLAS. Once it is crated, we can safely drop this.\n\n let instance_buffer = self.uploader.create_buffer_with_data(\n\n &self.device,\n\n \"Model TLAS Instances\",\n\n cast_slice(instance_data.as_slice()),\n\n vk::BufferUsageFlags::ACCELERATION_STRUCTURE_BUILD_INPUT_READ_ONLY_KHR\n\n | vk::BufferUsageFlags::ACCELERATION_STRUCTURE_STORAGE_KHR\n\n | vk::BufferUsageFlags::SHADER_DEVICE_ADDRESS,\n\n vk::QueueFlags::GRAPHICS | vk::QueueFlags::COMPUTE,\n\n )?;\n\n\n\n let geometry_instance_data =\n\n vk::AccelerationStructureGeometryInstancesDataKHRBuilder::new()\n\n .data(vk::DeviceOrHostAddressConstKHR {\n\n device_address: instance_buffer.device_address(),\n", "file_path": "examples/raytracing/main.rs", "rank": 56, "score": 59157.54765180933 }, { "content": " aspect_mask: vk::ImageAspectFlags::COLOR,\n\n base_mip_level: 0,\n\n level_count: 1,\n\n base_array_layer: 0,\n\n layer_count: 1,\n\n });\n\n encoder.pipeline_barrier2(\n\n &vk::DependencyInfoKHRBuilder::new().image_memory_barriers(&[image_barrier]),\n\n );\n\n }\n\n\n\n queue.submit(&command_buffer, None)?;\n\n timeline.wait_for_value(*timeline_value)?;\n\n\n\n Ok(Texture { view, image })\n\n }\n\n\n\n pub unsafe fn upload_uniforms(&mut self) -> Result<()> {\n\n let projection_matrix = perspective_infinite_reverse_rh_yup(\n\n (90.0f32).to_radians(),\n", "file_path": "examples/raytracing/main.rs", "rank": 57, "score": 59157.357141990244 }, { "content": " }\n\n\n\n // Create a scratch pad for the device to create the AS. We re-use it for each BLAS of a model.\n\n let scratch_pad = self.device.create_buffer(&asche::BufferDescriptor::<_> {\n\n name: \"AS Scratchpad\",\n\n usage: vk::BufferUsageFlags::STORAGE_BUFFER\n\n | vk::BufferUsageFlags::SHADER_DEVICE_ADDRESS,\n\n memory_location: vk_alloc::MemoryLocation::GpuOnly,\n\n lifetime: Lifetime::Buffer,\n\n sharing_mode: Default::default(),\n\n queues: Default::default(),\n\n size: scratchpad_size,\n\n flags: None,\n\n })?;\n\n let scratch_pad_device_address = scratch_pad.device_address();\n\n\n\n // Create for each model a BLAS. We do this in one command buffer each, since a BLAS creation could take a long time\n\n // and this reduces the chance of timeouts and enabled to device to suspend the queue if needed (a device can only create\n\n // one AS at a time anyway).\n\n for (id, (model, size)) in self.models.iter().zip(&max_sizes).enumerate() {\n", "file_path": "examples/raytracing/main.rs", "rank": 58, "score": 59157.31481655266 }, { "content": " }\n\n\n\n self.queue\n\n .submit(&graphics_buffer, Some(&self.render_fence))?;\n\n self.swapchain.queue_frame(\n\n &self.queue,\n\n frame,\n\n &[&self.presentation_semaphore, &self.render_semaphore],\n\n )?;\n\n self.render_fence.wait()?;\n\n self.render_fence.reset()?;\n\n self.command_pool.reset()?;\n\n\n\n Ok(())\n\n }\n\n}\n", "file_path": "examples/triangle/main.rs", "rank": 59, "score": 59157.288068176254 }, { "content": "\n\n self.graphics_queue\n\n .submit(&command_buffer, Some(&self.render_fence))?;\n\n self.swapchain.queue_frame(\n\n &self.graphics_queue,\n\n frame,\n\n &[&self.presentation_semaphore, &self.render_semaphore],\n\n )?;\n\n\n\n self.render_fence.wait()?;\n\n self.render_fence.reset()?;\n\n\n\n self.graphics_pool.reset()?;\n\n\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "examples/raytracing/main.rs", "rank": 60, "score": 59157.24045549184 }, { "content": " self.swapchain.queue_frame(\n\n &self.graphics_queue,\n\n frame,\n\n &[&self.presentation_semaphore, &self.render_semaphore],\n\n )?;\n\n\n\n self.frame_counter += 1;\n\n\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "examples/cube/main.rs", "rank": 61, "score": 59157.23676789869 }, { "content": " graphics_queue: asche::GraphicsQueue,\n\n transfer_queue: asche::TransferQueue,\n\n swapchain: asche::Swapchain,\n\n device: asche::Device<Lifetime>,\n\n}\n\n\n\nimpl Drop for Application {\n\n fn drop(&mut self) {\n\n unsafe {\n\n self.device\n\n .wait_idle()\n\n .expect(\"couldn't wait for device to become idle while dropping\");\n\n }\n\n }\n\n}\n\n\n\nimpl Application {\n\n fn new(\n\n device: asche::Device<Lifetime>,\n\n swapchain: asche::Swapchain,\n", "file_path": "examples/cube/main.rs", "rank": 62, "score": 59157.20719858189 }, { "content": " include_bytes!(\"shader/triangle.vert.spv\"),\n\n )\n\n }?;\n\n let frag_module = unsafe {\n\n device.create_shader_module(\n\n \"Fragment Shader Module\",\n\n include_bytes!(\"shader/triangle.frag.spv\"),\n\n )\n\n }?;\n\n\n\n let mainfunctionname = std::ffi::CString::new(\"main\").unwrap();\n\n let vertexshader_stage = vk::PipelineShaderStageCreateInfoBuilder::new()\n\n .stage(vk::ShaderStageFlagBits::VERTEX)\n\n .module(vert_module.raw())\n\n .name(&mainfunctionname);\n\n let fragmentshader_stage = vk::PipelineShaderStageCreateInfoBuilder::new()\n\n .stage(vk::ShaderStageFlagBits::FRAGMENT)\n\n .module(frag_module.raw())\n\n .name(&mainfunctionname);\n\n\n", "file_path": "examples/triangle/main.rs", "rank": 63, "score": 59157.133252055 }, { "content": " app.vertex_buffer.push(vertex_buffer);\n\n app.index_buffer.push(index_buffer);\n\n\n\n // Upload the model texture\n\n let texture_data = include_bytes!(\"fractal.dds\");\n\n let texture = unsafe { app.create_texture(\"Cube Texture\", texture_data) }?;\n\n app.textures.push(texture);\n\n\n\n Ok(app)\n\n }\n\n\n\n unsafe fn create_texture(\n\n &mut self,\n\n name: &str,\n\n image_data: &[u8],\n\n ) -> Result<Texture, asche::AscheError> {\n\n let dds = ddsfile::Dds::read(&mut std::io::Cursor::new(&image_data)).unwrap();\n\n\n\n let mut stagging_buffer = self.device.create_buffer(&asche::BufferDescriptor::<_> {\n\n name: \"Staging Buffer\",\n", "file_path": "examples/cube/main.rs", "rank": 64, "score": 59156.99383345476 }, { "content": " )\n\n }?;\n\n\n\n let mainfunctionname = std::ffi::CString::new(\"main\").unwrap();\n\n let vertexshader_stage = vk::PipelineShaderStageCreateInfoBuilder::new()\n\n .stage(vk::ShaderStageFlagBits::VERTEX)\n\n .module(vert_module.raw())\n\n .name(&mainfunctionname);\n\n let fragmentshader_stage = vk::PipelineShaderStageCreateInfoBuilder::new()\n\n .stage(vk::ShaderStageFlagBits::FRAGMENT)\n\n .module(frag_module.raw())\n\n .name(&mainfunctionname);\n\n\n\n // Depth image\n\n let depth_image = unsafe {\n\n device.create_image(&asche::ImageDescriptor::<_> {\n\n name: \"Depth Texture\",\n\n usage: vk::ImageUsageFlags::DEPTH_STENCIL_ATTACHMENT,\n\n memory_location: vk_alloc::MemoryLocation::GpuOnly,\n\n lifetime: Lifetime::Static,\n", "file_path": "examples/cube/main.rs", "rank": 65, "score": 59156.83409603164 }, { "content": " device.physical_device_properties(properties);\n\n\n\n // RT shader\n\n let mainfunctionname = std::ffi::CString::new(\"main\").unwrap();\n\n let raygen_module = device.create_shader_module(\n\n \"Raytrace Raygen Shader Module\",\n\n include_bytes!(\"shader/raytrace.rgen.spv\"),\n\n )?;\n\n let miss_module = device.create_shader_module(\n\n \"Raytrace Miss Shader Module\",\n\n include_bytes!(\"shader/raytrace.rmiss.spv\"),\n\n )?;\n\n let close_hit_module = device.create_shader_module(\n\n \"Raytrace Close Hit Shader Module\",\n\n include_bytes!(\"shader/raytrace.rchit.spv\"),\n\n )?;\n\n\n\n let raygen_stage = vk::PipelineShaderStageCreateInfoBuilder::new()\n\n .stage(vk::ShaderStageFlagBits::RAYGEN_KHR)\n\n .module(raygen_module.raw())\n", "file_path": "examples/raytracing/main.rs", "rank": 66, "score": 59156.77897726179 }, { "content": "\n\n encoder.bind_raytrace_pipeline(&self.raytracing_pipeline);\n\n encoder.bind_descriptor_set(\n\n vk::PipelineBindPoint::RAY_TRACING_KHR,\n\n self.raytracing_pipeline_layout.raw(),\n\n 0,\n\n &[self.raytracing_descriptor_set.raw()],\n\n &[],\n\n );\n\n encoder.bind_descriptor_set(\n\n vk::PipelineBindPoint::RAY_TRACING_KHR,\n\n self.raytracing_pipeline_layout.raw(),\n\n 1,\n\n &[self.vertex_descriptor_set.raw()],\n\n &[],\n\n );\n\n encoder.bind_descriptor_set(\n\n vk::PipelineBindPoint::RAY_TRACING_KHR,\n\n self.raytracing_pipeline_layout.raw(),\n\n 2,\n", "file_path": "examples/raytracing/main.rs", "rank": 67, "score": 59156.6074525217 }, { "content": " .wait_for_value(self.transfer_timeline_value)?;\n\n\n\n // Get the compacted sizes\n\n let size = self.models.len();\n\n let mut compact_sizes = vec![0; size];\n\n query_pool.results(\n\n 0,\n\n size as u32,\n\n cast_slice_mut(compact_sizes.as_mut_slice()),\n\n std::mem::size_of::<u64>() as u64,\n\n Some(vk::QueryResultFlags::WAIT),\n\n )?;\n\n\n\n Ok(compact_sizes)\n\n }\n\n\n\n unsafe fn init_tlas(&mut self) -> Result<()> {\n\n let instance_data: Vec<vk::AccelerationStructureInstanceKHR> = self\n\n .blas\n\n .iter()\n", "file_path": "examples/raytracing/main.rs", "rank": 68, "score": 59156.59998987345 }, { "content": " .buffer_offset(0)\n\n .buffer_row_length(0)\n\n .buffer_image_height(0)\n\n .image_subresource(vk::ImageSubresourceLayers {\n\n aspect_mask: vk::ImageAspectFlags::COLOR,\n\n mip_level: 0,\n\n base_array_layer: 0,\n\n layer_count: 1,\n\n })\n\n .image_offset(vk::Offset3D { x: 0, y: 0, z: 0 })\n\n .image_extent(extent),\n\n );\n\n\n\n let barrier = [vk::ImageMemoryBarrier2KHRBuilder::new()\n\n .old_layout(vk::ImageLayout::TRANSFER_DST_OPTIMAL)\n\n .new_layout(vk::ImageLayout::SHADER_READ_ONLY_OPTIMAL)\n\n .image(image.raw())\n\n .subresource_range(subresource_range)\n\n .src_queue_family_index(vk::QUEUE_FAMILY_IGNORED)\n\n .dst_queue_family_index(vk::QUEUE_FAMILY_IGNORED)\n", "file_path": "examples/cube/main.rs", "rank": 69, "score": 59156.60261922924 }, { "content": " let graphics_command_pool = unsafe { graphics_queue.create_command_pool() }?;\n\n\n\n let render_fence = unsafe { device.create_fence(\"Render fence\") }?;\n\n let presentation_semaphore =\n\n unsafe { device.create_binary_semaphore(\"Presentation Semaphore\") }?;\n\n let render_semaphore = unsafe { device.create_binary_semaphore(\"Render Semaphore\") }?;\n\n\n\n Ok(Self {\n\n device,\n\n queue: graphics_queue,\n\n command_pool: graphics_command_pool,\n\n extent,\n\n _pipeline_layout: pipeline_layout,\n\n pipeline,\n\n render_pass,\n\n render_fence,\n\n presentation_semaphore,\n\n render_semaphore,\n\n swapchain,\n\n })\n", "file_path": "examples/triangle/main.rs", "rank": 70, "score": 59156.55803701682 }, { "content": "\n\n let stagging_slice = stagging_buffer\n\n .mapped_slice_mut()?\n\n .expect(\"staging buffer allocation was not mapped\");\n\n stagging_slice[..buffer_data.len()].clone_from_slice(bytemuck::cast_slice(buffer_data));\n\n\n\n let dst_buffer = self.device.create_buffer(&asche::BufferDescriptor::<_> {\n\n name,\n\n usage: buffer_type | vk::BufferUsageFlags::TRANSFER_DST,\n\n memory_location: vk_alloc::MemoryLocation::GpuOnly,\n\n lifetime: Lifetime::Static,\n\n sharing_mode: vk::SharingMode::CONCURRENT,\n\n queues: vk::QueueFlags::TRANSFER | vk::QueueFlags::GRAPHICS,\n\n size: buffer_data.len() as u64,\n\n flags: None,\n\n })?;\n\n\n\n let mut transfer_pool = self.transfer_queue.create_command_pool()?;\n\n let transfer_buffer = transfer_pool.create_command_buffer(\n\n &[CommandBufferSemaphore::Timeline {\n", "file_path": "examples/cube/main.rs", "rank": 71, "score": 59156.475658342184 }, { "content": " .new_layout(vk::ImageLayout::TRANSFER_DST_OPTIMAL)\n\n .image(image.raw())\n\n .subresource_range(subresource_range)\n\n .src_queue_family_index(vk::QUEUE_FAMILY_IGNORED)\n\n .dst_queue_family_index(vk::QUEUE_FAMILY_IGNORED)\n\n .dst_stage_mask(vk::PipelineStageFlags2KHR::TRANSFER_KHR)\n\n .dst_access_mask(vk::AccessFlags2KHR::TRANSFER_WRITE_KHR)];\n\n\n\n let dependency_info = vk::DependencyInfoKHRBuilder::new()\n\n .memory_barriers(&[])\n\n .image_memory_barriers(&barrier)\n\n .buffer_memory_barriers(&[]);\n\n\n\n encoder.pipeline_barrier2(&dependency_info);\n\n\n\n encoder.copy_buffer_to_image(\n\n stagging_buffer.raw(),\n\n image.raw(),\n\n vk::ImageLayout::TRANSFER_DST_OPTIMAL,\n\n vk::BufferImageCopyBuilder::new()\n", "file_path": "examples/cube/main.rs", "rank": 72, "score": 59156.46340774454 }, { "content": " .any_hit_shader(vk::SHADER_UNUSED_KHR),\n\n // Hit\n\n vk::RayTracingShaderGroupCreateInfoKHRBuilder::new()\n\n ._type(vk::RayTracingShaderGroupTypeKHR::TRIANGLES_HIT_GROUP_KHR)\n\n .general_shader(vk::SHADER_UNUSED_KHR)\n\n .closest_hit_shader(2)\n\n .intersection_shader(vk::SHADER_UNUSED_KHR)\n\n .any_hit_shader(vk::SHADER_UNUSED_KHR),\n\n ];\n\n\n\n let rt_pipeline_info = vk::RayTracingPipelineCreateInfoKHRBuilder::new()\n\n .stages(&shader_stages)\n\n .groups(&groups)\n\n .max_pipeline_ray_recursion_depth(max_bounce)\n\n .layout(raytracing_pipeline_layout.raw());\n\n\n\n let raytracing_pipeline =\n\n device.create_raytracing_pipeline(\"RT Pipeline\", None, rt_pipeline_info)?;\n\n\n\n let (sbt, sbt_stride_addresses) = RayTracingApplication::create_sbt(\n", "file_path": "examples/raytracing/main.rs", "rank": 73, "score": 59156.405677657836 }, { "content": " graphics_queue,\n\n transfer_queue,\n\n graphics_command_pool,\n\n extent,\n\n pipeline,\n\n pipeline_layout,\n\n render_pass,\n\n vertex_buffer: vec![],\n\n index_buffer: vec![],\n\n textures: vec![],\n\n depth_texture,\n\n sampler,\n\n vp_matrix,\n\n render_fence,\n\n presentation_semaphore,\n\n render_semaphore,\n\n transfer_timeline,\n\n transfer_timeline_value,\n\n descriptor_set_layout,\n\n descriptor_pool,\n", "file_path": "examples/cube/main.rs", "rank": 74, "score": 59156.399606205945 }, { "content": " vk::QueryPoolCreateInfoBuilder::new()\n\n .query_type(vk::QueryType::ACCELERATION_STRUCTURE_COMPACTED_SIZE_KHR)\n\n .query_count(self.models.len() as u32),\n\n )?;\n\n\n\n let mut command_buffers: Vec<asche::ComputeCommandBuffer> =\n\n Vec::with_capacity(self.models.len());\n\n for (id, _) in self.models.iter().enumerate() {\n\n let compute_buffer = self.compute_pool.create_command_buffer(\n\n &[CommandBufferSemaphore::Timeline {\n\n semaphore: self.transfer_timeline.handle(),\n\n stage: vk::PipelineStageFlags2KHR::NONE_KHR,\n\n value: self.transfer_timeline_value,\n\n }],\n\n &[CommandBufferSemaphore::Timeline {\n\n semaphore: self.transfer_timeline.handle(),\n\n stage: vk::PipelineStageFlags2KHR::NONE_KHR,\n\n value: self.transfer_timeline_value + 1,\n\n }],\n\n )?;\n", "file_path": "examples/raytracing/main.rs", "rank": 75, "score": 59156.27314363477 }, { "content": " usage: vk::BufferUsageFlags::TRANSFER_SRC,\n\n memory_location: vk_alloc::MemoryLocation::CpuToGpu,\n\n lifetime: Lifetime::Static,\n\n sharing_mode: vk::SharingMode::CONCURRENT,\n\n queues: vk::QueueFlags::TRANSFER | vk::QueueFlags::GRAPHICS,\n\n size: dds.data.len() as u64,\n\n flags: None,\n\n })?;\n\n\n\n let stagging_slice = stagging_buffer\n\n .mapped_slice_mut()?\n\n .expect(\"staging buffer allocation was not mapped\");\n\n stagging_slice[..dds.data.len()].clone_from_slice(bytemuck::cast_slice(&dds.data));\n\n stagging_buffer.flush()?;\n\n\n\n let extent = vk::Extent3D {\n\n width: dds.header.width,\n\n height: dds.header.height,\n\n depth: 1,\n\n };\n", "file_path": "examples/cube/main.rs", "rank": 76, "score": 59156.25803779053 }, { "content": " let layout_info = vk::PipelineLayoutCreateInfoBuilder::new().set_layouts(&layouts);\n\n let raytracing_pipeline_layout =\n\n device.create_pipeline_layout(\"RT Pipeline Layout\", layout_info)?;\n\n\n\n // RT Pipeline\n\n let max_bounce = raytrace_properties.max_ray_recursion_depth.min(1);\n\n let groups = [\n\n // Raygen\n\n vk::RayTracingShaderGroupCreateInfoKHRBuilder::new()\n\n ._type(vk::RayTracingShaderGroupTypeKHR::GENERAL_KHR)\n\n .general_shader(0)\n\n .closest_hit_shader(vk::SHADER_UNUSED_KHR)\n\n .intersection_shader(vk::SHADER_UNUSED_KHR)\n\n .any_hit_shader(vk::SHADER_UNUSED_KHR),\n\n // Miss\n\n vk::RayTracingShaderGroupCreateInfoKHRBuilder::new()\n\n ._type(vk::RayTracingShaderGroupTypeKHR::GENERAL_KHR)\n\n .general_shader(1)\n\n .closest_hit_shader(vk::SHADER_UNUSED_KHR)\n\n .intersection_shader(vk::SHADER_UNUSED_KHR)\n", "file_path": "examples/raytracing/main.rs", "rank": 77, "score": 59156.247392768164 }, { "content": "\n\n let image = self.device.create_image(&asche::ImageDescriptor::<_> {\n\n name,\n\n usage: vk::ImageUsageFlags::SAMPLED | vk::ImageUsageFlags::TRANSFER_DST,\n\n memory_location: vk_alloc::MemoryLocation::GpuOnly,\n\n lifetime: Lifetime::Static,\n\n sharing_mode: vk::SharingMode::CONCURRENT,\n\n queues: vk::QueueFlags::TRANSFER | vk::QueueFlags::GRAPHICS,\n\n image_type: vk::ImageType::_2D,\n\n format: vk::Format::BC7_SRGB_BLOCK,\n\n extent,\n\n mip_levels: 1,\n\n array_layers: 1,\n\n samples: vk::SampleCountFlagBits::_1,\n\n tiling: vk::ImageTiling::OPTIMAL,\n\n initial_layout: vk::ImageLayout::UNDEFINED,\n\n flags: None,\n\n })?;\n\n\n\n let subresource_range = vk::ImageSubresourceRange {\n", "file_path": "examples/cube/main.rs", "rank": 78, "score": 59156.158761414285 }, { "content": " light_position: light_position.into(),\n\n light_color: light_color.into(),\n\n };\n\n\n\n let camera_uniforms_buffer = self.uploader.create_buffer_with_data(\n\n &self.device,\n\n \"Camera Uniforms Buffer\",\n\n cast_slice(&[camera_uniforms]),\n\n vk::BufferUsageFlags::UNIFORM_BUFFER,\n\n vk::QueueFlags::GRAPHICS,\n\n )?;\n\n\n\n let light_uniforms_buffer = self.uploader.create_buffer_with_data(\n\n &self.device,\n\n \"Lights Uniforms Buffer\",\n\n cast_slice(&[light_uniforms]),\n\n vk::BufferUsageFlags::UNIFORM_BUFFER,\n\n vk::QueueFlags::GRAPHICS,\n\n )?;\n\n\n", "file_path": "examples/raytracing/main.rs", "rank": 79, "score": 59156.05622169053 }, { "content": " sharing_mode: vk::SharingMode::EXCLUSIVE,\n\n queues: vk::QueueFlags::GRAPHICS,\n\n image_type: vk::ImageType::_2D,\n\n format: vk::Format::D32_SFLOAT,\n\n extent: vk::Extent3D {\n\n width: extent.width,\n\n height: extent.height,\n\n depth: 1,\n\n },\n\n mip_levels: 1,\n\n array_layers: 1,\n\n samples: vk::SampleCountFlagBits::_1,\n\n tiling: vk::ImageTiling::OPTIMAL,\n\n initial_layout: vk::ImageLayout::UNDEFINED,\n\n flags: None,\n\n })\n\n }?;\n\n\n\n let depth_image_view = unsafe {\n\n device.create_image_view(&asche::ImageViewDescriptor {\n", "file_path": "examples/cube/main.rs", "rank": 80, "score": 59155.87170318623 }, { "content": " .src_acceleration_structure(vk::AccelerationStructureKHR::null())\n\n .dst_acceleration_structure(structure.raw())\n\n .scratch_data(vk::DeviceOrHostAddressKHR {\n\n device_address: scratchpad.device_address(),\n\n })];\n\n\n\n let ranges = [vk::AccelerationStructureBuildRangeInfoKHRBuilder::new()\n\n .primitive_count(instance_count)\n\n .primitive_offset(0)\n\n .transform_offset(0)\n\n .first_vertex(0)\n\n .build()];\n\n\n\n self.transfer_timeline_value += 1;\n\n let compute_buffer = self.compute_pool.create_command_buffer(\n\n &[],\n\n &[CommandBufferSemaphore::Timeline {\n\n semaphore: self.transfer_timeline.handle(),\n\n stage: vk::PipelineStageFlags2KHR::NONE_KHR,\n\n value: self.transfer_timeline_value,\n", "file_path": "examples/raytracing/main.rs", "rank": 81, "score": 59155.81346442867 }, { "content": " memory_location: vk_alloc::MemoryLocation::GpuOnly,\n\n lifetime: Lifetime::Image,\n\n sharing_mode: vk::SharingMode::EXCLUSIVE,\n\n queues: vk::QueueFlags::GRAPHICS,\n\n image_type: vk::ImageType::_2D,\n\n format: OFFSCREEN_FORMAT,\n\n extent: vk::Extent3D {\n\n width: extent.width,\n\n height: extent.height,\n\n depth: 1,\n\n },\n\n mip_levels: 1,\n\n array_layers: 1,\n\n samples: vk::SampleCountFlagBits::_1,\n\n tiling: vk::ImageTiling::OPTIMAL,\n\n initial_layout: vk::ImageLayout::UNDEFINED,\n\n flags: None,\n\n })?;\n\n\n\n let view = device.create_image_view(&asche::ImageViewDescriptor {\n", "file_path": "examples/raytracing/main.rs", "rank": 82, "score": 59155.7867121962 }, { "content": " &[self.index_descriptor_set.raw()],\n\n &[],\n\n );\n\n\n\n encoder.trace_rays_khr(\n\n &self.sbt_stride_addresses[0],\n\n &self.sbt_stride_addresses[1],\n\n &self.sbt_stride_addresses[2],\n\n &self.sbt_stride_addresses[3],\n\n self.extent.width,\n\n self.extent.height,\n\n 1,\n\n );\n\n\n\n let image_barrier = vk::ImageMemoryBarrier2KHRBuilder::new()\n\n .src_stage_mask(vk::PipelineStageFlags2KHR::RAY_TRACING_SHADER_KHR)\n\n .src_access_mask(vk::AccessFlags2KHR::SHADER_WRITE_KHR)\n\n .old_layout(vk::ImageLayout::GENERAL)\n\n .dst_stage_mask(vk::PipelineStageFlags2KHR::FRAGMENT_SHADER_KHR)\n\n .dst_access_mask(vk::AccessFlags2KHR::SHADER_READ_KHR)\n", "file_path": "examples/raytracing/main.rs", "rank": 83, "score": 59155.65581573721 }, { "content": "\n\n pass.push_constants(\n\n self.pipeline_layout.raw(),\n\n vk::ShaderStageFlags::VERTEX,\n\n 0,\n\n cast_slice(mvp_matrix.as_ref()),\n\n )?;\n\n\n\n pass.draw_indexed(36, 1, 0, 0, 0);\n\n }\n\n\n\n self.graphics_queue\n\n .submit(&graphics_buffer, Some(&self.render_fence))?;\n\n\n\n self.render_fence.wait()?;\n\n self.render_fence.reset()?;\n\n\n\n self.graphics_command_pool.reset()?;\n\n self.descriptor_pool.free_sets()?;\n\n\n", "file_path": "examples/cube/main.rs", "rank": 84, "score": 59155.45612849509 }, { "content": " vk::KHR_RAY_TRACING_PIPELINE_EXTENSION_NAME,\n\n vk::KHR_PIPELINE_LIBRARY_EXTENSION_NAME,\n\n vk::KHR_DEFERRED_HOST_OPERATIONS_EXTENSION_NAME,\n\n // For GLSL_EXT_debug_printf support\n\n vk::KHR_SHADER_NON_SEMANTIC_INFO_EXTENSION_NAME,\n\n ],\n\n features_v1_2: Some(\n\n vk::PhysicalDeviceVulkan12FeaturesBuilder::new()\n\n .timeline_semaphore(true)\n\n .buffer_device_address(true)\n\n .scalar_block_layout(true)\n\n .uniform_buffer_standard_layout(true)\n\n .descriptor_indexing(true)\n\n .descriptor_binding_partially_bound(true)\n\n .descriptor_binding_variable_descriptor_count(true)\n\n .runtime_descriptor_array(true)\n\n .shader_storage_buffer_array_non_uniform_indexing(true),\n\n ),\n\n features_raytracing: Some(\n\n vk::PhysicalDeviceRayTracingPipelineFeaturesKHRBuilder::new()\n", "file_path": "examples/raytracing/main.rs", "rank": 85, "score": 59155.45765249318 }, { "content": " )?;\n\n\n\n self.models.push(Model {\n\n vertex_count: mesh.vertices.len() as u32,\n\n index_count: mesh.indices.len() as u32,\n\n material_buffer,\n\n vertex_buffer,\n\n index_buffer,\n\n transform_buffer,\n\n })\n\n }\n\n\n\n self.init_blas()?;\n\n self.init_tlas()?;\n\n\n\n Ok(())\n\n }\n\n\n\n unsafe fn init_blas(&mut self) -> Result<()> {\n\n // We need to assemble all information that is needed to build the AS so that it outlives the loop were we assemble the command buffers.\n", "file_path": "examples/raytracing/main.rs", "rank": 86, "score": 59155.34765809011 }, { "content": " }\n\n\n\n pub unsafe fn render(&mut self) -> Result<()> {\n\n let frame = self.swapchain.next_frame(&self.presentation_semaphore)?;\n\n\n\n let command_buffer = self.graphics_pool.create_command_buffer(\n\n &[],\n\n &[CommandBufferSemaphore::Binary {\n\n semaphore: self.render_semaphore.handle(),\n\n stage: vk::PipelineStageFlags2KHR::COLOR_ATTACHMENT_OUTPUT_KHR,\n\n }],\n\n )?;\n\n\n\n {\n\n let encoder = command_buffer.record()?;\n\n encoder.set_viewport_and_scissor(\n\n vk::Rect2DBuilder::new()\n\n .offset(vk::Offset2D { x: 0, y: 0 })\n\n .extent(self.extent),\n\n );\n", "file_path": "examples/raytracing/main.rs", "rank": 87, "score": 59155.07944814693 }, { "content": " }\n\n\n\n unsafe fn render(&mut self) -> Result<(), asche::AscheError> {\n\n let frame = self.swapchain.next_frame(&self.presentation_semaphore)?;\n\n\n\n let graphics_buffer = self.command_pool.create_command_buffer(\n\n &[],\n\n &[CommandBufferSemaphore::Binary {\n\n semaphore: self.render_semaphore.handle(),\n\n stage: vk::PipelineStageFlags2KHR::COLOR_ATTACHMENT_OUTPUT_KHR,\n\n }],\n\n )?;\n\n\n\n {\n\n let encoder = graphics_buffer.record()?;\n\n encoder.set_viewport_and_scissor(\n\n vk::Rect2DBuilder::new()\n\n .offset(vk::Offset2D { x: 0, y: 0 })\n\n .extent(self.extent),\n\n );\n", "file_path": "examples/triangle/main.rs", "rank": 88, "score": 59155.03981974071 }, { "content": " ) = Self::create_rt_pipeline(&device, &mut uploader)?;\n\n\n\n // Offscreen Image\n\n let offscreen_attachment = Self::create_offscreen_image(\n\n &device,\n\n extent,\n\n &mut graphics_pool,\n\n &mut graphics_queue,\n\n &timeline,\n\n &mut timeline_value,\n\n )?;\n\n\n\n let render_fence = device.create_fence(\"Render Fence\")?;\n\n let render_semaphore = device.create_binary_semaphore(\"Render Semaphore\")?;\n\n let presentation_semaphore = device.create_binary_semaphore(\"Presentation Semaphore\")?;\n\n\n\n Ok(Self {\n\n uniforms: vec![],\n\n sbt_stride_addresses,\n\n _sbt: sbt,\n", "file_path": "examples/raytracing/main.rs", "rank": 89, "score": 59155.02857281754 }, { "content": " // Each group can contain multiple entries which were defined\n\n // in the pipeline layout.\n\n //\n\n // Those entries are rightly packed in the handle_data and need\n\n // to be copied at the right location inside the SBT.\n\n //\n\n // Example for a \"shader_group_base_alignment\" of 64 with a\n\n // shader_group_handle_size of 32:\n\n //\n\n // ---- ---- ---- ----\n\n //| RG | | MS | | HG | | CL |\n\n // ---- ---- ---- ----\n\n // 0 64 128 192\n\n //\n\n let shader_group_handle_size = raytrace_properties.shader_group_handle_size;\n\n let shader_group_base_alignment = raytrace_properties.shader_group_base_alignment as usize;\n\n\n\n let group_count = groups.len() as u32;\n\n let handle_data_size = shader_group_handle_size * group_count;\n\n\n", "file_path": "examples/raytracing/main.rs", "rank": 90, "score": 59155.00781388965 }, { "content": " vertex_write,\n\n index_write,\n\n ],\n\n &[],\n\n );\n\n }\n\n /// Upload vertex data and prepares the TLAS and BLAS structures.\n\n pub unsafe fn upload_model(&mut self, materials: &[Material], meshes: &[Mesh]) -> Result<()> {\n\n for (id, mesh) in meshes.iter().enumerate() {\n\n let material = &materials[mesh.material];\n\n\n\n let material_data = MaterialData {\n\n albedo: material.albedo,\n\n metallic: material.metallic,\n\n roughness: material.roughness,\n\n };\n\n\n\n // Vulkan expects a row major 3x4 transform matrix.\n\n let row_major_matrix = mesh.model_matrix.transpose();\n\n let transform = vk::TransformMatrixKHR {\n", "file_path": "examples/raytracing/main.rs", "rank": 91, "score": 59154.94615158427 }, { "content": " ._type(vk::AccelerationStructureTypeKHR::BOTTOM_LEVEL_KHR);\n\n\n\n let structure = self\n\n .device\n\n .create_acceleration_structure(\"Model {} BLAS\", &creation_info)?;\n\n\n\n Ok(Blas {\n\n structure,\n\n _buffer: buffer,\n\n })\n\n }\n\n\n\n // Returns a query pool with the compacted sized.\n\n unsafe fn create_as_on_device(\n\n &mut self,\n\n infos: &[vk::AccelerationStructureBuildGeometryInfoKHRBuilder],\n\n ranges: &[vk::AccelerationStructureBuildRangeInfoKHR],\n\n ) -> Result<Vec<u64>> {\n\n let query_pool = self.device.create_query_pool(\n\n \"BLAS Compacted Size Query Pool\",\n", "file_path": "examples/raytracing/main.rs", "rank": 92, "score": 59154.92828687708 }, { "content": " let mut handle_data: Vec<u8> = vec![0; handle_data_size as usize];\n\n unsafe {\n\n device.ray_tracing_shader_group_handles(\n\n raytracing_pipeline.raw(),\n\n 0,\n\n group_count,\n\n handle_data.as_mut_slice(),\n\n )\n\n }?;\n\n\n\n // We only have one shader in the first three groups.\n\n let rg_group_size = shader_group_handle_size as usize;\n\n let miss_group_size = shader_group_handle_size as usize;\n\n let hg_group_size = shader_group_handle_size as usize;\n\n\n\n let rg_group_offfset = 0;\n\n let miss_group_offfset = align_up(\n\n rg_group_offfset + rg_group_size,\n\n shader_group_base_alignment,\n\n );\n", "file_path": "examples/raytracing/main.rs", "rank": 93, "score": 59154.81187059504 }, { "content": " })?;\n\n\n\n let mut transfer_pool = self.transfer_queue.create_command_pool()?;\n\n let transfer_buffer = transfer_pool.create_command_buffer(\n\n &[CommandBufferSemaphore::Timeline {\n\n semaphore: self.transfer_timeline.handle(),\n\n stage: vk::PipelineStageFlags2KHR::NONE_KHR,\n\n value: self.transfer_timeline_value,\n\n }],\n\n &[CommandBufferSemaphore::Timeline {\n\n semaphore: self.transfer_timeline.handle(),\n\n stage: vk::PipelineStageFlags2KHR::NONE_KHR,\n\n value: self.transfer_timeline_value + 1,\n\n }],\n\n )?;\n\n\n\n {\n\n let encoder = transfer_buffer.record()?;\n\n let barrier = [vk::ImageMemoryBarrier2KHRBuilder::new()\n\n .old_layout(vk::ImageLayout::UNDEFINED)\n", "file_path": "examples/cube/main.rs", "rank": 94, "score": 59154.78813386999 }, { "content": " .src_stage_mask(vk::PipelineStageFlags2KHR::TRANSFER_KHR)\n\n .src_access_mask(vk::AccessFlags2KHR::TRANSFER_WRITE_KHR)\n\n .dst_stage_mask(vk::PipelineStageFlags2KHR::ALL_TRANSFER_KHR)\n\n .dst_access_mask(vk::AccessFlags2KHR::NONE_KHR)];\n\n\n\n let dependency_info = vk::DependencyInfoKHRBuilder::new()\n\n .memory_barriers(&[])\n\n .image_memory_barriers(&barrier)\n\n .buffer_memory_barriers(&[]);\n\n\n\n encoder.pipeline_barrier2(&dependency_info);\n\n }\n\n\n\n self.transfer_timeline_value += 1;\n\n self.transfer_queue.submit(&transfer_buffer, None)?;\n\n self.transfer_timeline\n\n .wait_for_value(self.transfer_timeline_value)?;\n\n\n\n Ok(Texture {\n\n view,\n", "file_path": "examples/cube/main.rs", "rank": 95, "score": 59154.68516400871 }, { "content": " .binding(0)\n\n .descriptor_count(1) // Used fore texture arrays\n\n .descriptor_type(vk::DescriptorType::COMBINED_IMAGE_SAMPLER)\n\n .stage_flags(vk::ShaderStageFlags::FRAGMENT)];\n\n let layout_info = vk::DescriptorSetLayoutCreateInfoBuilder::new().bindings(&bindings);\n\n let descriptor_set_layout = unsafe {\n\n device.create_descriptor_set_layout(\"Cube Descriptor Set Layout\", layout_info)\n\n }?;\n\n\n\n // Descriptor pool\n\n let pool_sizes = [vk::DescriptorPoolSizeBuilder::new()\n\n .descriptor_count(1)\n\n ._type(vk::DescriptorType::COMBINED_IMAGE_SAMPLER)];\n\n\n\n let descriptor_pool = unsafe {\n\n device.create_descriptor_pool(&asche::DescriptorPoolDescriptor {\n\n name: \"Cube Descriptor Pool\",\n\n max_sets: 16,\n\n pool_sizes: &pool_sizes,\n\n flags: None,\n", "file_path": "examples/cube/main.rs", "rank": 96, "score": 59154.625992517416 }, { "content": " let command_buffer = self.compute_pool.create_command_buffer(\n\n &[],\n\n &[CommandBufferSemaphore::Timeline {\n\n semaphore: self.transfer_timeline.handle(),\n\n stage: vk::PipelineStageFlags2KHR::NONE_KHR,\n\n value: self.transfer_timeline_value,\n\n }],\n\n )?;\n\n\n\n let mut compacted_blas = Vec::with_capacity(self.blas.len());\n\n {\n\n let encoder = command_buffer.record()?;\n\n for ((id, blas), compacted) in self.blas.iter().enumerate().zip(compacted_sizes) {\n\n let compact_blas = self.create_new_blas(&id, *compacted)?;\n\n compacted_blas.push(compact_blas);\n\n\n\n let info = vk::CopyAccelerationStructureInfoKHRBuilder::new()\n\n .mode(vk::CopyAccelerationStructureModeKHR::COMPACT_KHR)\n\n .src(blas.structure.raw())\n\n .dst(compacted_blas[compacted_blas.len() - 1].structure.raw());\n", "file_path": "examples/raytracing/main.rs", "rank": 97, "score": 59154.61348862703 }, { "content": " let pipeline_info = vk::GraphicsPipelineCreateInfoBuilder::new()\n\n .stages(&shader_stages)\n\n .vertex_input_state(&vertex_input_state)\n\n .input_assembly_state(&input_assembly_state)\n\n .dynamic_state(&dynamic_state)\n\n .viewport_state(&viewport_state)\n\n .rasterization_state(&rasterization_state)\n\n .multisample_state(&multisample_state)\n\n .color_blend_state(&color_blend_state)\n\n .depth_stencil_state(&depth_stencil_state)\n\n .layout(pipeline_layout.raw())\n\n .render_pass(render_pass.raw())\n\n .subpass(0);\n\n\n\n let pipeline = unsafe { device.create_graphics_pipeline(\"Cube Pipeline\", pipeline_info) }?;\n\n\n\n let graphics_command_pool = unsafe { graphics_queue.create_command_pool() }?;\n\n\n\n let p_matrix = perspective_infinite_reverse_rh_yup(\n\n (70.0f32).to_radians(),\n", "file_path": "examples/cube/main.rs", "rank": 98, "score": 59154.566408791805 }, { "content": " let query_info = vk::AccelerationStructureBuildGeometryInfoKHRBuilder::new()\n\n .flags(\n\n vk::BuildAccelerationStructureFlagsKHR::ALLOW_COMPACTION_KHR\n\n | vk::BuildAccelerationStructureFlagsKHR::PREFER_FAST_TRACE_KHR,\n\n )\n\n .geometries(&geometries[id..id + 1])\n\n .mode(vk::BuildAccelerationStructureModeKHR::BUILD_KHR)\n\n ._type(vk::AccelerationStructureTypeKHR::BOTTOM_LEVEL_KHR);\n\n\n\n let size_info = self.device.acceleration_structure_build_sizes(\n\n vk::AccelerationStructureBuildTypeKHR::DEVICE_KHR,\n\n &query_info,\n\n &[model.index_count / 3],\n\n );\n\n\n\n max_sizes.push(size_info.acceleration_structure_size);\n\n\n\n if size_info.build_scratch_size > scratchpad_size {\n\n scratchpad_size = size_info.build_scratch_size\n\n }\n", "file_path": "examples/raytracing/main.rs", "rank": 99, "score": 59154.50160916333 } ]
Rust
src/cargo/lib.rs
jakerr/cargo
f0762dfc1340c24ad87fa59027d5308c96867393
#![crate_name="cargo"] #![crate_type="rlib"] #![feature(macro_rules, phase)] #![feature(default_type_params)] #![deny(bad_style, unused)] extern crate libc; extern crate regex; extern crate serialize; extern crate term; extern crate time; #[phase(plugin)] extern crate regex_macros; #[phase(plugin, link)] extern crate log; extern crate curl; extern crate docopt; extern crate flate2; extern crate git2; extern crate glob; extern crate semver; extern crate tar; extern crate toml; extern crate url; #[cfg(test)] extern crate hamcrest; use std::os; use std::io::stdio::{stdout_raw, stderr_raw}; use std::io::{mod, stdout, stderr}; use serialize::{Decoder, Encoder, Decodable, Encodable, json}; use docopt::FlagParser; use core::{Shell, MultiShell, ShellConfig}; use term::color::{BLACK}; pub use util::{CargoError, CliError, CliResult, human}; macro_rules! some( ($e:expr) => ( match $e { Some(e) => e, None => return None } ) ) mod cargo { pub use super::util; } #[macro_export] macro_rules! try ( ($expr:expr) => ({ use cargo::util::FromError; match $expr.map_err(FromError::from_error) { Ok(val) => val, Err(err) => return Err(err) } }) ) macro_rules! raw_try ( ($expr:expr) => ({ match $expr { Ok(val) => val, Err(err) => return Err(err) } }) ) pub mod core; pub mod ops; pub mod sources; pub mod util; pub trait RepresentsJSON : Decodable<json::Decoder, json::DecoderError> {} impl<T: Decodable<json::Decoder, json::DecoderError>> RepresentsJSON for T {} pub fn execute_main<'a, T: FlagParser, U: RepresentsJSON, V: Encodable<json::Encoder<'a>, io::IoError>>( exec: fn(T, U, &mut MultiShell) -> CliResult<Option<V>>, options_first: bool) { off_the_main_thread(proc() { process::<V>(|rest, shell| call_main(exec, shell, rest, options_first)); }); } pub fn call_main<'a, T: FlagParser, U: RepresentsJSON, V: Encodable<json::Encoder<'a>, io::IoError>>( exec: fn(T, U, &mut MultiShell) -> CliResult<Option<V>>, shell: &mut MultiShell, args: &[String], options_first: bool) -> CliResult<Option<V>> { let flags = try!(flags_from_args::<T>(args, options_first)); let json = try!(json_from_stdin::<U>()); exec(flags, json, shell) } pub fn execute_main_without_stdin<'a, T: FlagParser, V: Encodable<json::Encoder<'a>, io::IoError>>( exec: fn(T, &mut MultiShell) -> CliResult<Option<V>>, options_first: bool) { off_the_main_thread(proc() { process::<V>(|rest, shell| call_main_without_stdin(exec, shell, rest, options_first)); }); } pub fn call_main_without_stdin<'a, T: FlagParser, V: Encodable<json::Encoder<'a>, io::IoError>>( exec: fn(T, &mut MultiShell) -> CliResult<Option<V>>, shell: &mut MultiShell, args: &[String], options_first: bool) -> CliResult<Option<V>> { let flags = try!(flags_from_args::<T>(args, options_first)); exec(flags, shell) } fn process<'a, V: Encodable<json::Encoder<'a>, io::IoError>>( callback: |&[String], &mut MultiShell| -> CliResult<Option<V>>) { let mut shell = shell(true); let mut args = os::args(); args.remove(0); process_executed(callback(args.as_slice(), &mut shell), &mut shell) } pub fn process_executed<'a, T: Encodable<json::Encoder<'a>, io::IoError>>( result: CliResult<Option<T>>, shell: &mut MultiShell) { match result { Err(e) => handle_error(e, shell), Ok(encodable) => { encodable.map(|encodable| { let encoded = json::encode(&encodable); println!("{}", encoded); }); } } } pub fn shell(verbose: bool) -> MultiShell<'static> { let tty = stderr_raw().isatty(); let stderr = box stderr() as Box<Writer>; let config = ShellConfig { color: true, verbose: verbose, tty: tty }; let err = Shell::create(stderr, config); let tty = stdout_raw().isatty(); let stdout = box stdout() as Box<Writer>; let config = ShellConfig { color: true, verbose: verbose, tty: tty }; let out = Shell::create(stdout, config); MultiShell::new(out, err, verbose) } pub fn handle_error(err: CliError, shell: &mut MultiShell) { log!(4, "handle_error; err={}", err); let CliError { error, exit_code, unknown } = err; if unknown { let _ = shell.error("An unknown error occurred"); } else if error.to_string().len() > 0 { let _ = shell.error(error.to_string()); } if error.cause().is_some() || unknown { let _ = shell.concise(|shell| { shell.err().say("\nTo learn more, run the command again with --verbose.", BLACK) }); } let _ = shell.verbose(|shell| { if unknown { let _ = shell.error(error.to_string()); } error.detail().map(|detail| { let _ = shell.err().say(format!("{}", detail), BLACK); }); error.cause().map(|err| { let _ = handle_cause(err, shell); }); Ok(()) }); std::os::set_exit_status(exit_code as int); } fn handle_cause(err: &CargoError, shell: &mut MultiShell) { let _ = shell.err().say("\nCaused by:", BLACK); let _ = shell.err().say(format!(" {}", err.description()), BLACK); err.cause().map(|e| handle_cause(e, shell)); } pub fn version() -> String { format!("cargo {}", match option_env!("CFG_VERSION") { Some(s) => s.to_string(), None => format!("{}.{}.{}{}", env!("CARGO_PKG_VERSION_MAJOR"), env!("CARGO_PKG_VERSION_MINOR"), env!("CARGO_PKG_VERSION_PATCH"), option_env!("CARGO_PKG_VERSION_PRE").unwrap_or("")) }) } fn flags_from_args<T: FlagParser>(args: &[String], options_first: bool) -> CliResult<T> { let args = args.iter().map(|a| a.as_slice()).collect::<Vec<&str>>(); let config = docopt::Config { options_first: options_first, help: true, version: Some(version()), }; FlagParser::parse_args(config, args.as_slice()).map_err(|e| { let code = if e.fatal() {1} else {0}; CliError::from_error(e, code) }) } fn json_from_stdin<T: RepresentsJSON>() -> CliResult<T> { let mut reader = io::stdin(); let input = try!(reader.read_to_string().map_err(|_| { CliError::new("Standard in did not exist or was not UTF-8", 1) })); let json = try!(json::from_str(input.as_slice()).map_err(|_| { CliError::new("Could not parse standard in as JSON", 1) })); let mut decoder = json::Decoder::new(json); Decodable::decode(&mut decoder).map_err(|_| { CliError::new("Could not process standard in as input", 1) }) } fn off_the_main_thread(p: proc():Send) { let (tx, rx) = channel(); spawn(proc() { p(); tx.send(()); }); if rx.recv_opt().is_err() { std::os::set_exit_status(std::rt::DEFAULT_ERROR_CODE); } }
#![crate_name="cargo"] #![crate_type="rlib"] #![feature(macro_rules, phase)] #![feature(default_type_params)] #![deny(bad_style, unused)] extern crate libc; extern crate regex; extern crate serialize; extern crate term; extern crate time; #[phase(plugin)] extern crate regex_macros; #[phase(plugin, link)] extern crate log; extern crate curl; extern crate docopt; extern crate flate2; extern crate git2; extern crate glob; extern crate semver; extern crate tar; extern crate toml; extern crate url; #[cfg(test)] extern crate hamcrest; use std::os; use std::io::stdio::{stdout_raw, stderr_raw}; use std::io::{mod, stdout, stderr}; use serialize::{Decoder, Encoder, Decodable, Encodable, json}; use docopt::FlagParser; use core::{Shell, MultiShell, ShellConfig}; use term::color::{BLACK}; pub use util::{CargoError, CliError, CliResult, human}; macro_rules! some( ($e:expr) => ( match $e { Some(e) => e, None => return None } ) ) mod cargo { pub use super::util; } #[macro_export] macro_rules! try ( ($expr:expr) => ({ use cargo::util::FromError; match $expr.map_err(FromError::from_error) { Ok(val) => val, Err(err) => return Err(err) } }) ) macro_rules! raw_try ( ($expr:expr) => ({ match $expr { Ok(val) => val, Err(err) => return Err(err) } }) ) pub mod core; pub mod ops; pub mod sources; pub mod util; pub trait RepresentsJSON : Decodable<json::Decoder, json::DecoderError> {} impl<T: Decodable<json::Decoder, json::DecoderError>> RepresentsJSON for T {} pub fn execute_main<'a, T: FlagParser, U: RepresentsJSON, V: Encodable<json::Encoder<'a>, io::IoError>>( exec: fn(T, U, &mut MultiShell) -> CliResult<Option<V>>, options_first: bool) { off_the_main_thread(proc() { process::<V>(|rest, shell| call_main(exec, shell, rest, options_first)); }); } pub fn call_main<'a, T: FlagParser, U: RepresentsJSON, V: Encodable<json::Encoder<'a>, io::IoError>>( exec: fn(T, U, &mut MultiShell) -> CliResult<Option<V>>, shell: &mut MultiShell, args: &[String], options_first: bool) -> CliResult<Option<V>> { let flags = try!(flags_from_args::<T>(args, options_first)); let json = try!(json_from_stdin::<U>()); exec(flags, json, shell) } pub fn execute_main_without_stdin<'a, T: FlagParser, V: Encodable<json::Encoder<'a>, io::IoError>>( exec: fn(T, &mut MultiShell) -> CliResult<Option<V>>, options_first: bool) { off_the_main_thread(proc() { process::<V>(|rest, shell| call_main_without_stdin(exec, shell, rest, options_first)); }); } pub fn call_main_without_stdin<'a, T: FlagParser, V: Encodable<json::Encoder<'a>, io::IoError>>( exec: fn(T, &mut MultiShell) -> CliResult<Option<V>>, shell: &mut MultiShell, args: &[String], options_first: bool) -> CliResult<Option<V>> { let flags = try!(flags_from_args::<T>(args, options_first)); exec(flags, shell) } fn process<'a, V: Encodable<json::Encoder<'a>, io::IoError>>( callback: |&[String], &mut MultiShell| -> CliResult<Option<V>>) { let mut shell = shell(true); let mut args = os::args(); args.remove(0); process_executed(callback(args.as_slice(), &mut shell), &mut shell) } pub fn process_executed<'a, T: Encodable<json::Encoder<'a>, io::IoError>>( result: CliResult<Option<T>>, shell: &mut MultiShel
pub fn shell(verbose: bool) -> MultiShell<'static> { let tty = stderr_raw().isatty(); let stderr = box stderr() as Box<Writer>; let config = ShellConfig { color: true, verbose: verbose, tty: tty }; let err = Shell::create(stderr, config); let tty = stdout_raw().isatty(); let stdout = box stdout() as Box<Writer>; let config = ShellConfig { color: true, verbose: verbose, tty: tty }; let out = Shell::create(stdout, config); MultiShell::new(out, err, verbose) } pub fn handle_error(err: CliError, shell: &mut MultiShell) { log!(4, "handle_error; err={}", err); let CliError { error, exit_code, unknown } = err; if unknown { let _ = shell.error("An unknown error occurred"); } else if error.to_string().len() > 0 { let _ = shell.error(error.to_string()); } if error.cause().is_some() || unknown { let _ = shell.concise(|shell| { shell.err().say("\nTo learn more, run the command again with --verbose.", BLACK) }); } let _ = shell.verbose(|shell| { if unknown { let _ = shell.error(error.to_string()); } error.detail().map(|detail| { let _ = shell.err().say(format!("{}", detail), BLACK); }); error.cause().map(|err| { let _ = handle_cause(err, shell); }); Ok(()) }); std::os::set_exit_status(exit_code as int); } fn handle_cause(err: &CargoError, shell: &mut MultiShell) { let _ = shell.err().say("\nCaused by:", BLACK); let _ = shell.err().say(format!(" {}", err.description()), BLACK); err.cause().map(|e| handle_cause(e, shell)); } pub fn version() -> String { format!("cargo {}", match option_env!("CFG_VERSION") { Some(s) => s.to_string(), None => format!("{}.{}.{}{}", env!("CARGO_PKG_VERSION_MAJOR"), env!("CARGO_PKG_VERSION_MINOR"), env!("CARGO_PKG_VERSION_PATCH"), option_env!("CARGO_PKG_VERSION_PRE").unwrap_or("")) }) } fn flags_from_args<T: FlagParser>(args: &[String], options_first: bool) -> CliResult<T> { let args = args.iter().map(|a| a.as_slice()).collect::<Vec<&str>>(); let config = docopt::Config { options_first: options_first, help: true, version: Some(version()), }; FlagParser::parse_args(config, args.as_slice()).map_err(|e| { let code = if e.fatal() {1} else {0}; CliError::from_error(e, code) }) } fn json_from_stdin<T: RepresentsJSON>() -> CliResult<T> { let mut reader = io::stdin(); let input = try!(reader.read_to_string().map_err(|_| { CliError::new("Standard in did not exist or was not UTF-8", 1) })); let json = try!(json::from_str(input.as_slice()).map_err(|_| { CliError::new("Could not parse standard in as JSON", 1) })); let mut decoder = json::Decoder::new(json); Decodable::decode(&mut decoder).map_err(|_| { CliError::new("Could not process standard in as input", 1) }) } fn off_the_main_thread(p: proc():Send) { let (tx, rx) = channel(); spawn(proc() { p(); tx.send(()); }); if rx.recv_opt().is_err() { std::os::set_exit_status(std::rt::DEFAULT_ERROR_CODE); } }
l) { match result { Err(e) => handle_error(e, shell), Ok(encodable) => { encodable.map(|encodable| { let encoded = json::encode(&encodable); println!("{}", encoded); }); } } }
function_block-function_prefixed
[ { "content": "pub fn upload_login(shell: &mut MultiShell, token: String) -> CargoResult<()> {\n\n let config = try!(Config::new(shell, None, None));\n\n let UploadConfig { host, token: _ } = try!(upload_configuration());\n\n let mut map = HashMap::new();\n\n let p = os::getcwd();\n\n match host {\n\n Some(host) => {\n\n map.insert(\"host\".to_string(), config::String(host, p.clone()));\n\n }\n\n None => {}\n\n }\n\n map.insert(\"token\".to_string(), config::String(token, p));\n\n\n\n config::set_config(&config, config::Global, \"registry\", config::Table(map))\n\n}\n", "file_path": "src/cargo/ops/cargo_upload.rs", "rank": 0, "score": 377537.1951230435 }, { "content": "pub fn fetch(repo: &git2::Repository, url: &str,\n\n refspec: &str) -> CargoResult<()> {\n\n // Create a local anonymous remote in the repository to fetch the url\n\n\n\n with_authentication(url, &try!(repo.config()), |f| {\n\n let mut cb = git2::RemoteCallbacks::new()\n\n .credentials(f);\n\n let mut remote = try!(repo.remote_anonymous(url.as_slice(), refspec));\n\n try!(remote.add_fetch(\"refs/tags/*:refs/tags/*\"));\n\n remote.set_callbacks(&mut cb);\n\n try!(remote.fetch(None, None));\n\n Ok(())\n\n })\n\n}\n", "file_path": "src/cargo/sources/git/utils.rs", "rank": 1, "score": 325318.37599749106 }, { "content": "fn tar(pkg: &Package, src: &PathSource, shell: &mut MultiShell,\n\n dst: &Path) -> CargoResult<()> {\n\n\n\n if dst.exists() {\n\n return Err(human(format!(\"destination already exists: {}\",\n\n dst.display())))\n\n }\n\n let tmpfile = try!(File::create(dst));\n\n\n\n // Prepare the encoder and its header\n\n let encoder = GzBuilder::new().filename(dst.filename().unwrap())\n\n .writer(tmpfile, BestCompression);\n\n\n\n // Put all package files into a compressed archive\n\n let ar = Archive::new(encoder);\n\n for file in try!(src.list_files(pkg)).iter() {\n\n if file == dst { continue }\n\n let relative = file.path_relative_from(&dst.dir_path()).unwrap();\n\n let relative = try!(relative.as_str().require(|| {\n\n human(format!(\"non-utf8 path in source directory: {}\",\n", "file_path": "src/cargo/ops/cargo_package.rs", "rank": 2, "score": 324370.90642496146 }, { "content": "fn emit_package(dep: &toml::TomlTable, out: &mut String) {\n\n out.push_str(format!(\"name = {}\\n\", lookup(dep, \"name\")).as_slice());\n\n out.push_str(format!(\"version = {}\\n\", lookup(dep, \"version\")).as_slice());\n\n\n\n dep.find(&\"source\".to_string()).map(|_| {\n\n out.push_str(format!(\"source = {}\\n\", lookup(dep, \"source\")).as_slice());\n\n });\n\n\n\n dep.find(&\"dependencies\".to_string()).map(|s| {\n\n let slice = s.as_slice().unwrap();\n\n\n\n if !slice.is_empty() {\n\n out.push_str(\"dependencies = [\\n\");\n\n\n\n for child in s.as_slice().unwrap().iter() {\n\n out.push_str(format!(\" {},\\n\", child).as_slice());\n\n }\n\n\n\n out.push_str(\"]\\n\");\n\n }\n\n out.push_str(\"\\n\");\n\n });\n\n}\n\n\n", "file_path": "src/cargo/ops/cargo_generate_lockfile.rs", "rank": 4, "score": 300564.09242462367 }, { "content": "pub fn new(opts: NewOptions, _shell: &mut MultiShell) -> CargoResult<()> {\n\n let path = os::getcwd().join(opts.path);\n\n if path.exists() {\n\n return Err(human(format!(\"Destination `{}` already exists\",\n\n path.display())))\n\n }\n\n let name = path.filename_str().unwrap();\n\n mk(&path, name, &opts).chain_error(|| {\n\n human(format!(\"Failed to create project `{}` at `{}`\",\n\n name, path.display()))\n\n })\n\n}\n\n\n", "file_path": "src/cargo/ops/cargo_new.rs", "rank": 5, "score": 297911.32312496647 }, { "content": "fn ident(url: &Url) -> String {\n\n let hasher = SipHasher::new_with_keys(0,0);\n\n\n\n // FIXME: this really should be able to not use to_str() everywhere, but the\n\n // compiler seems to currently ask for static lifetimes spuriously.\n\n // Perhaps related to rust-lang/rust#15144\n\n let url = canonicalize_url(url);\n\n let ident = url.path().unwrap_or(&[])\n\n .last().map(|a| a.clone()).unwrap_or(String::new());\n\n\n\n let ident = if ident.as_slice() == \"\" {\n\n \"_empty\".to_string()\n\n } else {\n\n ident\n\n };\n\n\n\n format!(\"{}-{}\", ident, to_hex(hasher.hash(&url)))\n\n}\n\n\n", "file_path": "src/cargo/sources/git/source.rs", "rank": 6, "score": 290391.829769321 }, { "content": "// Some hacks and heuristics for making equivalent URLs hash the same\n\npub fn canonicalize_url(url: &Url) -> Url {\n\n let mut url = url.clone();\n\n\n\n // Strip a trailing slash\n\n match url.scheme_data {\n\n url::RelativeSchemeData(ref mut rel) => {\n\n if rel.path.last().map(|s| s.is_empty()).unwrap_or(false) {\n\n rel.path.pop();\n\n }\n\n }\n\n _ => {}\n\n }\n\n\n\n // HACKHACK: For github URL's specifically just lowercase\n\n // everything. GitHub treats both the same, but they hash\n\n // differently, and we're gonna be hashing them. This wants a more\n\n // general solution, and also we're almost certainly not using the\n\n // same case conversion rules that GitHub does. (#84)\n\n if url.domain() == Some(\"github.com\") {\n\n url.scheme = \"https\".to_string();\n", "file_path": "src/cargo/sources/git/source.rs", "rank": 7, "score": 289751.7785978672 }, { "content": "pub fn mkdir_recursive(path: &Path) -> Result<(), String> {\n\n fs::mkdir_recursive(path, io::UserDir)\n\n .with_err_msg(format!(\"could not create directory; path={}\",\n\n path.display()))\n\n}\n\n\n", "file_path": "tests/support/mod.rs", "rank": 9, "score": 285373.0859082109 }, { "content": "pub fn rmdir_recursive(path: &Path) -> Result<(), String> {\n\n path.rm_rf()\n\n .with_err_msg(format!(\"could not rm directory; path={}\",\n\n path.display()))\n\n}\n\n\n", "file_path": "tests/support/mod.rs", "rank": 10, "score": 285373.0859082109 }, { "content": "pub trait ResultTest<T,E> {\n\n fn assert(self) -> T;\n\n}\n\n\n\nimpl<T,E: Show> ResultTest<T,E> for Result<T,E> {\n\n fn assert(self) -> T {\n\n match self {\n\n Ok(val) => val,\n\n Err(err) => fail!(\"Result was error: {}\", err)\n\n }\n\n }\n\n}\n\n\n\nimpl<T> ResultTest<T,()> for Option<T> {\n\n fn assert(self) -> T {\n\n match self {\n\n Some(val) => val,\n\n None => fail!(\"Option was None\")\n\n }\n\n }\n\n}\n\n\n", "file_path": "tests/support/mod.rs", "rank": 11, "score": 283576.6828983157 }, { "content": "/// Find a globally configured HTTP proxy if one is available.\n\n///\n\n/// Favor cargo's `http.proxy`, then git's `http.proxy`, then finally a\n\n/// HTTP_PROXY env var.\n\npub fn http_proxy() -> CargoResult<Option<String>> {\n\n let configs = try!(config::all_configs(os::getcwd()));\n\n match configs.find_equiv(&\"http\") {\n\n Some(http) => {\n\n let http = try!(http.table().chain_error(|| {\n\n internal(\"invalid configuration for the key `http`\")\n\n }));\n\n match http.find_equiv(&\"proxy\") {\n\n Some(proxy) => {\n\n return Ok(Some(try!(proxy.string().chain_error(|| {\n\n internal(\"invalid configuration for key `http.proxy`\")\n\n })).ref0().to_string()))\n\n }\n\n None => {},\n\n }\n\n }\n\n None => {}\n\n }\n\n match git2::Config::open_default() {\n\n Ok(cfg) => {\n\n match cfg.get_str(\"http.proxy\") {\n\n Ok(s) => return Ok(Some(s.to_string())),\n\n Err(..) => {}\n\n }\n\n }\n\n Err(..) => {}\n\n }\n\n Ok(os::getenv(\"HTTP_PROXY\"))\n\n}\n\n\n", "file_path": "src/cargo/ops/cargo_upload.rs", "rank": 12, "score": 283393.5434004892 }, { "content": "pub fn parse(toml: &str, file: &Path) -> CargoResult<toml::TomlTable> {\n\n let mut parser = toml::Parser::new(toml.as_slice());\n\n match parser.parse() {\n\n Some(toml) => return Ok(toml),\n\n None => {}\n\n }\n\n let mut error_str = format!(\"could not parse input TOML\\n\");\n\n for error in parser.errors.iter() {\n\n let (loline, locol) = parser.to_linecol(error.lo);\n\n let (hiline, hicol) = parser.to_linecol(error.hi);\n\n error_str.push_str(format!(\"{}:{}:{}{} {}\\n\",\n\n file.display(),\n\n loline + 1, locol + 1,\n\n if loline != hiline || locol != hicol {\n\n format!(\"-{}:{}\", hiline + 1,\n\n hicol + 1)\n\n } else {\n\n \"\".to_string()\n\n },\n\n error.desc).as_slice());\n\n }\n\n Err(human(error_str))\n\n}\n\n\n", "file_path": "src/cargo/util/toml.rs", "rank": 13, "score": 282876.5731824797 }, { "content": "pub trait FromError<E> {\n\n fn from_error(error: E) -> Self;\n\n}\n\n\n\nimpl<E: CargoError + Send> FromError<E> for Box<CargoError + Send> {\n\n fn from_error(error: E) -> Box<CargoError + Send> {\n\n error.box_error()\n\n }\n\n}\n\n\n\nmacro_rules! from_error (\n\n ($ty:ty) => {\n\n impl FromError<$ty> for $ty {\n\n fn from_error(error: $ty) -> $ty {\n\n error\n\n }\n\n }\n\n }\n\n)\n\n\n", "file_path": "src/cargo/util/errors.rs", "rank": 15, "score": 277306.1800880506 }, { "content": "/// Find the root Cargo.toml\n\npub fn find_root_manifest_for_cwd(manifest_path: Option<String>) -> CliResult<Path> {\n\n match manifest_path {\n\n Some(path) => Ok(Path::new(path)),\n\n None => match find_project_manifest(&os::getcwd(), \"Cargo.toml\") {\n\n Ok(x) => Ok(x),\n\n Err(_) => Err(CliError::new(\"Could not find Cargo.toml in this \\\n\n directory or any parent directory\", 102))\n\n }\n\n }.map(|path| os::make_absolute(&path))\n\n}\n\n\n", "file_path": "src/cargo/util/important_paths.rs", "rank": 16, "score": 268995.8414242366 }, { "content": "pub fn all_configs(pwd: Path) -> CargoResult<HashMap<string::String, ConfigValue>> {\n\n let mut cfg = Table(HashMap::new());\n\n\n\n try!(walk_tree(&pwd, |mut file| {\n\n let path = file.path().clone();\n\n let contents = try!(file.read_to_string());\n\n let table = try!(cargo_toml::parse(contents.as_slice(), &path).chain_error(|| {\n\n internal(format!(\"could not parse Toml manifest; path={}\",\n\n path.display()))\n\n }));\n\n let value = try!(ConfigValue::from_toml(&path, toml::Table(table)));\n\n try!(cfg.merge(value));\n\n Ok(())\n\n }).map_err(|_| human(\"Couldn't load Cargo configuration\")));\n\n\n\n\n\n match cfg {\n\n Table(map) => Ok(map),\n\n _ => unreachable!(),\n\n }\n\n}\n\n\n", "file_path": "src/cargo/util/config.rs", "rank": 17, "score": 268958.20052546536 }, { "content": "fn process_dependencies<'a>(cx: &mut Context<'a>, dev: bool,\n\n new_deps: Option<&HashMap<String, TomlDependency>>)\n\n -> CargoResult<()> {\n\n let dependencies = match new_deps {\n\n Some(ref dependencies) => dependencies,\n\n None => return Ok(())\n\n };\n\n for (n, v) in dependencies.iter() {\n\n let details = match *v {\n\n SimpleDep(ref version) => {\n\n let mut d: DetailedTomlDependency = Default::default();\n\n d.version = Some(version.clone());\n\n d\n\n }\n\n DetailedDep(ref details) => details.clone(),\n\n };\n\n let reference = details.branch.clone()\n\n .or_else(|| details.tag.clone())\n\n .or_else(|| details.rev.clone())\n\n .unwrap_or_else(|| \"master\".to_string());\n", "file_path": "src/cargo/util/toml.rs", "rank": 18, "score": 267454.72231802373 }, { "content": "pub trait ToSemver {\n\n fn to_semver(self) -> Result<Version, String>;\n\n}\n\n\n\nimpl ToSemver for Version {\n\n fn to_semver(self) -> Result<Version, String> { Ok(self) }\n\n}\n\n\n\nimpl<'a> ToSemver for &'a str {\n\n fn to_semver(self) -> Result<Version, String> {\n\n match Version::parse(self) {\n\n Ok(v) => Ok(v),\n\n Err(..) => Err(format!(\"cannot parse '{}' as a semver\", self)),\n\n }\n\n }\n\n}\n", "file_path": "src/cargo/util/to_semver.rs", "rank": 19, "score": 265497.60267942277 }, { "content": "pub trait ToUrl {\n\n fn to_url(self) -> Result<Url, String>;\n\n}\n\n\n\nimpl ToUrl for Url {\n\n fn to_url(self) -> Result<Url, String> {\n\n Ok(self)\n\n }\n\n}\n\n\n\nimpl<'a> ToUrl for &'a Url {\n\n fn to_url(self) -> Result<Url, String> {\n\n Ok(self.clone())\n\n }\n\n}\n\n\n\nimpl<'a> ToUrl for &'a str {\n\n fn to_url(self) -> Result<Url, String> {\n\n UrlParser::new().scheme_type_mapper(mapper).parse(self).map_err(|s| {\n\n format!(\"invalid url `{}`: {}\", self, s)\n", "file_path": "src/cargo/util/to_url.rs", "rank": 20, "score": 265405.81405290146 }, { "content": "pub fn shell_writes<T: Show>(string: T) -> ShellWrites {\n\n ShellWrites { expected: string.to_string() }\n\n}\n\n\n", "file_path": "tests/support/mod.rs", "rank": 21, "score": 257101.56995512982 }, { "content": "/// A Source finds and downloads remote packages based on names and\n\n/// versions.\n\npub trait Source: Registry {\n\n /// The update method performs any network operations required to\n\n /// get the entire list of all names, versions and dependencies of\n\n /// packages managed by the Source.\n\n fn update(&mut self) -> CargoResult<()>;\n\n\n\n /// The download method fetches the full package for each name and\n\n /// version specified.\n\n fn download(&mut self, packages: &[PackageId]) -> CargoResult<()>;\n\n\n\n /// The get method returns the Path of each specified package on the\n\n /// local file system. It assumes that `download` was already called,\n\n /// and that the packages are already locally available on the file\n\n /// system.\n\n fn get(&self, packages: &[PackageId]) -> CargoResult<Vec<Package>>;\n\n\n\n /// Generates a unique string which represents the fingerprint of the\n\n /// current state of the source.\n\n ///\n\n /// This fingerprint is used to determine the \"fresheness\" of the source\n", "file_path": "src/cargo/core/source.rs", "rank": 22, "score": 255234.671516483 }, { "content": "pub trait Wrap {\n\n fn wrap<E: CargoError + Send>(self, error: E) -> Self;\n\n}\n\n\n\nimpl<T> Wrap for Result<T, Box<CargoError + Send>> {\n\n fn wrap<E: CargoError + Send>(self, error: E) -> CargoResult<T> {\n\n match self {\n\n Ok(x) => Ok(x),\n\n Err(e) => Err(error.with_cause(e))\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/cargo/util/result.rs", "rank": 23, "score": 254737.30219346573 }, { "content": "/// Cleans the project from build artifacts.\n\npub fn clean(manifest_path: &Path, opts: &mut CleanOptions) -> CargoResult<()> {\n\n let mut src = try!(PathSource::for_path(&manifest_path.dir_path()));\n\n try!(src.update());\n\n let root = try!(src.get_root_package());\n\n let manifest = root.get_manifest();\n\n\n\n // If we have a spec, then we need to delete some package,s otherwise, just\n\n // remove the whole target directory and be done with it!\n\n let spec = match opts.spec {\n\n Some(spec) => spec,\n\n None => return rm_rf(manifest.get_target_dir()),\n\n };\n\n\n\n // Load the lockfile (if one's available), and resolve spec to a pkgid\n\n let lockfile = root.get_root().join(\"Cargo.lock\");\n\n let source_id = root.get_package_id().get_source_id();\n\n let resolve = match try!(ops::load_lockfile(&lockfile, source_id)) {\n\n Some(resolve) => resolve,\n\n None => return Err(human(\"A Cargo.lock must exist before cleaning\"))\n\n };\n", "file_path": "src/cargo/ops/cargo_clean.rs", "rank": 24, "score": 254034.179376702 }, { "content": "fn lines_match(expected: &str, mut actual: &str) -> bool {\n\n for part in expected.split_str(\"[..]\") {\n\n match actual.find_str(part) {\n\n Some(i) => actual = actual.slice_from(i),\n\n None => {\n\n return false\n\n }\n\n }\n\n }\n\n return true;\n\n}\n\n\n", "file_path": "tests/support/mod.rs", "rank": 25, "score": 251631.64656657123 }, { "content": "pub fn to_hex(num: u64) -> String {\n\n let mut writer = MemWriter::with_capacity(8);\n\n writer.write_le_u64(num).unwrap(); // this should never fail\n\n writer.get_ref().to_hex()\n\n}\n\n\n", "file_path": "src/cargo/util/hex.rs", "rank": 26, "score": 251085.75533717033 }, { "content": "pub trait Require<T> {\n\n fn require<E: CargoError + Send>(self, err: || -> E) -> CargoResult<T>;\n\n}\n\n\n\nimpl<T> Require<T> for Option<T> {\n\n fn require<E: CargoError + Send>(self, err: || -> E) -> CargoResult<T> {\n\n match self {\n\n Some(x) => Ok(x),\n\n None => Err(box err().concrete() as Box<CargoError + Send>)\n\n }\n\n }\n\n}\n", "file_path": "src/cargo/util/result.rs", "rank": 27, "score": 244736.26684733224 }, { "content": "fn discover_author() -> CargoResult<(String, Option<String>)> {\n\n let git_config = Config::open_default().ok();\n\n let git_config = git_config.as_ref();\n\n let name = git_config.and_then(|g| g.get_str(\"user.name\").ok())\n\n .map(|s| s.to_string())\n\n .or_else(|| os::getenv(\"USER\"));\n\n let name = match name {\n\n Some(name) => name,\n\n None => return Err(human(\"could not determine the current user, \\\n\n please set $USER\"))\n\n };\n\n let email = git_config.and_then(|g| g.get_str(\"user.email\").ok());\n\n\n\n let name = name.as_slice().trim().to_string();\n\n let email = email.map(|s| s.as_slice().trim().to_string());\n\n\n\n Ok((name, email))\n\n}\n\n\n", "file_path": "src/cargo/ops/cargo_new.rs", "rank": 29, "score": 242853.59948339424 }, { "content": "pub fn caused_human<S: Show, E: CargoError + Send>(error: S, cause: E) -> Box<CargoError + Send> {\n\n box ConcreteCargoError {\n\n description: error.to_string(),\n\n detail: None,\n\n cause: Some(cause.box_error()),\n\n is_human: true\n\n } as Box<CargoError + Send>\n\n}\n", "file_path": "src/cargo/util/errors.rs", "rank": 30, "score": 242510.20043775675 }, { "content": "fn url(s: &str) -> url::ParseResult<Url> {\n\n return UrlParser::new().scheme_type_mapper(mapper).parse(s);\n\n\n\n fn mapper(scheme: &str) -> url::SchemeType {\n\n if scheme == \"cargo\" {\n\n url::RelativeScheme(1)\n\n } else {\n\n url::whatwg_scheme_type_mapper(scheme)\n\n }\n\n }\n\n\n\n}\n\n\n\nimpl fmt::Show for PackageIdSpec {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n let mut printed_name = false;\n\n match self.url {\n\n Some(ref url) => {\n\n if url.scheme.as_slice() == \"cargo\" {\n\n try!(write!(f, \"{}/{}\", url.host().unwrap(),\n", "file_path": "src/cargo/core/package_id_spec.rs", "rank": 31, "score": 241556.37120020238 }, { "content": "pub fn path2url(p: Path) -> Url {\n\n Url::from_file_path(&p).unwrap()\n\n}\n\n\n\npub static RUNNING: &'static str = \" Running\";\n\npub static COMPILING: &'static str = \" Compiling\";\n\npub static FRESH: &'static str = \" Fresh\";\n\npub static UPDATING: &'static str = \" Updating\";\n\npub static DOCTEST: &'static str = \" Doc-tests\";\n\npub static PACKAGING: &'static str = \" Packaging\";\n\npub static DOWNLOADING: &'static str = \" Downloading\";\n\npub static UPLOADING: &'static str = \" Uploading\";\n", "file_path": "tests/support/mod.rs", "rank": 33, "score": 238857.63334614626 }, { "content": "/// Given the data to build and write a fingerprint, generate some Work\n\n/// instances to actually perform the necessary work.\n\nfn prepare(is_fresh: bool, loc: Path, fingerprint: String,\n\n to_copy: Vec<(Path, Path)>) -> Preparation {\n\n let write_fingerprint = proc() {\n\n try!(File::create(&loc).write_str(fingerprint.as_slice()));\n\n Ok(())\n\n };\n\n\n\n let move_old = proc() {\n\n for &(ref src, ref dst) in to_copy.iter() {\n\n try!(fs::rename(src, dst));\n\n }\n\n Ok(())\n\n };\n\n\n\n (if is_fresh {Fresh} else {Dirty}, write_fingerprint, move_old)\n\n}\n\n\n", "file_path": "src/cargo/ops/cargo_rustc/fingerprint.rs", "rank": 35, "score": 234017.155650298 }, { "content": "pub fn execs() -> Execs {\n\n Execs {\n\n expect_stdout: None,\n\n expect_stderr: None,\n\n expect_stdin: None,\n\n expect_exit_code: None\n\n }\n\n}\n\n\n", "file_path": "tests/support/mod.rs", "rank": 36, "score": 233116.38884450466 }, { "content": "pub fn to_manifest(contents: &[u8],\n\n source_id: &SourceId,\n\n layout: Layout)\n\n -> CargoResult<(Manifest, Vec<Path>)> {\n\n let manifest = layout.root.join(\"Cargo.toml\");\n\n let manifest = match manifest.path_relative_from(&os::getcwd()) {\n\n Some(path) => path,\n\n None => manifest,\n\n };\n\n let contents = try!(str::from_utf8(contents).require(|| {\n\n human(format!(\"{} is not valid UTF-8\", manifest.display()))\n\n }));\n\n let root = try!(parse(contents, &manifest));\n\n let mut d = toml::Decoder::new(toml::Table(root));\n\n let toml_manifest: TomlManifest = match Decodable::decode(&mut d) {\n\n Ok(t) => t,\n\n Err(e) => return Err(human(format!(\"{} is not a valid \\\n\n manifest\\n\\n{}\",\n\n manifest.display(), e)))\n\n };\n", "file_path": "src/cargo/util/toml.rs", "rank": 37, "score": 230960.07688011177 }, { "content": "pub fn short_hash<H: Hash>(hashable: &H) -> String {\n\n let hasher = SipHasher::new_with_keys(0, 0);\n\n to_hex(hasher.hash(hashable))\n\n}\n", "file_path": "src/cargo/util/hex.rs", "rank": 38, "score": 229715.16056799592 }, { "content": "pub fn resolve_and_fetch(registry: &mut PackageRegistry, package: &Package)\n\n -> CargoResult<Resolve> {\n\n let _p = profile::start(\"resolve and fetch...\");\n\n\n\n let lockfile = package.get_manifest_path().dir_path().join(\"Cargo.lock\");\n\n let source_id = package.get_package_id().get_source_id();\n\n match try!(ops::load_lockfile(&lockfile, source_id)) {\n\n Some(r) => try!(add_lockfile_sources(registry, package, &r)),\n\n None => try!(registry.add_sources(package.get_source_ids())),\n\n }\n\n\n\n let resolved = try!(resolver::resolve(package.get_summary(),\n\n resolver::ResolveEverything,\n\n registry));\n\n try!(ops::write_resolve(package, &resolved));\n\n Ok(resolved)\n\n}\n\n\n", "file_path": "src/cargo/ops/cargo_fetch.rs", "rank": 39, "score": 228946.78883705294 }, { "content": "fn with_authentication<T>(url: &str,\n\n cfg: &git2::Config,\n\n f: |git2::Credentials| -> CargoResult<T>)\n\n -> CargoResult<T> {\n\n // Prepare the authentication callbacks.\n\n //\n\n // We check the `allowed` types of credentials, and we try to do as much as\n\n // possible based on that:\n\n //\n\n // * Prioritize SSH keys from the local ssh agent as they're likely the most\n\n // reliable. The username here is prioritized from the credential\n\n // callback, then from whatever is configured in git itself, and finally\n\n // we fall back to the generic user of `git`.\n\n //\n\n // * If a username/password is allowed, then we fallback to git2-rs's\n\n // implementation of the credential helper. This is what is configured\n\n // with `credential.helper` in git, and is the interface for the OSX\n\n // keychain, for example.\n\n //\n\n // * After the above two have failed, we just kinda grapple attempting to\n", "file_path": "src/cargo/sources/git/utils.rs", "rank": 40, "score": 227834.67541397063 }, { "content": "pub fn basic_bin_manifest(name: &str) -> String {\n\n format!(r#\"\n\n [package]\n\n\n\n name = \"{}\"\n\n version = \"0.5.0\"\n\n authors = [\"wycats@example.com\"]\n\n\n\n [[bin]]\n\n\n\n name = \"{}\"\n\n \"#, name, name)\n\n}\n\n\n", "file_path": "tests/support/mod.rs", "rank": 41, "score": 226336.1018432149 }, { "content": "pub fn basic_lib_manifest(name: &str) -> String {\n\n format!(r#\"\n\n [package]\n\n\n\n name = \"{}\"\n\n version = \"0.5.0\"\n\n authors = [\"wycats@example.com\"]\n\n\n\n [lib]\n\n\n\n name = \"{}\"\n\n \"#, name, name)\n\n}\n\n\n", "file_path": "tests/support/mod.rs", "rank": 42, "score": 226336.1018432149 }, { "content": "/// Prepare the necessary work for the fingerprint of a build command.\n\n///\n\n/// Build commands are located on packages, not on targets. Additionally, we\n\n/// don't have --dep-info to drive calculation of the fingerprint of a build\n\n/// command. This brings up an interesting predicament which gives us a few\n\n/// options to figure out whether a build command is dirty or not:\n\n///\n\n/// 1. A build command is dirty if *any* file in a package changes. In theory\n\n/// all files are candidate for being used by the build command.\n\n/// 2. A build command is dirty if any file in a *specific directory* changes.\n\n/// This may lose information as it may require files outside of the specific\n\n/// directory.\n\n/// 3. A build command must itself provide a dep-info-like file stating how it\n\n/// should be considered dirty or not.\n\n///\n\n/// The currently implemented solution is option (1), although it is planned to\n\n/// migrate to option (2) in the near future.\n\npub fn prepare_build_cmd(cx: &mut Context, pkg: &Package)\n\n -> CargoResult<Preparation> {\n\n let _p = profile::start(format!(\"fingerprint build cmd: {}\",\n\n pkg.get_package_id()));\n\n\n\n // TODO: this should not explicitly pass KindTarget\n\n let kind = KindTarget;\n\n\n\n if pkg.get_manifest().get_build().len() == 0 {\n\n return Ok((Fresh, proc() Ok(()), proc() Ok(())))\n\n }\n\n let (old, new) = dirs(cx, pkg, kind);\n\n let old_loc = old.join(\"build\");\n\n let new_loc = new.join(\"build\");\n\n\n\n debug!(\"fingerprint at: {}\", new_loc.display());\n\n\n\n let new_fingerprint = try!(calculate_build_cmd_fingerprint(cx, pkg));\n\n let new_fingerprint = mk_fingerprint(cx, &new_fingerprint);\n\n\n", "file_path": "src/cargo/ops/cargo_rustc/fingerprint.rs", "rank": 43, "score": 225535.65738666733 }, { "content": "fn build_deps_args(mut cmd: ProcessBuilder, target: &Target, package: &Package,\n\n cx: &Context,\n\n kind: Kind) -> CargoResult<ProcessBuilder> {\n\n enum LinkReason { Dependency, LocalLib }\n\n\n\n let layout = cx.layout(kind);\n\n cmd = cmd.arg(\"-L\").arg(layout.root());\n\n cmd = cmd.arg(\"-L\").arg(layout.deps());\n\n\n\n // Traverse the entire dependency graph looking for -L paths to pass for\n\n // native dependencies.\n\n let mut dirs = Vec::new();\n\n each_dep(package, cx, |pkg| {\n\n if pkg.get_manifest().get_build().len() > 0 {\n\n dirs.push(layout.native(pkg));\n\n }\n\n });\n\n for dir in dirs.into_iter() {\n\n cmd = cmd.arg(\"-L\").arg(dir);\n\n }\n", "file_path": "src/cargo/ops/cargo_rustc/mod.rs", "rank": 44, "score": 224190.71170701514 }, { "content": "fn build_plugin_args(mut cmd: ProcessBuilder, cx: &Context, pkg: &Package,\n\n target: &Target, kind: Kind) -> ProcessBuilder {\n\n cmd = cmd.arg(\"--out-dir\");\n\n cmd = cmd.arg(cx.layout(kind).root());\n\n\n\n let (_, dep_info_loc) = fingerprint::dep_info_loc(cx, pkg, target, kind);\n\n cmd = cmd.arg(\"--dep-info\").arg(dep_info_loc);\n\n\n\n if kind == KindTarget {\n\n fn opt(cmd: ProcessBuilder, key: &str, prefix: &str,\n\n val: Option<&str>) -> ProcessBuilder {\n\n match val {\n\n Some(val) => {\n\n cmd.arg(key)\n\n .arg(format!(\"{}{}\", prefix, val))\n\n }\n\n None => cmd\n\n }\n\n }\n\n\n\n cmd = opt(cmd, \"--target\", \"\", cx.config.target());\n\n cmd = opt(cmd, \"-C\", \"ar=\", cx.config.ar());\n\n cmd = opt(cmd, \"-C\", \"linker=\", cx.config.linker());\n\n }\n\n\n\n return cmd;\n\n}\n\n\n", "file_path": "src/cargo/ops/cargo_rustc/mod.rs", "rank": 45, "score": 224190.71170701514 }, { "content": "pub fn upload_configuration() -> CargoResult<UploadConfig> {\n\n let configs = try!(config::all_configs(os::getcwd()));\n\n let registry = match configs.find_equiv(&\"registry\") {\n\n None => return Ok(UploadConfig { host: None, token: None }),\n\n Some(registry) => try!(registry.table().chain_error(|| {\n\n internal(\"invalid configuration for the key `registry`\")\n\n })),\n\n };\n\n let host = match registry.find_equiv(&\"host\") {\n\n None => None,\n\n Some(host) => {\n\n Some(try!(host.string().chain_error(|| {\n\n internal(\"invalid configuration for key `host`\")\n\n })).ref0().to_string())\n\n }\n\n };\n\n let token = match registry.find_equiv(&\"token\") {\n\n None => None,\n\n Some(token) => {\n\n Some(try!(token.string().chain_error(|| {\n\n internal(\"invalid configuration for key `token`\")\n\n })).ref0().to_string())\n\n }\n\n };\n\n Ok(UploadConfig { host: host, token: token })\n\n}\n\n\n", "file_path": "src/cargo/ops/cargo_upload.rs", "rank": 46, "score": 223571.96593912411 }, { "content": "fn try_add_file(files: &mut Vec<Path>, root: &Path, dir: &str) {\n\n let p = root.join(dir);\n\n if p.exists() {\n\n files.push(p);\n\n }\n\n}\n", "file_path": "src/cargo/util/toml.rs", "rank": 47, "score": 223258.87542318625 }, { "content": "fn try_add_files(files: &mut Vec<Path>, root: &Path, dir: &str) {\n\n match fs::readdir(&root.join(dir)) {\n\n Ok(new) => {\n\n files.extend(new.into_iter().filter(|f| f.extension_str() == Some(\"rs\")))\n\n }\n\n Err(_) => {/* just don't add anything if the directory doesn't exist, etc. */}\n\n }\n\n}\n\n\n\n/// Returns a new `Layout` for a given root path.\n\n/// The `root_path` represents the directory that contains the `Cargo.toml` file.\n\n\n", "file_path": "src/cargo/util/toml.rs", "rank": 48, "score": 223258.87542318625 }, { "content": "pub fn human<S: Show>(error: S) -> Box<CargoError + Send> {\n\n box ConcreteCargoError {\n\n description: error.to_string(),\n\n detail: None,\n\n cause: None,\n\n is_human: true\n\n } as Box<CargoError + Send>\n\n}\n\n\n", "file_path": "src/cargo/util/errors.rs", "rank": 49, "score": 222279.76914761402 }, { "content": "/// When a lockfile is present, we want to keep as many dependencies at their\n\n/// original revision as possible. We need to account, however, for\n\n/// modifications to the manifest in terms of modifying, adding, or deleting\n\n/// dependencies.\n\n///\n\n/// This method will add any appropriate sources from the lockfile into the\n\n/// registry, and add all other sources from the root package to the registry.\n\n/// Any dependency which has not been modified has its source added to the\n\n/// registry (to retain the precise field if possible). Any dependency which\n\n/// *has* changed has its source id listed in the manifest added and all of its\n\n/// transitive dependencies are blacklisted to not be added from the lockfile.\n\n///\n\n/// TODO: this won't work too well for registry-based packages, but we don't\n\n/// have many of those anyway so we should be ok for now.\n\nfn add_lockfile_sources(registry: &mut PackageRegistry,\n\n root: &Package,\n\n resolve: &Resolve) -> CargoResult<()> {\n\n let deps = resolve.deps(root.get_package_id()).into_iter().flat_map(|deps| {\n\n deps.map(|d| (d.get_name(), d))\n\n }).collect::<HashMap<_, &PackageId>>();\n\n\n\n let mut sources = vec![root.get_package_id().get_source_id().clone()];\n\n let mut to_avoid = HashSet::new();\n\n let mut to_add = HashSet::new();\n\n for dep in root.get_dependencies().iter() {\n\n match deps.find(&dep.get_name()) {\n\n Some(&lockfile_dep) => {\n\n let summary = Summary::new(lockfile_dep.clone(), Vec::new(),\n\n HashMap::new()).unwrap();\n\n if dep.matches(&summary) {\n\n fill_with_deps(resolve, lockfile_dep, &mut to_add);\n\n } else {\n\n fill_with_deps(resolve, lockfile_dep, &mut to_avoid);\n\n sources.push(dep.get_source_id().clone());\n", "file_path": "src/cargo/ops/cargo_fetch.rs", "rank": 50, "score": 221540.96412588053 }, { "content": "pub fn load_lockfile(path: &Path, sid: &SourceId) -> CargoResult<Option<Resolve>> {\n\n // If there is no lockfile, return none.\n\n let mut f = match File::open(path) {\n\n Ok(f) => f,\n\n Err(_) => return Ok(None)\n\n };\n\n\n\n let s = try!(f.read_to_string());\n\n\n\n let table = toml::Table(try!(cargo_toml::parse(s.as_slice(), path)));\n\n let mut d = toml::Decoder::new(table);\n\n let v: resolver::EncodableResolve = Decodable::decode(&mut d).unwrap();\n\n Ok(Some(try!(v.to_resolve(sid))))\n\n}\n\n\n", "file_path": "src/cargo/ops/cargo_generate_lockfile.rs", "rank": 52, "score": 220477.61166505938 }, { "content": "/// Create a new HTTP handle with appropriate global configuration for cargo.\n\npub fn http_handle() -> CargoResult<http::Handle> {\n\n Ok(match try!(http_proxy()) {\n\n Some(proxy) => http::handle().proxy(proxy),\n\n None => http::handle(),\n\n })\n\n}\n\n\n", "file_path": "src/cargo/ops/cargo_upload.rs", "rank": 53, "score": 220160.64026992262 }, { "content": "/// Prepare the necessary work for the fingerprint for a specific target.\n\n///\n\n/// When dealing with fingerprints, cargo gets to choose what granularity\n\n/// \"freshness\" is considered at. One option is considering freshness at the\n\n/// package level. This means that if anything in a package changes, the entire\n\n/// package is rebuilt, unconditionally. This simplicity comes at a cost,\n\n/// however, in that test-only changes will cause libraries to be rebuilt, which\n\n/// is quite unfortunate!\n\n///\n\n/// The cost was deemed high enough that fingerprints are now calculated at the\n\n/// layer of a target rather than a package. Each target can then be kept track\n\n/// of separately and only rebuilt as necessary. This requires cargo to\n\n/// understand what the inputs are to a target, so we drive rustc with the\n\n/// --dep-info flag to learn about all input files to a unit of compilation.\n\n///\n\n/// This function will calculate the fingerprint for a target and prepare the\n\n/// work necessary to either write the fingerprint or copy over all fresh files\n\n/// from the old directories to their new locations.\n\npub fn prepare_target(cx: &mut Context, pkg: &Package, target: &Target,\n\n kind: Kind) -> CargoResult<Preparation> {\n\n let _p = profile::start(format!(\"fingerprint: {} / {}\",\n\n pkg.get_package_id(), target));\n\n let (old, new) = dirs(cx, pkg, kind);\n\n let filename = filename(target);\n\n let old_loc = old.join(filename.as_slice());\n\n let new_loc = new.join(filename.as_slice());\n\n\n\n // We want to use the package fingerprint if we're either a doc target or a\n\n // path source. If we're a git/registry source, then the mtime of files may\n\n // fluctuate, but they won't change so long as the source itself remains\n\n // constant (which is the responsibility of the source)\n\n let use_pkg = {\n\n let doc = target.get_profile().is_doc();\n\n let path = match pkg.get_summary().get_source_id().kind {\n\n PathKind => true,\n\n _ => false,\n\n };\n\n doc || !path\n", "file_path": "src/cargo/ops/cargo_rustc/fingerprint.rs", "rank": 55, "score": 215220.49613124528 }, { "content": "/// Prepare work for when a package starts to build\n\npub fn prepare_init(cx: &mut Context, pkg: &Package, kind: Kind)\n\n -> (Work, Work) {\n\n let (_, new1) = dirs(cx, pkg, kind);\n\n let new2 = new1.clone();\n\n\n\n let work1 = proc() { try!(fs::mkdir(&new1, UserRWX)); Ok(()) };\n\n let work2 = proc() { try!(fs::mkdir(&new2, UserRWX)); Ok(()) };\n\n\n\n (work1, work2)\n\n}\n\n\n", "file_path": "src/cargo/ops/cargo_rustc/fingerprint.rs", "rank": 56, "score": 215214.50023953128 }, { "content": "pub fn read_package(path: &Path, source_id: &SourceId)\n\n -> CargoResult<(Package, Vec<Path>)> {\n\n log!(5, \"read_package; path={}; source-id={}\", path.display(), source_id);\n\n let mut file = try!(File::open(path));\n\n let data = try!(file.read_to_end());\n\n\n\n let layout = project_layout(&path.dir_path());\n\n let (manifest, nested) =\n\n try!(read_manifest(data.as_slice(), layout, source_id));\n\n\n\n Ok((Package::new(manifest, path, source_id), nested))\n\n}\n\n\n", "file_path": "src/cargo/ops/cargo_read_manifest.rs", "rank": 57, "score": 214582.67059739924 }, { "content": "pub fn project_layout(root_path: &Path) -> Layout {\n\n let mut lib = None;\n\n let mut bins = vec!();\n\n let mut examples = vec!();\n\n let mut tests = vec!();\n\n let mut benches = vec!();\n\n\n\n if root_path.join(\"src/lib.rs\").exists() {\n\n lib = Some(root_path.join(\"src/lib.rs\"));\n\n }\n\n\n\n try_add_file(&mut bins, root_path, \"src/main.rs\");\n\n try_add_files(&mut bins, root_path, \"src/bin\");\n\n\n\n try_add_files(&mut examples, root_path, \"examples\");\n\n\n\n try_add_files(&mut tests, root_path, \"tests\");\n\n try_add_files(&mut benches, root_path, \"benches\");\n\n\n\n Layout {\n\n root: root_path.clone(),\n\n lib: lib,\n\n bins: bins,\n\n examples: examples,\n\n tests: tests,\n\n benches: benches,\n\n }\n\n}\n\n\n", "file_path": "src/cargo/util/toml.rs", "rank": 58, "score": 214310.57552582427 }, { "content": "pub trait Registry {\n\n fn query(&mut self, name: &Dependency) -> CargoResult<Vec<Summary>>;\n\n}\n\n\n\nimpl Registry for Vec<Summary> {\n\n fn query(&mut self, dep: &Dependency) -> CargoResult<Vec<Summary>> {\n\n debug!(\"querying for {}, summaries={}\", dep,\n\n self.iter().map(|s| s.get_package_id().to_string()).collect::<Vec<String>>());\n\n\n\n Ok(self.iter().filter(|summary| dep.matches(*summary))\n\n .map(|summary| summary.clone()).collect())\n\n }\n\n}\n\n\n\npub struct PackageRegistry<'a> {\n\n sources: SourceMap<'a>,\n\n overrides: Vec<SourceId>,\n\n config: &'a mut Config<'a>\n\n}\n\n\n", "file_path": "src/cargo/core/registry.rs", "rank": 59, "score": 213699.27962388928 }, { "content": "fn pre_version_component(v: &Version) -> Option<String> {\n\n if v.pre.is_empty() {\n\n return None;\n\n }\n\n\n\n let mut ret = String::new();\n\n\n\n for (i, x) in v.pre.iter().enumerate() {\n\n if i != 0 { ret.push('.') };\n\n ret.push_str(x.to_string().as_slice());\n\n }\n\n\n\n Some(ret)\n\n}\n", "file_path": "src/cargo/ops/cargo_rustc/compilation.rs", "rank": 60, "score": 211567.12027401387 }, { "content": "pub trait SummaryVec {\n\n fn names(&self) -> Vec<String>;\n\n}\n\n\n\nimpl SummaryVec for Vec<Summary> {\n\n // TODO: Move to Registry\n\n fn names(&self) -> Vec<String> {\n\n self.iter().map(|summary| summary.get_name().to_string()).collect()\n\n }\n\n\n\n}\n", "file_path": "src/cargo/core/summary.rs", "rank": 61, "score": 209627.56827279687 }, { "content": "pub trait CargoError: Send {\n\n fn description(&self) -> String;\n\n fn detail(&self) -> Option<String> { None }\n\n fn cause(&self) -> Option<&CargoError> { None }\n\n fn is_human(&self) -> bool { false }\n\n\n\n fn to_error<E: FromError<Self>>(self) -> E {\n\n FromError::from_error(self)\n\n }\n\n\n\n fn box_error(self) -> Box<CargoError + Send> {\n\n box self as Box<CargoError + Send>\n\n }\n\n\n\n fn concrete(&self) -> ConcreteCargoError {\n\n ConcreteCargoError {\n\n description: self.description(),\n\n detail: self.detail(),\n\n cause: self.cause().map(|c| box c.concrete() as Box<CargoError + Send>),\n\n is_human: self.is_human()\n", "file_path": "src/cargo/util/errors.rs", "rank": 62, "score": 207065.9307818391 }, { "content": "fn is_fresh(loc: &Path, new_fingerprint: &str) -> CargoResult<bool> {\n\n let mut file = match File::open(loc) {\n\n Ok(file) => file,\n\n Err(..) => return Ok(false),\n\n };\n\n\n\n let old_fingerprint = try!(file.read_to_string());\n\n\n\n log!(5, \"old fingerprint: {}\", old_fingerprint);\n\n log!(5, \"new fingerprint: {}\", new_fingerprint);\n\n\n\n Ok(old_fingerprint.as_slice() == new_fingerprint)\n\n}\n\n\n", "file_path": "src/cargo/ops/cargo_rustc/fingerprint.rs", "rank": 63, "score": 205434.0212645598 }, { "content": "pub fn realpath(original: &Path) -> io::IoResult<Path> {\n\n static MAX_LINKS_FOLLOWED: uint = 256;\n\n let original = os::make_absolute(original);\n\n\n\n // Right now lstat on windows doesn't work quite well\n\n if cfg!(windows) {\n\n return Ok(original)\n\n }\n\n\n\n let result = original.root_path();\n\n let mut result = result.expect(\"make_absolute has no root_path\");\n\n let mut followed = 0;\n\n\n\n for part in original.components() {\n\n result.push(part);\n\n\n\n loop {\n\n if followed == MAX_LINKS_FOLLOWED {\n\n return Err(io::standard_error(io::InvalidInput))\n\n }\n", "file_path": "src/cargo/util/paths.rs", "rank": 64, "score": 204701.90688419444 }, { "content": "fn source_ids_from_config(configs: &HashMap<String, config::ConfigValue>,\n\n cur_path: Path) -> CargoResult<Vec<SourceId>> {\n\n debug!(\"loaded config; configs={}\", configs);\n\n\n\n let config_paths = match configs.find_equiv(&\"paths\") {\n\n Some(cfg) => cfg,\n\n None => return Ok(Vec::new())\n\n };\n\n let paths = try!(config_paths.list().chain_error(|| {\n\n internal(\"invalid configuration for the key `paths`\")\n\n }));\n\n\n\n paths.iter().map(|&(ref s, ref p)| {\n\n // The path listed next to the string is the config file in which the\n\n // key was located, so we want to pop off the `.cargo/config` component\n\n // to get the directory containing the `.cargo` folder.\n\n p.dir_path().dir_path().join(s.as_slice())\n\n }).filter(|p| {\n\n // Make sure we don't override the local package, even if it's in the\n\n // list of override paths.\n\n cur_path != *p\n\n }).map(|p| SourceId::for_path(&p)).collect()\n\n}\n\n\n", "file_path": "src/cargo/ops/cargo_compile.rs", "rank": 65, "score": 204204.63257211578 }, { "content": "pub fn write_resolve(pkg: &Package, resolve: &Resolve) -> CargoResult<()> {\n\n let loc = pkg.get_root().join(\"Cargo.lock\");\n\n\n\n let mut e = Encoder::new();\n\n resolve.encode(&mut e).unwrap();\n\n\n\n let mut out = String::new();\n\n\n\n // Note that we do not use e.toml.to_string() as we want to control the\n\n // exact format the toml is in to ensure pretty diffs between updates to the\n\n // lockfile.\n\n let root = e.toml.find(&\"root\".to_string()).unwrap();\n\n\n\n out.push_str(\"[root]\\n\");\n\n emit_package(root.as_table().unwrap(), &mut out);\n\n\n\n let deps = e.toml.find(&\"package\".to_string()).unwrap().as_slice().unwrap();\n\n for dep in deps.iter() {\n\n let dep = dep.as_table().unwrap();\n\n\n\n out.push_str(\"[[package]]\\n\");\n\n emit_package(dep, &mut out);\n\n }\n\n\n\n try!(File::create(&loc).write_str(out.as_slice()));\n\n Ok(())\n\n}\n\n\n", "file_path": "src/cargo/ops/cargo_generate_lockfile.rs", "rank": 66, "score": 203916.9388930542 }, { "content": "pub fn read_manifest(contents: &[u8], layout: Layout, source_id: &SourceId)\n\n -> CargoResult<(Manifest, Vec<Path>)> {\n\n util::toml::to_manifest(contents, source_id, layout).map_err(human)\n\n}\n\n\n", "file_path": "src/cargo/ops/cargo_read_manifest.rs", "rank": 67, "score": 203197.48851011653 }, { "content": "fn calculate_pkg_fingerprint(cx: &Context, pkg: &Package) -> CargoResult<String> {\n\n let source = cx.sources\n\n .get(pkg.get_package_id().get_source_id())\n\n .expect(\"BUG: Missing package source\");\n\n\n\n source.fingerprint(pkg)\n\n}\n\n\n", "file_path": "src/cargo/ops/cargo_rustc/fingerprint.rs", "rank": 68, "score": 201938.54429479587 }, { "content": "pub trait BoxError<T> {\n\n fn box_error(self) -> CargoResult<T>;\n\n}\n\n\n", "file_path": "src/cargo/util/errors.rs", "rank": 69, "score": 200858.69227455297 }, { "content": "pub trait ChainError<T> {\n\n fn chain_error<E: CargoError + Send>(self, callback: || -> E) -> CargoResult<T> ;\n\n}\n\n\n\nimpl<'a, T> ChainError<T> for ||:'a -> CargoResult<T> {\n\n fn chain_error<E: CargoError + Send>(self, callback: || -> E) -> CargoResult<T> {\n\n self().map_err(|err| callback().with_cause(err))\n\n }\n\n}\n\n\n\nimpl<T, E: CargoError + Send> BoxError<T> for Result<T, E> {\n\n fn box_error(self) -> CargoResult<T> {\n\n self.map_err(|err| err.box_error())\n\n }\n\n}\n\n\n\nimpl<T, E: CargoError + Send> ChainError<T> for Result<T, E> {\n\n fn chain_error<E: CargoError + Send>(self, callback: || -> E) -> CargoResult<T> {\n\n self.map_err(|err| callback().with_cause(err))\n\n }\n", "file_path": "src/cargo/util/errors.rs", "rank": 70, "score": 200858.69227455297 }, { "content": "// Path to cargo executables\n\npub fn cargo_dir() -> Path {\n\n os::getenv(\"CARGO_BIN_PATH\").map(Path::new)\n\n .or_else(|| os::self_exe_path())\n\n .unwrap_or_else(|| {\n\n fail!(\"CARGO_BIN_PATH wasn't set. Cannot continue running test\")\n\n })\n\n}\n\n\n\n/// Returns an absolute path in the filesystem that `path` points to. The\n\n/// returned path does not contain any symlinks in its hierarchy.\n\n/*\n\n *\n\n * ===== Matchers =====\n\n *\n\n */\n\n\n", "file_path": "tests/support/mod.rs", "rank": 71, "score": 200425.07011811523 }, { "content": "pub fn main_file<T: Str>(println: T, deps: &[&str]) -> String {\n\n let mut buf = String::new();\n\n\n\n for dep in deps.iter() {\n\n buf.push_str(format!(\"extern crate {};\\n\", dep).as_slice());\n\n }\n\n\n\n buf.push_str(\"fn main() { println!(\");\n\n buf.push_str(println.as_slice());\n\n buf.push_str(\"); }\\n\");\n\n\n\n buf.to_string()\n\n}\n\n\n", "file_path": "tests/support/mod.rs", "rank": 72, "score": 199992.07321357875 }, { "content": "fn extract_config(mut file: File, key: &str) -> CargoResult<ConfigValue> {\n\n let contents = try!(file.read_to_string());\n\n let mut toml = try!(cargo_toml::parse(contents.as_slice(), file.path()));\n\n let val = try!(toml.pop(&key.to_string()).require(|| internal(\"\")));\n\n\n\n ConfigValue::from_toml(file.path(), val)\n\n}\n\n\n", "file_path": "src/cargo/util/config.rs", "rank": 73, "score": 199329.7652560218 }, { "content": "fn calculate_target_fresh(pkg: &Package, dep_info: &Path) -> CargoResult<bool> {\n\n let line = match BufferedReader::new(File::open(dep_info)).lines().next() {\n\n Some(Ok(line)) => line,\n\n _ => return Ok(false),\n\n };\n\n let line = line.as_slice();\n\n let mtime = try!(fs::stat(dep_info)).modified;\n\n let pos = try!(line.find_str(\": \").require(|| {\n\n internal(format!(\"dep-info not in an understood format: {}\",\n\n dep_info.display()))\n\n }));\n\n let deps = line.slice_from(pos + 2);\n\n\n\n for file in deps.split(' ').map(|s| s.trim()).filter(|s| !s.is_empty()) {\n\n match fs::stat(&pkg.get_root().join(file)) {\n\n Ok(stat) if stat.modified <= mtime => {}\n\n Ok(stat) => {\n\n debug!(\"stale: {} -- {} vs {}\", file, stat.modified, mtime);\n\n return Ok(false)\n\n }\n\n _ => { debug!(\"stale: {} -- missing\", file); return Ok(false) }\n\n }\n\n }\n\n\n\n Ok(true)\n\n}\n\n\n", "file_path": "src/cargo/ops/cargo_rustc/fingerprint.rs", "rank": 74, "score": 199123.37143975444 }, { "content": "/// Iteratively search for `file` in `pwd` and its parents, returning\n\n/// the path of the directory.\n\npub fn find_project(pwd: &Path, file: &str) -> CargoResult<Path> {\n\n find_project_manifest(pwd, file)\n\n .map(|mut p| {\n\n // remove the file, leaving just the directory\n\n p.pop();\n\n p\n\n })\n\n}\n\n\n", "file_path": "src/cargo/util/important_paths.rs", "rank": 75, "score": 198107.93537088495 }, { "content": "pub fn get_config(pwd: Path, key: &str) -> CargoResult<ConfigValue> {\n\n find_in_tree(&pwd, |file| extract_config(file, key)).map_err(|_|\n\n human(format!(\"`{}` not found in your configuration\", key)))\n\n}\n\n\n", "file_path": "src/cargo/util/config.rs", "rank": 76, "score": 198103.1186476457 }, { "content": "fn mapper(s: &str) -> url::SchemeType {\n\n match s {\n\n \"git\" => url::RelativeScheme(9418),\n\n \"ssh\" => url::RelativeScheme(22),\n\n s => url::whatwg_scheme_type_mapper(s),\n\n }\n\n}\n", "file_path": "src/cargo/util/to_url.rs", "rank": 77, "score": 197903.39304114954 }, { "content": "fn build_base_args(cx: &Context,\n\n mut cmd: ProcessBuilder,\n\n pkg: &Package,\n\n target: &Target,\n\n crate_types: &[&str]) -> ProcessBuilder {\n\n let metadata = target.get_metadata();\n\n\n\n // TODO: Handle errors in converting paths into args\n\n cmd = cmd.arg(target.get_src_path());\n\n\n\n cmd = cmd.arg(\"--crate-name\").arg(target.get_name());\n\n\n\n for crate_type in crate_types.iter() {\n\n cmd = cmd.arg(\"--crate-type\").arg(*crate_type);\n\n }\n\n\n\n // Despite whatever this target's profile says, we need to configure it\n\n // based off the profile found in the root package's targets.\n\n let mut profile = target.get_profile().clone();\n\n let root_package = cx.get_package(cx.resolve.root());\n", "file_path": "src/cargo/ops/cargo_rustc/mod.rs", "rank": 78, "score": 197603.8034026041 }, { "content": "/// Iteratively search for `file` in `pwd` and its parents, returning\n\n/// the path to the file.\n\npub fn find_project_manifest(pwd: &Path, file: &str) -> CargoResult<Path> {\n\n let mut current = pwd.clone();\n\n\n\n loop {\n\n let manifest = current.join(file);\n\n if manifest.exists() {\n\n return Ok(manifest)\n\n }\n\n\n\n if !current.pop() { break; }\n\n }\n\n\n\n Err(human(format!(\"Could not find `{}` in `{}` or any parent directory\",\n\n file, pwd.display())))\n\n}\n\n\n", "file_path": "src/cargo/util/important_paths.rs", "rank": 79, "score": 194841.2567331656 }, { "content": "pub trait Tap {\n\n fn tap(mut self, callback: |&mut Self|) -> Self;\n\n}\n\n\n\nimpl<T> Tap for T {\n\n fn tap(mut self, callback: |&mut T|) -> T {\n\n callback(&mut self);\n\n self\n\n }\n\n}\n\n\n", "file_path": "tests/support/mod.rs", "rank": 80, "score": 194519.29003104853 }, { "content": "fn has_manifest(path: &Path) -> bool {\n\n find_project_manifest_exact(path, \"Cargo.toml\").is_ok()\n\n}\n\n\n", "file_path": "src/cargo/ops/cargo_read_manifest.rs", "rank": 81, "score": 193208.99225508716 }, { "content": "fn filename(target: &Target) -> String {\n\n let kind = if target.is_lib() {\"lib\"} else {\"bin\"};\n\n let flavor = if target.get_profile().is_test() {\n\n \"test-\"\n\n } else if target.get_profile().is_doc() {\n\n \"doc-\"\n\n } else {\n\n \"\"\n\n };\n\n format!(\"{}{}-{}\", flavor, kind, target.get_name())\n\n}\n", "file_path": "src/cargo/ops/cargo_rustc/fingerprint.rs", "rank": 82, "score": 192884.65348606772 }, { "content": "fn colored_output<S: Str>(string: S, color: color::Color) -> IoResult<String> {\n\n let mut term: TerminfoTerminal<MemWriter> =\n\n Terminal::new(MemWriter::new()).assert();\n\n try!(term.reset());\n\n try!(term.fg(color));\n\n try!(term.write_str(string.as_slice()));\n\n try!(term.reset());\n\n try!(term.flush());\n\n Ok(String::from_utf8_lossy(term.get_ref().get_ref()).to_string())\n\n}\n", "file_path": "tests/test_shell.rs", "rank": 83, "score": 192243.07010563696 }, { "content": "// Returns a mapping of the root package plus its immediate dependencies to\n\n// where the compiled libraries are all located.\n\npub fn compile_targets<'a>(env: &str, targets: &[&'a Target], pkg: &'a Package,\n\n deps: &PackageSet, resolve: &'a Resolve,\n\n sources: &'a SourceMap,\n\n config: &'a mut Config<'a>)\n\n -> CargoResult<Compilation> {\n\n if targets.is_empty() {\n\n return Ok(Compilation::new())\n\n }\n\n\n\n debug!(\"compile_targets; targets={}; pkg={}; deps={}\", targets, pkg, deps);\n\n\n\n let dest = uniq_target_dest(targets);\n\n let root = deps.iter().find(|p| p.get_package_id() == resolve.root()).unwrap();\n\n let host_layout = Layout::new(root, None, dest);\n\n let target_layout = config.target().map(|target| {\n\n layout::Layout::new(root, Some(target), dest)\n\n });\n\n\n\n let mut cx = try!(Context::new(env, resolve, sources, deps, config,\n\n host_layout, target_layout));\n", "file_path": "src/cargo/ops/cargo_rustc/mod.rs", "rank": 84, "score": 192031.9918914411 }, { "content": "/// Return the path to the `file` in `pwd`, if it exists.\n\npub fn find_project_manifest_exact(pwd: &Path, file: &str) -> CargoResult<Path> {\n\n let manifest = pwd.join(file);\n\n\n\n if manifest.exists() {\n\n Ok(manifest)\n\n } else {\n\n Err(human(format!(\"Could not find `{}` in `{}`\",\n\n file, pwd.display())))\n\n }\n\n}\n", "file_path": "src/cargo/util/important_paths.rs", "rank": 85, "score": 191712.03236946993 }, { "content": "pub fn package(manifest_path: &Path,\n\n shell: &mut MultiShell) -> CargoResult<Path> {\n\n let mut src = try!(PathSource::for_path(&manifest_path.dir_path()));\n\n try!(src.update());\n\n let pkg = try!(src.get_root_package());\n\n\n\n let filename = format!(\"{}-{}.tar.gz\", pkg.get_name(), pkg.get_version());\n\n let dst = pkg.get_manifest_path().dir_path().join(filename);\n\n if dst.exists() { return Ok(dst) }\n\n\n\n try!(shell.status(\"Packaging\", pkg.get_package_id().to_string()));\n\n try!(tar(&pkg, &src, shell, &dst).chain_error(|| {\n\n human(\"failed to prepare local package for uploading\")\n\n }));\n\n Ok(dst)\n\n}\n\n\n", "file_path": "src/cargo/ops/cargo_package.rs", "rank": 86, "score": 191417.5057988126 }, { "content": "pub fn compile(manifest_path: &Path,\n\n options: &mut CompileOptions)\n\n -> CargoResult<ops::Compilation> {\n\n let CompileOptions { env, ref mut shell, jobs, target, spec,\n\n dev_deps, features, no_default_features } = *options;\n\n let target = target.map(|s| s.to_string());\n\n let features = features.iter().flat_map(|s| {\n\n s.as_slice().split(' ')\n\n }).map(|s| s.to_string()).collect::<Vec<String>>();\n\n\n\n log!(4, \"compile; manifest-path={}\", manifest_path.display());\n\n\n\n if spec.is_some() && (no_default_features || features.len() > 0) {\n\n return Err(human(\"features cannot be modified when the main package \\\n\n is not being built\"))\n\n }\n\n\n\n let mut source = try!(PathSource::for_path(&manifest_path.dir_path()));\n\n try!(source.update());\n\n\n", "file_path": "src/cargo/ops/cargo_compile.rs", "rank": 87, "score": 191417.5057988126 }, { "content": "pub fn fetch(manifest_path: &Path,\n\n shell: &mut MultiShell) -> CargoResult<()> {\n\n let mut source = try!(PathSource::for_path(&manifest_path.dir_path()));\n\n try!(source.update());\n\n let package = try!(source.get_root_package());\n\n\n\n let mut config = try!(Config::new(shell, None, None));\n\n let mut registry = PackageRegistry::new(&mut config);\n\n try!(resolve_and_fetch(&mut registry, &package));\n\n Ok(())\n\n}\n\n\n", "file_path": "src/cargo/ops/cargo_fetch.rs", "rank": 88, "score": 191417.5057988126 }, { "content": "pub fn run(manifest_path: &Path,\n\n options: &mut ops::CompileOptions,\n\n args: &[String]) -> CargoResult<Option<ProcessError>> {\n\n let mut src = try!(PathSource::for_path(&manifest_path.dir_path()));\n\n try!(src.update());\n\n let root = try!(src.get_root_package());\n\n let env = options.env;\n\n let mut bins = root.get_manifest().get_targets().iter().filter(|a| {\n\n a.is_bin() && a.get_profile().get_env() == env\n\n });\n\n let bin = try!(bins.next().require(|| {\n\n human(\"a bin target must be available for `cargo run`\")\n\n }));\n\n match bins.next() {\n\n Some(..) => return Err(human(\"`cargo run` requires that a project only \\\n\n have one executable\")),\n\n None => {}\n\n }\n\n\n\n let compile = try!(ops::compile(manifest_path, options));\n", "file_path": "src/cargo/ops/cargo_run.rs", "rank": 89, "score": 191417.5057988126 }, { "content": "pub fn upload(manifest_path: &Path,\n\n shell: &mut MultiShell,\n\n token: Option<String>,\n\n host: Option<String>) -> CargoResult<()> {\n\n let mut src = try!(PathSource::for_path(&manifest_path.dir_path()));\n\n try!(src.update());\n\n let pkg = try!(src.get_root_package());\n\n\n\n // Parse all configuration options\n\n let UploadConfig { token: token_config, .. } = try!(upload_configuration());\n\n let token = try!(token.or(token_config).require(|| {\n\n human(\"no upload token found, please run `cargo login`\")\n\n }));\n\n let host = host.unwrap_or(try!(RegistrySource::url()).to_string());\n\n let host = try!(host.as_slice().to_url().map_err(human));\n\n let upload = {\n\n let sid = SourceId::new(RegistryKind, host.clone());\n\n let mut config = try!(Config::new(shell, None, None));\n\n let mut src = RegistrySource::new(&sid, &mut config);\n\n try!(src.update().chain_error(|| {\n", "file_path": "src/cargo/ops/cargo_upload.rs", "rank": 90, "score": 191417.5057988126 }, { "content": "pub fn doc(manifest_path: &Path,\n\n options: &mut DocOptions) -> CargoResult<()> {\n\n let mut source = try!(PathSource::for_path(&manifest_path.dir_path()));\n\n try!(source.update());\n\n let package = try!(source.get_root_package());\n\n\n\n let mut lib_names = HashSet::new();\n\n let mut bin_names = HashSet::new();\n\n for target in package.get_targets().iter().filter(|t| t.get_profile().is_doc()) {\n\n if target.is_lib() {\n\n assert!(lib_names.insert(target.get_name()));\n\n } else {\n\n assert!(bin_names.insert(target.get_name()));\n\n }\n\n }\n\n for bin in bin_names.iter() {\n\n if lib_names.contains(bin) {\n\n return Err(human(\"Cannot document a package where a library and a \\\n\n binary have the same name. Consider renaming one \\\n\n or marking the target as `doc = false`\"))\n", "file_path": "src/cargo/ops/cargo_doc.rs", "rank": 91, "score": 191417.5057988126 }, { "content": "pub fn pkgid(manifest_path: &Path,\n\n spec: Option<&str>,\n\n _shell: &mut MultiShell) -> CargoResult<PackageIdSpec> {\n\n let mut source = try!(PathSource::for_path(&manifest_path.dir_path()));\n\n try!(source.update());\n\n let package = try!(source.get_root_package());\n\n\n\n let lockfile = package.get_root().join(\"Cargo.lock\");\n\n let source_id = package.get_package_id().get_source_id();\n\n let resolve = match try!(ops::load_lockfile(&lockfile, source_id)) {\n\n Some(resolve) => resolve,\n\n None => return Err(human(\"A Cargo.lock must exist for this command\"))\n\n };\n\n\n\n let pkgid = match spec {\n\n Some(spec) => try!(resolve.query(spec)),\n\n None => package.get_package_id(),\n\n };\n\n Ok(PackageIdSpec::from_package_id(pkgid))\n\n}\n", "file_path": "src/cargo/ops/cargo_pkgid.rs", "rank": 92, "score": 191417.5057988126 }, { "content": "fn add_submodule<'a>(repo: &'a git2::Repository, url: &str,\n\n path: &Path) -> git2::Submodule<'a> {\n\n let mut s = repo.submodule(url, path, false).unwrap();\n\n let subrepo = s.open().unwrap();\n\n let mut origin = subrepo.find_remote(\"origin\").unwrap();\n\n origin.add_fetch(\"refs/heads/*:refs/heads/*\").unwrap();\n\n origin.fetch(None, None).unwrap();\n\n origin.save().unwrap();\n\n subrepo.checkout_head(None).unwrap();\n\n s.add_finalize().unwrap();\n\n return s;\n\n}\n\n\n", "file_path": "tests/test_cargo_compile_git_deps.rs", "rank": 93, "score": 190387.9301241507 }, { "content": "fn scrape_target_config(config: &mut Config,\n\n configs: &HashMap<String, config::ConfigValue>)\n\n -> CargoResult<()> {\n\n let target = match configs.find_equiv(&\"target\") {\n\n None => return Ok(()),\n\n Some(target) => try!(target.table().chain_error(|| {\n\n internal(\"invalid configuration for the key `target`\")\n\n })),\n\n };\n\n let target = match config.target() {\n\n None => target,\n\n Some(triple) => match target.find_equiv(&triple) {\n\n None => return Ok(()),\n\n Some(target) => try!(target.table().chain_error(|| {\n\n internal(format!(\"invalid configuration for the key \\\n\n `target.{}`\", triple))\n\n })),\n\n },\n\n };\n\n\n", "file_path": "src/cargo/ops/cargo_compile.rs", "rank": 94, "score": 189732.0675750759 }, { "content": "fn lookup<'a>(table: &'a toml::TomlTable, key: &str) -> &'a toml::Value {\n\n table.find(&key.to_string()).expect(format!(\"Didn't find {}\", key).as_slice())\n\n}\n", "file_path": "src/cargo/ops/cargo_generate_lockfile.rs", "rank": 95, "score": 188351.0678963749 }, { "content": "pub fn run_tests(manifest_path: &Path,\n\n options: &mut TestOptions,\n\n test_args: &[String]) -> CargoResult<Option<ProcessError>> {\n\n let mut source = try!(PathSource::for_path(&manifest_path.dir_path()));\n\n try!(source.update());\n\n let package = try!(source.get_root_package());\n\n\n\n let mut compile = try!(ops::compile(manifest_path, &mut options.compile_opts));\n\n if options.no_run { return Ok(None) }\n\n compile.tests.sort();\n\n\n\n let cwd = os::getcwd();\n\n for exe in compile.tests.iter() {\n\n let to_display = match exe.path_relative_from(&cwd) {\n\n Some(path) => path,\n\n None => exe.clone(),\n\n };\n\n let cmd = compile.process(exe, &package).args(test_args);\n\n try!(options.compile_opts.shell.concise(|shell| {\n\n shell.status(\"Running\", to_display.display().to_string())\n", "file_path": "src/cargo/ops/cargo_test.rs", "rank": 96, "score": 188232.49681345047 }, { "content": "pub fn run_benches(manifest_path: &Path,\n\n options: &mut TestOptions,\n\n args: &[String]) -> CargoResult<Option<ProcessError>> {\n\n let mut args = args.to_vec();\n\n args.push(\"--bench\".to_string());\n\n\n\n run_tests(manifest_path, options, args.as_slice())\n\n}\n", "file_path": "src/cargo/ops/cargo_test.rs", "rank": 97, "score": 188232.49681345047 }, { "content": "pub fn read_packages(path: &Path,\n\n source_id: &SourceId) -> CargoResult<Vec<Package>> {\n\n let mut all_packages = Vec::new();\n\n let mut visited = HashSet::<Path>::new();\n\n\n\n log!(5, \"looking for root package: {}, source_id={}\", path.display(), source_id);\n\n try!(process_possible_package(path, &mut all_packages, source_id, &mut visited));\n\n\n\n try!(walk(path, true, |root, dir| {\n\n log!(5, \"looking for child package: {}\", dir.display());\n\n if root && dir.join(\"target\").is_dir() { return Ok(false); }\n\n if root { return Ok(true) }\n\n if dir.filename_str() == Some(\".git\") { return Ok(false); }\n\n if dir.join(\".git\").exists() { return Ok(false); }\n\n try!(process_possible_package(dir, &mut all_packages, source_id,\n\n &mut visited));\n\n Ok(true)\n\n }));\n\n\n\n if all_packages.is_empty() {\n\n Err(human(format!(\"Could not find Cargo.toml in `{}`\", path.display())))\n\n } else {\n\n log!(5, \"all packages: {}\", all_packages);\n\n Ok(all_packages)\n\n }\n\n}\n\n\n", "file_path": "src/cargo/ops/cargo_read_manifest.rs", "rank": 98, "score": 188232.49681345047 }, { "content": "fn enabled() -> bool { os::getenv(\"CARGO_PROFILE\").is_some() }\n\n\n", "file_path": "src/cargo/util/profile.rs", "rank": 99, "score": 186639.7076511515 } ]
Rust
kernel/net/tcp_socket.rs
castarco/kerla
52b15dbfcbf537bdad5b982d4d5cae3a9c7ae743
use crate::{ arch::SpinLock, fs::{ inode::{FileLike, PollStatus}, opened_file::OpenOptions, }, net::{socket::SockAddr, RecvFromFlags}, user_buffer::UserBuffer, user_buffer::{UserBufReader, UserBufWriter, UserBufferMut}, }; use crate::{ arch::SpinLockGuard, result::{Errno, Result}, }; use alloc::{collections::BTreeSet, sync::Arc, vec::Vec}; use core::{cmp::min, convert::TryInto, fmt}; use crossbeam::atomic::AtomicCell; use smoltcp::socket::{SocketRef, TcpSocketBuffer}; use smoltcp::wire::{IpAddress, IpEndpoint, Ipv4Address}; use super::{process_packets, SOCKETS, SOCKET_WAIT_QUEUE}; const BACKLOG_MAX: usize = 8; static INUSE_ENDPOINTS: SpinLock<BTreeSet<u16>> = SpinLock::new(BTreeSet::new()); fn get_ready_backlog_index( sockets: &mut smoltcp::socket::SocketSet, backlogs: &[Arc<TcpSocket>], ) -> Option<usize> { backlogs.iter().position(|sock| { let smol_socket: SocketRef<'_, smoltcp::socket::TcpSocket> = sockets.get(sock.handle); smol_socket.may_recv() || smol_socket.may_send() }) } pub struct TcpSocket { handle: smoltcp::socket::SocketHandle, local_endpoint: AtomicCell<Option<IpEndpoint>>, backlogs: SpinLock<Vec<Arc<TcpSocket>>>, num_backlogs: AtomicCell<usize>, } impl TcpSocket { pub fn new() -> Arc<TcpSocket> { let rx_buffer = TcpSocketBuffer::new(vec![0; 4096]); let tx_buffer = TcpSocketBuffer::new(vec![0; 4096]); let inner = smoltcp::socket::TcpSocket::new(rx_buffer, tx_buffer); let handle = SOCKETS.lock().add(inner); Arc::new(TcpSocket { handle, local_endpoint: AtomicCell::new(None), backlogs: SpinLock::new(Vec::new()), num_backlogs: AtomicCell::new(0), }) } fn refill_backlog_sockets( &self, backlogs: &mut SpinLockGuard<'_, Vec<Arc<TcpSocket>>>, ) -> Result<()> { let local_endpoint = match self.local_endpoint.load() { Some(local_endpoint) => local_endpoint, None => return Err(Errno::EINVAL.into()), }; for _ in 0..(self.num_backlogs.load() - backlogs.len()) { let socket = TcpSocket::new(); SOCKETS .lock() .get::<smoltcp::socket::TcpSocket>(socket.handle) .listen(local_endpoint)?; backlogs.push(socket); } Ok(()) } } impl FileLike for TcpSocket { fn listen(&self, backlog: i32) -> Result<()> { let mut backlogs = self.backlogs.lock(); let new_num_backlogs = min(backlog as usize, BACKLOG_MAX); backlogs.truncate(new_num_backlogs); self.num_backlogs.store(new_num_backlogs); self.refill_backlog_sockets(&mut backlogs) } fn accept(&self, _options: &OpenOptions) -> Result<(Arc<dyn FileLike>, SockAddr)> { SOCKET_WAIT_QUEUE.sleep_signalable_until(|| { let mut sockets = SOCKETS.lock(); let mut backlogs = self.backlogs.lock(); match get_ready_backlog_index(&mut *sockets, &*backlogs) { Some(index) => { let socket = backlogs.remove(index); drop(sockets); self.refill_backlog_sockets(&mut backlogs)?; let mut sockets_lock = SOCKETS.lock(); let smol_socket: SocketRef<'_, smoltcp::socket::TcpSocket> = sockets_lock.get(socket.handle); Ok(Some(( socket as Arc<dyn FileLike>, smol_socket.remote_endpoint().into(), ))) } None => { Ok(None) } } }) } fn bind(&self, sockaddr: SockAddr) -> Result<()> { self.local_endpoint.store(Some(sockaddr.try_into()?)); Ok(()) } fn getsockname(&self) -> Result<SockAddr> { let endpoint = SOCKETS .lock() .get::<smoltcp::socket::TcpSocket>(self.handle) .local_endpoint(); if endpoint.addr.is_unspecified() { return Err(Errno::ENOTCONN.into()); } Ok(endpoint.into()) } fn getpeername(&self) -> Result<SockAddr> { let endpoint = SOCKETS .lock() .get::<smoltcp::socket::TcpSocket>(self.handle) .remote_endpoint(); if endpoint.addr.is_unspecified() { return Err(Errno::ENOTCONN.into()); } Ok(endpoint.into()) } fn connect(&self, sockaddr: SockAddr, _options: &OpenOptions) -> Result<()> { let remote_endpoint: IpEndpoint = sockaddr.try_into()?; let mut inuse_endpoints = INUSE_ENDPOINTS.lock(); let mut local_endpoint = self.local_endpoint.load().unwrap_or(IpEndpoint { addr: IpAddress::Ipv4(Ipv4Address::UNSPECIFIED), port: 0, }); if local_endpoint.port == 0 { let mut port = 50000; while inuse_endpoints.contains(&port) { if port == u16::MAX { return Err(Errno::EAGAIN.into()); } port += 1; } local_endpoint.port = port; } SOCKETS .lock() .get::<smoltcp::socket::TcpSocket>(self.handle) .connect(remote_endpoint, local_endpoint)?; inuse_endpoints.insert(remote_endpoint.port); drop(inuse_endpoints); process_packets(); SOCKET_WAIT_QUEUE.sleep_signalable_until(|| { if SOCKETS .lock() .get::<smoltcp::socket::TcpSocket>(self.handle) .may_send() { Ok(Some(())) } else { Ok(None) } }) } fn write(&self, _offset: usize, buf: UserBuffer<'_>, _options: &OpenOptions) -> Result<usize> { let mut total_len = 0; let mut reader = UserBufReader::from(buf); loop { let copied_len = SOCKETS .lock() .get::<smoltcp::socket::TcpSocket>(self.handle) .send(|dst| { let copied_len = reader.read_bytes(dst).unwrap_or(0); (copied_len, copied_len) }); process_packets(); match copied_len { Ok(0) => { return Ok(total_len); } Ok(copied_len) => { total_len += copied_len; } Err(err) => return Err(err.into()), } } } fn read(&self, _offset: usize, buf: UserBufferMut<'_>, options: &OpenOptions) -> Result<usize> { let mut writer = UserBufWriter::from(buf); SOCKET_WAIT_QUEUE.sleep_signalable_until(|| { let copied_len = SOCKETS .lock() .get::<smoltcp::socket::TcpSocket>(self.handle) .recv(|src| { let copied_len = writer.write_bytes(src).unwrap_or(0); (copied_len, copied_len) }); match copied_len { Ok(0) | Err(smoltcp::Error::Exhausted) => { if options.nonblock { Err(Errno::EAGAIN.into()) } else { Ok(None) } } Ok(copied_len) => { Ok(Some(copied_len)) } Err(err) => Err(err.into()), } }) } fn sendto( &self, buf: UserBuffer<'_>, sockaddr: Option<SockAddr>, options: &OpenOptions, ) -> Result<usize> { if sockaddr.is_some() { return Err(Errno::EINVAL.into()); } self.write(0, buf, options) } fn recvfrom( &self, buf: UserBufferMut<'_>, _flags: RecvFromFlags, options: &OpenOptions, ) -> Result<(usize, SockAddr)> { Ok((self.read(0, buf, options)?, self.getpeername()?)) } fn poll(&self) -> Result<PollStatus> { let mut status = PollStatus::empty(); let mut sockets = SOCKETS.lock(); if get_ready_backlog_index(&mut *sockets, &*self.backlogs.lock()).is_some() { status |= PollStatus::POLLIN; } let socket = sockets.get::<smoltcp::socket::TcpSocket>(self.handle); if socket.can_recv() { status |= PollStatus::POLLIN; } if socket.can_send() { status |= PollStatus::POLLOUT; } Ok(status) } } impl fmt::Debug for TcpSocket { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("TcpSocket").finish() } }
use crate::{ arch::SpinLock, fs::{ inode::{FileLike, PollStatus}, opened_file::OpenOptions, }, net::{socket::SockAddr, RecvFromFlags}, user_buffer::UserBuffer, user_buffer::{UserBufReader, UserBufWriter, UserBufferMut}, }; use crate::{ arch::SpinLockGuard, result::{Errno, Result}, }; use alloc::{collections::BTreeSet, sync::Arc, vec::Vec}; use core::{cmp::min, convert::TryInto, fmt}; use crossbeam::atomic::AtomicCell; use smoltcp::socket::{SocketRef, TcpSocketBuffer}; use smoltcp::wire::{IpAddress, IpEndpoint, Ipv4Address}; use super::{process_packets, SOCKETS, SOCKET_WAIT_QUEUE}; const BACKLOG_MAX: usize = 8; static INUSE_ENDPOINTS: SpinLock<BTreeSet<u16>> = SpinLock::new(BTreeSet::new()); fn get_ready_backlog_index( sockets: &mut smoltcp::socket::SocketSet, backlogs: &[Arc<TcpSocket>], ) -> Option<usize> { backlogs.iter().position(|sock| { let smol_socket: SocketRef<'_, smoltcp::socket::TcpSocket> = sockets.get(sock.handle); smol_socket.may_recv() || smol_socket.may_send() }) } pub struct TcpSocket { handle: smoltcp::socket::SocketHandle, local_endpoint: AtomicCell<Option<IpEndpoint>>, backlogs: SpinLock<Vec<Arc<TcpSocket>>>, num_backlogs: AtomicCell<usize>, } impl TcpSocket { pub fn new() -> Arc<TcpSocket> { let rx_buffer = TcpSocketBuffer::new(vec![0; 4096]); let tx_buffer = TcpSocketBuffer::new(vec![0; 4096]); let inner = smoltcp::socket::TcpSocket::new(rx_buffer, tx_buffer); let handle = SOCKETS.lock().add(inner); Arc::new(TcpSocket { handle, local_endpoint: AtomicCell::new(None), backlogs: SpinLock::new(Vec::new()), num_backlogs: AtomicCell::new(0), }) } fn refill_backlog_sockets( &self, backlogs: &mut SpinLockGuard<'_, Vec<Arc<TcpSocket>>>, ) -> Result<()> { let local_endpoint = match self.local_endpoint.load() { Some(local_endpoint) => local_endpoint, None => return Err(Errno::EINVAL.into()), }; for _ in 0..(self.num_backlogs.load() - backlogs.len()) { let socket = TcpSocket::new(); SOCKETS .lock() .get::<smoltcp::socket::TcpSocket>(socket.handle) .listen(local_endpoint)?; backlogs.push(socket); } Ok(()) } } impl FileLike for TcpSocket { fn listen(&self, backlog: i32) -> Result<()> { let mut backlogs = self.backlogs.lock(); let new_num_backlogs = min(backlog as usize, BACKLOG_MAX); backlogs.truncate(new_num_backlogs); self.num_backlogs.store(new_num_backlogs); self.refill_backlog_sockets(&mut backlogs) } fn accept(&self, _options: &OpenOptions) -> Result<(Arc<dyn FileLike>, SockAddr)> { SOCKET_WAIT_QUEUE.sleep_signalable_until(|| { let mut sockets = SOCKETS.lock(); let mut backlogs = self.backlogs.lock(); match get_ready_backlog_index(&mut *sockets, &*backlogs) { Some(index) => { let socket = backlogs.remove(index); drop(sockets); self.refill_backlog_sockets(&mut backlogs)?; let mut sockets_lock = SOCKETS.lock(); let smol_socket: SocketRef<'_, smoltcp::socket::TcpSocket> = sockets_lock.get(socket.handle); Ok(Some(( socket as Arc<dyn FileLike>, smol_socket.remote_endpoint().into(), ))) } None => { Ok(None) } } }) } fn bind(&self, sockaddr: SockAddr) -> Result<()> { self.local_endpoint.store(Some(sockaddr.try_into()?)); Ok(()) }
fn getpeername(&self) -> Result<SockAddr> { let endpoint = SOCKETS .lock() .get::<smoltcp::socket::TcpSocket>(self.handle) .remote_endpoint(); if endpoint.addr.is_unspecified() { return Err(Errno::ENOTCONN.into()); } Ok(endpoint.into()) } fn connect(&self, sockaddr: SockAddr, _options: &OpenOptions) -> Result<()> { let remote_endpoint: IpEndpoint = sockaddr.try_into()?; let mut inuse_endpoints = INUSE_ENDPOINTS.lock(); let mut local_endpoint = self.local_endpoint.load().unwrap_or(IpEndpoint { addr: IpAddress::Ipv4(Ipv4Address::UNSPECIFIED), port: 0, }); if local_endpoint.port == 0 { let mut port = 50000; while inuse_endpoints.contains(&port) { if port == u16::MAX { return Err(Errno::EAGAIN.into()); } port += 1; } local_endpoint.port = port; } SOCKETS .lock() .get::<smoltcp::socket::TcpSocket>(self.handle) .connect(remote_endpoint, local_endpoint)?; inuse_endpoints.insert(remote_endpoint.port); drop(inuse_endpoints); process_packets(); SOCKET_WAIT_QUEUE.sleep_signalable_until(|| { if SOCKETS .lock() .get::<smoltcp::socket::TcpSocket>(self.handle) .may_send() { Ok(Some(())) } else { Ok(None) } }) } fn write(&self, _offset: usize, buf: UserBuffer<'_>, _options: &OpenOptions) -> Result<usize> { let mut total_len = 0; let mut reader = UserBufReader::from(buf); loop { let copied_len = SOCKETS .lock() .get::<smoltcp::socket::TcpSocket>(self.handle) .send(|dst| { let copied_len = reader.read_bytes(dst).unwrap_or(0); (copied_len, copied_len) }); process_packets(); match copied_len { Ok(0) => { return Ok(total_len); } Ok(copied_len) => { total_len += copied_len; } Err(err) => return Err(err.into()), } } } fn read(&self, _offset: usize, buf: UserBufferMut<'_>, options: &OpenOptions) -> Result<usize> { let mut writer = UserBufWriter::from(buf); SOCKET_WAIT_QUEUE.sleep_signalable_until(|| { let copied_len = SOCKETS .lock() .get::<smoltcp::socket::TcpSocket>(self.handle) .recv(|src| { let copied_len = writer.write_bytes(src).unwrap_or(0); (copied_len, copied_len) }); match copied_len { Ok(0) | Err(smoltcp::Error::Exhausted) => { if options.nonblock { Err(Errno::EAGAIN.into()) } else { Ok(None) } } Ok(copied_len) => { Ok(Some(copied_len)) } Err(err) => Err(err.into()), } }) } fn sendto( &self, buf: UserBuffer<'_>, sockaddr: Option<SockAddr>, options: &OpenOptions, ) -> Result<usize> { if sockaddr.is_some() { return Err(Errno::EINVAL.into()); } self.write(0, buf, options) } fn recvfrom( &self, buf: UserBufferMut<'_>, _flags: RecvFromFlags, options: &OpenOptions, ) -> Result<(usize, SockAddr)> { Ok((self.read(0, buf, options)?, self.getpeername()?)) } fn poll(&self) -> Result<PollStatus> { let mut status = PollStatus::empty(); let mut sockets = SOCKETS.lock(); if get_ready_backlog_index(&mut *sockets, &*self.backlogs.lock()).is_some() { status |= PollStatus::POLLIN; } let socket = sockets.get::<smoltcp::socket::TcpSocket>(self.handle); if socket.can_recv() { status |= PollStatus::POLLIN; } if socket.can_send() { status |= PollStatus::POLLOUT; } Ok(status) } } impl fmt::Debug for TcpSocket { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("TcpSocket").finish() } }
fn getsockname(&self) -> Result<SockAddr> { let endpoint = SOCKETS .lock() .get::<smoltcp::socket::TcpSocket>(self.handle) .local_endpoint(); if endpoint.addr.is_unspecified() { return Err(Errno::ENOTCONN.into()); } Ok(endpoint.into()) }
function_block-full_function
[ { "content": "pub fn read_secure_random(buf: UserBufferMut<'_>) -> Result<usize> {\n\n // TODO: Implement arch-agnostic CRNG which does not fully depends on RDRAND.\n\n\n\n UserBufWriter::from(buf).write_with(|slice| {\n\n let valid = unsafe { rdrand_slice(slice) };\n\n if valid {\n\n Ok(slice.len())\n\n } else {\n\n warn_once!(\"RDRAND returned invalid data\");\n\n Ok(0)\n\n }\n\n })\n\n}\n\n\n", "file_path": "kernel/random.rs", "rank": 0, "score": 240441.88525239233 }, { "content": "pub fn read_insecure_random(buf: UserBufferMut<'_>) -> Result<usize> {\n\n // TODO:\n\n read_secure_random(buf)\n\n}\n", "file_path": "kernel/random.rs", "rank": 1, "score": 240441.88525239233 }, { "content": "pub fn read_sockaddr(uaddr: UserVAddr, len: usize) -> Result<SockAddr> {\n\n let sa_family = uaddr.read::<sa_family_t>()?;\n\n let sockaddr = match sa_family as i32 {\n\n AF_INET => {\n\n if len < size_of::<SockAddrIn>() {\n\n return Err(Errno::EINVAL.into());\n\n }\n\n\n\n SockAddr::In(uaddr.read::<SockAddrIn>()?)\n\n }\n\n AF_UNIX => {\n\n // TODO: SHould we check `len` for sockaddr_un as well?\n\n SockAddr::Un(uaddr.read::<SockAddrUn>()?)\n\n }\n\n _ => {\n\n return Err(Errno::EINVAL.into());\n\n }\n\n };\n\n\n\n Ok(sockaddr)\n\n}\n\n\n", "file_path": "kernel/net/socket.rs", "rank": 2, "score": 238007.77460721243 }, { "content": "pub fn write_sockaddr(\n\n sockaddr: &SockAddr,\n\n dst: Option<UserVAddr>,\n\n socklen: Option<UserVAddr>,\n\n) -> Result<()> {\n\n match sockaddr {\n\n SockAddr::In(sockaddr_in) => {\n\n if let Some(dst) = dst {\n\n dst.write::<SockAddrIn>(sockaddr_in)?;\n\n }\n\n\n\n if let Some(socklen) = socklen {\n\n socklen.write::<socklen_t>(&(size_of::<SockAddrIn>() as u32))?;\n\n }\n\n }\n\n SockAddr::Un(sockaddr_un) => {\n\n if let Some(dst) = dst {\n\n dst.write::<SockAddrUn>(sockaddr_un)?;\n\n }\n\n\n\n if let Some(socklen) = socklen {\n\n socklen.write::<socklen_t>(&(size_of::<SockAddrUn>() as u32))?;\n\n }\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "kernel/net/socket.rs", "rank": 3, "score": 198594.2518518045 }, { "content": "pub fn cpu_local_head() -> &'static mut CpuLocalHead {\n\n unsafe { &mut *(rdgsbase() as *mut CpuLocalHead) }\n\n}\n\n\n\npub unsafe fn init(cpu_local_area: VAddr) {\n\n extern \"C\" {\n\n static __cpu_local: u8;\n\n static __cpu_local_size: u8;\n\n }\n\n\n\n let template = VAddr::new(&__cpu_local as *const _ as usize);\n\n let len = &__cpu_local_size as *const _ as usize;\n\n ptr::copy_nonoverlapping::<u8>(template.as_ptr(), cpu_local_area.as_mut_ptr(), len);\n\n\n\n wrgsbase(cpu_local_area.value() as u64);\n\n}\n", "file_path": "kernel/arch/x64/cpu_local.rs", "rank": 4, "score": 195652.98473824153 }, { "content": "pub fn attach_irq<F: FnMut() + Send + Sync + 'static>(irq: u8, f: F) {\n\n IRQ_HANDLERS.lock()[irq as usize].write(Box::new(f));\n\n enable_irq(irq);\n\n}\n\n\n", "file_path": "kernel/interrupt.rs", "rank": 5, "score": 169485.95201327946 }, { "content": "fn push_usize_to_stack(sp: &mut VAddr, stack_bottom: VAddr, value: usize) -> Result<()> {\n\n if cfg!(target_endian = \"big\") {\n\n push_bytes_to_stack(sp, stack_bottom, &value.to_be_bytes())?;\n\n } else {\n\n push_bytes_to_stack(sp, stack_bottom, &value.to_le_bytes())?;\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "kernel/process/init_stack.rs", "rank": 6, "score": 164520.58861243527 }, { "content": "pub fn init() {\n\n INITRAM_FS.init(|| {\n\n let image = include_bytes!(concat!(\"../../\", env!(\"INITRAMFS_PATH\")));\n\n Arc::new(InitramFs::new(image))\n\n });\n\n}\n", "file_path": "kernel/fs/initramfs.rs", "rank": 7, "score": 162939.39787057674 }, { "content": "pub fn init() {\n\n TMP_FS.init(|| Arc::new(TmpFs::new()));\n\n}\n", "file_path": "kernel/fs/tmpfs.rs", "rank": 8, "score": 162939.39787057674 }, { "content": "pub fn arch_prctl(current: &Arc<Process>, code: i32, uaddr: UserVAddr) -> Result<()> {\n\n match code {\n\n // TODO: Move to arch directory.\n\n ARCH_SET_FS => {\n\n let value = uaddr.value() as u64;\n\n current.arch().fsbase.store(value);\n\n unsafe {\n\n wrfsbase(value);\n\n }\n\n }\n\n _ => {\n\n return Err(Errno::EINVAL.into());\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "kernel/arch/x64/arch_prctl.rs", "rank": 9, "score": 162864.5960918506 }, { "content": "pub fn handle_timer_irq() {\n\n {\n\n let mut timers = TIMERS.lock();\n\n for timer in timers.iter_mut() {\n\n if timer.current > 0 {\n\n timer.current -= 1;\n\n }\n\n }\n\n\n\n timers.retain(|timer| {\n\n if timer.current == 0 {\n\n timer.process.resume();\n\n }\n\n\n\n timer.current > 0\n\n })\n\n }\n\n\n\n WALLCLOCK_TICKS.fetch_add(1, Ordering::Relaxed);\n\n let ticks = MONOTONIC_TICKS.fetch_add(1, Ordering::Relaxed);\n\n if ticks % PREEMPT_PER_TICKS == 0 {\n\n process::switch();\n\n }\n\n}\n", "file_path": "kernel/timer.rs", "rank": 11, "score": 160198.96995424145 }, { "content": "pub fn init() {\n\n DEV_FS.init(|| Arc::new(DevFs::new()));\n\n}\n", "file_path": "kernel/fs/devfs/mod.rs", "rank": 12, "score": 159310.24279803174 }, { "content": "pub fn current_process() -> &'static Arc<Process> {\n\n CURRENT.get()\n\n}\n\n\n", "file_path": "kernel/process/mod.rs", "rank": 13, "score": 158349.27707898358 }, { "content": "pub fn alloc_pages(num_pages: usize, flags: AllocPageFlags) -> Result<PAddr> {\n\n let order = num_pages_to_order(num_pages);\n\n let mut zones = ZONES.lock();\n\n for i in 0..zones.len() {\n\n if let Some(paddr) = zones[i].alloc_pages(order).map(PAddr::new) {\n\n if flags.contains(AllocPageFlags::ZEROED) {\n\n unsafe {\n\n paddr\n\n .as_mut_ptr::<u8>()\n\n .write_bytes(0, num_pages * PAGE_SIZE);\n\n }\n\n }\n\n return Ok(paddr);\n\n }\n\n }\n\n\n\n Err(Errno::ENOMEM.into())\n\n}\n\n\n", "file_path": "kernel/mm/page_allocator.rs", "rank": 14, "score": 156695.66722658815 }, { "content": "pub fn handle_irq(irq: u8) {\n\n let handler = &mut IRQ_HANDLERS.lock()[irq as usize];\n\n unsafe {\n\n (*handler.assume_init_mut())();\n\n }\n\n}\n", "file_path": "kernel/interrupt.rs", "rank": 15, "score": 148397.69898838724 }, { "content": "/// Suspends the current process at least `ms` milliseconds.\n\npub fn _sleep_ms(ms: usize) {\n\n TIMERS.lock().push(Timer {\n\n current: ms * TICK_HZ / 1000,\n\n process: current_process().clone(),\n\n });\n\n\n\n current_process().set_state(ProcessState::BlockedSignalable);\n\n switch();\n\n}\n\n\n\n#[derive(Debug, Copy, Clone, Eq, PartialEq)]\n\npub struct WallClock {\n\n ticks_from_epoch: usize,\n\n}\n\n\n\nimpl WallClock {\n\n pub fn secs_from_epoch(self) -> usize {\n\n self.ticks_from_epoch / TICK_HZ\n\n }\n\n\n\n pub fn msecs_from_epoch(self) -> usize {\n\n self.ticks_from_epoch / (TICK_HZ / 1000)\n\n }\n\n\n\n pub fn nanosecs_from_epoch(self) -> usize {\n\n self.msecs_from_epoch() * 1_000_000\n\n }\n\n}\n\n\n", "file_path": "kernel/timer.rs", "rank": 16, "score": 148303.46250412118 }, { "content": "struct DirInner {\n\n files: HashMap<String, TmpFsINode>,\n\n stat: Stat,\n\n}\n\n\n\npub struct Dir(SpinLock<DirInner>);\n\n\n\nimpl Dir {\n\n pub fn new(inode_no: INodeNo) -> Dir {\n\n Dir(SpinLock::new(DirInner {\n\n files: HashMap::new(),\n\n stat: Stat {\n\n inode_no,\n\n mode: FileMode::new(S_IFDIR | 0o755),\n\n ..Stat::zeroed()\n\n },\n\n }))\n\n }\n\n\n\n pub fn add_dir(&self, name: &str) -> Arc<Dir> {\n", "file_path": "kernel/fs/tmpfs.rs", "rank": 17, "score": 146371.22396902632 }, { "content": "fn syscall_name_by_number(n: usize) -> &'static str {\n\n match n {\n\n 0 => \"read\",\n\n 1 => \"write\",\n\n 2 => \"open\",\n\n 3 => \"close\",\n\n 4 => \"stat\",\n\n 5 => \"fstat\",\n\n 6 => \"lstat\",\n\n 7 => \"poll\",\n\n 8 => \"lseek\",\n\n 9 => \"mmap\",\n\n 10 => \"mprotect\",\n\n 11 => \"munmap\",\n\n 12 => \"brk\",\n\n 13 => \"rt_sigaction\",\n\n 14 => \"rt_sigprocmask\",\n\n 15 => \"rt_sigreturn\",\n\n 16 => \"ioctl\",\n\n 17 => \"pread64\",\n", "file_path": "kernel/syscalls/mod.rs", "rank": 18, "score": 141119.81461798504 }, { "content": "fn resolve_path(uaddr: usize) -> Result<PathBuf> {\n\n const PATH_MAX: usize = 512;\n\n Ok(Path::new(UserCStr::new(UserVAddr::new_nonnull(uaddr)?, PATH_MAX)?.as_str()).to_path_buf())\n\n}\n\n\n\npub struct SyscallHandler<'a> {\n\n pub frame: &'a mut SyscallFrame,\n\n}\n\n\n\nimpl<'a> SyscallHandler<'a> {\n\n pub fn new(frame: &'a mut SyscallFrame) -> SyscallHandler<'a> {\n\n SyscallHandler { frame }\n\n }\n\n\n\n #[allow(clippy::too_many_arguments)]\n\n pub fn dispatch(\n\n &mut self,\n\n a1: usize,\n\n a2: usize,\n\n a3: usize,\n", "file_path": "kernel/syscalls/mod.rs", "rank": 19, "score": 137884.43949997623 }, { "content": "pub fn handle_page_fault(unaligned_vaddr: UserVAddr, ip: usize, _reason: PageFaultReason) {\n\n let current = current_process();\n\n let aligned_vaddr = match UserVAddr::new_nonnull(align_down(unaligned_vaddr.value(), PAGE_SIZE))\n\n {\n\n Ok(uaddr) => uaddr,\n\n _ => {\n\n debug_warn!(\n\n \"invalid memory access at {} (ip={:x}), killing the current process...\",\n\n unaligned_vaddr,\n\n ip\n\n );\n\n Process::exit_by_signal(SIGSEGV);\n\n }\n\n };\n\n\n\n // Look for the associated vma area.\n\n let vm_ref = current.vm();\n\n let mut vm = vm_ref.as_ref().unwrap().lock();\n\n let vma = match vm\n\n .vm_areas()\n", "file_path": "kernel/mm/page_fault.rs", "rank": 20, "score": 136463.7140072851 }, { "content": "pub fn init() {\n\n let mut handlers = IRQ_HANDLERS.lock();\n\n for handler in handlers.iter_mut() {\n\n handler.write(Box::new(empty_irq_handler));\n\n }\n\n}\n\n\n", "file_path": "kernel/interrupt.rs", "rank": 21, "score": 126589.70523708174 }, { "content": "pub fn init() {\n\n POLL_WAIT_QUEUE.init(WaitQueue::new);\n\n}\n", "file_path": "kernel/poll.rs", "rank": 22, "score": 126589.70523708174 }, { "content": "pub fn init() {\n\n PIPE_WAIT_QUEUE.init(WaitQueue::new);\n\n}\n", "file_path": "kernel/pipe.rs", "rank": 23, "score": 126589.70523708174 }, { "content": "/// Prints a backtrace.\n\npub fn backtrace() {\n\n Backtrace::current_frame().traverse(|i, vaddr| {\n\n if let Some(symbol) = resolve_symbol(vaddr) {\n\n warn!(\n\n \" {index}: {vaddr} {symbol_name}()+0x{offset:x}\",\n\n index = i,\n\n vaddr = vaddr,\n\n symbol_name = symbol.name,\n\n offset = vaddr.value() - symbol.addr.value(),\n\n );\n\n } else {\n\n warn!(\n\n \" {index}: {vaddr} (symbol unknown)\",\n\n index = i,\n\n vaddr = vaddr,\n\n );\n\n }\n\n });\n\n}\n\n\n\npub struct CapturedBacktraceFrame {\n\n pub vaddr: VAddr,\n\n pub offset: usize,\n\n pub symbol_name: &'static str,\n\n}\n\n\n\npub struct CapturedBacktrace {\n\n pub trace: Box<ArrayVec<CapturedBacktraceFrame, 8>>,\n\n}\n\n\n", "file_path": "kernel/printk.rs", "rank": 24, "score": 126589.70523708174 }, { "content": "fn push_bytes_to_stack(sp: &mut VAddr, stack_bottom: VAddr, buf: &[u8]) -> Result<()> {\n\n if sp.sub(buf.len()) < stack_bottom {\n\n return Err(Error::with_message(Errno::E2BIG, \"too big argvp/envp/auxv\"));\n\n }\n\n\n\n *sp = sp.sub(buf.len());\n\n sp.write_bytes(buf);\n\n Ok(())\n\n}\n\n\n", "file_path": "kernel/process/init_stack.rs", "rank": 25, "score": 125007.09027929768 }, { "content": "pub fn init_logger() {\n\n log::set_logger(&LOGGER).unwrap();\n\n log::set_max_level(if cfg!(debug_assertions) {\n\n log::LevelFilter::Trace\n\n } else {\n\n log::LevelFilter::Info\n\n });\n\n}\n\n\n", "file_path": "kernel/boot.rs", "rank": 26, "score": 124003.99829806245 }, { "content": "pub fn init() {\n\n // Initialize the array of all device drivers.\n\n DRIVER_BUILDERS\n\n .lock()\n\n .push(Box::new(VirtioNetBuilder::new()));\n\n}\n", "file_path": "kernel/drivers/mod.rs", "rank": 27, "score": 124003.99829806245 }, { "content": "pub fn init() {\n\n // Scan PCI devices.\n\n for device in enumerate_pci_devices() {\n\n trace!(\n\n \"pci: found a device: id={:04x}:{:04x}, bar0={:016x?}, irq={}\",\n\n device.config().vendor_id(),\n\n device.config().device_id(),\n\n device.config().bar(0),\n\n device.config().interrupt_line()\n\n );\n\n\n\n for builder in DRIVER_BUILDERS.lock().iter() {\n\n builder.attach_pci(&device).ok();\n\n }\n\n }\n\n}\n", "file_path": "kernel/drivers/pci.rs", "rank": 28, "score": 124003.99829806245 }, { "content": "/// Yields execution to another thread.\n\npub fn switch() {\n\n // Save the current interrupt enable flag to restore it in the next execution\n\n // of the currently running thread.\n\n let interrupt_enabled = is_interrupt_enabled();\n\n\n\n let prev = current_process().clone();\n\n let prev_pid = prev.pid();\n\n let prev_state = prev.state();\n\n let next = {\n\n let scheduler = SCHEDULER.lock();\n\n\n\n // Push back the currently running thread to the runqueue if it's still\n\n // ready for running, in other words, it's not blocked.\n\n if prev_pid != PId::new(0) && prev_state == ProcessState::Runnable {\n\n scheduler.enqueue(prev_pid);\n\n }\n\n\n\n // Pick a thread to run next.\n\n match scheduler.pick_next() {\n\n Some(next_pid) => PROCESSES.lock().get(&next_pid).unwrap().clone(),\n", "file_path": "kernel/process/switch.rs", "rank": 29, "score": 124003.99829806245 }, { "content": "pub fn init() {\n\n JOIN_WAIT_QUEUE.init(WaitQueue::new);\n\n SCHEDULER.init(|| SpinLock::new(Scheduler::new()));\n\n let idle_thread = Process::new_idle_thread().unwrap();\n\n IDLE_THREAD.as_mut().set(idle_thread.clone());\n\n CURRENT.as_mut().set(idle_thread);\n\n}\n", "file_path": "kernel/process/mod.rs", "rank": 30, "score": 124003.99829806245 }, { "content": "pub fn init() {\n\n let neighbor_cache = NeighborCache::new(BTreeMap::new());\n\n let driver = get_ethernet_driver().expect(\"no ethernet drivers\");\n\n let mac_addr = driver.lock().mac_addr().unwrap();\n\n let ethernet_addr = EthernetAddress(mac_addr.as_array());\n\n let ip_addrs = [IpCidr::new(wire::Ipv4Address::UNSPECIFIED.into(), 0)];\n\n let routes = Routes::new(BTreeMap::new());\n\n let iface = EthernetInterfaceBuilder::new(OurDevice)\n\n .ethernet_addr(ethernet_addr)\n\n .neighbor_cache(neighbor_cache)\n\n .ip_addrs(ip_addrs)\n\n .routes(routes)\n\n .finalize();\n\n\n\n let mut sockets = SocketSet::new(vec![]);\n\n let dhcp_rx_buffer = RawSocketBuffer::new([RawPacketMetadata::EMPTY; 4], vec![0; 2048]);\n\n let dhcp_tx_buffer = RawSocketBuffer::new([RawPacketMetadata::EMPTY; 4], vec![0; 2048]);\n\n let dhcp = Dhcpv4Client::new(\n\n &mut sockets,\n\n dhcp_rx_buffer,\n", "file_path": "kernel/net/mod.rs", "rank": 31, "score": 124003.99829806245 }, { "content": "fn push_aux_data_to_stack(sp: &mut VAddr, stack_bottom: VAddr, auxv: &Auxv) -> Result<()> {\n\n #[allow(clippy::single_match)]\n\n match auxv {\n\n Auxv::Random(values) => push_bytes_to_stack(sp, stack_bottom, values.as_slice())?,\n\n _ => {}\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "kernel/process/init_stack.rs", "rank": 32, "score": 123157.90950506058 }, { "content": "fn parse_hex_field(bytes: &[u8]) -> usize {\n\n usize::from_str_radix(parse_str_field(bytes), 16).unwrap()\n\n}\n\n\n\npub static INITRAM_FS: Once<Arc<InitramFs>> = Once::new();\n\n\n", "file_path": "kernel/fs/initramfs.rs", "rank": 33, "score": 122446.93814016119 }, { "content": "#[cfg_attr(test, allow(unused))]\n\npub fn halt() -> ! {\n\n semihosting_halt(ExitStatus::Success);\n\n\n\n loop {\n\n unsafe {\n\n asm!(\"cli; hlt\");\n\n }\n\n }\n\n}\n", "file_path": "kernel/arch/x64/idle.rs", "rank": 34, "score": 121578.10964020094 }, { "content": "pub fn idle() {\n\n unsafe {\n\n asm!(\"sti; hlt\");\n\n }\n\n}\n\n\n", "file_path": "kernel/arch/x64/idle.rs", "rank": 35, "score": 121578.10964020094 }, { "content": "pub fn end_tests() -> ! {\n\n semihosting_halt(ExitStatus::Success);\n\n\n\n #[allow(clippy::empty_loop)]\n\n loop {}\n\n}\n\n\n\nstatic ALREADY_PANICED: AtomicBool = AtomicBool::new(false);\n\n\n", "file_path": "kernel/test_runner.rs", "rank": 36, "score": 121578.10964020094 }, { "content": "pub fn init() {\n\n enable_irq(4);\n\n}\n", "file_path": "kernel/arch/x64/serial.rs", "rank": 37, "score": 121578.10964020094 }, { "content": "/// Called after the memory allocator is initialized.\n\npub fn init() {\n\n serial::init();\n\n}\n", "file_path": "kernel/arch/x64/boot.rs", "rank": 38, "score": 121578.10964020094 }, { "content": "pub fn process_packets() {\n\n let mut sockets = SOCKETS.lock();\n\n let mut iface = INTERFACE.lock();\n\n let mut dhcp = DHCP_CLIENT.lock();\n\n\n\n let timestamp = read_monotonic_clock().into();\n\n loop {\n\n if let Some(config) = dhcp\n\n .poll(&mut iface, &mut sockets, timestamp)\n\n .unwrap_or_else(|e| {\n\n trace!(\"DHCP: {:?}\", e);\n\n None\n\n })\n\n {\n\n if let Some(cidr) = config.address {\n\n iface.update_ip_addrs(|addrs| {\n\n if let Some(addr) = addrs.iter_mut().next() {\n\n *addr = IpCidr::Ipv4(cidr);\n\n }\n\n });\n", "file_path": "kernel/net/mod.rs", "rank": 39, "score": 121578.10964020094 }, { "content": "pub fn init() {\n\n unsafe {\n\n // TODO: Expand the kernel heap when it has been exhausted.\n\n let size = 1024 * 1024;\n\n let start = alloc_pages(size / PAGE_SIZE, AllocPageFlags::KERNEL)\n\n .expect(\"failed to reserve memory pages for the global alllocator\")\n\n .as_vaddr()\n\n .value();\n\n ALLOCATOR.lock().init(start, size);\n\n }\n\n\n\n KERNEL_HEAP_ENABLED.store(true, Ordering::Release);\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n #![allow(clippy::vec_init_then_push)]\n\n\n\n #[test_case]\n\n fn alloc_crate_test() {\n\n use alloc::vec::Vec;\n\n let mut v = Vec::with_capacity(1);\n\n v.push('a');\n\n v.push('b');\n\n v.push('c');\n\n assert_eq!(v.as_slice(), &['a', 'b', 'c']);\n\n }\n\n}\n", "file_path": "kernel/mm/global_allocator.rs", "rank": 40, "score": 121578.10964020094 }, { "content": "pub fn get_ethernet_driver() -> Option<Arc<SpinLock<dyn EthernetDriver>>> {\n\n ETHERNET_DRIVERS.lock().get(0).cloned()\n\n}\n\n\n", "file_path": "kernel/drivers/mod.rs", "rank": 41, "score": 121038.43510419589 }, { "content": "pub fn register_ethernet_driver(driver: Arc<SpinLock<dyn EthernetDriver>>) {\n\n ETHERNET_DRIVERS.lock().push(driver);\n\n}\n\n\n", "file_path": "kernel/drivers/mod.rs", "rank": 42, "score": 121038.43510419589 }, { "content": "pub fn ack_interrupt() {\n\n unsafe {\n\n APIC.lock().write_eoi();\n\n }\n\n}\n\n\n\npub unsafe fn init() {\n\n // Activate Local APIC.\n\n let apic_base = rdmsr(msr::APIC_BASE);\n\n wrmsr(msr::APIC_BASE, apic_base | APIC_BASE_EN);\n\n\n\n let apic = APIC.lock();\n\n apic.write_spurious_interrupt(SIVR_SOFT_EN);\n\n}\n", "file_path": "kernel/arch/x64/apic.rs", "rank": 43, "score": 119297.66629179232 }, { "content": "pub fn irq_handler() {\n\n while let Some(ch) = read_char() {\n\n if ch == b'\\r' {\n\n SERIAL_TTY.input_char(b'\\n');\n\n } else {\n\n SERIAL_TTY.input_char(ch);\n\n }\n\n }\n\n}\n\n\n\npub unsafe fn early_init() {\n\n let divisor: u16 = 12; // 115200 / 9600 = 12\n\n outb(IOPORT_SERIAL + IER, 0x00); // Disable interrupts.\n\n outb(IOPORT_SERIAL + DLL, (divisor & 0xff) as u8);\n\n outb(IOPORT_SERIAL + DLH, ((divisor >> 8) & 0xff) as u8);\n\n outb(IOPORT_SERIAL + LCR, 0x03); // 8n1.\n\n outb(IOPORT_SERIAL + FCR, 0x01); // Enable FIFO.\n\n outb(IOPORT_SERIAL + IER, 0x01); // Enable interrupts.\n\n}\n\n\n", "file_path": "kernel/arch/x64/serial.rs", "rank": 44, "score": 119297.66629179232 }, { "content": "/// Duplicates entires (and referenced memory pages if `level == 1`) in the\n\n/// nth-level page table. Returns the newly created copy of the page table.\n\n///\n\n/// fork(2) uses this funciton to duplicate the memory space.\n\nfn duplicate_table(original_table_paddr: PAddr, level: usize) -> Result<PAddr> {\n\n let orig_table = unsafe { original_table_paddr.as_ptr::<PageTableEntry>() };\n\n let new_table_paddr = alloc_pages(1, AllocPageFlags::KERNEL)?;\n\n let new_table = unsafe { new_table_paddr.as_mut_ptr::<PageTableEntry>() };\n\n\n\n debug_assert!(level > 0);\n\n for i in 0..ENTRIES_PER_TABLE {\n\n let entry = unsafe { *orig_table.offset(i as isize) };\n\n let paddr = entry_paddr(entry);\n\n\n\n // Check if we need to copy the entry.\n\n if paddr.is_null() {\n\n continue;\n\n }\n\n\n\n // Create a deep copy of the page table entry.\n\n let new_paddr = if level == 1 {\n\n // Copy a physical page referenced from the last-level page table.\n\n let new_paddr = alloc_pages(1, AllocPageFlags::KERNEL)?;\n\n unsafe {\n", "file_path": "kernel/arch/x64/page_table.rs", "rank": 45, "score": 116250.04746355768 }, { "content": "/// Returns a saved backtrace.\n\npub fn capture_backtrace() -> CapturedBacktrace {\n\n let mut trace = Box::new(ArrayVec::new());\n\n Backtrace::current_frame().traverse(|_, vaddr| {\n\n if let Some(symbol) = resolve_symbol(vaddr) {\n\n let _ = trace.try_push(CapturedBacktraceFrame {\n\n vaddr,\n\n symbol_name: symbol.name,\n\n offset: vaddr.value() - symbol.addr.value(),\n\n });\n\n }\n\n });\n\n CapturedBacktrace { trace }\n\n}\n\n\n\nimpl fmt::Debug for CapturedBacktrace {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n for (i, frame) in self.trace.iter().enumerate() {\n\n let _ = writeln!(\n\n f,\n\n \" #{}: {} {}()+0x{:x}\",\n", "file_path": "kernel/printk.rs", "rank": 46, "score": 114697.83690185737 }, { "content": "struct InitramFsFile {\n\n filename: &'static str,\n\n data: &'static [u8],\n\n stat: Stat,\n\n}\n\n\n\nimpl FileLike for InitramFsFile {\n\n fn read(&self, offset: usize, buf: UserBufferMut<'_>, _options: &OpenOptions) -> Result<usize> {\n\n if offset > self.data.len() {\n\n return Ok(0);\n\n }\n\n\n\n let mut writer = UserBufWriter::from(buf);\n\n writer.write_bytes(&self.data[offset..])\n\n }\n\n\n\n fn write(&self, _offset: usize, _buf: UserBuffer<'_>, _options: &OpenOptions) -> Result<usize> {\n\n Err(Error::new(Errno::EROFS))\n\n }\n\n\n", "file_path": "kernel/fs/initramfs.rs", "rank": 47, "score": 114158.1529660802 }, { "content": "struct InitramFsSymlink {\n\n filename: &'static str,\n\n stat: Stat,\n\n dst: PathBuf,\n\n}\n\n\n\nimpl Symlink for InitramFsSymlink {\n\n fn stat(&self) -> Result<Stat> {\n\n Ok(self.stat)\n\n }\n\n\n\n fn linked_to(&self) -> Result<PathBuf> {\n\n Ok(self.dst.clone())\n\n }\n\n}\n\n\n\nimpl fmt::Debug for InitramFsSymlink {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n f.debug_struct(\"InitramFsSymlink\")\n\n .field(\"name\", &self.filename)\n", "file_path": "kernel/fs/initramfs.rs", "rank": 48, "score": 114158.1529660802 }, { "content": "struct InitramFsDir {\n\n filename: &'static str,\n\n stat: Stat,\n\n files: HashMap<&'static str, InitramFsINode>,\n\n}\n\n\n\nimpl Directory for InitramFsDir {\n\n fn stat(&self) -> Result<Stat> {\n\n Ok(self.stat)\n\n }\n\n\n\n fn link(&self, _name: &str, _link_to: &INode) -> Result<()> {\n\n Err(Error::new(Errno::ENOSYS))\n\n }\n\n\n\n fn readdir(&self, index: usize) -> Result<Option<DirEntry>> {\n\n let entry = self.files.values().nth(index).map(|entry| match entry {\n\n InitramFsINode::Directory(dir) => DirEntry {\n\n inode_no: dir.stat.inode_no,\n\n file_type: FileType::Directory,\n", "file_path": "kernel/fs/initramfs.rs", "rank": 49, "score": 114158.1529660802 }, { "content": "pub fn is_interrupt_enabled() -> bool {\n\n x86::current::rflags::read().contains(RFlags::FLAGS_IF)\n\n}\n", "file_path": "kernel/arch/x64/interrupt.rs", "rank": 50, "score": 112545.32706346497 }, { "content": "pub fn read_monotonic_clock() -> MonotonicClock {\n\n MonotonicClock {\n\n ticks: MONOTONIC_TICKS.load(Ordering::Relaxed),\n\n }\n\n}\n\n\n\n/// `struct timeval`\n\n#[derive(Debug, Copy, Clone)]\n\n#[repr(C, packed)]\n\npub struct Timeval {\n\n tv_sec: c_time,\n\n tv_usec: c_suseconds,\n\n}\n\n\n\nimpl Timeval {\n\n pub fn as_msecs(&self) -> usize {\n\n (self.tv_sec as usize) * 1000 + (self.tv_usec as usize) / 1000\n\n }\n\n}\n\n\n", "file_path": "kernel/timer.rs", "rank": 51, "score": 112545.32706346497 }, { "content": "pub fn read_wall_clock() -> WallClock {\n\n WallClock {\n\n ticks_from_epoch: WALLCLOCK_TICKS.load(Ordering::Relaxed),\n\n }\n\n}\n\n\n\n#[derive(Debug, Copy, Clone, Eq, PartialEq)]\n\npub struct MonotonicClock {\n\n ticks: usize,\n\n}\n\n\n\nimpl MonotonicClock {\n\n pub fn secs(self) -> usize {\n\n self.ticks / TICK_HZ\n\n }\n\n\n\n pub fn msecs(self) -> usize {\n\n self.ticks / (TICK_HZ / 1000)\n\n }\n\n\n\n pub fn nanosecs(self) -> usize {\n\n self.msecs() * 1_000_000\n\n }\n\n\n\n pub fn elapsed_msecs(self) -> usize {\n\n // FIXME: Consider wrapping.\n\n (read_monotonic_clock().ticks - self.ticks) / (TICK_HZ / 1000)\n\n }\n\n}\n\n\n", "file_path": "kernel/timer.rs", "rank": 52, "score": 112545.32706346497 }, { "content": "fn allocate_pml4() -> Result<PAddr> {\n\n extern \"C\" {\n\n static __kernel_pml4: u8;\n\n }\n\n\n\n let pml4 = alloc_pages(1, AllocPageFlags::KERNEL)?;\n\n\n\n // Map kernel pages.\n\n unsafe {\n\n let kernel_pml4 = PAddr::new(&__kernel_pml4 as *const u8 as usize).as_vaddr();\n\n pml4.as_mut_ptr::<u8>().write_bytes(0, PAGE_SIZE);\n\n ptr::copy_nonoverlapping::<u8>(kernel_pml4.as_ptr(), pml4.as_mut_ptr(), PAGE_SIZE);\n\n }\n\n\n\n // The kernel no longer access a virtual address around 0x0000_0000. Unmap\n\n // the area to catch bugs (especially NULL pointer dereferences in the\n\n // kernel).\n\n //\n\n // TODO: Is it able to unmap in boot.S before running bsp_early_init?\n\n unsafe {\n", "file_path": "kernel/arch/x64/page_table.rs", "rank": 53, "score": 110803.44643930843 }, { "content": "/// Enumerates all PCI devices.\n\npub fn enumerate_pci_devices() -> PciScanner {\n\n PciScanner {\n\n bus: PciBus::new(),\n\n bus_no: 0,\n\n slot: 0,\n\n }\n\n}\n\n\n\nimpl Iterator for PciScanner {\n\n type Item = PciDevice;\n\n fn next(&mut self) -> Option<Self::Item> {\n\n while !(self.bus_no == 255 && self.slot == 31) {\n\n if self.slot == 31 {\n\n self.bus_no += 1;\n\n self.slot = 0;\n\n }\n\n\n\n let config = self.bus.read_device_config(self.bus_no, self.slot);\n\n self.slot += 1;\n\n\n", "file_path": "kernel/drivers/pci.rs", "rank": 54, "score": 110519.11205298133 }, { "content": "pub fn is_kernel_heap_enabled() -> bool {\n\n KERNEL_HEAP_ENABLED.load(Ordering::Acquire)\n\n}\n\n\n", "file_path": "kernel/mm/global_allocator.rs", "rank": 55, "score": 110519.11205298133 }, { "content": "struct PipeInner {\n\n buf: RingBuffer<u8, PIPE_SIZE>,\n\n closed_by_reader: bool,\n\n closed_by_writer: bool,\n\n}\n\n\n\npub struct Pipe(Arc<SpinLock<PipeInner>>);\n\n\n\nimpl Pipe {\n\n pub fn new() -> Pipe {\n\n Pipe(Arc::new(SpinLock::new(PipeInner {\n\n buf: RingBuffer::new(),\n\n closed_by_reader: false,\n\n closed_by_writer: false,\n\n })))\n\n }\n\n\n\n pub fn write_end(&self) -> Arc<PipeWriter> {\n\n Arc::new(PipeWriter(self.0.clone()))\n\n }\n", "file_path": "kernel/pipe.rs", "rank": 56, "score": 110052.86760967315 }, { "content": "struct File {\n\n data: SpinLock<Vec<u8>>,\n\n stat: Stat,\n\n}\n\n\n\nimpl File {\n\n pub fn new(inode_no: INodeNo) -> File {\n\n File {\n\n data: SpinLock::new(Vec::new()),\n\n stat: Stat {\n\n inode_no,\n\n mode: FileMode::new(S_IFREG | 0o644),\n\n ..Stat::zeroed()\n\n },\n\n }\n\n }\n\n}\n\n\n\nimpl FileLike for File {\n\n fn stat(&self) -> Result<Stat> {\n", "file_path": "kernel/fs/tmpfs.rs", "rank": 57, "score": 109113.67579550287 }, { "content": "pub fn print_str(s: &[u8]) {\n\n for ch in s {\n\n printchar(*ch as char);\n\n }\n\n}\n\n\n", "file_path": "kernel/arch/x64/serial.rs", "rank": 58, "score": 108786.90701202542 }, { "content": "pub fn boot_kernel(bootinfo: &BootInfo) -> ! {\n\n info!(\"Booting Kerla...\");\n\n\n\n // Initialize memory allocators first.\n\n page_allocator::init(&bootinfo.ram_areas);\n\n global_allocator::init();\n\n interrupt::init();\n\n\n\n #[cfg(test)]\n\n {\n\n crate::test_main();\n\n end_tests();\n\n }\n\n\n\n // Initialize kernel subsystems.\n\n arch::init();\n\n pipe::init();\n\n poll::init();\n\n devfs::init();\n\n tmpfs::init();\n", "file_path": "kernel/boot.rs", "rank": 59, "score": 108786.90701202542 }, { "content": "pub fn printchar(ch: char) {\n\n unsafe {\n\n if ch == '\\n' && option_env!(\"DISABLE_AUTO_CR_PRINT\").is_none() {\n\n serial_write('\\r');\n\n }\n\n serial_write(ch);\n\n }\n\n}\n\n\n", "file_path": "kernel/arch/x64/serial.rs", "rank": 60, "score": 108786.90701202542 }, { "content": "pub fn init(areas: &[RamArea]) {\n\n let mut zones = ZONES.lock();\n\n for area in areas {\n\n info!(\n\n \"available RAM: base={:x}, size={}\",\n\n area.base.value(),\n\n ByteSize::new(area.len)\n\n );\n\n\n\n zones.push(Allocator::new(\n\n unsafe { area.base.as_mut_ptr() },\n\n area.base.value(),\n\n area.len,\n\n ));\n\n }\n\n}\n", "file_path": "kernel/mm/page_allocator.rs", "rank": 61, "score": 106760.69200154177 }, { "content": "pub fn enable_irq(irq: u8) {\n\n let ioapic = IO_APIC.lock();\n\n unsafe {\n\n let entry = (VECTOR_IRQ_BASE as u64) + (irq as u64);\n\n ioapic.write_iored_tbl(irq as u32, entry);\n\n }\n\n}\n\n\n\npub unsafe fn init() {\n\n // symmetric I/O mode.\n\n // FIXME: Do we need this?\n\n outb(0x22, 0x70);\n\n outb(0x23, 0x01);\n\n\n\n // Mask (disable) all hardware interrupts for now.\n\n let ioapic = IO_APIC.lock();\n\n let n = ((ioapic.read_ver() >> 16) & 0xff) + 1;\n\n for i in 0..n {\n\n ioapic.write_iored_tbl(i, 1 << 16 /* masked */);\n\n }\n\n}\n", "file_path": "kernel/arch/x64/ioapic.rs", "rank": 62, "score": 106760.69200154177 }, { "content": "pub fn send_ethernet_frame(frame: &[u8]) {\n\n DRIVER.lock().transmit(frame).unwrap();\n\n}\n\n\n", "file_path": "kernel/net/mod.rs", "rank": 63, "score": 106760.69200154177 }, { "content": "pub fn receive_ethernet_frame(frame: &[u8]) {\n\n if RX_PACKET_QUEUE.lock().push(frame.to_vec()).is_err() {\n\n // TODO: Introduce warn_once! macro\n\n warn!(\"the rx packet queue is full; dropping an incoming packet\");\n\n }\n\n}\n\n\n\nimpl From<MonotonicClock> for Instant {\n\n fn from(value: MonotonicClock) -> Self {\n\n // FIXME: msecs could be larger than i64\n\n Instant::from_millis(value.msecs() as i64)\n\n }\n\n}\n\n\n\npub(self) static SOCKETS: Once<SpinLock<SocketSet>> = Once::new();\n\nstatic INTERFACE: Once<SpinLock<EthernetInterface<OurDevice>>> = Once::new();\n\nstatic DHCP_CLIENT: Once<SpinLock<Dhcpv4Client>> = Once::new();\n\npub(self) static SOCKET_WAIT_QUEUE: Once<WaitQueue> = Once::new();\n\n\n", "file_path": "kernel/net/mod.rs", "rank": 64, "score": 106760.69200154177 }, { "content": "#[derive(Debug, Copy, Clone)]\n\n#[repr(C, packed)]\n\nstruct VirtqUsed {\n\n flags: u16,\n\n index: u16,\n\n // The rings (an array of VirtqUsedElem) immediately follows here.\n\n}\n\n\n\npub enum VirtqDescBuffer {\n\n ReadOnlyFromDevice { addr: PAddr, len: usize },\n\n WritableFromDevice { addr: PAddr, len: usize },\n\n}\n\n\n\npub struct VirtqUsedChain {\n\n pub descs: Vec<VirtqDescBuffer>,\n\n pub total_len: usize,\n\n}\n\n\n\n/// A virtqueue.\n\npub struct VirtQueue {\n\n index: u16,\n\n transport: Arc<dyn VirtioTransport>,\n", "file_path": "kernel/drivers/virtio/virtio.rs", "rank": 65, "score": 105004.19272687729 }, { "content": "pub fn semihosting_halt(status: ExitStatus) {\n\n unsafe {\n\n outw(0x501, status as u16);\n\n }\n\n}\n", "file_path": "kernel/arch/x64/semihosting.rs", "rank": 66, "score": 104845.93582547232 }, { "content": "pub fn run_tests(tests: &[&dyn Testable]) {\n\n println!(\"Running {} tests\\n\", tests.len());\n\n for test in tests {\n\n test.run();\n\n }\n\n println!();\n\n println!(\"\\x1b[92mPassed all tests :)\\x1b[0m\\n\");\n\n}\n\n\n", "file_path": "kernel/test_runner.rs", "rank": 67, "score": 103634.85199956247 }, { "content": "pub fn init(mmio_devices: &[VirtioMmioDevice]) {\n\n for device in mmio_devices {\n\n for builder in DRIVER_BUILDERS.lock().iter() {\n\n builder.attach_virtio_mmio(device).ok();\n\n }\n\n }\n\n}\n", "file_path": "kernel/drivers/virtio/mod.rs", "rank": 68, "score": 103033.6879050189 }, { "content": "#[derive(Clone)]\n\nstruct LocalOpenedFile {\n\n opened_file: Arc<OpenedFile>,\n\n close_on_exec: bool,\n\n}\n\n\n\n/// The opened file table.\n\n#[derive(Clone)]\n\npub struct OpenedFileTable {\n\n files: Vec<Option<LocalOpenedFile>>,\n\n prev_fd: i32,\n\n}\n\n\n\nimpl OpenedFileTable {\n\n pub fn new() -> OpenedFileTable {\n\n OpenedFileTable {\n\n files: Vec::new(),\n\n prev_fd: 1,\n\n }\n\n }\n\n\n", "file_path": "kernel/fs/opened_file.rs", "rank": 69, "score": 101817.72891004907 }, { "content": "#[inline(always)]\n\nfn pow2(order: usize) -> usize {\n\n 1 << order\n\n}\n\n\n\n/// A physical memory page.\n\npub struct Page {\n\n /// The reference counter. 0 if the page is free.\n\n ref_count: usize,\n\n /// The intrusive pointer to the next chunk in a free list.\n\n next: Option<NonNull<Page>>,\n\n}\n\n\n\nimpl Page {\n\n pub fn is_free(&self) -> bool {\n\n self.ref_count == 0\n\n }\n\n}\n\n\n", "file_path": "libs/kerla_utils/buddy_allocator.rs", "rank": 70, "score": 98748.62011635734 }, { "content": "fn align_down(value: usize, align: usize) -> usize {\n\n value & !(align - 1)\n\n}\n\n\n", "file_path": "libs/kerla_utils/bytes_parser.rs", "rank": 71, "score": 97989.89250998413 }, { "content": "fn align_up(value: usize, align: usize) -> usize {\n\n align_down(value + align - 1, align)\n\n}\n\n\n\n#[derive(Debug, PartialEq)]\n\npub enum BytesParserError {\n\n TooShort,\n\n}\n\n\n\npub struct BytesParser<'a> {\n\n buffer: &'a [u8],\n\n current: usize,\n\n}\n\n\n\nimpl<'a> BytesParser<'a> {\n\n pub fn new(buffer: &'a [u8]) -> BytesParser<'a> {\n\n BytesParser { buffer, current: 0 }\n\n }\n\n\n\n pub fn remaining(&self) -> &[u8] {\n", "file_path": "libs/kerla_utils/bytes_parser.rs", "rank": 72, "score": 97989.89250998413 }, { "content": "fn alloc_inode_no() -> INodeNo {\n\n // Inode #1 is reserved for the root dir.\n\n static NEXT_INODE_NO: AtomicUsize = AtomicUsize::new(2);\n\n\n\n INodeNo::new(NEXT_INODE_NO.fetch_add(1, Ordering::SeqCst))\n\n}\n\n\n\npub struct TmpFs {\n\n root_dir: Arc<Dir>,\n\n}\n\n\n\nimpl TmpFs {\n\n pub fn new() -> TmpFs {\n\n TmpFs {\n\n root_dir: Arc::new(Dir::new(INodeNo::new(1))),\n\n }\n\n }\n\n\n\n pub fn root_tmpfs_dir(&self) -> &Arc<Dir> {\n\n &self.root_dir\n\n }\n\n}\n\n\n\nimpl FileSystem for TmpFs {\n\n fn root_dir(&self) -> Result<Arc<dyn Directory>> {\n\n Ok(self.root_dir.clone())\n\n }\n\n}\n\n\n", "file_path": "kernel/fs/tmpfs.rs", "rank": 73, "score": 97653.31823584958 }, { "content": "pub fn switch_thread(prev: &Process, next: &Process) {\n\n let head = cpu_local_head();\n\n\n\n // Switch the kernel stack.\n\n head.rsp0 = (next.syscall_stack.value() + KERNEL_STACK_SIZE) as u64;\n\n TSS.as_mut()\n\n .set_rsp0((next.interrupt_stack.value() + KERNEL_STACK_SIZE) as u64);\n\n\n\n // Save and restore the XSAVE area (i.e. XMM/YMM registrers).\n\n unsafe {\n\n use core::arch::x86_64::{_xrstor64, _xsave64};\n\n\n\n let xsave_mask = x86::controlregs::xcr0().bits();\n\n if let Some(xsave_area) = prev.xsave_area.as_ref() {\n\n _xsave64(xsave_area.as_mut_ptr(), xsave_mask);\n\n }\n\n if let Some(xsave_area) = next.xsave_area.as_ref() {\n\n _xrstor64(xsave_area.as_mut_ptr(), xsave_mask);\n\n }\n\n }\n\n\n\n // Fill an invalid value for now: must be initialized in interrupt handlers.\n\n head.rsp3 = 0xbaad_5a5a_5b5b_baad;\n\n\n\n unsafe {\n\n wrfsbase(next.fsbase.load());\n\n do_switch_thread(prev.rsp.get(), next.rsp.get());\n\n }\n\n}\n", "file_path": "kernel/arch/x64/process.rs", "rank": 74, "score": 97267.28022377622 }, { "content": "fn ensure_vm_lock_is_unheld() {\n\n // We should not hold the vm lock since we'll try to acquire it in the\n\n // page fault handler when copying caused a page fault.\n\n debug_assert!(!current_process().vm().as_ref().unwrap().is_locked());\n\n}\n\n\n\n/// Represents a user virtual memory address.\n\n///\n\n/// It is guaranteed that `UserVaddr` contains a valid address, in other words,\n\n/// it does not point to a kernel address.\n\n///\n\n/// Futhermore, like `NonNull<T>`, it is always non-null. Use `Option<UserVaddr>`\n\n/// represent a nullable user pointer.\n\n#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]\n\n#[repr(transparent)]\n\npub struct UserVAddr(u64);\n\n\n\nimpl UserVAddr {\n\n pub const fn new(addr: usize) -> Result<Option<UserVAddr>> {\n\n if (addr as u64) >= KERNEL_BASE_ADDR {\n", "file_path": "kernel/arch/x64/address.rs", "rank": 75, "score": 96834.20171807606 }, { "content": "fn num_pages_to_order(num_pages: usize) -> usize {\n\n // TODO: Use log2 instead\n\n for order in 0..16 {\n\n if num_pages > 1 << order {\n\n continue;\n\n }\n\n\n\n return order;\n\n }\n\n\n\n unreachable!();\n\n}\n\n\n\nbitflags! {\n\n pub struct AllocPageFlags: u32 {\n\n // TODO: Currently both of them are unused in the allocator.\n\n\n\n /// Allocate pages for the kernel purpose.\n\n const KERNEL = 0;\n\n /// Allocate pages for the user.\n\n const USER = 0;\n\n /// Fill allocated pages with zeroes.\n\n const ZEROED = 1 << 0;\n\n }\n\n}\n\n\n", "file_path": "kernel/mm/page_allocator.rs", "rank": 76, "score": 95247.3018890337 }, { "content": "pub trait FileSystem: Send + Sync {\n\n fn root_dir(&self) -> Result<Arc<dyn Directory>>;\n\n}\n", "file_path": "kernel/fs/file_system.rs", "rank": 77, "score": 88630.2828069356 }, { "content": "fn parse_str_field(bytes: &[u8]) -> &str {\n\n unsafe { from_utf8_unchecked(bytes) }\n\n}\n\n\n", "file_path": "kernel/fs/initramfs.rs", "rank": 78, "score": 87191.81786530497 }, { "content": "fn create_file(path: &Path, flags: OpenFlags, mode: FileMode) -> Result<INode> {\n\n if flags.contains(OpenFlags::O_DIRECTORY) {\n\n // A directory should be created through mkdir(2).\n\n return Err(Errno::EINVAL.into());\n\n }\n\n\n\n let (parent_dir, name) = path\n\n .parent_and_basename()\n\n .ok_or_else::<Error, _>(|| Errno::EEXIST.into())?;\n\n\n\n current_process()\n\n .root_fs()\n\n .lock()\n\n .lookup_dir(parent_dir)?\n\n .create_file(name, mode)\n\n}\n\n\n\nimpl<'a> SyscallHandler<'a> {\n\n pub fn sys_open(&mut self, path: &Path, flags: OpenFlags, mode: FileMode) -> Result<isize> {\n\n let current = current_process();\n", "file_path": "kernel/syscalls/open.rs", "rank": 79, "score": 86412.5341649952 }, { "content": "/// A symbolic link.\n\n///\n\n/// # Locking\n\n///\n\n/// See [`FileLike`] documentation.\n\npub trait Symlink: Debug + Send + Sync + Downcastable {\n\n /// `stat(2)`.\n\n fn stat(&self) -> Result<Stat>;\n\n /// The path linked to.\n\n fn linked_to(&self) -> Result<PathBuf>;\n\n /// `fsync(2)`.\n\n fn fsync(&self) -> Result<()> {\n\n Ok(())\n\n }\n\n}\n\n\n\n/// An inode object.\n\n///\n\n/// # Locking\n\n///\n\n/// See [`FileLike`] documentation.\n\n///\n\n/// # See Also\n\n///\n\n/// - [`crate::fs::opened_file::OpenedFile`]\n", "file_path": "kernel/fs/inode.rs", "rank": 80, "score": 85655.45741152755 }, { "content": "/// Represents a directory.\n\npub trait Directory: Debug + Send + Sync + Downcastable {\n\n /// Looks for an existing file.\n\n fn lookup(&self, name: &str) -> Result<INode>;\n\n /// Creates a file. Returns `EEXIST` if the it already exists.\n\n fn create_file(&self, _name: &str, _mode: FileMode) -> Result<INode>;\n\n /// Creates a directory. Returns `EEXIST` if the it already exists.\n\n fn create_dir(&self, _name: &str, _mode: FileMode) -> Result<INode>;\n\n /// `stat(2)`.\n\n fn stat(&self) -> Result<Stat>;\n\n /// `readdir(2)`.\n\n fn readdir(&self, index: usize) -> Result<Option<DirEntry>>;\n\n /// `link(2)`.\n\n fn link(&self, _name: &str, _link_to: &INode) -> Result<()>;\n\n /// `fsync(2)`.\n\n fn fsync(&self) -> Result<()> {\n\n Ok(())\n\n }\n\n /// `readlink(2)`.\n\n fn readlink(&self) -> Result<PathBuf> {\n\n // \"EINVAL - The named file is not a symbolic link.\" -- readlink(2)\n\n Err(Error::new(Errno::EINVAL))\n\n }\n\n}\n\n\n", "file_path": "kernel/fs/inode.rs", "rank": 81, "score": 85651.01201578851 }, { "content": "pub fn downcast<S, T>(arc: &Arc<S>) -> Option<Arc<T>>\n\nwhere\n\n S: Downcastable + ?Sized,\n\n T: Send + Sync + 'static,\n\n{\n\n arc.clone().as_any().downcast::<T>().ok()\n\n}\n", "file_path": "libs/kerla_utils/downcast.rs", "rank": 82, "score": 84935.22680385693 }, { "content": "/// A file-like object.\n\n///\n\n/// This trait represents an object which behaves like a file such as files on\n\n/// disks (aka. regular files), UDP/TCP sockets, device files like tty, etc.\n\npub trait FileLike: Debug + Send + Sync + Downcastable {\n\n /// `open(2)`.\n\n fn open(&self, _options: &OpenOptions) -> Result<Option<Arc<dyn FileLike>>> {\n\n Ok(None)\n\n }\n\n\n\n /// `stat(2)`.\n\n fn stat(&self) -> Result<Stat> {\n\n Err(Error::new(Errno::EBADF))\n\n }\n\n\n\n /// `readlink(2)`.\n\n fn readlink(&self) -> Result<PathBuf> {\n\n // \"EINVAL - The named file is not a symbolic link.\" -- readlink(2)\n\n Err(Error::new(Errno::EINVAL))\n\n }\n\n\n\n /// `poll(2)` and `select(2)`.\n\n fn poll(&self) -> Result<PollStatus> {\n\n Err(Error::new(Errno::EBADF))\n", "file_path": "kernel/fs/inode.rs", "rank": 83, "score": 83745.04250447183 }, { "content": "fn check_fd_statuses<F>(max_fd: c_int, fds: UserVAddr, is_ready: F) -> Result<isize>\n\nwhere\n\n F: Fn(PollStatus) -> bool,\n\n{\n\n let num_bytes = align_up(max_fd as usize, 8) / 8;\n\n if num_bytes > 1024 {\n\n return Err(Errno::ENOMEM.into());\n\n }\n\n\n\n let mut fds_vec = vec![0; num_bytes];\n\n fds.read_bytes(fds_vec.as_mut_slice())?;\n\n\n\n let mut ready_fds = 0;\n\n for (byte_i, byte) in fds_vec.iter_mut().enumerate().take(num_bytes) {\n\n for bit_i in 0..8 {\n\n let fd = Fd::new((byte_i * 8 + bit_i) as c_int);\n\n if *byte & (1 << bit_i) != 0 && fd.as_int() <= max_fd {\n\n let status = current_process().opened_files().lock().get(fd)?.poll()?;\n\n\n\n if is_ready(status) {\n", "file_path": "kernel/syscalls/select.rs", "rank": 84, "score": 80792.05268154692 }, { "content": "enum InnerMut<'a> {\n\n Slice(&'a mut [u8]),\n\n User { base: UserVAddr, len: usize },\n\n}\n\n\n\npub struct UserBufferMut<'a> {\n\n inner: InnerMut<'a>,\n\n}\n\n\n\nimpl<'a> UserBufferMut<'a> {\n\n pub fn from_uaddr(uaddr: UserVAddr, len: usize) -> UserBufferMut<'static> {\n\n UserBufferMut {\n\n inner: InnerMut::User { base: uaddr, len },\n\n }\n\n }\n\n\n\n pub fn len(&self) -> usize {\n\n match &self.inner {\n\n InnerMut::Slice(slice) => slice.len(),\n\n InnerMut::User { len, .. } => *len,\n", "file_path": "kernel/user_buffer.rs", "rank": 85, "score": 79650.75749422787 }, { "content": "fn nth_level_table_index(vaddr: UserVAddr, level: usize) -> isize {\n\n ((vaddr.value() >> ((((level) - 1) * 9) + 12)) & 0x1ff) as isize\n\n}\n\n\n", "file_path": "kernel/arch/x64/page_table.rs", "rank": 86, "score": 73567.1509405542 }, { "content": "struct Symbol {\n\n name: &'static str,\n\n addr: VAddr,\n\n}\n\n\n", "file_path": "kernel/printk.rs", "rank": 87, "score": 70289.16486181293 }, { "content": "struct Timer {\n\n current: usize,\n\n process: Arc<Process>,\n\n}\n\n\n", "file_path": "kernel/timer.rs", "rank": 88, "score": 70289.16486181293 }, { "content": "type IrqHandler = dyn FnMut() + Send + Sync;\n\nconst UNINITIALIZED_IRQ_HANDLER: MaybeUninit<Box<IrqHandler>> = MaybeUninit::uninit();\n\nstatic IRQ_HANDLERS: SpinLock<[MaybeUninit<Box<IrqHandler>>; 256]> =\n\n SpinLock::new([UNINITIALIZED_IRQ_HANDLER; 256]);\n\n\n", "file_path": "kernel/interrupt.rs", "rank": 89, "score": 70010.9292320165 }, { "content": "#[repr(C, packed)]\n\nstruct SymbolTable {\n\n magic: u32,\n\n num_symbols: i32,\n\n padding: u64,\n\n}\n\n\n\nextern \"C\" {\n\n static __symbol_table: SymbolTable;\n\n}\n\n\n\nglobal_asm!(\n\n r#\"\n\n .rodata\n\n .align 8\n\n .global __symbol_table\n\n __symbol_table:\n\n .ascii \"__SYMBOL_TABLE_START__\"\n\n .space 725 * 1024\n\n .ascii \"__SYMBOL_TABLE_END__\"\n\n\"#\n\n);\n\n\n", "file_path": "kernel/printk.rs", "rank": 90, "score": 68895.73825165447 }, { "content": "#[repr(C, packed)]\n\nstruct SymbolEntry {\n\n addr: u64,\n\n name: [u8; 56],\n\n}\n\n\n", "file_path": "kernel/printk.rs", "rank": 91, "score": 68895.73825165447 }, { "content": "struct OurDevice;\n\n\n\nimpl<'a> Device<'a> for OurDevice {\n\n type RxToken = OurRxToken;\n\n type TxToken = OurTxToken;\n\n\n\n fn receive(&'a mut self) -> Option<(Self::RxToken, Self::TxToken)> {\n\n RX_PACKET_QUEUE\n\n .lock()\n\n .pop()\n\n .map(|buffer| (OurRxToken { buffer }, OurTxToken {}))\n\n }\n\n\n\n fn transmit(&'a mut self) -> Option<Self::TxToken> {\n\n Some(OurTxToken {})\n\n }\n\n\n\n fn capabilities(&self) -> DeviceCapabilities {\n\n let mut caps = DeviceCapabilities::default();\n\n caps.max_transmission_unit = 1500;\n\n caps\n\n }\n\n}\n\n\n", "file_path": "kernel/net/mod.rs", "rank": 92, "score": 68895.73825165447 }, { "content": "#[derive(Copy, Clone)]\n\nstruct PciBus {}\n\n\n\nimpl PciBus {\n\n pub fn new() -> PciBus {\n\n PciBus {}\n\n }\n\n\n\n pub fn read32(&self, bus: u8, slot: u8, offset: u32) -> u32 {\n\n assert!(is_aligned(offset as usize, 4));\n\n let addr = (1 << 31) | ((bus as u32) << 16) | ((slot as u32) << 11) | offset;\n\n unsafe {\n\n outl(PCI_IOPORT_ADDR, addr);\n\n inl(PCI_IOPORT_DATA)\n\n }\n\n }\n\n\n\n pub fn read8(&self, bus: u8, slot: u8, offset: u32) -> u8 {\n\n let value = self.read32(bus, slot, offset & 0xfffc);\n\n ((value >> ((offset & 0x03) * 8)) & 0xff) as u8\n\n }\n", "file_path": "kernel/drivers/pci.rs", "rank": 93, "score": 67591.18355522095 }, { "content": "struct Cmdline {\n\n pub pci_enabled: bool,\n\n pub omikuji: bool,\n\n pub virtio_mmio_devices: ArrayVec<VirtioMmioDevice, 4>,\n\n}\n\n\n\nimpl Cmdline {\n\n pub fn parse(cmdline: &[u8]) -> Cmdline {\n\n let s = core::str::from_utf8(cmdline).expect(\"cmdline is not a utf-8 string\");\n\n info!(\"cmdline: {}\", if s.is_empty() { \"(empty)\" } else { s });\n\n\n\n let mut pci_enabled = true;\n\n let mut omikuji = false;\n\n let mut virtio_mmio_devices = ArrayVec::new();\n\n if !s.is_empty() {\n\n for config in s.split(' ') {\n\n let mut words = config.splitn(2, '=');\n\n match (words.next(), words.next()) {\n\n (Some(\"pci\"), Some(\"off\")) => {\n\n warn!(\"bootinfo: PCI disabled\");\n", "file_path": "kernel/arch/x64/bootinfo.rs", "rank": 94, "score": 67591.18355522095 }, { "content": "struct OurTxToken {}\n\n\n\nimpl TxToken for OurTxToken {\n\n fn consume<R, F>(self, _timestamp: Instant, len: usize, f: F) -> smoltcp::Result<R>\n\n where\n\n F: FnOnce(&mut [u8]) -> smoltcp::Result<R>,\n\n {\n\n let mut buffer = vec![0; len];\n\n let return_value = f(&mut buffer)?;\n\n if EthernetFrame::new_checked(&mut buffer).is_ok() {\n\n send_ethernet_frame(&buffer);\n\n }\n\n\n\n Ok(return_value)\n\n }\n\n}\n\n\n", "file_path": "kernel/net/mod.rs", "rank": 95, "score": 67591.18355522095 }, { "content": "struct UserspaceEntry {\n\n vm: Vm,\n\n ip: UserVAddr,\n\n user_sp: UserVAddr,\n\n}\n\n\n", "file_path": "kernel/process/process.rs", "rank": 96, "score": 67591.18355522095 }, { "content": "struct OurRxToken {\n\n buffer: Vec<u8>,\n\n}\n\n\n\nimpl RxToken for OurRxToken {\n\n fn consume<R, F>(mut self, _timestamp: Instant, f: F) -> smoltcp::Result<R>\n\n where\n\n F: FnOnce(&mut [u8]) -> smoltcp::Result<R>,\n\n {\n\n f(&mut self.buffer)\n\n }\n\n}\n\n\n", "file_path": "kernel/net/mod.rs", "rank": 97, "score": 67591.18355522095 }, { "content": "#[derive(Debug, Clone)]\n\nenum Inner<'a> {\n\n Slice(&'a [u8]),\n\n User { base: UserVAddr, len: usize },\n\n}\n\n\n\n/// A user or kernel pointer.\n\n#[derive(Debug, Clone)]\n\npub struct UserBuffer<'a> {\n\n inner: Inner<'a>,\n\n}\n\n\n\nimpl<'a> UserBuffer<'a> {\n\n pub fn from_uaddr(uaddr: UserVAddr, len: usize) -> UserBuffer<'static> {\n\n UserBuffer {\n\n inner: Inner::User { base: uaddr, len },\n\n }\n\n }\n\n\n\n pub fn len(&self) -> usize {\n\n match &self.inner {\n", "file_path": "kernel/user_buffer.rs", "rank": 98, "score": 67179.05009772134 }, { "content": "fn main() {\n\n env!(\"CARGO_FROM_MAKE\"); // Abort if cargo is called directly -- Use make(1) instead!\n\n\n\n println!(\n\n \"cargo:rerun-if-changed=arch/{arch}/{arch}.ld\",\n\n arch = env!(\"ARCH\")\n\n );\n\n println!(\n\n \"cargo:rerun-if-changed=arch/{arch}/{arch}.json\",\n\n arch = env!(\"ARCH\")\n\n );\n\n}\n", "file_path": "kernel/build.rs", "rank": 99, "score": 66586.97872232906 } ]
Rust
dao_factory/lib.rs
RainbowcityFoundation/RainbowDAO-Protocol-Ink-milestone_2
82757d337bea22dcb8e08b4759c47ec02d67a140
#![cfg_attr(not(feature = "std"), no_std)] #![feature(const_fn_trait_bound)] extern crate alloc; use ink_lang as ink; #[allow(unused_imports)] #[ink::contract] mod dao_factory { use alloc::string::String; use ink_prelude::vec::Vec; use ink_prelude::collections::BTreeMap; use ink_storage::{ traits::{ PackedLayout, SpreadLayout, }, collections::HashMap as StorageHashMap, }; use template_manager::TemplateManager; use template_manager::DAOTemplate; use dao_manager::DAOManager; const TEMPLATE_INIT_BALANCE: u128 = 1000 * 1_000_000_000_000; const DAO_INIT_BALANCE: u128 = 1000 * 1_000_000_000_000; #[derive(scale::Encode, scale::Decode, Clone, SpreadLayout, PackedLayout)] #[cfg_attr( feature = "std", derive(scale_info::TypeInfo, ink_storage::traits::StorageLayout) )] #[derive(Debug)] pub struct DAOInstance { id: u64, owner: AccountId, size: u64, name: String, logo: String, desc: String, dao_manager: DAOManager, dao_manager_addr: AccountId, } #[ink(storage)] pub struct DaoFactory { owner: AccountId, template_addr: Option<AccountId>, template: Option<TemplateManager>, instance_index:u64, instance_map: StorageHashMap<u64, DAOInstance>, instance_map_by_owner: StorageHashMap<AccountId, Vec<u64>>, } #[ink(event)] pub struct InstanceDAO { #[ink(topic)] index: u64, #[ink(topic)] owner: Option<AccountId>, #[ink(topic)] dao_addr: AccountId, } impl DaoFactory { #[ink(constructor)] pub fn new() -> Self { Self { owner: Self::env().caller(), template_addr: None, template: None, instance_index:0, instance_map: StorageHashMap::new(), instance_map_by_owner: StorageHashMap::new(), } } #[ink(message)] pub fn init_factory (&mut self, template_code_hash: Hash, version:u128) -> bool { let salt = version.to_le_bytes(); let instance_params = TemplateManager::new(self.owner) .endowment(TEMPLATE_INIT_BALANCE) .code_hash(template_code_hash) .salt_bytes(&salt) .params(); let init_result = ink_env::instantiate_contract(&instance_params); let contract_addr = init_result.expect("failed at instantiating the `TemplateManager` contract"); let contract_instance = ink_env::call::FromAccountId::from_account_id(contract_addr); self.template = Some(contract_instance); self.template_addr = Some(contract_addr); true } #[ink(message)] pub fn init_dao_by_template( &mut self, dao_manager_code_hash:Hash, controller: AccountId, controller_type:u32, category:String ) -> bool { assert_eq!(self.instance_index + 1 > self.instance_index, true); let salt = self.instance_index.to_le_bytes(); let dao_instance_params = DAOManager::new(self.env().caller(),controller, self.instance_index,controller_type,category) .endowment(DAO_INIT_BALANCE) .code_hash(dao_manager_code_hash) .salt_bytes(salt) .params(); let dao_init_result = ink_env::instantiate_contract(&dao_instance_params); let dao_addr = dao_init_result.expect("failed at instantiating the `DAO Instance` contract"); let dao_instance: DAOManager = ink_env::call::FromAccountId::from_account_id(dao_addr); self.env().emit_event(InstanceDAO { index: self.instance_index, owner: Some(controller), dao_addr: dao_addr, }); let id_list = self.instance_map_by_owner.entry(controller.clone()).or_insert(Vec::new()); id_list.push(self.instance_index); self.instance_map.insert(self.instance_index, DAOInstance { id: self.instance_index, owner: controller, size: 0, name: String::from(""), logo: String::from(""), desc: String::from(""), dao_manager: dao_instance, dao_manager_addr: dao_addr, }); self.instance_index += 1; true } #[ink(message)] pub fn query_template_by_index(&self, index: u64) -> DAOTemplate { self.template.as_ref().unwrap().query_template_by_index(index) } #[ink(message)] pub fn get_dao_by_index(&self,id:u64) -> DAOInstance { self.instance_map.get(&id).unwrap().clone() } #[ink(message)] pub fn get_daos_by_owner(&self) -> Vec<u64> { let user = self.env().caller(); let list = self.instance_map_by_owner.get(&user).unwrap().clone(); list } #[ink(message)] pub fn list_dao(&self) -> Vec<DAOInstance> { let mut dao_vec = Vec::new(); let mut iter = self.instance_map.values(); let mut dao = iter.next(); while dao.is_some() { dao_vec.push(dao.unwrap().clone()); dao = iter.next(); } dao_vec } #[ink(message)] pub fn joined_dao(&mut self,index:u64) -> bool { let user = self.env().caller(); let id_list = self.instance_map_by_owner.entry(user.clone()).or_insert(Vec::new()); id_list.push(index); true } } #[cfg(test)] mod tests { use super::*; use ink_lang as ink; #[ink::test] fn it_works() { let mut dao_factory = DaoFactory::new(); assert!(dao_factory.joined_dao(0) == true); } #[ink::test] fn test_dao_length() { let dao_factory = DaoFactory::new(); let list = dao_factory.list_dao(); assert!(list.len() == 0); } } }
#![cfg_attr(not(feature = "std"), no_std)] #![feature(const_fn_trait_bound)] extern crate alloc; use ink_lang as ink; #[allow(unused_imports)] #[ink::contract] mod dao_factory { use alloc::string::String; use ink_prelude::vec::Vec; use ink_prelude::collections::BTreeMap; use ink_storage::{ traits::{ PackedLayout, SpreadLayout, }, collections::HashMap as StorageHashMap, }; use template_manager::TemplateManager; use template_manager::DAOTemplate; use dao_manager::DAOManager; const TEMPLATE_INIT_BALANCE: u128 = 1000 * 1_000_000_000_000; const DAO_INIT_BALANCE: u128 = 1000 * 1_000_000_000_000; #[derive(scale::Encode, scale::Decode, Clone, SpreadLayout, PackedLayout)] #[cfg_attr( feature = "std", derive(scale_info::TypeInfo, ink_storage::traits::StorageLayout) )] #[d
shMap::new(), instance_map_by_owner: StorageHashMap::new(), } } #[ink(message)] pub fn init_factory (&mut self, template_code_hash: Hash, version:u128) -> bool { let salt = version.to_le_bytes(); let instance_params = TemplateManager::new(self.owner) .endowment(TEMPLATE_INIT_BALANCE) .code_hash(template_code_hash) .salt_bytes(&salt) .params(); let init_result = ink_env::instantiate_contract(&instance_params); let contract_addr = init_result.expect("failed at instantiating the `TemplateManager` contract"); let contract_instance = ink_env::call::FromAccountId::from_account_id(contract_addr); self.template = Some(contract_instance); self.template_addr = Some(contract_addr); true } #[ink(message)] pub fn init_dao_by_template( &mut self, dao_manager_code_hash:Hash, controller: AccountId, controller_type:u32, category:String ) -> bool { assert_eq!(self.instance_index + 1 > self.instance_index, true); let salt = self.instance_index.to_le_bytes(); let dao_instance_params = DAOManager::new(self.env().caller(),controller, self.instance_index,controller_type,category) .endowment(DAO_INIT_BALANCE) .code_hash(dao_manager_code_hash) .salt_bytes(salt) .params(); let dao_init_result = ink_env::instantiate_contract(&dao_instance_params); let dao_addr = dao_init_result.expect("failed at instantiating the `DAO Instance` contract"); let dao_instance: DAOManager = ink_env::call::FromAccountId::from_account_id(dao_addr); self.env().emit_event(InstanceDAO { index: self.instance_index, owner: Some(controller), dao_addr: dao_addr, }); let id_list = self.instance_map_by_owner.entry(controller.clone()).or_insert(Vec::new()); id_list.push(self.instance_index); self.instance_map.insert(self.instance_index, DAOInstance { id: self.instance_index, owner: controller, size: 0, name: String::from(""), logo: String::from(""), desc: String::from(""), dao_manager: dao_instance, dao_manager_addr: dao_addr, }); self.instance_index += 1; true } #[ink(message)] pub fn query_template_by_index(&self, index: u64) -> DAOTemplate { self.template.as_ref().unwrap().query_template_by_index(index) } #[ink(message)] pub fn get_dao_by_index(&self,id:u64) -> DAOInstance { self.instance_map.get(&id).unwrap().clone() } #[ink(message)] pub fn get_daos_by_owner(&self) -> Vec<u64> { let user = self.env().caller(); let list = self.instance_map_by_owner.get(&user).unwrap().clone(); list } #[ink(message)] pub fn list_dao(&self) -> Vec<DAOInstance> { let mut dao_vec = Vec::new(); let mut iter = self.instance_map.values(); let mut dao = iter.next(); while dao.is_some() { dao_vec.push(dao.unwrap().clone()); dao = iter.next(); } dao_vec } #[ink(message)] pub fn joined_dao(&mut self,index:u64) -> bool { let user = self.env().caller(); let id_list = self.instance_map_by_owner.entry(user.clone()).or_insert(Vec::new()); id_list.push(index); true } } #[cfg(test)] mod tests { use super::*; use ink_lang as ink; #[ink::test] fn it_works() { let mut dao_factory = DaoFactory::new(); assert!(dao_factory.joined_dao(0) == true); } #[ink::test] fn test_dao_length() { let dao_factory = DaoFactory::new(); let list = dao_factory.list_dao(); assert!(list.len() == 0); } } }
erive(Debug)] pub struct DAOInstance { id: u64, owner: AccountId, size: u64, name: String, logo: String, desc: String, dao_manager: DAOManager, dao_manager_addr: AccountId, } #[ink(storage)] pub struct DaoFactory { owner: AccountId, template_addr: Option<AccountId>, template: Option<TemplateManager>, instance_index:u64, instance_map: StorageHashMap<u64, DAOInstance>, instance_map_by_owner: StorageHashMap<AccountId, Vec<u64>>, } #[ink(event)] pub struct InstanceDAO { #[ink(topic)] index: u64, #[ink(topic)] owner: Option<AccountId>, #[ink(topic)] dao_addr: AccountId, } impl DaoFactory { #[ink(constructor)] pub fn new() -> Self { Self { owner: Self::env().caller(), template_addr: None, template: None, instance_index:0, instance_map: StorageHa
random
[ { "content": "/// Returns a new dynamic storage allocation.\n\npub fn alloc() -> DynamicAllocation {\n\n init::on_instance(DynamicAllocator::alloc)\n\n}\n\n\n", "file_path": "ink/crates/storage/src/alloc/mod.rs", "rank": 0, "score": 228000.94545688984 }, { "content": "/// Implemented by types that have a storage layout.\n\npub trait StorageLayout {\n\n /// Returns the static storage layout of `Self`.\n\n ///\n\n /// The given key pointer is guiding the allocation of static fields onto\n\n /// the contract storage regions.\n\n fn layout(key_ptr: &mut KeyPtr) -> Layout;\n\n}\n\n\n", "file_path": "ink/crates/storage/src/traits/layout/mod.rs", "rank": 1, "score": 223196.8103243583 }, { "content": "/// Types implementing this trait are supported layouting crypto hashers.\n\npub trait LayoutCryptoHasher {\n\n /// Returns the layout crypto hasher for `Self`.\n\n fn crypto_hasher() -> CryptoHasher;\n\n}\n\n\n\nimpl LayoutCryptoHasher for Blake2x256 {\n\n fn crypto_hasher() -> CryptoHasher {\n\n CryptoHasher::Blake2x256\n\n }\n\n}\n\n\n\nimpl LayoutCryptoHasher for Sha2x256 {\n\n fn crypto_hasher() -> CryptoHasher {\n\n CryptoHasher::Sha2x256\n\n }\n\n}\n\n\n\nimpl LayoutCryptoHasher for Keccak256 {\n\n fn crypto_hasher() -> CryptoHasher {\n\n CryptoHasher::Keccak256\n\n }\n\n}\n", "file_path": "ink/crates/storage/src/traits/layout/mod.rs", "rank": 2, "score": 220743.73954020438 }, { "content": "/// Types that can be stored to and loaded from a single contract storage cell.\n\npub trait PackedLayout: SpreadLayout + scale::Encode + scale::Decode {\n\n /// Indicates to `self` that is has just been pulled from the storage.\n\n ///\n\n /// # Note\n\n ///\n\n /// Most types will have to implement a simple forwarding to their fields.\n\n /// However, some types such as [`storage::Box`](`crate::Box`)\n\n /// are required to perform some special handling upon receiving this signal.\n\n fn pull_packed(&mut self, at: &Key);\n\n\n\n /// Indicates to `self` that it is about to be pushed to contract storage.\n\n ///\n\n /// # Note\n\n ///\n\n /// Most types will have to implement a simple forwarding to their fields.\n\n /// However, some types such as [`storage::Box`](`crate::Box`)\n\n /// are required to perform some special handling upon receiving this signal.\n\n fn push_packed(&self, at: &Key);\n\n\n\n /// Indicates to `self` that it is about to be cleared from contract storage.\n\n ///\n\n /// # Note\n\n ///\n\n /// Most types will have to implement a simple forwarding to their fields.\n\n /// However, some types such as [`storage::Box`](`crate::Box`)\n\n /// are required to perform some special handling upon receiving this signal.\n\n fn clear_packed(&self, at: &Key);\n\n}\n", "file_path": "ink/crates/storage/src/traits/packed.rs", "rank": 3, "score": 218093.09084021382 }, { "content": "/// Types that can be stored to and loaded from the contract storage.\n\npub trait SpreadLayout {\n\n /// The footprint of the type.\n\n ///\n\n /// This is the number of adjunctive cells the type requires in order to\n\n /// be stored in the contract storage with spread layout.\n\n ///\n\n /// # Examples\n\n ///\n\n /// An instance of type `i32` requires one storage cell, so its footprint is\n\n /// 1. An instance of type `(i32, i32)` requires 2 storage cells since a\n\n /// tuple or any other combined data structure always associates disjunctive\n\n /// cells for its sub types. The same applies to arrays, e.g. `[i32; 5]`\n\n /// has a footprint of 5.\n\n const FOOTPRINT: u64;\n\n\n\n /// Indicates whether a type requires deep clean-up of its state meaning that\n\n /// a clean-up routine has to decode an entity into an instance in order to\n\n /// eventually recurse upon its tear-down.\n\n /// This is not required for the majority of primitive data types such as `i32`,\n\n /// however types such as `storage::Box` that might want to forward the clean-up\n", "file_path": "ink/crates/storage/src/traits/spread.rs", "rank": 4, "score": 215658.03769068356 }, { "content": "#[doc(hidden)]\n\npub trait DispatchUsingMode {\n\n fn dispatch_using_mode(mode: DispatchMode) -> Result<(), DispatchError>;\n\n}\n", "file_path": "ink/crates/lang/src/contract.rs", "rank": 5, "score": 214863.07308411473 }, { "content": "/// Types implementing this trait can forward code generation to other generators.\n\npub trait GenerateCodeUsing: AsRef<ir::Contract> {\n\n /// Generates code using the given codegen module.\n\n fn generate_code_using<'a, G>(&'a self) -> TokenStream2\n\n where\n\n G: GenerateCode + From<&'a ir::Contract>;\n\n}\n\n\n\nimpl<T> GenerateCodeUsing for T\n\nwhere\n\n T: AsRef<ir::Contract>,\n\n{\n\n fn generate_code_using<'a, G>(&'a self) -> TokenStream2\n\n where\n\n G: GenerateCode + From<&'a ir::Contract>,\n\n {\n\n <G as GenerateCode>::generate_code(&G::from(\n\n <Self as AsRef<ir::Contract>>::as_ref(self),\n\n ))\n\n }\n\n}\n", "file_path": "ink/crates/lang/codegen/src/traits.rs", "rank": 6, "score": 210314.83158400736 }, { "content": "pub trait OnInstance: EnvBackend + TypedEnvBackend {\n\n fn on_instance<F, R>(f: F) -> R\n\n where\n\n F: FnOnce(&mut Self) -> R;\n\n}\n\n\n\ncfg_if! {\n\n if #[cfg(all(not(feature = \"std\"), target_arch = \"wasm32\"))] {\n\n mod on_chain;\n\n pub use self::on_chain::EnvInstance;\n\n } else if #[cfg(all(feature = \"std\", feature = \"ink-experimental-engine\"))] {\n\n pub mod experimental_off_chain;\n\n pub use experimental_off_chain as off_chain;\n\n pub use self::experimental_off_chain::EnvInstance;\n\n } else if #[cfg(feature = \"std\")] {\n\n pub mod off_chain;\n\n pub use self::off_chain::EnvInstance;\n\n pub use self::off_chain::{\n\n AccountError,\n\n TypedEncodedError,\n\n };\n\n } else {\n\n compile_error! {\n\n \"ink! only support compilation as `std` or `no_std` + `wasm32-unknown`\"\n\n }\n\n }\n\n}\n", "file_path": "ink/crates/env/src/engine/mod.rs", "rank": 7, "score": 199394.7173003754 }, { "content": "/// Frees the given dynamic storage allocation.\n\n///\n\n/// This makes the given dynamic storage allocation available again\n\n/// for new dynamic storage allocations.\n\npub fn free(allocation: DynamicAllocation) {\n\n init::on_instance(|allocator| allocator.free(allocation))\n\n}\n\n\n", "file_path": "ink/crates/storage/src/alloc/mod.rs", "rank": 8, "score": 186790.15017103322 }, { "content": "//! so-called `set_bits` vector.\n\n//! In this vector every `u8` element densely stores the number of set bits\n\n//! (bits that are `1` or `true`) for each 256-bit package in the `free` list.\n\n//! (Note that the `free` list is organized in 256-bit chunks of bits.)\n\n//!\n\n//! This way, to search for an unoccupied dynamic allocation we iterate over\n\n//! the set-bits vector which is 32 times more dense than our `free` list.\n\n//! The additional density implies that we can query up to 8192 potential\n\n//! dynamic storage allocations with a single contract storage look-up.\n\n\n\nmod allocation;\n\nmod allocator;\n\nmod boxed;\n\nmod init;\n\n\n\n#[cfg(test)]\n\nmod tests;\n\n\n\nuse self::allocator::DynamicAllocator;\n\npub use self::{\n\n allocation::DynamicAllocation,\n\n boxed::Box,\n\n init::ContractPhase,\n\n};\n\n\n\n/// Returns a new dynamic storage allocation.\n", "file_path": "ink/crates/storage/src/alloc/mod.rs", "rank": 9, "score": 183617.19165643887 }, { "content": "// Copyright 2018-2021 Parity Technologies (UK) Ltd.\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\n//! The default dynamic storage allocator.\n\n//!\n\n//! Allows to allocate storage cells in a dynamic fashion.\n\n//! This is important if users want to combine types of varying storage\n\n//! footprints. For example, dynamic allocations are required whenever\n\n//! a user wants to use a storage collection (e.g. `storage::Vec`) in\n", "file_path": "ink/crates/storage/src/alloc/mod.rs", "rank": 10, "score": 183609.35647156075 }, { "content": "//! keys by using one of the built-in crypto hashes that has a 256-bit output,\n\n//! e.g. KECCAK, SHA-2 or BLAKE-2. For technical reasons we should prepend the\n\n//! bytes of the 32-bit key by some unique byte sequence, e.g.:\n\n//! ```no_compile\n\n//! let key256 = blake2x256(b\"DYNAMICALLY ALLOCATED\", bytes(key32));\n\n//! ```\n\n//!\n\n//! # Internals\n\n//!\n\n//! As described in [# Simplification] there are `2^32` possible uniform dynamic\n\n//! allocations available. For each such slot the dynamic allocator stores via\n\n//! a single bit in a bitvector if that slot is free or occupied.\n\n//! This bitvector is called the `free` list.\n\n//! However, searching in this `free` list for a 0 bit and thus a free slot\n\n//! for a dynamic allocation would mean that for every 256 consecutively\n\n//! occupied dynamic allocations there was a contract storage lookup required.\n\n//! This might seem a lot but given that there could be thousands or\n\n//! tens of thousands of dynamic allocations at any given time this might not scale\n\n//! well.\n\n//! For the reason of improving scalability we added another vector: the\n", "file_path": "ink/crates/storage/src/alloc/mod.rs", "rank": 11, "score": 183608.72917184106 }, { "content": "//! another storage collection: `storage::Vec<storage::Vec<T>>`\n\n//!\n\n//! # Simplification\n\n//!\n\n//! The contracts pallet is using 256-bit keys for identifying storage cells.\n\n//! This implies a storage space of `2^256` cells which is big enough to say that\n\n//! there are probably never going to happen collisions anywhere at any time\n\n//! if keys are chosen randomly. Using the built-in crypto hashers on unique\n\n//! input we can be sure that there are never going to be collisions in this\n\n//! space of `2^256` cells.\n\n//!\n\n//! This way we can reduce the problem of finding another region in our storage\n\n//! that fits certain requirements (e.g. a minimum size) to the problem of\n\n//! finding another uniform slot. Since we are on 32-bit WebAssembly we have\n\n//! memory limitations that make it impractical to have more than `2^32` dynamic\n\n//! allocated entities, so we can create another limitation for having a total of\n\n//! `2^32` dynamic allocations at any point in time.\n\n//! This enables us to have 32-bit keys instead of 256-bit keys.\n\n//!\n\n//! We can convert such 32-bit keys (represented by e.g. a `u32`) into 256-bit\n", "file_path": "ink/crates/storage/src/alloc/mod.rs", "rank": 12, "score": 183608.11519372737 }, { "content": "//!\n\n//! The `PackedLayout` trait can then be implemented on top of the `SpreadLayout`\n\n//! for types that further allow to be stored in the contract storage in a more\n\n//! compressed format to a single storage cell.\n\n\n\nmod impls;\n\nmod keyptr;\n\nmod optspec;\n\nmod packed;\n\nmod spread;\n\n\n\n#[cfg(feature = \"std\")]\n\nmod layout;\n\n\n\n#[cfg(feature = \"std\")]\n\npub use self::layout::{\n\n LayoutCryptoHasher,\n\n StorageLayout,\n\n};\n\npub(crate) use self::optspec::{\n", "file_path": "ink/crates/storage/src/traits/mod.rs", "rank": 13, "score": 183197.05394745324 }, { "content": " },\n\n};\n\npub use ::ink_storage_derive::{\n\n PackedLayout,\n\n SpreadLayout,\n\n StorageLayout,\n\n};\n\nuse ink_primitives::Key;\n\n\n\n/// Pulls an instance of type `T` from the contract storage using spread layout.\n\n///\n\n/// The root key denotes the offset into the contract storage where the\n\n/// instance of type `T` is being pulled from.\n\n///\n\n/// # Note\n\n///\n\n/// - The routine assumes that the instance has previously been stored to\n\n/// the contract storage using spread layout.\n\n/// - Users should prefer using this function directly instead of using the\n\n/// trait methods on [`SpreadLayout`].\n", "file_path": "ink/crates/storage/src/traits/mod.rs", "rank": 14, "score": 183180.1713717906 }, { "content": "// Copyright 2018-2021 Parity Technologies (UK) Ltd.\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\n//! Traits and interfaces to operate with storage entities.\n\n//!\n\n//! Generally a type is said to be a storage entity if it implements the\n\n//! `SpreadLayout` trait. This defines certain constants and routines in order\n\n//! to tell a smart contract how to load and store instances of this type\n\n//! from and to the contract's storage.\n", "file_path": "ink/crates/storage/src/traits/mod.rs", "rank": 15, "score": 183172.64234863967 }, { "content": " clear_spread_root_opt,\n\n pull_packed_root_opt,\n\n pull_spread_root_opt,\n\n push_packed_root_opt,\n\n push_spread_root_opt,\n\n};\n\npub use self::{\n\n impls::{\n\n forward_clear_packed,\n\n forward_pull_packed,\n\n forward_push_packed,\n\n },\n\n keyptr::{\n\n ExtKeyPtr,\n\n KeyPtr,\n\n },\n\n packed::PackedLayout,\n\n spread::{\n\n SpreadLayout,\n\n FOOTPRINT_CLEANUP_THRESHOLD,\n", "file_path": "ink/crates/storage/src/traits/mod.rs", "rank": 16, "score": 183168.11389673917 }, { "content": " trait MyTrait {}\n\n );\n\n assert_ink_trait_eq_err!(\n\n error: \"ink! trait definitions must have public visibility\",\n\n pub(crate) trait MyTrait {}\n\n );\n\n }\n\n\n\n #[test]\n\n fn generic_trait_def_is_denied() {\n\n assert_ink_trait_eq_err!(\n\n error: \"ink! trait definitions must not be generic\",\n", "file_path": "ink/crates/lang/ir/src/ir/trait_def.rs", "rank": 17, "score": 182064.10214330946 }, { "content": "use crate::{\n\n alloc::{\n\n alloc,\n\n DynamicAllocation,\n\n },\n\n lazy::Lazy,\n\n traits::SpreadLayout,\n\n};\n\nuse ink_primitives::Key;\n\n\n\n/// A dynamically allocated storage entity.\n\n///\n\n/// Users can use this in order to make certain `SpreadLayout` storage entities\n\n/// used in contexts that require a `PackedLayout` storage entity by simply\n\n/// packing the storage entity within a `storage::Box`.\n\n///\n\n/// Dynamic allocations caused by the creation of `storage::Box` instances do\n\n/// have some limited overhead:\n\n///\n\n/// - The dynamic allocation itself has to be provided by some dynamic storage\n", "file_path": "ink/crates/storage/src/alloc/boxed/mod.rs", "rank": 18, "score": 180611.5104227027 }, { "content": "/// allocator that needs to be invoked.\n\n/// - Each dynamic storage allocation implies roughly 1.12 bits of overhead.\n\n/// - Upon ever first dereferencing of a `storage::Box` instance a cryptographic\n\n/// hash routine is run in order to compute the underlying storage key.\n\n///\n\n/// Use this abstraction with caution due to the aforementioned performance\n\n/// implications.\n\n#[derive(Debug)]\n\npub struct Box<T>\n\nwhere\n\n T: SpreadLayout,\n\n{\n\n /// The storage area where the boxed storage entity is stored.\n\n allocation: DynamicAllocation,\n\n /// The cache for the boxed storage entity.\n\n value: Lazy<T>,\n\n}\n\n\n\nimpl<T> Box<T>\n\nwhere\n", "file_path": "ink/crates/storage/src/alloc/boxed/mod.rs", "rank": 19, "score": 180596.08907858457 }, { "content": "// Copyright 2018-2021 Parity Technologies (UK) Ltd.\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\nmod impls;\n\nmod storage;\n\n\n\n#[cfg(test)]\n\nmod tests;\n\n\n", "file_path": "ink/crates/storage/src/alloc/boxed/mod.rs", "rank": 20, "score": 180595.70480830508 }, { "content": " self.allocation.key()\n\n }\n\n}\n\n\n\nimpl<T> Box<T>\n\nwhere\n\n T: SpreadLayout,\n\n{\n\n /// Returns a shared reference to the boxed value.\n\n ///\n\n /// # Note\n\n ///\n\n /// This loads the value from the pointed to contract storage\n\n /// if this did not happen before.\n\n ///\n\n /// # Panics\n\n ///\n\n /// If loading from contract storage failed.\n\n #[must_use]\n\n pub fn get(boxed: &Self) -> &T {\n", "file_path": "ink/crates/storage/src/alloc/boxed/mod.rs", "rank": 21, "score": 180595.16590707243 }, { "content": " T: SpreadLayout,\n\n{\n\n /// Creates a new boxed entity.\n\n pub fn new(value: T) -> Self {\n\n Self {\n\n allocation: alloc(),\n\n value: Lazy::new(value),\n\n }\n\n }\n\n\n\n /// Creates a new boxed entity that has not yet loaded its value.\n\n fn lazy(allocation: DynamicAllocation) -> Self {\n\n Self {\n\n allocation,\n\n value: Lazy::from_key(allocation.key()),\n\n }\n\n }\n\n\n\n /// Returns the underlying storage key for the dynamic allocated entity.\n\n fn key(&self) -> Key {\n", "file_path": "ink/crates/storage/src/alloc/boxed/mod.rs", "rank": 22, "score": 180592.7502169888 }, { "content": " &boxed.value\n\n }\n\n\n\n /// Returns an exclusive reference to the boxed value.\n\n ///\n\n /// # Note\n\n ///\n\n /// This loads the value from the pointed to contract storage\n\n /// if this did not happen before.\n\n ///\n\n /// # Panics\n\n ///\n\n /// If loading from contract storage failed.\n\n #[must_use]\n\n pub fn get_mut(boxed: &mut Self) -> &mut T {\n\n &mut boxed.value\n\n }\n\n}\n", "file_path": "ink/crates/storage/src/alloc/boxed/mod.rs", "rank": 23, "score": 180590.25830269774 }, { "content": " }\n\n };\n\n };\n\n}\n\n\n\nmod arrays;\n\nmod collections;\n\nmod prims;\n\nmod tuples;\n\n\n\n#[cfg(all(test, feature = \"ink-fuzz-tests\"))]\n\nmod fuzz_tests;\n\n\n\nuse super::{\n\n clear_packed_root,\n\n pull_packed_root,\n\n push_packed_root,\n\n PackedLayout,\n\n};\n\nuse crate::traits::KeyPtr;\n", "file_path": "ink/crates/storage/src/traits/impls/mod.rs", "rank": 24, "score": 180189.8909885745 }, { "content": "// Copyright 2018-2021 Parity Technologies (UK) Ltd.\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\nmod impls;\n\n\n\n#[cfg(test)]\n\nmod tests;\n\n\n\nuse crate::traits::KeyPtr;\n", "file_path": "ink/crates/storage/src/traits/layout/mod.rs", "rank": 25, "score": 180176.829030469 }, { "content": "// Copyright 2018-2021 Parity Technologies (UK) Ltd.\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\nuse crate::traits::ExtKeyPtr as _;\n\n\n\nmacro_rules! impl_always_packed_layout {\n\n ( $name:ident < $($frag:ident),+ >, deep: $deep:expr ) => {\n\n const _: () = {\n\n use crate::traits::impls::{\n", "file_path": "ink/crates/storage/src/traits/impls/mod.rs", "rank": 26, "score": 180174.02923517622 }, { "content": " fn push_spread(&self, ptr: &mut KeyPtr) {\n\n forward_push_packed::<Self>(self, ptr)\n\n }\n\n\n\n #[inline]\n\n fn clear_spread(&self, ptr: &mut KeyPtr) {\n\n forward_clear_packed::<Self>(self, ptr)\n\n }\n\n }\n\n };\n\n };\n\n ( $name:ty, deep: $deep:expr ) => {\n\n const _: () = {\n\n use crate::traits::impls::{\n\n forward_clear_packed,\n\n forward_pull_packed,\n\n forward_push_packed,\n\n };\n\n impl SpreadLayout for $name\n\n where\n", "file_path": "ink/crates/storage/src/traits/impls/mod.rs", "rank": 27, "score": 180171.36460419054 }, { "content": "use ink_env::hash::{\n\n Blake2x256,\n\n Keccak256,\n\n Sha2x256,\n\n};\n\nuse ink_metadata::layout::{\n\n CryptoHasher,\n\n Layout,\n\n};\n\n\n\n/// Implemented by types that have a storage layout.\n", "file_path": "ink/crates/storage/src/traits/layout/mod.rs", "rank": 28, "score": 180170.2551862681 }, { "content": "\n\n/// Returns the greater of both values.\n\nconst fn max(a: u64, b: u64) -> u64 {\n\n [a, b][(a > b) as usize]\n\n}\n\n\n\n/// Pulls an instance of type `T` in packed fashion from the contract storage.\n\n///\n\n/// Loads the instance from the storage location identified by `ptr`.\n\n/// The storage entity is expected to be decodable in its packed form.\n\n///\n\n/// # Note\n\n///\n\n/// Use this utility function to use a packed pull operation for the type\n\n/// instead of a spread storage layout pull operation.\n\n#[inline]\n", "file_path": "ink/crates/storage/src/traits/impls/mod.rs", "rank": 29, "score": 180163.57256831496 }, { "content": " forward_clear_packed,\n\n forward_pull_packed,\n\n forward_push_packed,\n\n };\n\n impl<$($frag),+> SpreadLayout for $name < $($frag),+ >\n\n where\n\n $(\n\n $frag: PackedLayout,\n\n )+\n\n {\n\n const FOOTPRINT: u64 = 1;\n\n\n\n const REQUIRES_DEEP_CLEAN_UP: bool = $deep;\n\n\n\n #[inline]\n\n fn pull_spread(ptr: &mut KeyPtr) -> Self {\n\n forward_pull_packed::<Self>(ptr)\n\n }\n\n\n\n #[inline]\n", "file_path": "ink/crates/storage/src/traits/impls/mod.rs", "rank": 30, "score": 180157.84763020207 }, { "content": " Self: PackedLayout,\n\n {\n\n const FOOTPRINT: u64 = 1;\n\n\n\n const REQUIRES_DEEP_CLEAN_UP: bool = $deep;\n\n\n\n #[inline]\n\n fn pull_spread(ptr: &mut KeyPtr) -> Self {\n\n forward_pull_packed::<Self>(ptr)\n\n }\n\n\n\n #[inline]\n\n fn push_spread(&self, ptr: &mut KeyPtr) {\n\n forward_push_packed::<Self>(self, ptr)\n\n }\n\n\n\n #[inline]\n\n fn clear_spread(&self, ptr: &mut KeyPtr) {\n\n forward_clear_packed::<Self>(self, ptr)\n\n }\n", "file_path": "ink/crates/storage/src/traits/impls/mod.rs", "rank": 31, "score": 180157.84763020207 }, { "content": "#[doc(hidden)]\n\npub trait True {}\n", "file_path": "ink/crates/lang/src/traits.rs", "rank": 32, "score": 177947.36202924303 }, { "content": " pub trait MyTrait {\n\n #[ink(constructor)]\n\n fn constructor_1() -> Self;\n\n #[ink(constructor)]\n\n fn constructor_2() -> Self;\n\n #[ink(message)]\n\n fn message_1(&self);\n\n #[ink(message)]\n\n fn message_2(&mut self);\n\n }\n\n })\n\n .unwrap();\n\n let actual = ink_trait\n\n .iter_items()\n\n .flat_map(|item| {\n\n item.filter_map_message()\n\n .map(|message| message.sig().ident.to_string())\n\n })\n\n .collect::<Vec<_>>();\n\n let expected = vec![\"message_1\".to_string(), \"message_2\".to_string()];\n", "file_path": "ink/crates/lang/ir/src/ir/trait_def.rs", "rank": 33, "score": 177813.90987146468 }, { "content": " pub trait MyTrait {\n\n #[ink(constructor)]\n\n fn constructor_1() -> Self;\n\n #[ink(constructor)]\n\n fn constructor_2(a: i32, b: i32) -> Self;\n\n }\n\n };\n\n assert_verify_hash2_works_with(\n\n ink_trait,\n\n \"__ink_trait::MyTrait::constructor_1:0,constructor_2:2\",\n\n );\n\n }\n\n}\n", "file_path": "ink/crates/lang/ir/src/ir/trait_def.rs", "rank": 34, "score": 177813.90987146468 }, { "content": "/// Finalizes the global dynamic storage allocator instance.\n\n///\n\n/// This pushes all the accumulated state from this contract execution back to\n\n/// the contract storage to be used in the next contract execution for the same\n\n/// contract instance.\n\n///\n\n/// The global dynamic storage allocator must not be used after this!\n\n///\n\n/// # Note\n\n///\n\n/// Normally users of ink! do not have to call this function directly as it is\n\n/// automatically being use in the correct order and way by the generated code.\n\npub fn finalize() {\n\n init::finalize()\n\n}\n", "file_path": "ink/crates/storage/src/alloc/mod.rs", "rank": 35, "score": 177686.43107514296 }, { "content": "#[doc(hidden)]\n\npub trait FnState {\n\n /// The storage state.\n\n type State: SpreadLayout + Sized;\n\n}\n\n\n\n/// A dispatchable contract constructor message.\n", "file_path": "ink/crates/lang/src/traits.rs", "rank": 36, "score": 176122.95991979184 }, { "content": "#[doc(hidden)]\n\npub trait FnOutput {\n\n /// The output type.\n\n type Output: scale::Encode + 'static;\n\n}\n\n\n\n/// The selector of dispatchable functions.\n", "file_path": "ink/crates/lang/src/traits.rs", "rank": 37, "score": 176122.95991979184 }, { "content": "#[doc(hidden)]\n\npub trait FnSelector {\n\n /// The selector.\n\n const SELECTOR: Selector;\n\n}\n\n\n\n/// The storage state that the dispatchable function acts on.\n", "file_path": "ink/crates/lang/src/traits.rs", "rank": 38, "score": 176122.95991979187 }, { "content": "#[doc(hidden)]\n\npub trait FnInput {\n\n /// The tuple-type of all inputs.\n\n type Input: scale::Decode + 'static;\n\n}\n\n\n\n/// Dispatchable functions that have an output.\n", "file_path": "ink/crates/lang/src/traits.rs", "rank": 39, "score": 176122.95991979187 }, { "content": " pub trait MyTrait: SuperTrait {}\n\n );\n\n }\n\n\n\n #[test]\n\n fn trait_def_containing_const_item_is_denied() {\n\n assert_ink_trait_eq_err!(\n\n error: \"associated constants in ink! trait definitions are not supported, yet\",\n", "file_path": "ink/crates/lang/ir/src/ir/trait_def.rs", "rank": 40, "score": 175506.53296007746 }, { "content": "/// Types implementing this trait are code generators for the ink! language.\n\npub trait GenerateCode {\n\n /// Generates ink! contract code.\n\n fn generate_code(&self) -> TokenStream2;\n\n}\n\n\n", "file_path": "ink/crates/lang/codegen/src/traits.rs", "rank": 41, "score": 174360.10578634468 }, { "content": " pub trait MyTrait<T> {}\n\n );\n\n }\n\n\n\n #[test]\n\n fn trait_def_with_supertraits_is_denied() {\n\n assert_ink_trait_eq_err!(\n\n error: \"ink! trait definitions with super-traits are not supported, yet\",\n", "file_path": "ink/crates/lang/ir/src/ir/trait_def.rs", "rank": 42, "score": 173917.63773390104 }, { "content": "/// Extension trait to make `KeyPtr` simpler to use for `T: SpreadLayout` types.\n\npub trait ExtKeyPtr {\n\n /// Advances the key pointer by the same amount of the footprint of the\n\n /// generic type parameter of `T` and returns the old value.\n\n fn next_for<T>(&mut self) -> &Key\n\n where\n\n T: SpreadLayout;\n\n}\n\n\n\nimpl ExtKeyPtr for KeyPtr {\n\n fn next_for<T>(&mut self) -> &Key\n\n where\n\n T: SpreadLayout,\n\n {\n\n self.advance_by(<T as SpreadLayout>::FOOTPRINT)\n\n }\n\n}\n", "file_path": "ink/crates/storage/src/traits/keyptr.rs", "rank": 43, "score": 172633.8216854957 }, { "content": "#[doc(hidden)]\n\npub trait ImpliesReturn<T> {}\n\n\n\nimpl<T> ImpliesReturn<T> for T {}\n\nimpl<T, E, Callee, GasCost, TransferredValue, Args> ImpliesReturn<T>\n\n for CallBuilder<\n\n E,\n\n Callee,\n\n GasCost,\n\n TransferredValue,\n\n Set<ExecutionInput<Args>>,\n\n Set<ReturnType<T>>,\n\n >\n\nwhere\n\n E: Environment,\n\n{\n\n}\n\n\n\nimpl<E, Callee, GasCost, TransferredValue, Args> ImpliesReturn<()>\n\n for CallBuilder<\n\n E,\n", "file_path": "ink/crates/lang/src/traits.rs", "rank": 44, "score": 172004.64750365453 }, { "content": " #[ink_lang::trait_definition]\n\n pub trait FlipperTrait {\n\n #[ink(constructor)]\n\n fn new() -> Self;\n\n\n\n #[ink(message)]\n\n fn flip(&mut self);\n\n\n\n #[ink(message)]\n\n fn get(&self) -> bool;\n\n }\n\n\n\n #[ink(storage)]\n\n pub struct Flipper {\n\n value: bool,\n\n }\n\n\n\n impl FlipperTrait for Flipper {\n\n #[ink(constructor)]\n\n fn new() -> Self {\n\n Self::default()\n", "file_path": "ink/crates/lang/macro/tests/ui/contract/pass/08-flipper-as-dependency-trait.rs", "rank": 45, "score": 171846.61654584145 }, { "content": "#[alloc_error_handler]\n\nfn oom(_: core::alloc::Layout) -> ! {\n\n core::intrinsics::abort()\n\n}\n", "file_path": "ink/crates/allocator/src/handlers.rs", "rank": 46, "score": 169829.12102845008 }, { "content": "/// Tells the global dynamic storage allocator instance how it shall initialize.\n\n///\n\n/// # Note\n\n///\n\n/// Normally users of ink! do not have to call this function directly as it is\n\n/// automatically being use in the correct order and way by the generated code.\n\n///\n\n/// - The `phase` parameter describes for which execution phase the dynamic\n\n/// storage allocator needs to be initialized since this is different\n\n/// in contract instantiations and calls.\n\n/// - This has to be issued before the first interaction with the global allocator.\n\n/// - The actual instantiation will happen only upon the first interaction with\n\n/// the global allocator, e.g. using the `alloc` or `free` calls. Until then,\n\n/// it remains uninitialized.\n\n///\n\n/// If this function is not called before the first global allocator interaction\n\n/// then the default initialization scheme is for contract instantiation.\n\n/// However, this behavior might change and must not be relied upon.\n\npub fn initialize(phase: ContractPhase) {\n\n init::initialize(phase);\n\n}\n\n\n", "file_path": "ink/crates/storage/src/alloc/mod.rs", "rank": 47, "score": 169476.96819440415 }, { "content": "/// Define a benchmark for an operation to be run against different size binary heaps\n\ntrait Benchmark {\n\n fn bench(group: &mut BenchmarkGroup<WallTime>, size: u32, new_heap: NewHeap);\n\n}\n\n\n", "file_path": "ink/crates/storage/benches/bench_binary_heap.rs", "rank": 48, "score": 167512.3990908142 }, { "content": "/// Implemented by event types to guide the event topic serialization using the topics builder.\n\n///\n\n/// Normally this trait should be implemented automatically via the ink! codegen.\n\npub trait Topics {\n\n /// Type state indicating how many event topics are to be expected by the topics builder.\n\n type RemainingTopics: EventTopicsAmount;\n\n\n\n /// Guides event topic serialization using the given topics builder.\n\n fn topics<E, B>(\n\n &self,\n\n builder: TopicsBuilder<state::Uninit, E, B>,\n\n ) -> <B as TopicsBuilderBackend<E>>::Output\n\n where\n\n E: Environment,\n\n B: TopicsBuilderBackend<E>;\n\n}\n\n\n\n/// For each topic a hash is generated. This hash must be unique\n\n/// for a field and its value. The `prefix` is concatenated\n\n/// with the `value`. This result is then hashed.\n\n/// The `prefix` is typically set to the path a field has in\n\n/// an event struct plus the identifier of the event struct.\n\n///\n", "file_path": "ink/crates/env/src/topics.rs", "rank": 49, "score": 166923.2890384748 }, { "content": "/// The environmental types usable by contracts defined with ink!.\n\npub trait Environment {\n\n /// The maximum number of supported event topics provided by the runtime.\n\n ///\n\n /// The value must match the maximum number of supported event topics of the used runtime.\n\n const MAX_EVENT_TOPICS: usize;\n\n\n\n /// The address type.\n\n type AccountId: 'static + scale::Codec + Clone + PartialEq + Eq + Ord;\n\n\n\n /// The type of balances.\n\n type Balance: 'static\n\n + scale::Codec\n\n + Copy\n\n + Clone\n\n + PartialEq\n\n + Eq\n\n + AtLeast32BitUnsigned;\n\n\n\n /// The type of hash.\n\n type Hash: 'static\n", "file_path": "ink/crates/env/src/types.rs", "rank": 50, "score": 166913.0145999223 }, { "content": "/// The equivalent of `Zero` for hashes.\n\n///\n\n/// A hash that consists only of 0 bits is clear.\n\npub trait Clear {\n\n /// Returns `true` if the hash is clear.\n\n fn is_clear(&self) -> bool;\n\n\n\n /// Returns a clear hash.\n\n fn clear() -> Self;\n\n}\n\n\n\nimpl Clear for [u8; 32] {\n\n fn is_clear(&self) -> bool {\n\n self.as_ref().iter().all(|&byte| byte == 0x00)\n\n }\n\n\n\n fn clear() -> Self {\n\n [0x00; 32]\n\n }\n\n}\n\n\n\nimpl Clear for Hash {\n\n fn is_clear(&self) -> bool {\n", "file_path": "ink/crates/env/src/types.rs", "rank": 51, "score": 166907.45927316067 }, { "content": "/// Saturating arithmetic operations, returning maximum or minimum values instead of overflowing.\n\npub trait Saturating {\n\n /// Saturating addition. Compute `self + rhs`, saturating at the numeric bounds instead of\n\n /// overflowing.\n\n fn saturating_add(self, rhs: Self) -> Self;\n\n\n\n /// Saturating subtraction. Compute `self - rhs`, saturating at the numeric bounds instead of\n\n /// overflowing.\n\n fn saturating_sub(self, rhs: Self) -> Self;\n\n\n\n /// Saturating multiply. Compute `self * rhs`, saturating at the numeric bounds instead of\n\n /// overflowing.\n\n fn saturating_mul(self, rhs: Self) -> Self;\n\n\n\n /// Saturating exponentiation. Compute `self.pow(exp)`, saturating at the numeric bounds\n\n /// instead of overflowing.\n\n fn saturating_pow(self, exp: usize) -> Self;\n\n}\n\n\n\nimpl<T> Saturating for T\n\nwhere\n", "file_path": "ink/crates/env/src/arithmetic.rs", "rank": 52, "score": 166907.45927316067 }, { "content": "#[doc(hidden)]\n\npub trait Execute {\n\n /// Starts the smart contract execution.\n\n fn execute(self) -> Result<()>;\n\n}\n\n\n\n/// Yields `true` if the message accepts payments.\n\n#[derive(Copy, Clone)]\n\n#[doc(hidden)]\n\npub struct AcceptsPayments(pub bool);\n\n\n\nimpl From<AcceptsPayments> for bool {\n\n #[inline]\n\n fn from(accepts_payments: AcceptsPayments) -> Self {\n\n accepts_payments.0\n\n }\n\n}\n\n\n\n/// Yields `true` if the dynamic storage allocator is enabled for the given call.\n\n#[derive(Copy, Clone)]\n\n#[doc(hidden)]\n", "file_path": "ink/crates/lang/src/dispatcher.rs", "rank": 53, "score": 166907.45927316067 }, { "content": " /// Seals the implementation of `CryptoHash` and `HashOutput`.\n\n pub trait Sealed {}\n\n}\n\n\n\nimpl private::Sealed for Sha2x256 {}\n\nimpl private::Sealed for Keccak256 {}\n\nimpl private::Sealed for Blake2x256 {}\n\nimpl private::Sealed for Blake2x128 {}\n\n\n\nimpl HashOutput for Sha2x256 {\n\n type Type = [u8; 32];\n\n}\n\n\n\nimpl HashOutput for Keccak256 {\n\n type Type = [u8; 32];\n\n}\n\n\n\nimpl HashOutput for Blake2x256 {\n\n type Type = [u8; 32];\n\n}\n\n\n\nimpl HashOutput for Blake2x128 {\n\n type Type = [u8; 16];\n\n}\n", "file_path": "ink/crates/env/src/hash.rs", "rank": 54, "score": 166907.45927316067 }, { "content": " #[allow(non_camel_case_types)]\n\n pub trait __ink_RenameBool {\n\n type Type;\n\n }\n\n impl __ink_RenameBool for [(); true as usize] {\n\n type Type = __ink_CheckSatisfied;\n\n }\n\n impl __ink_RenameBool for [(); false as usize] {\n\n type Type = #event_ident;\n\n }\n\n\n\n #[allow(non_upper_case_globals)]\n\n const __ink_MAX_EVENT_TOPICS: usize = <\n\n <#storage_ident as ::ink_lang::ContractEnv>::Env as ::ink_env::Environment\n\n >::MAX_EVENT_TOPICS;\n\n\n\n fn __ink_ensure_max_event_topics<T>(_: T)\n\n where\n\n T: __ink_RenameBool,\n\n <T as __ink_RenameBool>::Type: CompliesWithTopicLimit,\n\n {}\n", "file_path": "ink/crates/lang/codegen/src/generator/events.rs", "rank": 55, "score": 166561.53521656984 }, { "content": "#[ink::trait_definition]\n\npub trait Constructor {\n\n #[ink(constructor)]\n\n fn constructor() -> Self;\n\n}\n\n\n\n#[ink::contract]\n\nmod noop {\n\n #[ink(storage)]\n\n pub struct Noop {}\n\n\n\n impl Constructor for Noop {\n\n #[ink(constructor, payable)]\n\n fn constructor() -> Self {\n\n Self {}\n\n }\n\n }\n\n\n\n impl Noop {\n\n #[ink(message)]\n\n pub fn noop(&self) {}\n\n }\n\n}\n\n\n", "file_path": "ink/crates/lang/macro/tests/ui/contract/fail/C-15-payable-trait-constructor.rs", "rank": 56, "score": 166172.0850487733 }, { "content": "/// Implemented by [`Set`] and [`Unset`] in order to unwrap their value.\n\n///\n\n/// This is useful in case the use-site does not know if it is working with\n\n/// a set or an unset value generically unwrap it using a closure for fallback.\n\npub trait Unwrap {\n\n /// The output type of the `unwrap_or_else` operation.\n\n type Output;\n\n\n\n /// Returns the set value or evaluates the given closure.\n\n fn unwrap_or_else<F>(self, f: F) -> Self::Output\n\n where\n\n F: FnOnce() -> Self::Output;\n\n}\n\n\n\nimpl<T> Unwrap for Unset<T> {\n\n type Output = T;\n\n\n\n #[inline]\n\n fn unwrap_or_else<F>(self, f: F) -> Self::Output\n\n where\n\n F: FnOnce() -> Self::Output,\n\n {\n\n f()\n\n }\n", "file_path": "ink/crates/env/src/call/common.rs", "rank": 57, "score": 164862.0667605698 }, { "content": "#![cfg_attr(not(feature = \"std\"), no_std)]\n\n#![feature(const_fn_trait_bound)]\n\n\n\nextern crate alloc;\n\nuse ink_lang as ink;\n\npub use self::dao_base::DaoBase;\n\n\n\n#[allow(unused_imports)]\n\n#[ink::contract]\n\n\n\nmod dao_base {\n\n use alloc::string::String;\n\n use ink_storage::{\n\n traits::{\n\n PackedLayout,\n\n SpreadLayout,\n\n },\n\n };\n\n /// Construct a structure to display the basic information of Dao as a whole\n\n #[derive(scale::Encode, scale::Decode, Clone, SpreadLayout, PackedLayout)]\n", "file_path": "dao_base/lib.rs", "rank": 58, "score": 51.63323590437581 }, { "content": "#![cfg_attr(not(feature = \"std\"), no_std)]\n\n#![feature(const_fn_trait_bound)]\n\nextern crate alloc;\n\nuse ink_lang as ink;\n\n//use ink_prelude::vec::Vec;\n\npub use self::dao_vault::VaultManager;\n\n\n\n#[allow(unused_imports)]\n\n#[ink::contract]\n\nmod dao_vault {\n\n use alloc::string::String;\n\n use ink_storage::{\n\n collections::HashMap as StorageHashMap,\n\n traits::{PackedLayout, SpreadLayout},\n\n };\n\n use erc20::Erc20;\n\n\n\n /// store a transfer record\n\n #[derive(\n\n Debug, Clone, PartialEq, Eq, scale::Encode, scale::Decode, SpreadLayout, PackedLayout,Default\n", "file_path": "dao_vault/lib.rs", "rank": 59, "score": 49.597653687500426 }, { "content": "#![cfg_attr(not(feature = \"std\"), no_std)]\n\n#![feature(const_fn_trait_bound)]\n\nextern crate alloc;\n\nuse ink_lang as ink;\n\npub use self::erc20::{\n\n Erc20\n\n};\n\n#[allow(unused_imports)]\n\n#[ink::contract]\n\nmod erc20 {\n\n use alloc::string::String;\n\n use ink_storage::{\n\n collections::HashMap as StorageHashMap,\n\n lazy::Lazy,\n\n traits::{\n\n PackedLayout,\n\n SpreadLayout,\n\n }\n\n };\n\n\n", "file_path": "erc20/lib.rs", "rank": 60, "score": 49.584048384306286 }, { "content": "#![cfg_attr(not(feature = \"std\"), no_std)]\n\n#![feature(const_fn_trait_bound)]\n\nextern crate alloc;\n\nuse ink_lang as ink;\n\npub use self::template_manager::TemplateManager;\n\npub use self::template_manager::DAOTemplate;\n\n\n\n#[allow(unused_imports)]\n\n#[ink::contract]\n\nmod template_manager {\n\n use alloc::string::String;\n\n use ink_prelude::vec::Vec;\n\n use ink_prelude::collections::BTreeMap;\n\n use ink_storage::{\n\n traits::{\n\n PackedLayout,\n\n SpreadLayout,\n\n },\n\n collections::HashMap as StorageHashMap,\n\n };\n", "file_path": "template_manager/lib.rs", "rank": 62, "score": 47.230179626802375 }, { "content": "#![cfg_attr(not(feature = \"std\"), no_std)]\n\n#![feature(const_fn_trait_bound)]\n\nextern crate alloc;\n\nuse ink_lang as ink;\n\npub use self::dao_users::{\n\n DaoUsers\n\n};\n\n#[allow(unused_imports)]\n\n#[ink::contract]\n\nmod dao_users {\n\n use alloc::string::String;\n\n use ink_prelude::vec::Vec;\n\n use dao_setting::DaoSetting;\n\n use erc20::Erc20;\n\n use ink_prelude::collections::BTreeMap;\n\n use ink_storage::{\n\n collections::HashMap as StorageHashMap,\n\n lazy::Lazy,\n\n traits::{\n\n PackedLayout,\n", "file_path": "dao_users/lib.rs", "rank": 63, "score": 47.02791615267263 }, { "content": "#![cfg_attr(not(feature = \"std\"), no_std)]\n\n#![feature(const_fn_trait_bound)]\n\nextern crate alloc;\n\nuse ink_lang as ink;\n\npub use self::dao_setting::{\n\n DaoSetting\n\n};\n\n#[allow(unused_imports)]\n\n#[ink::contract]\n\nmod dao_setting {\n\n use alloc::string::String;\n\n use ink_prelude::vec::Vec;\n\n use ink_prelude::collections::BTreeMap;\n\n use ink_storage::{\n\n collections::HashMap as StorageHashMap,\n\n traits::{PackedLayout, SpreadLayout},\n\n };\n\n\n\n /// Fee limit for joining Dao\n\n /// time_limit:How long is the total limit\n", "file_path": "dao_setting/lib.rs", "rank": 64, "score": 46.62716959123239 }, { "content": "#![cfg_attr(not(feature = \"std\"), no_std)]\n\n#![feature(const_fn_trait_bound)]\n\nextern crate alloc;\n\npub use self::dao_proposal::DaoProposal;\n\n\n\nuse ink_lang as ink;\n\n#[allow(unused_imports)]\n\n#[ink::contract]\n\nmod dao_proposal {\n\n use ink_env::call::{\n\n build_call,\n\n utils::ReturnType,\n\n ExecutionInput,\n\n };\n\n\n\n use erc20::Erc20;\n\n use alloc::string::String;\n\n use ink_prelude::vec::Vec;\n\n use ink_prelude::collections::BTreeMap;\n\n use ink_storage::{\n", "file_path": "dao_proposal/lib.rs", "rank": 65, "score": 46.6201142379531 }, { "content": " trivial_casts,\n\n trivial_numeric_casts,\n\n unused_extern_crates\n\n)]\n\n\n\n#[cfg(all(test, feature = \"std\", feature = \"ink-fuzz-tests\"))]\n\n#[macro_use(quickcheck)]\n\nextern crate quickcheck_macros;\n\n\n\npub mod alloc;\n\npub mod collections;\n\npub mod lazy;\n\nmod memory;\n\nmod pack;\n\npub mod traits;\n\n\n\n#[cfg(test)]\n\nmod hashmap_entry_api_tests;\n\n\n\n#[cfg(test)]\n", "file_path": "ink/crates/storage/src/lib.rs", "rank": 66, "score": 46.61753187752085 }, { "content": "#![cfg_attr(not(feature = \"std\"), no_std)]\n\n#![feature(const_fn_trait_bound)]\n\nextern crate alloc;\n\nuse ink_lang as ink;\n\n\n\n#[allow(unused_imports)]\n\n#[ink::contract]\n\nmod dao_category {\n\n\n\n use alloc::string::String;\n\n use ink_prelude::vec::Vec;\n\n use ink_prelude::collections::BTreeMap;\n\n use ink_storage::{collections::HashMap as StorageHashMap, };\n\n\n\n /// the contract store the category of dao\n\n /// owner:the manager of the contract\n\n /// category_map:HashMap of the index and name\n\n #[ink(storage)]\n\n pub struct DaoCategory {\n\n owner: AccountId,\n", "file_path": "dao_category/lib.rs", "rank": 67, "score": 45.088763226176276 }, { "content": "#![cfg_attr(not(feature = \"std\"), no_std)]\n\n#![feature(const_fn_trait_bound)]\n\nextern crate alloc;\n\nuse ink_lang as ink;\n\n\n\npub use self::dao_manager::DAOManager;\n\n\n\n#[allow(unused_imports)]\n\n#[allow(dead_code)]\n\n#[ink::contract]\n\nmod dao_manager {\n\n use alloc::string::String;\n\n use template_manager::DAOTemplate;\n\n use dao_base::DaoBase;\n\n use dao_users::DaoUsers;\n\n use dao_setting::DaoSetting;\n\n use dao_proposal::DaoProposal;\n\n use erc20::Erc20;\n\n use dao_vault::VaultManager;\n\n use ink_prelude::vec::Vec;\n", "file_path": "dao_manager/lib.rs", "rank": 68, "score": 44.41792681597209 }, { "content": "//! The `ink_prelude` crate guarantees a stable interface between `std` and `no_std` mode.\n\n\n\n#![cfg_attr(not(feature = \"std\"), no_std)]\n\n\n\n#[cfg(not(feature = \"std\"))]\n\nextern crate alloc;\n\n\n\nuse cfg_if::cfg_if;\n\n\n\ncfg_if! {\n\n if #[cfg(feature = \"std\")] {\n\n pub use std::{\n\n borrow,\n\n boxed,\n\n format,\n\n string,\n\n vec,\n\n };\n\n\n\n /// Collection types.\n", "file_path": "ink/crates/prelude/src/lib.rs", "rank": 69, "score": 40.224338098369095 }, { "content": "\n\n#[cfg(all(\n\n test,\n\n feature = \"std\",\n\n feature = \"ink-fuzz-tests\",\n\n not(feature = \"wee-alloc\")\n\n))]\n\n#[macro_use(quickcheck)]\n\nextern crate quickcheck_macros;\n", "file_path": "ink/crates/allocator/src/lib.rs", "rank": 70, "score": 39.05113498338335 }, { "content": "#![cfg_attr(not(feature = \"std\"), no_std)]\n\n\n\nuse ink_lang as ink;\n\n\n\n#[ink::contract]\n\nmod delegator {\n\n use accumulator::AccumulatorRef;\n\n use ink_storage::{\n\n traits::{\n\n PackedLayout,\n\n SpreadLayout,\n\n },\n\n };\n\n\n\n #[derive(Debug, scale::Encode, scale::Decode, Clone, SpreadLayout, PackedLayout)]\n\n #[cfg_attr(\n\n feature = \"std\",\n\n derive(scale_info::TypeInfo, ink_storage::traits::StorageLayout)\n\n )]\n\n pub struct AddrInstance {\n", "file_path": "ink/examples/delegator/lib.rs", "rank": 71, "score": 38.87969830033278 }, { "content": " },\n\n};\n\n\n\n/// The dynamic allocator.\n\n///\n\n/// Manages dynamic storage allocations in a very efficient and economical way.\n\n#[derive(Debug, Default, PartialEq, Eq)]\n\npub struct DynamicAllocator {\n\n allocations: BitStash,\n\n}\n\n\n\n#[cfg(feature = \"std\")]\n\nconst _: () = {\n\n use crate::traits::StorageLayout;\n\n use ink_metadata::layout::{\n\n FieldLayout,\n\n Layout,\n\n StructLayout,\n\n };\n\n\n", "file_path": "ink/crates/storage/src/alloc/allocator.rs", "rank": 72, "score": 36.203848162269516 }, { "content": "use crate::{\n\n collections::Stash as StorageStash,\n\n traits::{\n\n forward_clear_packed,\n\n forward_pull_packed,\n\n forward_push_packed,\n\n KeyPtr,\n\n PackedLayout,\n\n SpreadLayout,\n\n },\n\n};\n\nuse ink_env::hash::{\n\n CryptoHash,\n\n HashOutput,\n\n};\n\nuse ink_primitives::Key;\n\n\n\n#[cfg(feature = \"std\")]\n\nconst _: () = {\n\n use crate::{\n", "file_path": "ink/crates/storage/src/collections/hashmap/storage.rs", "rank": 73, "score": 32.37712133205875 }, { "content": "};\n\nuse crate::{\n\n lazy::LazyIndexMap,\n\n traits::{\n\n forward_clear_packed,\n\n forward_pull_packed,\n\n forward_push_packed,\n\n KeyPtr,\n\n PackedLayout,\n\n SpreadLayout,\n\n },\n\n};\n\nuse ink_primitives::Key;\n\n\n\n#[cfg(feature = \"std\")]\n\nconst _: () = {\n\n use crate::{\n\n collections::Vec as StorageVec,\n\n traits::StorageLayout,\n\n };\n", "file_path": "ink/crates/storage/src/collections/stash/storage.rs", "rank": 74, "score": 32.275580844795996 }, { "content": "mod iter;\n\nmod storage;\n\n\n\n#[cfg(test)]\n\nmod tests;\n\n\n\n#[cfg(all(test, feature = \"ink-fuzz-tests\"))]\n\nmod fuzz_tests;\n\n\n\npub use self::iter::{\n\n Iter,\n\n IterMut,\n\n};\n\nuse crate::{\n\n lazy::{\n\n Lazy,\n\n LazyIndexMap,\n\n },\n\n traits::PackedLayout,\n\n};\n", "file_path": "ink/crates/storage/src/collections/vec/mod.rs", "rank": 75, "score": 32.15702567219834 }, { "content": " use ink_prelude::collections::BTreeMap;\n\n use ink_storage::{\n\n collections::HashMap as StorageHashMap,\n\n traits::{PackedLayout, SpreadLayout},\n\n };\n\n\n\n const CONTRACT_INIT_BALANCE: u128 = 1000 * 1_000_000_000_000;\n\n\n\n\n\n /// DAO component instances\n\n /// base:the instance of base\n\n /// erc20:the instance of erc20\n\n /// dao_users:the instance of dao_users\n\n /// dao_setting:the instance of dao_setting\n\n /// vault:the instance of vault\n\n #[derive(Debug, scale::Encode, scale::Decode, Clone, SpreadLayout, PackedLayout)]\n\n #[cfg_attr(\n\n feature = \"std\",\n\n derive(scale_info::TypeInfo, ink_storage::traits::StorageLayout)\n\n )]\n", "file_path": "dao_manager/lib.rs", "rank": 76, "score": 31.860066555644647 }, { "content": " Bitvec as StorageBitvec,\n\n Vec as StorageVec,\n\n },\n\n traits::{\n\n forward_clear_packed,\n\n forward_pull_packed,\n\n forward_push_packed,\n\n KeyPtr,\n\n PackedLayout,\n\n SpreadLayout,\n\n },\n\n};\n\nuse ink_primitives::Key;\n\n\n\n#[cfg(feature = \"std\")]\n\nconst _: () = {\n\n use crate::traits::StorageLayout;\n\n use ink_metadata::layout::{\n\n FieldLayout,\n\n Layout,\n", "file_path": "ink/crates/storage/src/collections/bitstash/storage.rs", "rank": 77, "score": 31.766443882959788 }, { "content": "use crate::traits::{\n\n KeyPtr,\n\n PackedLayout,\n\n SpreadLayout,\n\n};\n\n\n\n#[cfg(feature = \"std\")]\n\nconst _: () = {\n\n use crate::traits::StorageLayout;\n\n use ink_metadata::layout::{\n\n FieldLayout,\n\n Layout,\n\n StructLayout,\n\n };\n\n use scale_info::TypeInfo;\n\n\n\n impl<T> StorageLayout for BinaryHeap<T>\n\n where\n\n T: PackedLayout + Ord + TypeInfo + 'static,\n\n {\n", "file_path": "ink/crates/storage/src/collections/binary_heap/storage.rs", "rank": 78, "score": 31.60527206759867 }, { "content": " forward_push_packed,\n\n KeyPtr,\n\n PackedLayout,\n\n SpreadLayout,\n\n },\n\n};\n\nuse ink_prelude::vec::Vec;\n\nuse ink_primitives::Key;\n\n\n\n#[cfg(feature = \"std\")]\n\nconst _: () = {\n\n use crate::traits::StorageLayout;\n\n use ink_metadata::layout::{\n\n CellLayout,\n\n Layout,\n\n LayoutKey,\n\n };\n\n\n\n impl<T> StorageLayout for StorageBox<T>\n\n where\n", "file_path": "ink/crates/storage/src/alloc/boxed/storage.rs", "rank": 79, "score": 31.253197748522837 }, { "content": " match self {\n\n Item::Const(syn::ItemConst { attrs, .. })\n\n | Item::Enum(syn::ItemEnum { attrs, .. })\n\n | Item::ExternCrate(syn::ItemExternCrate { attrs, .. })\n\n | Item::Fn(syn::ItemFn { attrs, .. })\n\n | Item::ForeignMod(syn::ItemForeignMod { attrs, .. })\n\n | Item::Impl(syn::ItemImpl { attrs, .. })\n\n | Item::Macro(syn::ItemMacro { attrs, .. })\n\n | Item::Macro2(syn::ItemMacro2 { attrs, .. })\n\n | Item::Mod(syn::ItemMod { attrs, .. })\n\n | Item::Static(syn::ItemStatic { attrs, .. })\n\n | Item::Struct(syn::ItemStruct { attrs, .. })\n\n | Item::Trait(syn::ItemTrait { attrs, .. })\n\n | Item::TraitAlias(syn::ItemTraitAlias { attrs, .. })\n\n | Item::Type(syn::ItemType { attrs, .. })\n\n | Item::Union(syn::ItemUnion { attrs, .. })\n\n | Item::Use(syn::ItemUse { attrs, .. }) => attrs,\n\n _ => &[],\n\n }\n\n }\n", "file_path": "ink/crates/lang/ir/src/ir/attrs.rs", "rank": 80, "score": 31.001313087982815 }, { "content": " forward_clear_packed,\n\n forward_pull_packed,\n\n forward_push_packed,\n\n KeyPtr,\n\n PackedLayout,\n\n SpreadLayout,\n\n },\n\n Pack,\n\n Vec as StorageVec,\n\n};\n\nuse ink_primitives::Key;\n\n\n\n#[cfg(feature = \"std\")]\n\nconst _: () = {\n\n use crate::{\n\n lazy::Lazy,\n\n traits::StorageLayout,\n\n };\n\n use ink_metadata::layout::{\n\n FieldLayout,\n", "file_path": "ink/crates/storage/src/collections/bitvec/storage.rs", "rank": 81, "score": 30.839859217745804 }, { "content": "#![cfg_attr(not(feature = \"std\"), no_std)]\n\n\n\nuse ink_lang as ink;\n\n\n\n#[ink::contract]\n\nmod erc20 {\n\n #[cfg(not(feature = \"ink-as-dependency\"))]\n\n use ink_lang as ink;\n\n\n\n #[cfg(not(feature = \"ink-as-dependency\"))]\n\n use ink_lang::{\n\n EmitEvent,\n\n Env,\n\n };\n\n\n\n #[cfg(not(feature = \"ink-as-dependency\"))]\n\n use ink_storage::{\n\n collections::HashMap as StorageHashMap,\n\n lazy::Lazy,\n\n };\n", "file_path": "ink/examples/trait-erc20/lib.rs", "rank": 82, "score": 30.679840195748685 }, { "content": "#![cfg_attr(not(feature = \"std\"), no_std)]\n\n#![cfg_attr(not(feature = \"std\"), feature(alloc_error_handler, core_intrinsics))]\n\n\n\n// We use `wee_alloc` as the global allocator since it is optimized for binary file size\n\n// so that contracts compiled with it as allocator do not grow too much in size.\n\n#[cfg(not(feature = \"std\"))]\n\n#[cfg(feature = \"wee-alloc\")]\n\n#[global_allocator]\n\nstatic ALLOC: wee_alloc::WeeAlloc = wee_alloc::WeeAlloc::INIT;\n\n\n\n#[cfg(not(feature = \"std\"))]\n\n#[cfg(not(feature = \"wee-alloc\"))]\n\n#[global_allocator]\n\nstatic mut ALLOC: bump::BumpAllocator = bump::BumpAllocator {};\n\n\n\n#[cfg(not(feature = \"wee-alloc\"))]\n\nmod bump;\n\n\n\n#[cfg(not(feature = \"std\"))]\n\nmod handlers;\n", "file_path": "ink/crates/allocator/src/lib.rs", "rank": 83, "score": 30.396606915254385 }, { "content": "///\n\n/// Use instances of this type in order to have some shared state between\n\n/// contract messages and functions.\n\n/// Its usage is comparable to the Solidity's `memory` instances.\n\n/// Pulling an instance of this type from the contract storage will always\n\n/// yield a default constructed value.\n\n#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]\n\npub struct Memory<T> {\n\n /// The inner value that will always be stored within contract memory.\n\n inner: T,\n\n}\n\n\n\n#[cfg(feature = \"std\")]\n\nconst _: () = {\n\n use crate::traits::StorageLayout;\n\n use ink_metadata::layout::{\n\n CellLayout,\n\n Layout,\n\n LayoutKey,\n\n };\n", "file_path": "ink/crates/storage/src/memory.rs", "rank": 84, "score": 30.334848115353118 }, { "content": " )]\n\n #[cfg_attr(\n\n feature = \"std\",\n\n derive(::scale_info::TypeInfo, ::ink_storage::traits::StorageLayout)\n\n )]\n\n pub struct ERC20Param {\n\n owner: AccountId,\n\n name: String,\n\n symbol: String,\n\n total_supply: u128,\n\n decimals: u8,\n\n }\n\n /// the union dao setting\n\n #[derive(\n\n Debug, Clone, PartialEq, Eq, scale::Encode, scale::Decode, SpreadLayout, PackedLayout, Default\n\n )]\n\n #[cfg_attr(\n\n feature = \"std\",\n\n derive(::scale_info::TypeInfo, ::ink_storage::traits::StorageLayout)\n\n )]\n", "file_path": "dao_manager/lib.rs", "rank": 85, "score": 30.067098160294826 }, { "content": "#![cfg_attr(not(feature = \"std\"), no_std)]\n\n\n\nuse ink_lang as ink;\n\n\n\n#[ink::trait_definition]\n", "file_path": "ink/examples/trait-flipper/lib.rs", "rank": 86, "score": 30.046957884128794 }, { "content": " KeyPtr,\n\n PackedLayout,\n\n SpreadLayout,\n\n },\n\n};\n\n\n\n#[cfg(feature = \"std\")]\n\nconst _: () = {\n\n use crate::{\n\n lazy::Lazy,\n\n traits::StorageLayout,\n\n };\n\n use ink_metadata::layout::{\n\n FieldLayout,\n\n Layout,\n\n StructLayout,\n\n };\n\n use scale_info::TypeInfo;\n\n\n\n impl<T> StorageLayout for StorageVec<T>\n", "file_path": "ink/crates/storage/src/collections/vec/storage.rs", "rank": 87, "score": 30.007040562133206 }, { "content": " /// fee_limit:the number of fee\n\n /// token:the token of limit\n\n #[derive(\n\n Debug, Clone, PartialEq, Eq, scale::Encode, scale::Decode, SpreadLayout, PackedLayout, Default\n\n )]\n\n #[cfg_attr(\n\n feature = \"std\",\n\n derive(::scale_info::TypeInfo, ::ink_storage::traits::StorageLayout)\n\n )]\n\n pub struct FeeConditions {\n\n pub time_limit:u128,\n\n pub fee_limit:u128,\n\n pub token:AccountId\n\n }\n\n\n\n /// Other limit for joining Dao\n\n /// use_token:Whether to enable token restriction\n\n /// use_nft:Whether to enable nft restriction\n\n /// token:the token of limit\n\n /// token_balance_limit:the balance of limit\n", "file_path": "dao_setting/lib.rs", "rank": 88, "score": 29.709109229987813 }, { "content": " /// nft:the nft address\n\n /// nft_balance_limit:the balance of limit\n\n /// nft_time_limit:Remaining time limit of NFT\n\n #[derive(\n\n Debug, Clone, PartialEq, Eq, scale::Encode, scale::Decode, SpreadLayout, PackedLayout, Default\n\n )]\n\n #[cfg_attr(\n\n feature = \"std\",\n\n derive(::scale_info::TypeInfo, ::ink_storage::traits::StorageLayout)\n\n )]\n\n pub struct OtherConditions {\n\n pub use_token:bool,\n\n pub use_nft:bool,\n\n pub token:AccountId,\n\n pub token_balance_limit:u128,\n\n pub nft:AccountId,\n\n pub nft_balance_limit:u128,\n\n pub nft_time_limit:u128\n\n }\n\n ///creator:the creator's address\n", "file_path": "dao_setting/lib.rs", "rank": 89, "score": 29.6508759255027 }, { "content": "### Trait Definitions\n\n\n\nUse `#[ink::trait_definition]` to define your very own trait definitions that are then implementable by ink! smart contracts.\n\nSee e.g. the [`examples/trait-erc20`](https://github.com/paritytech/ink/blob/v3.0.0-rc5/examples/trait-erc20/lib.rs#L35-L37) contract on how to utilize it or [the documentation](https://paritytech.github.io/ink/ink_lang/attr.trait_definition.html) for details.\n\n\n\n### Off-chain Testing\n\n\n\nThe `#[ink::test]` procedural macro enables off-chain testing. See e.g. the [`examples/erc20`](https://github.com/paritytech/ink/blob/v3.0.0-rc5/examples/erc20/lib.rs#L248-L250) contract on how to utilize those or [the documentation](https://paritytech.github.io/ink/ink_lang/attr.test.html) for details.\n\n\n\n## Developer Documentation\n\n\n\nWe have [a very comprehensive documentation portal](https://paritytech.github.io/ink-docs),\n\nbut if you are looking for the crate level documentation itself, then these are\n\nthe relevant links:\n\n\n\n| Crate | Docs | Description |\n\n|:--|:--|:--|\n\n`ink_lang` | [![][j1]][j2] | Language features expose by ink!. See [here](https://paritytech.github.io/ink/ink_lang/attr.contract.html) for a detailed description of attributes which you can use in an `#[ink::contract]`. |\n\n`ink_storage` | [![][f1]][f2] | Data structures available in ink!. |\n\n`ink_env` | [![][g1]][g2] | Low-level interface for interacting with the smart contract Wasm executor. |\n\n`ink_prelude` | [![][i1]][i2] | Common API for no_std and std to access alloc crate types. |\n\n\n\n\n\n## Contributing\n\n\n\nVisit our [contribution guidelines](CONTRIBUTING.md) for more information.\n\n\n\nUse the scripts provided under `scripts/check-*` directory in order to run checks on either the workspace or all examples. Please do this before pushing work in a PR.\n\n\n\n## License\n\n\n\nThe entire code within this repository is licensed under the [Apache License 2.0](LICENSE).\n\n\n\nPlease [contact us](https://www.parity.io/contact/) if you have questions about the licensing of our products.\n", "file_path": "ink/README.md", "rank": 90, "score": 29.483074915816395 }, { "content": " }\n\n }\n\n }\n\n}\n\n\n\n#[cfg(feature = \"std\")]\n\nconst _: () = {\n\n use crate::traits::StorageLayout;\n\n use ink_metadata::layout::Layout;\n\n\n\n impl<T> StorageLayout for LazyCell<T>\n\n where\n\n T: StorageLayout + SpreadLayout,\n\n {\n\n fn layout(key_ptr: &mut KeyPtr) -> Layout {\n\n <T as StorageLayout>::layout(key_ptr)\n\n }\n\n }\n\n};\n\n\n", "file_path": "ink/crates/storage/src/lazy/lazy_cell.rs", "rank": 91, "score": 29.305230267915036 }, { "content": "};\n\nuse crate::{\n\n alloc,\n\n traits::{\n\n KeyPtr,\n\n SpreadLayout,\n\n },\n\n};\n\nuse ink_env::{\n\n test,\n\n DefaultEnvironment,\n\n};\n\nuse ink_primitives::Key;\n\n\n", "file_path": "ink/crates/storage/src/alloc/tests.rs", "rank": 93, "score": 29.029738118028007 }, { "content": "\n\n let expected_limit = 3 * PAGE_SIZE;\n\n assert_eq!(inner.upper_limit, expected_limit);\n\n\n\n let expected_alloc_start = 2 * PAGE_SIZE + size_of::<u8>();\n\n assert_eq!(inner.next, expected_alloc_start);\n\n }\n\n}\n\n\n\n#[cfg(all(test, feature = \"ink-fuzz-tests\"))]\n\nmod fuzz_tests {\n\n use super::*;\n\n use quickcheck::{\n\n quickcheck,\n\n TestResult,\n\n };\n\n use std::mem::size_of;\n\n\n\n const FROM_SIZE_ALIGN_EXPECT: &str =\n\n \"The rounded value of `size` cannot be more than `usize::MAX` since we have\n", "file_path": "ink/crates/allocator/src/bump.rs", "rank": 94, "score": 28.733389873284537 }, { "content": "#![cfg_attr(not(feature = \"std\"), no_std)]\n\n\n\nuse ink_lang as ink;\n\n\n\n#[ink::contract]\n\nmod dns {\n\n #[cfg(not(feature = \"ink-as-dependency\"))]\n\n use ink_storage::{\n\n collections::hashmap::Entry,\n\n collections::HashMap as StorageHashMap,\n\n lazy::Lazy,\n\n };\n\n\n\n /// Emitted whenever a new name is being registered.\n\n #[ink(event)]\n\n pub struct Register {\n\n #[ink(topic)]\n\n name: Hash,\n\n #[ink(topic)]\n\n from: AccountId,\n", "file_path": "ink/examples/dns/lib.rs", "rank": 95, "score": 28.513074391389075 }, { "content": "/// - on-chain mode: `no_std` and WebAssembly as target\n\n/// - off-chain mode: `std`\n\n///\n\n/// We generally use the on-chain mode for actual smart contract deployment\n\n/// whereas we use the off-chain mode for smart contract testing using the\n\n/// off-chain environment provided by the `ink_env` crate.\n\n///\n\n/// # Usage\n\n///\n\n/// ## Header Arguments\n\n///\n\n/// The `#[ink::contract]` macro can be provided with some additional comma-separated\n\n/// header arguments:\n\n///\n\n/// - `dynamic_storage_allocator: bool`\n\n///\n\n/// Tells the ink! code generator to allow usage of ink!'s built-in dynamic\n\n/// storage allocator.\n\n/// - `true`: Use the dynamic storage allocator provided by ink!.\n\n/// - `false`: Do NOT use the dynamic storage allocator provided by ink!.\n", "file_path": "ink/crates/lang/macro/src/lib.rs", "rank": 96, "score": 28.506534139269228 }, { "content": "//! By introducing `ink_primitives` we have a way to share utility components between `ink_env` or `ink_storage` and\n\n//! other parts of the framework, like `ink_lang`.\n\n\n\n#![cfg_attr(not(feature = \"std\"), no_std)]\n\n\n\nmod key;\n\nmod key_ptr;\n\n\n\npub use self::{\n\n key::Key,\n\n key_ptr::KeyPtr,\n\n};\n", "file_path": "ink/crates/primitives/src/lib.rs", "rank": 97, "score": 28.298041889490793 }, { "content": "//! emulator for simple off-chain testing.\n\n\n\n#![cfg_attr(not(feature = \"std\"), no_std)]\n\n#![deny(\n\n missing_docs,\n\n bad_style,\n\n bare_trait_objects,\n\n const_err,\n\n improper_ctypes,\n\n non_shorthand_field_patterns,\n\n no_mangle_generic_items,\n\n overflowing_literals,\n\n path_statements,\n\n patterns_in_fns_without_body,\n\n private_in_public,\n\n unconditional_recursion,\n\n unused_allocation,\n\n unused_comparisons,\n\n unused_parens,\n\n while_true,\n\n trivial_casts,\n\n trivial_numeric_casts,\n\n unused_extern_crates\n\n)]\n\n\n\n#[cfg(all(not(feature = \"std\"), target_arch = \"wasm32\"))]\n\n#[allow(unused_variables)]\n\n#[panic_handler]\n", "file_path": "ink/crates/env/src/lib.rs", "rank": 98, "score": 28.06067836760064 }, { "content": "///\n\n/// Use this if the storage field does not need to be loaded in some or most cases.\n\n#[derive(Debug)]\n\npub struct Lazy<T>\n\nwhere\n\n T: SpreadLayout,\n\n{\n\n cell: LazyCell<T>,\n\n}\n\n\n\n#[cfg(feature = \"std\")]\n\nconst _: () = {\n\n use crate::traits::StorageLayout;\n\n use ink_metadata::layout::Layout;\n\n\n\n impl<T> StorageLayout for Lazy<T>\n\n where\n\n T: StorageLayout + SpreadLayout,\n\n {\n\n fn layout(key_ptr: &mut KeyPtr) -> Layout {\n", "file_path": "ink/crates/storage/src/lazy/mod.rs", "rank": 99, "score": 28.047440908109632 } ]
Rust
src/wallet/state/log.rs
SebastienGllmt/cardano-cli
2470fe2dcb036226ade6f8c80a56d0610a623767
use storage_units::{append, utils::{serialize, lock::{self, Lock}}}; use std::{path::{PathBuf}, fmt, result, io::{self, Read, Write}, error}; use cardano::{block::{BlockDate, HeaderHash, types::EpochSlotId}}; use super::{ptr::{StatePtr}, utxo::{UTxO}}; use serde; use serde_yaml; #[derive(Debug)] pub enum Error { LogNotFound, IoError(io::Error), LogFormatError(String), LockError(lock::Error), AppendError(append::Error), UnsupportedLogFormat(Vec<u8>) } impl From<io::Error> for Error { fn from(e: io::Error) -> Self { Error::IoError(e) } } impl From<lock::Error> for Error { fn from(e: lock::Error) -> Self { Error::LockError(e) } } impl From<append::Error> for Error { fn from(e: append::Error) -> Self { match e { append::Error::NotFound => Error::LogNotFound, _ => Error::AppendError(e) } } } impl fmt::Display for Error { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { Error::LogNotFound => write!(f, "Log file not found"), Error::IoError(_) => write!(f, "I/O Error"), Error::LogFormatError(err) => write!(f, "Log format error: `{}`", err), Error::LockError(_) => write!(f, "Log's Lock file error"), Error::AppendError(_) => write!(f, "Error when appending data to the log file"), Error::UnsupportedLogFormat(_) => { write!(f, "Unsupported Log format (tried to deserialize log of unknown encoding or log is corrupted)") } } } } impl error::Error for Error { fn cause(&self) -> Option<& error::Error> { match self { Error::LogNotFound => None, Error::IoError(ref err) => Some(err), Error::LogFormatError(_) => None, Error::LockError(ref err) => Some(err), Error::AppendError(ref err) => Some(err), Error::UnsupportedLogFormat(_) => None, } } } pub type Result<T> = result::Result<T, Error>; const MAGIC : &'static [u8] = b"EVT1"; #[derive(Debug, Serialize, Deserialize)] pub enum Log<A> { Checkpoint(StatePtr), ReceivedFund(StatePtr, UTxO<A>), SpentFund(StatePtr, UTxO<A>) } impl<A: serde::Serialize> Log<A> { fn serialise(&self) -> Result<Vec<u8>> { let mut writer = Vec::with_capacity(64); let ptr = self.ptr(); let date = ptr.latest_block_date(); writer.write_all(b"EVT1")?; writer.write_all(ptr.latest_known_hash.as_ref())?; match date { BlockDate::Genesis(i) => { serialize::utils::write_u64(&mut writer, i as u64)?; serialize::utils::write_u64(&mut writer, u64::max_value())?; }, BlockDate::Normal(i) => { serialize::utils::write_u64(&mut writer, i.epoch as u64)?; serialize::utils::write_u64(&mut writer, i.slotid as u64)?; }, } match self { Log::Checkpoint(_) => { serialize::utils::write_u32(&mut writer, 1)?; serialize::utils::write_u64(&mut writer, 0)?; }, Log::ReceivedFund(_, utxo) => { serialize::utils::write_u32(&mut writer, 2)?; serialize::utils::write_u64(&mut writer, 0)?; serde_yaml::to_writer(&mut writer, utxo).map_err(|e| { Error::LogFormatError(format!("log format error: {:?}", e)) })?; }, Log::SpentFund(_, utxo) => { serialize::utils::write_u32(&mut writer, 3)?; serialize::utils::write_u64(&mut writer, 0)?; serde_yaml::to_writer(&mut writer, utxo).map_err(|e| { Error::LogFormatError(format!("log format error: {:?}", e)) })?; }, } Ok(writer) } } impl<A> Log<A> where for<'de> A: serde::Deserialize<'de> { fn deserisalise(bytes: &[u8]) -> Result<Self> { let mut reader = bytes; { let mut magic = [0u8; 4]; reader.read_exact(&mut magic)?; if magic != MAGIC { return Err(Error::UnsupportedLogFormat(magic.iter().cloned().collect())); } } let ptr = { let mut hash = [0;32]; reader.read_exact(&mut hash)?; let gen = serialize::utils::read_u64(&mut reader)?; let slot = serialize::utils::read_u64(&mut reader)?; let hh = HeaderHash::from(hash); let bd = if slot == 0xFFFFFFFFFFFFFFFF { BlockDate::Genesis(gen as u64) } else { BlockDate::Normal(EpochSlotId { epoch: gen as u64, slotid: slot as u16 }) }; StatePtr::new(bd, hh) }; let t = { let t = serialize::utils::read_u32(&mut reader)?; let b = serialize::utils::read_u64(&mut reader)?; debug_assert!(b == 0u64); t }; match t { 1 => Ok(Log::Checkpoint(ptr)), 2 => { let utxo = serde_yaml::from_slice(reader).map_err(|e| Error::LogFormatError(format!("log format error: {:?}", e)) )?; Ok(Log::ReceivedFund(ptr, utxo)) }, 3 => { let utxo = serde_yaml::from_slice(reader).map_err(|e| Error::LogFormatError(format!("log format error: {:?}", e)) )?; Ok(Log::SpentFund(ptr, utxo)) }, _ => { panic!("cannot parse log event of type: `{}'", t) } } } } impl<A> Log<A> { pub fn ptr<'a>(&'a self) -> &'a StatePtr { match self { Log::Checkpoint(ptr) => ptr, Log::ReceivedFund(ptr, _) => ptr, Log::SpentFund(ptr, _) => ptr, } } pub fn map<F, U>(self, f: F) -> Log<U> where F: FnOnce(A) -> U { match self { Log::Checkpoint(ptr) => Log::Checkpoint(ptr), Log::ReceivedFund(ptr, utxo) => Log::ReceivedFund(ptr, utxo.map(f)), Log::SpentFund(ptr, utxo) => Log::SpentFund(ptr, utxo.map(f)), } } } impl<A: fmt::Display> fmt::Display for Log<A> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { Log::Checkpoint(ptr) => write!(f, "Checkpoint at: {}", ptr), Log::ReceivedFund(ptr, utxo) => write!(f, "Received funds at: {} {}", ptr, utxo), Log::SpentFund(ptr, utxo) => write!(f, "Spent funds at: {} {}", ptr, utxo), } } } const WALLET_LOG_FILE : &'static str = "LOG"; pub struct LogLock(lock::Lock); impl LogLock { pub fn acquire_wallet_log_lock(wallet_path: PathBuf) -> Result<Self> { Ok(LogLock(Lock::lock(wallet_path.join(WALLET_LOG_FILE))?)) } pub fn delete_wallet_log_lock(self, wallet_path: PathBuf) -> ::std::io::Result<()> { let file = wallet_path.join(WALLET_LOG_FILE); ::std::fs::remove_file(file) } } pub struct LogReader(append::Reader); impl LogReader { pub fn open(locked: LogLock) -> Result<Self> { Ok(LogReader(append::Reader::open(locked.0)?)) } pub fn release_lock(self) -> LogLock { LogLock(self.0.close()) } pub fn into_iter<A>(self) -> LogIterator<A> where for<'de> A: serde::Deserialize<'de> { LogIterator {reader: self, _log_type: ::std::marker::PhantomData } } pub fn next<A>(&mut self) -> Result<Option<Log<A>>> where for<'de> A: serde::Deserialize<'de> { match self.0.next()? { None => Ok(None), Some(bytes) => { let log = Log::deserisalise(&bytes)?; Ok(Some(log)) } } } } pub struct LogIterator<A> { reader: LogReader, _log_type: ::std::marker::PhantomData<A> } impl<A> Iterator for LogIterator<A> where for<'de> A: serde::Deserialize<'de> { type Item = Result<Log<A>>; fn next(&mut self) -> Option<Self::Item> { match self.reader.next() { Err(err) => Some(Err(err)), Ok(None) => None, Ok(Some(log)) => Some(Ok(log)) } } } pub struct LogWriter(append::Writer); impl LogWriter { pub fn open(locked: LogLock) -> Result<Self> { Ok(LogWriter(append::Writer::open(locked.0)?)) } pub fn release_lock(self) -> LogLock { LogLock(self.0.close()) } pub fn append<A: serde::Serialize+fmt::Debug>(&mut self, log: &Log<A>) -> Result<()> { Ok(self.0.append_bytes(&log.serialise()?)?) } }
use storage_units::{append, utils::{serialize, lock::{self, Lock}}}; use std::{path::{PathBuf}, fmt, result, io::{self, Read, Write}, error}; use cardano::{block::{BlockDate, HeaderHash, types::EpochSlotId}}; use super::{ptr::{StatePtr}, utxo::{UTxO}}; use serde; use serde_yaml; #[derive(Debug)] pub enum Error { LogNotFound, IoError(io::Error), LogFormatError(String), LockError(lock::Error), AppendError(append::Error), UnsupportedLogFormat(Vec<u8>) } impl From<io::Error> for Error { fn from(e: io::Error) -> Self { Error::IoError(e) } } impl From<lock::Error> for Error { fn from(e: lock::Error) -> Self { Error::LockError(e) } } impl From<append::Error> for Error { fn from(e: append::Error) -> Self { match e { append::Error::NotFound => Error::LogNotFound, _ => Error::AppendError(e) } } } impl fmt::Display for Error { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { Error::LogNotFound => write!(f, "Log file not found"), Error::IoError(_) => write!(f, "I/O Error"), Error::LogFormatError(err) => write!(f, "Log format error: `{}`", err), Error::LockError(_) => write!(f, "Log's Lock file error"), Error::AppendError(_) => write!(f, "Error when appending data to the log file"), Error::UnsupportedLogFormat(_) => { write!(f, "Unsupported Log format (tried to deserialize log of unknown encoding or log is corrupted)") } } } } impl error::Error for Error { fn cause(&self) -> Option<& error::Error> { match self { Error::LogNotFound => None, Error::IoError(ref err) => Some(err), Error::LogFormatError(_) => None, Error::LockError(ref err) => Some(err), Error::AppendError(ref err) => Some(err), Error::UnsupportedLogFormat(_) => None, } } } pub type Result<T> = result::Result<T, Error>; const MAGIC : &'static [u8] = b"EVT1"; #[derive(Debug, Serialize, Deserialize)] pub enum Log<A> { Checkpoint(StatePtr), ReceivedFund(StatePtr, UTxO<A>), SpentFund(StatePtr, UTxO<A>) } impl<A: serde::Serialize> Log<A> { fn serialise(&self) -> Result<Vec<u8>> { let mut writer = Vec::with_capacity(64); let ptr = self.ptr(); let date = ptr.latest_block_date(); writer.write_all(b"EVT1")?; writer.write_all(ptr.latest_known_hash.as_ref())?; match date { BlockDate::Genesis(i) => { serialize::utils::write_u64(&mut writer, i as u64)?; serialize::utils::write_u64(&mut writer, u64::max_value())?; }, BlockDate::Normal(i) => { serialize::utils::write_u64(&mut writer, i.epoch as u64)?; serialize::utils::write_u64(&mut writer, i.slotid as u64)?; }, } match self { Log::Checkpoint(_) => { serialize::utils::write_u32(&mut writer, 1)?; serialize::utils::write_u64(&mut writer, 0)?; }, Log::ReceivedFund(_, utxo) => { serialize::utils::write_u32(&mut writer, 2)?; serialize::utils::write_u64(&mut writer, 0)?; serde_yaml::to_writer(&mut writer, utxo).map_err(|e| {
:utils::read_u32(&mut reader)?; let b = serialize::utils::read_u64(&mut reader)?; debug_assert!(b == 0u64); t }; match t { 1 => Ok(Log::Checkpoint(ptr)), 2 => { let utxo = serde_yaml::from_slice(reader).map_err(|e| Error::LogFormatError(format!("log format error: {:?}", e)) )?; Ok(Log::ReceivedFund(ptr, utxo)) }, 3 => { let utxo = serde_yaml::from_slice(reader).map_err(|e| Error::LogFormatError(format!("log format error: {:?}", e)) )?; Ok(Log::SpentFund(ptr, utxo)) }, _ => { panic!("cannot parse log event of type: `{}'", t) } } } } impl<A> Log<A> { pub fn ptr<'a>(&'a self) -> &'a StatePtr { match self { Log::Checkpoint(ptr) => ptr, Log::ReceivedFund(ptr, _) => ptr, Log::SpentFund(ptr, _) => ptr, } } pub fn map<F, U>(self, f: F) -> Log<U> where F: FnOnce(A) -> U { match self { Log::Checkpoint(ptr) => Log::Checkpoint(ptr), Log::ReceivedFund(ptr, utxo) => Log::ReceivedFund(ptr, utxo.map(f)), Log::SpentFund(ptr, utxo) => Log::SpentFund(ptr, utxo.map(f)), } } } impl<A: fmt::Display> fmt::Display for Log<A> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { Log::Checkpoint(ptr) => write!(f, "Checkpoint at: {}", ptr), Log::ReceivedFund(ptr, utxo) => write!(f, "Received funds at: {} {}", ptr, utxo), Log::SpentFund(ptr, utxo) => write!(f, "Spent funds at: {} {}", ptr, utxo), } } } const WALLET_LOG_FILE : &'static str = "LOG"; pub struct LogLock(lock::Lock); impl LogLock { pub fn acquire_wallet_log_lock(wallet_path: PathBuf) -> Result<Self> { Ok(LogLock(Lock::lock(wallet_path.join(WALLET_LOG_FILE))?)) } pub fn delete_wallet_log_lock(self, wallet_path: PathBuf) -> ::std::io::Result<()> { let file = wallet_path.join(WALLET_LOG_FILE); ::std::fs::remove_file(file) } } pub struct LogReader(append::Reader); impl LogReader { pub fn open(locked: LogLock) -> Result<Self> { Ok(LogReader(append::Reader::open(locked.0)?)) } pub fn release_lock(self) -> LogLock { LogLock(self.0.close()) } pub fn into_iter<A>(self) -> LogIterator<A> where for<'de> A: serde::Deserialize<'de> { LogIterator {reader: self, _log_type: ::std::marker::PhantomData } } pub fn next<A>(&mut self) -> Result<Option<Log<A>>> where for<'de> A: serde::Deserialize<'de> { match self.0.next()? { None => Ok(None), Some(bytes) => { let log = Log::deserisalise(&bytes)?; Ok(Some(log)) } } } } pub struct LogIterator<A> { reader: LogReader, _log_type: ::std::marker::PhantomData<A> } impl<A> Iterator for LogIterator<A> where for<'de> A: serde::Deserialize<'de> { type Item = Result<Log<A>>; fn next(&mut self) -> Option<Self::Item> { match self.reader.next() { Err(err) => Some(Err(err)), Ok(None) => None, Ok(Some(log)) => Some(Ok(log)) } } } pub struct LogWriter(append::Writer); impl LogWriter { pub fn open(locked: LogLock) -> Result<Self> { Ok(LogWriter(append::Writer::open(locked.0)?)) } pub fn release_lock(self) -> LogLock { LogLock(self.0.close()) } pub fn append<A: serde::Serialize+fmt::Debug>(&mut self, log: &Log<A>) -> Result<()> { Ok(self.0.append_bytes(&log.serialise()?)?) } }
Error::LogFormatError(format!("log format error: {:?}", e)) })?; }, Log::SpentFund(_, utxo) => { serialize::utils::write_u32(&mut writer, 3)?; serialize::utils::write_u64(&mut writer, 0)?; serde_yaml::to_writer(&mut writer, utxo).map_err(|e| { Error::LogFormatError(format!("log format error: {:?}", e)) })?; }, } Ok(writer) } } impl<A> Log<A> where for<'de> A: serde::Deserialize<'de> { fn deserisalise(bytes: &[u8]) -> Result<Self> { let mut reader = bytes; { let mut magic = [0u8; 4]; reader.read_exact(&mut magic)?; if magic != MAGIC { return Err(Error::UnsupportedLogFormat(magic.iter().cloned().collect())); } } let ptr = { let mut hash = [0;32]; reader.read_exact(&mut hash)?; let gen = serialize::utils::read_u64(&mut reader)?; let slot = serialize::utils::read_u64(&mut reader)?; let hh = HeaderHash::from(hash); let bd = if slot == 0xFFFFFFFFFFFFFFFF { BlockDate::Genesis(gen as u64) } else { BlockDate::Normal(EpochSlotId { epoch: gen as u64, slotid: slot as u16 }) }; StatePtr::new(bd, hh) }; let t = { let t = serialize:
random
[ { "content": "pub fn decrypt(password: &Password, data: &[u8]) -> Option<Vec<u8>> {\n\n let mut reader = data;\n\n let mut salt = [0;SALT_SIZE];\n\n let mut nonce = [0;NONCE_SIZE];\n\n let mut key = [0;KEY_SIZE];\n\n let len = data.len() - TAG_SIZE - SALT_SIZE - NONCE_SIZE;\n\n let mut bytes : Vec<u8> = repeat(0).take(len).collect();\n\n\n\n reader.read_exact(&mut salt[..]).unwrap();\n\n reader.read_exact(&mut nonce[..]).unwrap();\n\n\n\n password_to_key(password, salt, &mut key);\n\n let mut ctx = ChaCha20Poly1305::new(&key[..], &nonce[..], &[]);\n\n if ctx.decrypt(&reader[0..len], &mut bytes[..], &reader[len..]) {\n\n Some(bytes)\n\n } else {\n\n None\n\n }\n\n}\n\n\n", "file_path": "src/utils/password_encrypted.rs", "rank": 0, "score": 234648.48782071937 }, { "content": "pub fn display_utxo<L>(term: &mut Term, ptr: StatePtr, utxo: UTxO<L>, debit: bool) {\n\n let ptr = format!(\"{}\", style!(ptr.latest_block_date()));\n\n let tid = format!(\"{}\", style!(utxo.transaction_id));\n\n let tii = format!(\"{:03}\", utxo.index_in_transaction);\n\n const WIDTH : usize = 14;\n\n let credit = if debit {\n\n format!(\"{:>width$}\", \" \", width = WIDTH)\n\n } else {\n\n format!(\"{:>width$}\", format!(\"{}\", utxo.credited_value), width = WIDTH)\n\n };\n\n let debit = if debit {\n\n format!(\"{:>width$}\", format!(\"{}\", utxo.credited_value), width = WIDTH)\n\n } else {\n\n format!(\"{:>width$}\", \" \", width = WIDTH)\n\n };\n\n\n\n writeln!(term, \"{:9}|{}.{}|{}|{}\",\n\n ::console::pad_str(&ptr, 9, ::console::Alignment::Left, None),\n\n tid,\n\n style!(tii).yellow(),\n\n style!(credit).green(),\n\n style!(debit).red()\n\n ).unwrap()\n\n}\n\n\n", "file_path": "src/wallet/utils.rs", "rank": 1, "score": 205552.54647213555 }, { "content": "pub fn dump_utxo<L>(term: &mut Term, ptr: StatePtr, utxo: UTxO<L>, debit: bool) {\n\n let title = if debit {\n\n style!(\"debit\").red()\n\n } else {\n\n style!(\"credit\").green()\n\n };\n\n let amount = if debit {\n\n style!(format!(\"{}\", utxo.credited_value)).red()\n\n } else {\n\n style!(format!(\"{}\", utxo.credited_value)).green()\n\n };\n\n\n\n writeln!(term, \"{} {}.{}\",\n\n title,\n\n style!(utxo.transaction_id),\n\n style!(utxo.index_in_transaction).yellow(),\n\n ).unwrap();\n\n writeln!(term, \"Date {}\", style!(ptr.latest_block_date())).unwrap();\n\n writeln!(term, \"Block {}\", style!(ptr.latest_known_hash)).unwrap();\n\n writeln!(term, \"Value {}\", amount).unwrap();\n\n writeln!(term, \"\").unwrap()\n\n}\n\n\n\n\n", "file_path": "src/wallet/utils.rs", "rank": 2, "score": 205552.54647213555 }, { "content": "pub fn log( mut term: Term\n\n , root_dir: PathBuf\n\n , name: WalletName\n\n , pretty: bool\n\n )\n\n{\n\n // load the wallet\n\n let wallet = Wallet::load(root_dir.clone(), name);\n\n\n\n let mut state = create_wallet_state_from_logs(&mut term, &wallet, root_dir, lookup::accum::Accum::default());\n\n\n\n display_wallet_state_logs(&mut term, &wallet, &mut state, pretty);\n\n}\n\n\n", "file_path": "src/wallet/commands.rs", "rank": 3, "score": 202482.90506376547 }, { "content": "pub fn log( term: &mut Term\n\n , root_dir: PathBuf\n\n , name: BlockchainName\n\n , from: Option<HeaderHash>\n\n )\n\n -> Result<()>\n\n{\n\n let blockchain = Blockchain::load(root_dir, name);\n\n\n\n let from = if let Some(hash) = from {\n\n if storage::block_location(&blockchain.storage, &hash).is_none() {\n\n term.error(&format!(\"block hash `{}' is not present in the local blockchain\\n\", hash))?;\n\n ::std::process::exit(1);\n\n }\n\n\n\n hash\n\n } else {\n\n blockchain.load_tip().0.hash\n\n };\n\n\n\n for block in storage::block::iter::ReverseIter::from(&blockchain.storage, from).unwrap() {\n\n use utils::pretty::Pretty;\n\n\n\n block.pretty(term, 0)?;\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/blockchain/commands.rs", "rank": 4, "score": 202482.90506376547 }, { "content": "pub fn parse_genesis_data(json: &str) -> config::GenesisData { // FIXME: use Result\n\n\n\n let data: RawGenesisData = serde_json::from_str(&json).unwrap();\n\n\n\n let parse_fee_constant = |s: &str| {\n\n let n = s.parse::<u64>().unwrap();\n\n assert!(n % 1000000 == 0);\n\n fee::Milli(n / 1000000)\n\n };\n\n\n\n let mut avvm_distr = BTreeMap::new();\n\n for (avvm, balance) in &data.avvmDistr {\n\n avvm_distr.insert(\n\n redeem::PublicKey::from_slice(\n\n &base64::decode_config(avvm, base64::URL_SAFE).unwrap()).unwrap(),\n\n coin::Coin::new(balance.parse::<u64>().unwrap()).unwrap());\n\n }\n\n\n\n config::GenesisData {\n\n genesis_prev: block::HeaderHash::new(canonicalize_json(json).as_bytes()),\n\n epoch_stability_depth: data.protocolConsts.k,\n\n protocol_magic: config::ProtocolMagic::from(data.protocolConsts.protocolMagic),\n\n fee_policy: fee::LinearFee::new(\n\n parse_fee_constant(&data.blockVersionData.txFeePolicy.summand),\n\n parse_fee_constant(&data.blockVersionData.txFeePolicy.multiplier)),\n\n avvm_distr,\n\n non_avvm_balances: BTreeMap::new(), // FIXME\n\n }\n\n}\n\n\n", "file_path": "src/blockchain/parse_genesis_data.rs", "rank": 5, "score": 198483.24796257447 }, { "content": "pub fn encrypt(password: &Password, data: &[u8]) -> Vec<u8> {\n\n let salt = generate_salt();\n\n let nonce = generate_nonce();\n\n let mut key = [0;KEY_SIZE];\n\n let mut tag = [0;TAG_SIZE];\n\n let len = data.len();\n\n\n\n let mut bytes = Vec::with_capacity(SALT_SIZE + NONCE_SIZE + len + TAG_SIZE);\n\n let mut encrypted : Vec<u8> = repeat(0).take(data.len()).collect();\n\n\n\n // here we can safely unwrap, `Vec::with_capacity` should have provided\n\n // enough pre-allocated memory. If not, then there is a memory issue,\n\n // and there is nothing we can do.\n\n bytes.write_all(&salt[..]).unwrap();\n\n bytes.write_all(&nonce[..]).unwrap();\n\n\n\n password_to_key(password, salt, &mut key);\n\n let mut ctx = ChaCha20Poly1305::new(&key[..], &nonce[..], &[]);\n\n\n\n ctx.encrypt(data, &mut encrypted[0..len], &mut tag);\n\n encrypted.extend_from_slice(&tag[..]);\n\n\n\n bytes.append(&mut encrypted);\n\n bytes\n\n}\n\n\n", "file_path": "src/utils/password_encrypted.rs", "rank": 6, "score": 196283.5538574924 }, { "content": "pub fn lock_wallet_log(wallet: &Wallet) -> log::LogLock {\n\n match wallet.log() {\n\n Err(Error::WalletLogAlreadyLocked(pid)) => {\n\n error!(\"Wallet's LOG already locked by another process or thread ({})\\n\", pid);\n\n ::std::process::exit(1);\n\n },\n\n Err(err) => {\n\n error!(\"{}\", IMPOSSIBLE_HAPPENED);\n\n panic!(\"`lock_wallet_log' has failed with an unexpected error {:#?}\", err);\n\n },\n\n Ok(lock) => { lock }\n\n }\n\n}\n\n\n", "file_path": "src/wallet/utils.rs", "rank": 7, "score": 189077.0738530214 }, { "content": "pub fn get_genesis_data(genesis_prev: &HeaderHash) -> Result<&str, HeaderHash> {\n\n if genesis_prev == &HeaderHash::from_str(\"5f20df933584822601f9e3f8c024eb5eb252fe8cefb24d1317dc3d432e940ebb\").unwrap() {\n\n Ok(include_str!(\"../../genesis/5f20df933584822601f9e3f8c024eb5eb252fe8cefb24d1317dc3d432e940ebb.json\"))\n\n }\n\n else if genesis_prev == &HeaderHash::from_str(\"b7f76950bc4866423538ab7764fc1c7020b24a5f717a5bee3109ff2796567214\").unwrap() {\n\n Ok(include_str!(\"../../genesis/b7f76950bc4866423538ab7764fc1c7020b24a5f717a5bee3109ff2796567214.json\"))\n\n }\n\n else if genesis_prev == &HeaderHash::from_str(\"c6a004d3d178f600cd8caa10abbebe1549bef878f0665aea2903472d5abf7323\").unwrap() {\n\n Ok(include_str!(\"../../genesis/c6a004d3d178f600cd8caa10abbebe1549bef878f0665aea2903472d5abf7323.json\"))\n\n }\n\n else {\n\n Err(genesis_prev.clone())\n\n }\n\n}\n", "file_path": "src/blockchain/genesis_data.rs", "rank": 8, "score": 188648.61708592204 }, { "content": "fn pretty_object<P: Pretty, W: Write>(w: &mut W, indent: usize, k: &'static str, v: P) -> Result<()> {\n\n writeln!(w, \"{:width$}{}:\", \"\", k, width = indent)?;\n\n v.pretty(w, indent + DISPLAY_INDENT_SIZE)?;\n\n writeln!(w, \"\")?;\n\n Ok(())\n\n}\n\n\n\nimpl<'a> Pretty for &'a str {\n\n fn pretty<W>(self, f: &mut W, _: usize) -> Result<()>\n\n where W: Write\n\n {\n\n write!(f, \"{}\", self)\n\n }\n\n}\n\n\n\nimpl<D: ::std::fmt::Display> Pretty for StyledObject<D> {\n\n fn pretty<W>(self, f: &mut W, _: usize) -> Result<()>\n\n where W: Write\n\n {\n\n write!(f, \"{}\", self)\n", "file_path": "src/utils/pretty.rs", "rank": 9, "score": 178987.48279052784 }, { "content": "fn pretty_attribute<P: Pretty, W: Write>(w: &mut W, indent: usize, k: &'static str, v: P) -> Result<()> {\n\n write!(w, \"{:width$}{}: \", \"\", k, width = indent)?;\n\n v.pretty(w, indent + DISPLAY_INDENT_SIZE)?;\n\n writeln!(w, \"\")?;\n\n Ok(())\n\n}\n", "file_path": "src/utils/pretty.rs", "rank": 10, "score": 178987.48279052784 }, { "content": "pub fn utxos( mut term: Term\n\n , root_dir: PathBuf\n\n , name: WalletName\n\n )\n\n{\n\n // load the wallet\n\n let wallet = Wallet::load(root_dir.clone(), name);\n\n\n\n let state = create_wallet_state_from_logs(&mut term, &wallet, root_dir, lookup::accum::Accum::default());\n\n\n\n display_wallet_state_utxos(&mut term, state);\n\n}\n\n\n", "file_path": "src/wallet/commands.rs", "rank": 11, "score": 177909.92007582448 }, { "content": "type Result<T> = ::std::result::Result<T, Error>;\n\n\n\n\n\n/// describe a transaction in its most reduce representation\n\n///\n\n/// Transaction are not meant to be edited from this representation\n\n/// as this is a read only object.\n\n///\n\n/// There is 2 way to construct a transaction:\n\n///\n\n/// 1. by creating an empty transaction and updating it with operations;\n\n/// 2. by collecting it from an iterator over `Operation` (see `FromIterator` trait);\n\n///\n\n/// Keeping private the transaction will allow us to control the state of the transaction\n\n/// and to guarantee some levels of integrity (preventing errors).\n\n///\n\n#[derive(Debug, Clone, Serialize, Deserialize)]\n\npub struct Transaction {\n\n pub inputs: Vec<Input>,\n\n pub outputs: Vec<Output>,\n", "file_path": "src/transaction/core/transaction.rs", "rank": 12, "score": 176135.90134488407 }, { "content": "fn global_verbose_option<'a>(matches: &ArgMatches<'a>) -> u64 {\n\n matches.occurrences_of(\"VERBOSITY\")\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 13, "score": 163589.6728970566 }, { "content": "/// convenient function to decrypt a HDWallet XPrv with a password\n\n///\n\n/// # Errors\n\n///\n\n/// This function may fail if:\n\n///\n\n/// * the password in invalid;\n\n/// * the encrypted value did not represent a HDWallet XPrv\n\n///\n\npub fn decrypt_primary_key(password: &Password, encrypted_key: &[u8]) -> Result<hdwallet::XPrv> {\n\n let xprv_vec = match password_encrypted::decrypt(password, encrypted_key) {\n\n None => return Err(Error::CannotRetrievePrivateKeyInvalidPassword),\n\n Some(bytes) => bytes\n\n };\n\n\n\n if xprv_vec.len() != hdwallet::XPRV_SIZE {\n\n return Err(\n\n Error::CannotRetrievePrivateKey(\n\n hdwallet::Error::InvalidXPrvSize(xprv_vec.len())\n\n )\n\n )\n\n }\n\n\n\n let mut xprv_bytes = [0;hdwallet::XPRV_SIZE];\n\n xprv_bytes.copy_from_slice(&xprv_vec[..]);\n\n\n\n Ok(hdwallet::XPrv::from_bytes_verified(xprv_bytes)?)\n\n}\n", "file_path": "src/wallet/config.rs", "rank": 14, "score": 161746.15238636578 }, { "content": "pub fn display_wallet_state_utxos<LS>( term: &mut Term\n\n , state: state::State<LS>\n\n )\n\n where LS: lookup::AddressLookup\n\n{\n\n for (_, utxo) in state.utxos {\n\n writeln!(term, \"{}.{} {}\",\n\n style!(utxo.transaction_id),\n\n style!(utxo.index_in_transaction).yellow(),\n\n style!(utxo.credited_value).green()\n\n ).unwrap()\n\n }\n\n}\n\n\n", "file_path": "src/wallet/utils.rs", "rank": 15, "score": 159641.75880247817 }, { "content": "pub fn update_wallet_state_with_utxos<LS>( term: &mut Term\n\n , wallet: &Wallet\n\n , blockchain: &Blockchain\n\n , state: &mut state::State<LS>\n\n )\n\n where LS: lookup::AddressLookup\n\n{\n\n let blockchain_tip = blockchain.load_tip().0;\n\n\n\n let from_ptr = state.ptr().clone();\n\n let from = from_ptr.latest_known_hash;\n\n let from_date = from_ptr.latest_addr.unwrap_or(BlockDate::Genesis(0));\n\n let num_blocks = blockchain_tip.date - from_date;\n\n\n\n term.info(&format!(\"syncing wallet from {} to {}\\n\", from_date, blockchain_tip.date)).unwrap();\n\n\n\n let progress = term.progress_bar(num_blocks as u64);\n\n progress.set_message(\"loading transactions... \");\n\n\n\n let mut last_block_date = from_date;\n", "file_path": "src/wallet/utils.rs", "rank": 16, "score": 159641.7588024782 }, { "content": "pub fn display_wallet_state_logs<LS>( term: &mut Term\n\n , wallet: &Wallet\n\n , _state: &mut state::State<LS>\n\n , pretty: bool\n\n )\n\n where LS: lookup::AddressLookup\n\n{\n\n let log_lock = lock_wallet_log(&wallet);\n\n let reader = log::LogReader::open(log_lock).unwrap();\n\n let reader : log::LogIterator<lookup::Address> = reader.into_iter();\n\n let reader = reader.filter_map(|r| {\n\n match r {\n\n Err(err) => {\n\n panic!(\"{:?}\", err)\n\n },\n\n Ok(v) => Some(v)\n\n }\n\n });\n\n\n\n for log in reader {\n", "file_path": "src/wallet/utils.rs", "rank": 17, "score": 159553.7825289335 }, { "content": "fn blockchain_argument_opt_headhash_match<'a>(term: &mut term::Term, matches: &ArgMatches<'a>, name: &str) -> Option<cardano::block::HeaderHash> {\n\n if matches.is_present(name) {\n\n Some(blockchain_argument_headhash_match(term, matches, name))\n\n } else { None }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 18, "score": 150825.31280314963 }, { "content": "pub fn destroy( term: &mut Term\n\n , root_dir: PathBuf\n\n , name: BlockchainName\n\n )\n\n -> Result<()>\n\n{\n\n let blockchain = Blockchain::load(root_dir, name);\n\n\n\n writeln!(term, \"You are about to destroy the local blockchain {}.\n\nThis means that all the blocks downloaded will be deleted and that the attached\n\nwallets won't be able to interact with this blockchain.\",\n\n ::console::style(&blockchain.name).bold().red(),\n\n )?;\n\n\n\n let confirmation = ::dialoguer::Confirmation::new(\"Are you sure?\")\n\n .use_line_input(true)\n\n .clear(false)\n\n .default(false)\n\n .interact()?;\n\n if confirmation {\n\n unsafe { blockchain.destroy() }?;\n\n term.success(\"blockchain successfully destroyed\\n\")?;\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/blockchain/commands.rs", "rank": 19, "score": 147865.47884782002 }, { "content": "pub fn detach( mut term: Term\n\n , root_dir: PathBuf\n\n , name: WalletName\n\n )\n\n{\n\n // load the wallet\n\n let mut wallet = Wallet::load(root_dir.clone(), name);\n\n\n\n let blockchainname = wallet.config.attached_blockchain().unwrap();\n\n\n\n // 1. get the wallet's blockchain\n\n let _ = load_attached_blockchain(\n\n &mut term,\n\n root_dir,\n\n blockchainname\n\n );\n\n\n\n // 2. delete the wallet log\n\n wallet.delete_log().unwrap_or_else(|e| term.fail_with(e));\n\n\n\n wallet.config.attached_blockchain = None;\n\n\n\n wallet.save();\n\n\n\n term.success(\"Wallet successfully attached to blockchain.\\n\").unwrap()\n\n}\n\n\n", "file_path": "src/wallet/commands.rs", "rank": 20, "score": 147865.47884782002 }, { "content": "pub fn forward( term: &mut Term\n\n , root_dir: PathBuf\n\n , name: BlockchainName\n\n , to: Option<HeaderHash>\n\n )\n\n -> Result<()>\n\n{\n\n let blockchain = Blockchain::load(root_dir, name);\n\n\n\n let hash = if let Some(hash) = to {\n\n if storage::block_location(&blockchain.storage, &hash).is_none() {\n\n return Err(Error::ForwardHashDoesNotExist(hash))\n\n }\n\n\n\n hash\n\n } else {\n\n let initial_tip = blockchain.load_tip().0;\n\n\n\n let tip = blockchain.peers().map(|np| {\n\n peer::Peer::prepare(&blockchain, np.name().to_owned()).load_local_tip().0\n", "file_path": "src/blockchain/commands.rs", "rank": 21, "score": 147865.47884782002 }, { "content": "pub fn list( term: &mut Term\n\n , root_dir: PathBuf\n\n )\n\n -> Result<(), Error>\n\n{\n\n let transactions_dir = core::config::transaction_directory(root_dir.clone());\n\n\n\n for entry in ::std::fs::read_dir(transactions_dir)? {\n\n let entry = entry?;\n\n if entry.file_type()?.is_dir() {\n\n term.warn(&format!(\"unexpected directory in transaction directory: {:?}\", entry.path()))?;\n\n continue;\n\n }\n\n let name = entry.file_name().into_string().unwrap_or_else(|err| {\n\n panic!(\"invalid utf8... {:?}\", err)\n\n });\n\n\n\n let staging = load_staging(root_dir.clone(), name.as_str())?;\n\n\n\n writeln!(term, \"{}\", style!(staging.id())).unwrap();\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/transaction/commands.rs", "rank": 22, "score": 147865.47884782002 }, { "content": "/// function to create a new empty transaction\n\npub fn new( term: &mut Term\n\n , root_dir: PathBuf\n\n , blockchain: BlockchainName\n\n )\n\n -> Result<(), Error>\n\n{\n\n let blockchain = Blockchain::load(root_dir.clone(), blockchain);\n\n\n\n let staging = StagingTransaction::new(root_dir, blockchain.config.protocol_magic)\n\n .map_err(Error::CannotCreateNewTransaction)?;\n\n\n\n writeln!(term, \"{}\", style!(staging.id()))?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/transaction/commands.rs", "rank": 23, "score": 147865.47884782002 }, { "content": "pub fn list( term: &mut Term\n\n , root_dir: PathBuf\n\n , detailed: bool\n\n )\n\n -> Result<()>\n\n{\n\n let blockchains_dir = super::config::blockchains_directory(&root_dir);\n\n let dir_reader = match ::std::fs::read_dir(blockchains_dir) {\n\n Err(err) => {\n\n use std::io::ErrorKind;\n\n return match err.kind() {\n\n ErrorKind::NotFound => Err(Error::ListNoBlockchains),\n\n ErrorKind::PermissionDenied => Err(Error::ListPermissionsDenied),\n\n _ => Err(Error::IoError(err)),\n\n }\n\n },\n\n Ok(dr) => dr\n\n };\n\n for entry in dir_reader {\n\n let entry = entry.unwrap();\n", "file_path": "src/blockchain/commands.rs", "rank": 24, "score": 147865.47884782002 }, { "content": "pub fn address( mut term: Term\n\n , root_dir: PathBuf\n\n , name: WalletName\n\n , account: u32\n\n , is_internal: bool\n\n , index: u32\n\n )\n\n{\n\n // load the wallet\n\n let wallet = Wallet::load(root_dir.clone(), name);\n\n\n\n let addr = match wallet.config.hdwallet_model {\n\n HDWalletModel::BIP44 => {\n\n let mut lookup_struct = load_bip44_lookup_structure(&mut term, &wallet);\n\n let account = match ::cardano::bip::bip44::Account::new(account) {\n\n Err(err) => term.fail_with(err),\n\n Ok(account) => account\n\n };\n\n let change = if is_internal {\n\n account.internal().unwrap_or_else(|e| term.fail_with(e))\n", "file_path": "src/wallet/commands.rs", "rank": 25, "score": 147865.47884782002 }, { "content": "/// function to create and initialize a given new blockchain\n\n///\n\n/// It will mainly create the subdirectories needed for the storage\n\n/// of blocks, epochs and tags.\n\n///\n\n/// If the given blockchain configuration provides some preset peers\n\n/// each peer will be initialized with an associated tag pointing to\n\n/// the genesis hash of the blockchain (given in the same configuration\n\n/// structure `Config`).\n\n///\n\npub fn new( term: &mut Term\n\n , root_dir: PathBuf\n\n , name: BlockchainName\n\n , config: Config\n\n )\n\n -> Result<()>\n\n{\n\n let blockchain = Blockchain::new(root_dir, name, config)?;\n\n blockchain.save();\n\n\n\n term.success(&format!(\"local blockchain `{}' created.\\n\", blockchain.name))?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/blockchain/commands.rs", "rank": 26, "score": 147865.47884782002 }, { "content": "pub fn status( mut term: Term\n\n , root_dir: PathBuf\n\n , name: WalletName\n\n )\n\n{\n\n // load the wallet\n\n let wallet = Wallet::load(root_dir.clone(), name);\n\n\n\n if let Some(ref blk_name) = &wallet.config.attached_blockchain {\n\n term.simply(\"Wallet \").unwrap();\n\n term.warn(&format!(\"{}\", &wallet.name)).unwrap();\n\n term.simply(\" on blockchain \").unwrap();\n\n term.info(blk_name).unwrap();\n\n term.simply(\"\\n\").unwrap();\n\n } else {\n\n term.info(&format!(\"Wallet {} status\\n\", &wallet.name)).unwrap();\n\n term.warn(\"wallet not attached to a blockchain\").unwrap();\n\n return;\n\n }\n\n\n", "file_path": "src/wallet/commands.rs", "rank": 27, "score": 147865.47884782002 }, { "content": "pub fn status( term: &mut Term\n\n , root_dir: PathBuf\n\n , id_str: &str\n\n )\n\n -> Result<(), Error>\n\n{\n\n let staging = load_staging(root_dir, id_str)?;\n\n\n\n let trans = staging.transaction();\n\n let inputs = trans.inputs();\n\n let input_total = sum_coins(inputs.into_iter().map(|x| x.expected_value))\n\n .map_err(Error::CannotReportStatusInvalidInputTotal)?;\n\n let (builder, changes) = staging.transaction().mk_txbuilder()\n\n .map_err(Error::CannotReportStatusInvalidTxBuilder)?;\n\n let tx = builder.make_tx()\n\n .unwrap_or_else(|_| Tx::new());\n\n let output_total = tx.get_output_total()\n\n .map_err(Error::CannotReportStatusInvalidOutputTotal)?;\n\n let difference = {\n\n let i : u64 = input_total.into();\n", "file_path": "src/transaction/commands.rs", "rank": 28, "score": 147865.47884782002 }, { "content": "pub fn attach( mut term: Term\n\n , root_dir: PathBuf\n\n , name: WalletName\n\n , blockchain_name: BlockchainName\n\n )\n\n{\n\n // load the wallet\n\n let mut wallet = Wallet::load(root_dir.clone(), name);\n\n\n\n // 1. is the wallet already attached\n\n if let Some(ref bn) = wallet.config.attached_blockchain {\n\n term.error(&format!(\"Wallet already attached to blockchain `{}'\\n\", bn)).unwrap();\n\n ::std::process::exit(1);\n\n }\n\n\n\n // 2. check the blockchain exists\n\n let blockchain_dir = blockchain::config::directory(root_dir.clone(), &blockchain_name);\n\n if let Err(err) = ::std::fs::read_dir(blockchain_dir) {\n\n term.error(&format!(\"Blockchain `{}' does not exists or you do not have user permissions\\n\", blockchain_name)).unwrap();\n\n term.error(&format!(\" |-> {}\\n\", err)).unwrap();\n", "file_path": "src/wallet/commands.rs", "rank": 29, "score": 147865.47884782002 }, { "content": "pub fn sign( term: &mut Term\n\n , root_dir: PathBuf\n\n , id_str: &str\n\n )\n\n -> Result<(), Error>\n\n{\n\n let mut signatures = Vec::new();\n\n\n\n let mut wallets = BTreeMap::new();\n\n for (name, wallet) in Wallets::load(root_dir.clone()).unwrap() {\n\n let state = wallet::utils::create_wallet_state_from_logs(term, &wallet, root_dir.clone(), wallet::state::lookup::accum::Accum::default());\n\n wallets.insert(name, (wallet, state));\n\n }\n\n\n\n let mut staging = load_staging(root_dir.clone(), id_str)?;\n\n let (finalized, changes) = staging.transaction().mk_finalized()\n\n .map_err(Error::CannotSignTransactionNotFinalized)?;\n\n let tx = staging.transaction().mk_txbuilder()\n\n .map_err(Error::CannotSignTransactionNotFinalized)?\n\n .0.make_tx()\n", "file_path": "src/transaction/commands.rs", "rank": 30, "score": 147865.47884782002 }, { "content": "pub fn status( term: &mut Term\n\n , root_dir: PathBuf\n\n , name: BlockchainName\n\n )\n\n -> Result<()>\n\n{\n\n let blockchain = Blockchain::load(root_dir, name);\n\n\n\n writeln!(term, \"{}\", style!(\"Blockchain\").cyan().bold())?;\n\n {\n\n let (tip, _is_genesis) = blockchain.load_tip();\n\n let tag_path = blockchain.dir.join(\"tag\").join(super::LOCAL_BLOCKCHAIN_TIP_TAG);\n\n let metadata = ::std::fs::metadata(tag_path)?;\n\n let fetched_date = metadata.modified()?.into();\n\n // get the difference between now and the last fetch, only keep up to the seconds\n\n let fetched_since = time::Duration::since(fetched_date);\n\n\n\n writeln!(term, \" * last forward: {} ({} ago)\", style!(fetched_date).green(), style!(fetched_since).green())?;\n\n writeln!(term, \" * local tip hash: {}\", style!(tip.hash))?;\n\n writeln!(term, \" * local tip date: {}\", style!(tip.date))?;\n", "file_path": "src/blockchain/commands.rs", "rank": 31, "score": 147865.47884782002 }, { "content": "pub fn pull( term: &mut Term\n\n , root_dir: PathBuf\n\n , name: BlockchainName\n\n )\n\n -> Result<()>\n\n{\n\n let blockchain = Blockchain::load(root_dir.clone(), name.clone());\n\n\n\n for np in blockchain.peers() {\n\n if ! np.is_native() { continue; }\n\n term.info(&format!(\"fetching blocks from peer: {}\\n\", np.name()))?;\n\n\n\n let peer = peer::Peer::prepare(&blockchain, np.name().to_owned());\n\n\n\n peer.connect(term).unwrap().sync(term);\n\n }\n\n\n\n forward(term, root_dir, name, None)\n\n}\n\n\n", "file_path": "src/blockchain/commands.rs", "rank": 32, "score": 147865.47884782002 }, { "content": "pub fn sync( mut term: Term\n\n , root_dir: PathBuf\n\n , name: WalletName\n\n )\n\n\n\n{\n\n // 0. load the wallet\n\n let wallet = Wallet::load(root_dir.clone(), name);\n\n\n\n let blockchainname = wallet.config.attached_blockchain().unwrap();\n\n\n\n // 1. get the wallet's blockchain\n\n let blockchain = load_attached_blockchain(&mut term, root_dir.clone(), blockchainname);\n\n\n\n match wallet.config.hdwallet_model {\n\n HDWalletModel::BIP44 => {\n\n let mut lookup_struct = load_bip44_lookup_structure(&mut term, &wallet);\n\n lookup_struct.prepare_next_account().unwrap_or_else(|e| term.fail_with(e));\n\n let mut state = create_wallet_state_from_logs(&mut term, &wallet, root_dir.clone(), lookup_struct);\n\n\n", "file_path": "src/wallet/commands.rs", "rank": 33, "score": 147865.47884782002 }, { "content": "pub fn finalize( _term: &mut Term\n\n , root_dir: PathBuf\n\n , id_str: &str\n\n )\n\n -> Result<(), Error>\n\n{\n\n let mut staging = load_staging(root_dir, id_str)?;\n\n\n\n staging.finalize().map_err(Error::CannotFinalize)\n\n}\n\n\n", "file_path": "src/transaction/commands.rs", "rank": 34, "score": 147865.47884782002 }, { "content": "pub fn export( term: &mut Term\n\n , root_dir: PathBuf\n\n , id_str: &str\n\n , export_file: Option<&str>\n\n )\n\n -> Result<(), Error>\n\n{\n\n let staging = load_staging(root_dir, id_str)?;\n\n\n\n let export = staging.export();\n\n\n\n if let Some(export_file) = export_file {\n\n let mut file = ::std::fs::OpenOptions::new().create(true).write(true).open(export_file)\n\n .map_err(Error::CannotExportToFileCannotOpenOutFile)?;\n\n ::serde_yaml::to_writer(&mut file, &export)\n\n .map_err(Error::CannotExportToFile)\n\n } else {\n\n ::serde_yaml::to_writer(term, &export)\n\n .map_err(Error::CannotExportToStdout)\n\n }\n\n}\n\n\n", "file_path": "src/transaction/commands.rs", "rank": 35, "score": 147865.47884782002 }, { "content": "pub fn destroy( mut term: Term\n\n , root_dir: PathBuf\n\n , name: WalletName\n\n )\n\n{\n\n // load the wallet\n\n let wallet = Wallet::load(root_dir.clone(), name);\n\n\n\n writeln!(term, \"You are about to destroy your wallet {}.\n\nThis means that all the data associated to this wallet will be deleted on this device.\n\nThe only way you will be able to reuse the wallet, recover the funds and create\n\nnew transactions will be by recovering the wallet with the mnemonic words.\",\n\n ::console::style(&wallet.name).bold().red(),\n\n ).unwrap();\n\n\n\n let confirmation = ::dialoguer::Confirmation::new(\"Are you sure?\")\n\n .use_line_input(true)\n\n .clear(false)\n\n .default(false)\n\n .interact().unwrap();\n\n if ! confirmation { ::std::process::exit(0); }\n\n\n\n unsafe { wallet.destroy() }.unwrap_or_else(|e| term.fail_with(e));\n\n\n\n term.success(\"Wallet successfully destroyed.\\n\").unwrap()\n\n}\n\n\n\n\n", "file_path": "src/wallet/commands.rs", "rank": 36, "score": 147865.47884782002 }, { "content": "pub fn destroy( _term: &mut Term\n\n , root_dir: PathBuf\n\n , id_str: &str\n\n )\n\n -> Result<(), Error>\n\n{\n\n let staging = load_staging(root_dir, id_str)?;\n\n\n\n staging.destroy().map_err(Error::CannotDestroyTransaction)\n\n}\n\n\n", "file_path": "src/transaction/commands.rs", "rank": 37, "score": 147865.47884782002 }, { "content": "pub fn import( term: &mut Term\n\n , root_dir: PathBuf\n\n , import_file: Option<&str>\n\n )\n\n -> Result<(), Error>\n\n{\n\n let import = if let Some(import_file) = import_file {\n\n let mut file = ::std::fs::OpenOptions::new().read(true).open(import_file)\n\n .map_err(Error::CannotImportFromFileCannotOpenInputFile)?;\n\n ::serde_yaml::from_reader(&mut file)\n\n .map_err(Error::CannotImportFromFile)?\n\n } else {\n\n let mut stdin = ::std::io::stdin();\n\n ::serde_yaml::from_reader(&mut stdin)\n\n .map_err(Error::CannotImportFromStdin)?\n\n };\n\n\n\n let staging = StagingTransaction::import(root_dir, import)\n\n .map_err(Error::CannotImportStaging)?;\n\n writeln!(term, \"Staging transaction `{}' successfully imported\", style!(staging.id()))?;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/transaction/commands.rs", "rank": 38, "score": 147865.47884782002 }, { "content": "pub fn cat( term: &mut Term\n\n , root_dir: PathBuf\n\n , name: BlockchainName\n\n , hash: HeaderHash\n\n , no_parse: bool\n\n , debug: bool\n\n )\n\n -> Result<()>\n\n{\n\n let blockchain = Blockchain::load(root_dir.clone(), name.clone());\n\n let rblk = get_block(&blockchain, &hash)?;\n\n\n\n if no_parse {\n\n ::std::io::stdout().write(rblk.as_ref())?;\n\n ::std::io::stdout().flush()?;\n\n } else {\n\n use utils::pretty::Pretty;\n\n\n\n let blk = rblk.decode().map_err(Error::CatMalformedBlock)?;\n\n if debug {\n\n writeln!(term, \"{:#?}\", blk)?;\n\n } else {\n\n blk.pretty(term, 0)?;\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/blockchain/commands.rs", "rank": 39, "score": 147865.47884782002 }, { "content": "/// function to create a new empty transaction\n\npub fn send( term: &mut Term\n\n , root_dir: PathBuf\n\n , id_str: &str\n\n , blockchain: BlockchainName\n\n )\n\n -> Result<(), Error>\n\n{\n\n let blockchain = Blockchain::load(root_dir.clone(), blockchain);\n\n let staging = load_staging(root_dir.clone(), id_str)?;\n\n\n\n let (finalized, changes) = staging.transaction().mk_finalized()\n\n .map_err(Error::CannotSendTransactionNotFinalized)?;\n\n let txaux = finalized.make_txaux()\n\n .map_err(Error::CannotSendTransactionInvalidTxAux)?;\n\n\n\n writeln!(term, \"sending transaction {}\", style!(txaux.tx.id()))?;\n\n\n\n let mut sent = false;\n\n for np in blockchain.peers() {\n\n if ! np.is_native() { continue; }\n", "file_path": "src/transaction/commands.rs", "rank": 40, "score": 147865.47884782002 }, { "content": "pub fn list( mut term: Term\n\n , root_dir: PathBuf\n\n , detailed: bool\n\n )\n\n{\n\n let wallets = Wallets::load(root_dir.clone()).unwrap_or_else(|e| term.fail_with(e));\n\n for (_, wallet) in wallets {\n\n let detail = if detailed {\n\n if let Some(blk_name) = &wallet.config.attached_blockchain {\n\n let state = create_wallet_state_from_logs(&mut term, &wallet, root_dir.clone(), lookup::accum::Accum::default());\n\n\n\n let total = state.total().unwrap_or_else(|e| term.fail_with(e));\n\n\n\n format!(\"\\t{}\\t{}@{}\",\n\n style!(total).green().bold(),\n\n style!(blk_name).underlined().white(),\n\n style!(state.ptr.latest_block_date())\n\n )\n\n } else {\n\n String::new()\n\n }\n\n } else {\n\n String::new()\n\n };\n\n\n\n writeln!(term, \"{}{}\", style!(wallet.name).cyan().italic(), detail).unwrap();\n\n }\n\n}\n\n\n", "file_path": "src/wallet/commands.rs", "rank": 41, "score": 147865.47884782002 }, { "content": "pub fn load_attached_blockchain(term: &mut Term, root_dir: PathBuf, name: Option<BlockchainName>) -> Blockchain {\n\n match name {\n\n None => {\n\n term.error(\"Wallet is not attached to any blockchain\\n\").unwrap();\n\n ::std::process::exit(1);\n\n },\n\n Some(blockchain) => {\n\n Blockchain::load(root_dir, blockchain)\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/wallet/utils.rs", "rank": 42, "score": 146217.9311776167 }, { "content": "pub fn command_address( mut term: Term\n\n , address: String\n\n )\n\n{\n\n let bytes = match base58::decode(&address) {\n\n Err(err) => {\n\n term.error(&format!(\"Invalid Address, should be encoded in base58\\n\")).unwrap();\n\n term.error(&format!(\"{}\\n\", err)).unwrap();\n\n ::std::process::exit(1)\n\n },\n\n Ok(bytes) => bytes,\n\n };\n\n\n\n let address = match ExtendedAddr::try_from_slice(&bytes) {\n\n Err(err) => {\n\n term.error(&format!(\"Invalid Address\\n\")).unwrap();\n\n term.error(&format!(\"{:?}\\n\", err)).unwrap();\n\n ::std::process::exit(2)\n\n },\n\n Ok(address) => address,\n", "file_path": "src/debug/mod.rs", "rank": 43, "score": 144841.66784627698 }, { "content": "pub fn add_output( _term: &mut Term\n\n , root_dir: PathBuf\n\n , id_str: &str\n\n , output: Option<(ExtendedAddr, Coin)>\n\n )\n\n -> Result<(), Error>\n\n{\n\n let mut staging = load_staging(root_dir, id_str)?;\n\n\n\n let output = if let Some(output) = output {\n\n core::Output {\n\n address: output.0,\n\n amount: output.1\n\n }\n\n } else {\n\n // TODO, implement interactive mode\n\n unimplemented!()\n\n };\n\n\n\n staging.add_output(output).map_err(Error::CannotAddOutput)\n\n}\n\n\n", "file_path": "src/transaction/commands.rs", "rank": 44, "score": 144841.66784627698 }, { "content": "pub fn add_input( term: &mut Term\n\n , root_dir: PathBuf\n\n , id_str: &str\n\n , input: Option<(TxId, u32, Option<Coin>)>\n\n )\n\n -> Result<(), Error>\n\n{\n\n let mut staging = load_staging(root_dir.clone(), id_str)?;\n\n\n\n let input = if let Some(input) = input {\n\n match input.2 {\n\n None => {\n\n find_input_in_all_utxos(term, root_dir.clone(), input.0, input.1)?\n\n },\n\n Some(v) => {\n\n core::Input {\n\n transaction_id: input.0,\n\n index_in_transaction: input.1,\n\n expected_value: v,\n\n }\n\n },\n\n }\n\n } else {\n\n // TODO, implement interactive mode\n\n unimplemented!()\n\n };\n\n\n\n staging.add_input(input).map_err(Error::CannotAddInput)\n\n}\n\n\n", "file_path": "src/transaction/commands.rs", "rank": 45, "score": 144841.66784627698 }, { "content": "pub fn add_change( _term: &mut Term\n\n , root_dir: PathBuf\n\n , id_str: &str\n\n , change: ExtendedAddr\n\n )\n\n -> Result<(), Error>\n\n{\n\n let mut staging = load_staging(root_dir, id_str)?;\n\n\n\n staging.add_change(change.into()).map_err(Error::CannotAddChange)\n\n}\n\n\n", "file_path": "src/transaction/commands.rs", "rank": 46, "score": 144841.66784627698 }, { "content": "pub fn remove_input( _term: &mut Term\n\n , root_dir: PathBuf\n\n , id_str: &str\n\n , input: Option<(TxId, u32)>\n\n )\n\n -> Result<(), Error>\n\n{\n\n let mut staging = load_staging(root_dir, id_str)?;\n\n\n\n let txin = if let Some(input) = input {\n\n TxoPointer {\n\n id: input.0,\n\n index: input.1\n\n }\n\n } else {\n\n // TODO, implement interactive mode\n\n unimplemented!()\n\n };\n\n\n\n staging.remove_input(txin).map_err(Error::CannotRemoveInput)\n\n}\n\n\n", "file_path": "src/transaction/commands.rs", "rank": 47, "score": 144841.66784627698 }, { "content": "/// function to add a remote to the given blockchain\n\n///\n\n/// It will create the appropriate tag referring to the blockchain\n\n/// genesis hash. This is because when add a new peer we don't assume\n\n/// anything more than the genesis block.\n\n///\n\npub fn remote_add( term: &mut Term\n\n , root_dir: PathBuf\n\n , name: BlockchainName\n\n , remote_alias: String\n\n , remote_endpoint: String\n\n )\n\n -> Result<()>\n\n{\n\n let mut blockchain = Blockchain::load(root_dir, name);\n\n blockchain.add_peer(remote_alias.clone(), remote_endpoint);\n\n blockchain.save();\n\n\n\n term.success(&format!(\"remote `{}' node added to blockchain `{}'\\n\", remote_alias, blockchain.name))?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/blockchain/commands.rs", "rank": 48, "score": 144841.66784627698 }, { "content": "pub fn verify_block( term: &mut Term\n\n , root_dir: PathBuf\n\n , name: BlockchainName\n\n , hash: HeaderHash\n\n )\n\n -> Result<()>\n\n{\n\n let blockchain = Blockchain::load(root_dir, name);\n\n let rblk = get_block(&blockchain, &hash)?;\n\n match rblk.decode() {\n\n Ok(blk) => {\n\n match cardano::block::verify_block(blockchain.config.protocol_magic, &hash, &blk) {\n\n Ok(()) => {\n\n Ok(writeln!(term, \"{}\", style!(\"Block is valid\").green())?)\n\n }\n\n Err(err) => {\n\n Err(Error::VerifyInvalidBlock(err))\n\n }\n\n }\n\n },\n\n Err(err) => {\n\n Err(Error::VerifyMalformedBlock(err))\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/blockchain/commands.rs", "rank": 49, "score": 144841.66784627698 }, { "content": "pub fn remove_change( _term: &mut Term\n\n , root_dir: PathBuf\n\n , id_str: &str\n\n , change: ExtendedAddr\n\n )\n\n -> Result<(), Error>\n\n{\n\n let mut staging = load_staging(root_dir, id_str)?;\n\n\n\n staging.remove_change(change).map_err(Error::CannotRemoveChange)\n\n}\n\n\n", "file_path": "src/transaction/commands.rs", "rank": 50, "score": 144841.66784627698 }, { "content": "pub fn remove_output( _term: &mut Term\n\n , root_dir: PathBuf\n\n , id_str: &str\n\n , address: Option<ExtendedAddr>\n\n )\n\n -> Result<(), Error>\n\n{\n\n let mut staging = load_staging(root_dir, id_str)?;\n\n\n\n if let Some(addr) = address {\n\n staging.remove_outputs_for(&addr).map_err(Error::CannotRemoveOutput)\n\n } else {\n\n // TODO, implement interactive mode\n\n unimplemented!()\n\n }\n\n}\n\n\n", "file_path": "src/transaction/commands.rs", "rank": 51, "score": 144841.66784627698 }, { "content": "pub fn verify_chain( term: &mut Term\n\n , root_dir: PathBuf\n\n , name: BlockchainName\n\n , stop_on_error: bool\n\n )\n\n -> Result<()>\n\n{\n\n let blockchain = Blockchain::load(root_dir, name);\n\n\n\n let tip = blockchain.load_tip().0;\n\n let num_blocks = tip.date.slot_number();\n\n\n\n let progress = term.progress_bar(num_blocks as u64);\n\n progress.set_message(\"verifying blocks... \");\n\n\n\n let genesis_data = {\n\n let genesis_data = genesis_data::get_genesis_data(&blockchain.config.genesis_prev)\n\n .map_err(Error::VerifyChainGenesisHashNotFound)?;\n\n\n\n parse_genesis_data::parse_genesis_data(genesis_data)\n", "file_path": "src/blockchain/commands.rs", "rank": 52, "score": 144841.66784627698 }, { "content": "pub fn remote_fetch( term: &mut Term\n\n , root_dir: PathBuf\n\n , name: BlockchainName\n\n , peers: Vec<String>\n\n )\n\n -> Result<()>\n\n{\n\n let blockchain = Blockchain::load(root_dir, name);\n\n\n\n for np in blockchain.peers() {\n\n if peers.is_empty() || peers.contains(&np.name().to_owned()) {\n\n term.info(&format!(\"fetching blocks from peer: {}\\n\", np.name()))?;\n\n\n\n let peer = peer::Peer::prepare(&blockchain, np.name().to_owned());\n\n\n\n peer.connect(term).unwrap().sync(term);\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n\n\n\n#[derive(PartialEq, Eq, PartialOrd, Ord)]\n\npub enum RemoteDetail {\n\n Short,\n\n Local,\n\n Remote\n\n}\n\n\n", "file_path": "src/blockchain/commands.rs", "rank": 53, "score": 144841.66784627698 }, { "content": "pub fn input_select( term: &mut Term\n\n , root_dir: PathBuf\n\n , id_str: &str\n\n , wallets: Vec<WalletName>\n\n )\n\n -> Result<(), Error>\n\n{\n\n use ::cardano::{fee::{self}, input_selection::{SelectionAlgorithm, SelectionPolicy}, txutils};\n\n\n\n let alg = fee::LinearFee::default();\n\n let selection_policy = SelectionPolicy::default();\n\n\n\n let mut staging = load_staging(root_dir.clone(), id_str)?;\n\n\n\n if ! staging.transaction().has_change() {\n\n return Err(Error::CannotInputSelectNoChangeOption);\n\n }\n\n\n\n let change_address = staging.transaction().changes()[0].address.clone();\n\n let output_policy = txutils::OutputPolicy::One(change_address.clone());\n", "file_path": "src/transaction/commands.rs", "rank": 54, "score": 144841.66784627698 }, { "content": "/// remove the given peer from the blockchain\n\n///\n\n/// it will also delete all the metadata associated to this peer\n\n/// such as the tag pointing to the remote's tip.\n\n///\n\npub fn remote_rm( term: &mut Term\n\n , root_dir: PathBuf\n\n , name: BlockchainName\n\n , remote_alias: String\n\n )\n\n -> Result<()>\n\n{\n\n let mut blockchain = Blockchain::load(root_dir, name);\n\n blockchain.remove_peer(remote_alias.clone());\n\n blockchain.save();\n\n\n\n term.success(&format!(\"remote `{}' node removed from blockchain `{}'\\n\", remote_alias, blockchain.name))?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/blockchain/commands.rs", "rank": 55, "score": 144841.66784627698 }, { "content": "pub fn remote_ls( term: &mut Term\n\n , root_dir: PathBuf\n\n , name: BlockchainName\n\n , detailed: RemoteDetail\n\n )\n\n -> Result<()>\n\n{\n\n let blockchain = Blockchain::load(root_dir, name);\n\n\n\n for np in blockchain.peers() {\n\n let peer = peer::Peer::prepare(&blockchain, np.name().to_owned());\n\n let (tip, _is_genesis) = peer.load_local_tip();\n\n\n\n writeln!(term, \"{} ({})\", style!(&peer.name), style!(&peer.config))?;\n\n\n\n if detailed >= RemoteDetail::Local {\n\n let tag_path = blockchain.dir.join(\"tag\").join(&peer.tag);\n\n let metadata = ::std::fs::metadata(tag_path).unwrap();\n\n let fetched_date = metadata.modified()?.into();\n\n // get the difference between now and the last fetch, only keep up to the seconds\n", "file_path": "src/blockchain/commands.rs", "rank": 56, "score": 144841.66784627698 }, { "content": "pub fn input_mnemonic_phrase<D>(term: &mut Term, dic: &D, size: bip39::Type) -> PromptedMnemonics\n\n where D: Language\n\n{\n\n let count = size.mnemonic_count();\n\n\n\n loop {\n\n let string = Input::new(&format!(\"Please enter all your {} mnemonics\", style(count).bold().red()))\n\n .clear(true)\n\n .interact_on(&term.term)\n\n .unwrap();\n\n\n\n match validate_mnemonics(dic, size, string) {\n\n Ok(res) => { return res; },\n\n Err(prompt) => {\n\n while ! Confirmation::new(&prompt).clear(true).default(true).show_default(true).interact_on(&term.term).unwrap() {}\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/utils/prompt/mnemonics.rs", "rank": 57, "score": 143460.14298737556 }, { "content": "pub fn interactive_input_words<D>(term: &mut Term, dic: &D, size: bip39::Type) -> PromptedMnemonics\n\n where D: Language\n\n{\n\n let count = size.mnemonic_count();\n\n\n\n loop {\n\n let mut string = String::new();\n\n for idx in 1..=count {\n\n let result = interactive_input_word(term, dic, idx, count);\n\n if idx == 1 {\n\n string = result;\n\n } else {\n\n string.push_str(dic.separator());\n\n string.push_str(&result);\n\n }\n\n }\n\n\n\n match validate_mnemonics(dic, size, string) {\n\n Ok(res) => { return res; },\n\n Err(prompt) => {\n\n while ! Confirmation::new(&prompt).clear(true).default(true).show_default(true).interact_on(&term.term).unwrap() {}\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/utils/prompt/mnemonics.rs", "rank": 58, "score": 143460.14298737556 }, { "content": "pub fn recover<D>( mut term: Term\n\n , root_dir: PathBuf\n\n , name: WalletName\n\n , wallet_scheme: HDWalletModel\n\n , derivation_scheme: DerivationScheme\n\n , mnemonic_size: bip39::Type\n\n , interactive: bool\n\n , daedalus_seed: bool\n\n , language: D\n\n )\n\n where D: bip39::dictionary::Language\n\n{\n\n let config = Config {\n\n attached_blockchain: None,\n\n derivation_scheme: derivation_scheme,\n\n hdwallet_model: wallet_scheme\n\n };\n\n\n\n // 1. generate the mnemonics\n\n term.info(\"enter your mnemonics\\n\").unwrap();\n", "file_path": "src/wallet/commands.rs", "rank": 59, "score": 141307.12821103714 }, { "content": "/// function to create a new wallet\n\n///\n\npub fn new<D>( mut term: Term\n\n , root_dir: PathBuf\n\n , name: WalletName\n\n , wallet_scheme: HDWalletModel\n\n , derivation_scheme: DerivationScheme\n\n , mnemonic_size: bip39::Type\n\n , languages: Vec<D>\n\n )\n\n where D: bip39::dictionary::Language\n\n{\n\n let config = Config {\n\n attached_blockchain: None,\n\n derivation_scheme: derivation_scheme,\n\n hdwallet_model: wallet_scheme\n\n };\n\n\n\n // 1. generate the mnemonics\n\n\n\n let entropy = bip39::Entropy::generate(mnemonic_size, random);\n\n // 2. perform the seed generation from the entropy\n", "file_path": "src/wallet/commands.rs", "rank": 60, "score": 141307.12821103714 }, { "content": "fn transaction_argument_input_match<'a>(matches: &ArgMatches<'a>) -> Option<(cardano::tx::TxId, u32, Option<cardano::coin::Coin>)> {\n\n let (txid, index) = transaction_argument_txin_match(&matches)?;\n\n let coin = value_t!(matches, \"UTXO_AMOUNT\", cardano::coin::Coin).ok();\n\n\n\n Some((txid, index, coin))\n\n}\n", "file_path": "src/main.rs", "rank": 61, "score": 124624.61285378775 }, { "content": "/// helper function to load a staging file\n\nfn load_staging(root_dir: PathBuf, id_str: &str) -> Result<StagingTransaction, Error> {\n\n let id = id_str.parse::<StagingId>().map_err(Error::InvalidStagingId)?;\n\n\n\n StagingTransaction::read_from_file(root_dir, id).map_err(Error::CannotLoadStagingTransaction)\n\n}\n\n\n\n// ----------------------------------- helpers ---------------------------------\n\n\n", "file_path": "src/transaction/commands.rs", "rank": 62, "score": 124232.3433690017 }, { "content": "pub fn wallet_sign_tx(term: &mut Term, wallet: &Wallet, protocol_magic: ProtocolMagic, txid: &TxId, address: &lookup::Address) -> TxInWitness\n\n{\n\n match wallet.config.hdwallet_model {\n\n HDWalletModel::BIP44 => {\n\n let wallet = load_bip44_lookup_structure(term, wallet);\n\n if let lookup::Address::Bip44(addressing) = address {\n\n let xprv = wallet.get_private_key(addressing);\n\n TxInWitness::new(protocol_magic, &*xprv, txid)\n\n } else {\n\n panic!()\n\n }\n\n },\n\n HDWalletModel::RandomIndex2Levels => {\n\n let wallet = load_randomindex_lookup_structure(term, wallet);\n\n if let lookup::Address::RIndex(addressing) = address {\n\n let xprv = wallet.get_private_key(addressing);\n\n TxInWitness::new(protocol_magic, &xprv, txid)\n\n } else {\n\n panic!()\n\n }\n\n },\n\n }\n\n}\n\n\n\nconst IMPOSSIBLE_HAPPENED : &'static str = \"The impossible happened\n\nThe process will panic with an error message, this is because something\n\nunexpected happened. Please report the error message with the panic\n\nerror message to: https://github.com/input-output-hk/rust-cardano/issues\n\n\";\n", "file_path": "src/wallet/utils.rs", "rank": 63, "score": 122004.51610913636 }, { "content": "fn wallet_argument_mnemonic_size_match<'a>(matches: &ArgMatches<'a>) -> cardano::bip::bip39::Type {\n\n match matches.value_of(\"MNEMONIC_SIZE\") {\n\n Some(\"12\") => cardano::bip::bip39::Type::Type12Words,\n\n Some(\"15\") => cardano::bip::bip39::Type::Type15Words,\n\n Some(\"18\") => cardano::bip::bip39::Type::Type18Words,\n\n Some(\"21\") => cardano::bip::bip39::Type::Type21Words,\n\n Some(\"24\") => cardano::bip::bip39::Type::Type24Words,\n\n _ => unreachable!() // default is \"24\"\n\n }\n\n}\n", "file_path": "src/main.rs", "rank": 64, "score": 121774.57303851913 }, { "content": "/// convenient function to encrypt a HDWallet XPrv with a password\n\n///\n\npub fn encrypt_primary_key(password: &Password, xprv: &hdwallet::XPrv) -> Vec<u8> {\n\n password_encrypted::encrypt(password, xprv.as_ref())\n\n}\n\n\n", "file_path": "src/wallet/config.rs", "rank": 65, "score": 120766.1860643335 }, { "content": "fn blockchain_argument_headhash_match<'a>(term: &mut term::Term, matches: &ArgMatches<'a>, name: &str) -> cardano::block::HeaderHash {\n\n match value_t!(matches, name, cardano::block::HeaderHash) {\n\n Ok(hh) => hh,\n\n Err(err) => { term.fail_with(err) }\n\n }\n\n}\n", "file_path": "src/main.rs", "rank": 66, "score": 119839.25750906383 }, { "content": "type Result<T> = bip44::bip44::Result<T>;\n\n\n\npub struct SequentialBip44Lookup {\n\n // cryptographic wallet\n\n //\n\n // downside of needed the bip44's wallet is that we need to decrypt the\n\n // wallet private key with the password. This is needed because we might need\n\n // to create new addresses and they need hard derivation (which cannot be\n\n // done through the public key).\n\n //\n\n wallet: bip44::Wallet,\n\n // all the known expected addresses, that includes\n\n // all different accounts, and also the next not yet live\n\n // account's addresses\n\n expected: BTreeMap<ExtendedAddr, bip44::Addressing>,\n\n\n\n // accounts threshold index for internal and external addresses\n\n accounts: Vec<[bip44::Index;2]>,\n\n\n\n // gap limit\n", "file_path": "src/wallet/state/lookup/sequentialindex.rs", "rank": 67, "score": 119354.82210227552 }, { "content": "fn pretty_iterator<I, D, W>(w: &mut W, indent: usize, iter: I) -> Result<()>\n\n where I: IntoIterator<Item = D>\n\n , D: Pretty\n\n , W: Write\n\n{\n\n for e in iter {\n\n write!(w, \"{:width$}\", \"\", width = indent)?;\n\n e.pretty(w, indent + DISPLAY_INDENT_SIZE)?;\n\n writeln!(w, \"\")?;\n\n }\n\n Ok(())\n\n}\n\n\n\nimpl<D: Pretty> Pretty for Vec<D> {\n\n fn pretty<W>(self, f: &mut W, indent: usize) -> Result<()>\n\n where W: Write\n\n {\n\n pretty_iterator(f, indent, self.into_iter())\n\n }\n\n}\n", "file_path": "src/utils/pretty.rs", "rank": 68, "score": 118624.33594545793 }, { "content": "fn transaction_argument_txin_match<'a>(matches: &ArgMatches<'a>) -> Option<(cardano::tx::TxId, u32)> {\n\n if ! matches.is_present(\"TRANSACTION_TXID\") { return None; }\n\n let txid = value_t!(matches, \"TRANSACTION_TXID\", cardano::tx::TxId).unwrap_or_else(|e| e.exit());\n\n\n\n let index = value_t!(matches, \"TRANSACTION_INDEX\", u32).unwrap_or_else(|e| e.exit());\n\n\n\n Some((txid, index))\n\n}\n", "file_path": "src/main.rs", "rank": 69, "score": 118261.66047744062 }, { "content": "pub fn canonicalize_json(json: &str) -> String\n\n{\n\n let data: serde_json::Value = serde_json::from_str(&json).unwrap();\n\n data.to_string()\n\n}\n", "file_path": "src/blockchain/parse_genesis_data.rs", "rank": 70, "score": 117787.14305130523 }, { "content": "type Nonce = [u8;NONCE_SIZE];\n\n\n", "file_path": "src/utils/password_encrypted.rs", "rank": 71, "score": 116070.30757909702 }, { "content": "type Salt = [u8;SALT_SIZE];\n", "file_path": "src/utils/password_encrypted.rs", "rank": 72, "score": 116070.30757909702 }, { "content": "type Key = [u8;KEY_SIZE];\n", "file_path": "src/utils/password_encrypted.rs", "rank": 73, "score": 116070.30757909702 }, { "content": "fn blockchain_argument_name_match<'a>(term: &mut term::Term, matches: &ArgMatches<'a>) -> blockchain::BlockchainName {\n\n match value_t!(matches, \"BLOCKCHAIN_NAME\", blockchain::BlockchainName) {\n\n Ok(r) => { r },\n\n Err(err) => { term.fail_with(err) },\n\n }\n\n}\n", "file_path": "src/main.rs", "rank": 74, "score": 112628.40274936272 }, { "content": "/// Compute the Blake2b256 hash of the data on stdin.\n\npub fn hash()\n\n{\n\n let mut data = vec![];\n\n io::stdin().read_to_end(&mut data).expect(\"Cannot read stdin.\");\n\n println!(\"{}\", hash::Blake2b256::new(&data));\n\n}\n", "file_path": "src/debug/mod.rs", "rank": 75, "score": 112058.83266638748 }, { "content": "pub fn create_wallet_state_from_logs<LS>(term: &mut Term, wallet: &Wallet, root_dir: PathBuf, lookup_structure: LS) -> state::State<LS>\n\n where LS: lookup::AddressLookup\n\n{\n\n let log_lock = lock_wallet_log(wallet);\n\n let state = state::State::from_logs(lookup_structure,\n\n log::LogReader::open(log_lock).unwrap_or_else(|e| term.fail_with(e))\n\n .into_iter().filter_map(|r| {\n\n match r {\n\n Err(err) => {\n\n term.fail_with(err)\n\n },\n\n Ok(v) => Some(v)\n\n }\n\n })\n\n ).unwrap_or_else(|e| term.fail_with(e));\n\n match state {\n\n Ok(state) => state,\n\n Err(lookup_structure) => {\n\n // create empty state\n\n // 1. get the wallet's blockchain\n\n let blockchain = load_attached_blockchain(term, root_dir, wallet.config.attached_blockchain().unwrap());\n\n\n\n // 2. prepare the wallet state\n\n let initial_ptr = ptr::StatePtr::new_before_genesis(blockchain.config.genesis.clone());\n\n state::State::new(initial_ptr, lookup_structure)\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/wallet/utils.rs", "rank": 76, "score": 111855.94330507104 }, { "content": "fn transaction_argument_output_match<'a>(matches: &ArgMatches<'a>) -> Option<(cardano::address::ExtendedAddr, cardano::coin::Coin)> {\n\n if ! matches.is_present(\"TRANSACTION_ADDRESS\") { return None; }\n\n\n\n let address = value_t!(matches, \"TRANSACTION_ADDRESS\", cardano::address::ExtendedAddr).unwrap_or_else(|e| e.exit());\n\n let coin = value_t!(matches, \"TRANSACTION_AMOUNT\", cardano::coin::Coin).unwrap_or_else(|e| e.exit());\n\n\n\n Some((address, coin))\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 77, "score": 111759.28743908135 }, { "content": "fn global_quiet_option<'a>(matches: &ArgMatches<'a>) -> bool {\n\n matches.is_present(\"QUIET\")\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 78, "score": 111190.77175402 }, { "content": "fn subcommand_debug<'a>(mut term: term::Term, _rootdir: PathBuf, matches: &ArgMatches<'a>) {\n\n match matches.subcommand() {\n\n (\"address\", Some(matches)) => {\n\n let address = value_t!(matches, \"ADDRESS\", String).unwrap_or_else(|e| e.exit() );\n\n\n\n debug::command_address(term, address);\n\n },\n\n (\"canonicalize-json\", Some(_)) => {\n\n debug::canonicalize_json();\n\n },\n\n (\"hash\", Some(_)) => {\n\n debug::hash();\n\n },\n\n _ => {\n\n term.error(matches.usage()).unwrap();\n\n ::std::process::exit(1)\n\n }\n\n }\n\n}\n", "file_path": "src/main.rs", "rank": 79, "score": 110826.72090738286 }, { "content": "pub fn load_randomindex_lookup_structure(term: &mut Term, wallet: &Wallet) -> lookup::randomindex::RandomIndexLookup {\n\n // in the case of the random index, we may not need the password if we have the public key\n\n term.info(\"Enter the wallet password.\\n\").unwrap();\n\n let password = term.password(\"wallet password: \").unwrap();\n\n\n\n let wallet = match wallet.get_wallet_rindex(password.as_bytes()) {\n\n Err(Error::CannotRetrievePrivateKeyInvalidPassword) => {\n\n term.error(\"Invalid wallet spending password\").unwrap();\n\n ::std::process::exit(1);\n\n },\n\n Err(Error::CannotRetrievePrivateKey(err)) => {\n\n term.error(&format!(\"Cannot retrieve the private key of the wallet: {}\", err)).unwrap();\n\n term.info(\"The encrypted wallet password is in an invalid format. You might need to delete this wallet and recover it.\").unwrap();\n\n ::std::process::exit(1);\n\n },\n\n Err(err) => {\n\n term.error(IMPOSSIBLE_HAPPENED).unwrap();\n\n panic!(\"failing with an unexpected error {:#?}\", err);\n\n },\n\n Ok(wallet) => { wallet }\n\n };\n\n lookup::randomindex::RandomIndexLookup::from(wallet)\n\n}\n\n\n", "file_path": "src/wallet/utils.rs", "rank": 80, "score": 110489.5085601484 }, { "content": "pub fn load_bip44_lookup_structure(term: &mut Term, wallet: &Wallet) -> lookup::sequentialindex::SequentialBip44Lookup {\n\n // TODO: to prevent from the need of the password, we can ask the user to create accounts ahead.\n\n // if we store the wallet's account public keys in the config file we may not need for the\n\n // password (and for the private key).\n\n term.info(\"Enter the wallet password.\\n\").unwrap();\n\n let password = term.password(\"wallet password: \").unwrap();\n\n\n\n let wallet = match wallet.get_wallet_bip44(password.as_bytes()) {\n\n Err(Error::CannotRetrievePrivateKeyInvalidPassword) => {\n\n term.error(\"Invalid wallet spending password\").unwrap();\n\n ::std::process::exit(1);\n\n },\n\n Err(Error::CannotRetrievePrivateKey(err)) => {\n\n term.error(&format!(\"Cannot retrieve the private key of the wallet: {}\", err)).unwrap();\n\n term.info(\"The encrypted wallet password is in an invalid format. You might need to delete this wallet and recover it.\").unwrap();\n\n ::std::process::exit(1);\n\n },\n\n Err(err) => {\n\n term.error(IMPOSSIBLE_HAPPENED).unwrap();\n\n panic!(\"failing with an unexpected error {:#?}\", err);\n\n },\n\n Ok(wallet) => { wallet }\n\n };\n\n lookup::sequentialindex::SequentialBip44Lookup::new(wallet)\n\n}\n", "file_path": "src/wallet/utils.rs", "rank": 81, "score": 110489.5085601484 }, { "content": "/// Read a JSON file from stdin and write its canonicalized form to stdout.\n\npub fn canonicalize_json()\n\n{\n\n let mut json = String::new();\n\n io::stdin().read_to_string(&mut json).expect(\"Cannot read stdin.\");\n\n print!(\"{}\", parse_genesis_data::canonicalize_json(&json));\n\n}\n\n\n", "file_path": "src/debug/mod.rs", "rank": 82, "score": 109558.15840852552 }, { "content": "fn subcommand_wallet<'a>(mut term: term::Term, root_dir: PathBuf, matches: &ArgMatches<'a>) {\n\n match matches.subcommand() {\n\n (\"create\", Some(matches)) => {\n\n let name = wallet_argument_name_match(&matches);\n\n let wallet_scheme = wallet_argument_wallet_scheme_match(&matches);\n\n let derivation_scheme = wallet_argument_derivation_scheme_match(&matches);\n\n let mnemonic_length = wallet_argument_mnemonic_size_match(&matches);\n\n let mnemonic_langs = wallet_argument_mnemonic_languages_match(&matches);\n\n\n\n wallet::commands::new(term, root_dir, name, wallet_scheme, derivation_scheme, mnemonic_length, mnemonic_langs);\n\n },\n\n (\"recover\", Some(matches)) => {\n\n let name = wallet_argument_name_match(&matches);\n\n let mut wallet_scheme = wallet_argument_wallet_scheme_match(&matches);\n\n let mut derivation_scheme = wallet_argument_derivation_scheme_match(&matches);\n\n let mut mnemonic_length = wallet_argument_mnemonic_size_match(&matches);\n\n let mnemonic_lang = wallet_argument_mnemonic_language_match(&matches);\n\n let daedalus_seed = wallet_argument_daedalus_seed_match(&matches);\n\n let interactive = matches.is_present(\"RECOVER_INTERACTIVE\");\n\n\n", "file_path": "src/main.rs", "rank": 83, "score": 108739.88708937168 }, { "content": "fn subcommand_blockchain<'a>(mut term: term::Term, root_dir: PathBuf, matches: &ArgMatches<'a>) {\n\n match matches.subcommand() {\n\n (\"list\", Some(matches)) => {\n\n let detailed = matches.is_present(\"LIST_DETAILS\");\n\n\n\n blockchain::commands::list(&mut term, root_dir, detailed)\n\n .unwrap_or_else(|e| term.fail_with(e));\n\n },\n\n (\"new\", Some(matches)) => {\n\n let name = blockchain_argument_name_match(&mut term, &matches);\n\n let net_config = blockchain_argument_template_match(&matches);\n\n\n\n blockchain::commands::new(&mut term, root_dir, name, net_config)\n\n .unwrap_or_else(|e| term.fail_with(e));\n\n },\n\n (\"remote-add\", Some(matches)) => {\n\n let name = blockchain_argument_name_match(&mut term, &matches);\n\n let alias = blockchain_argument_remote_alias_match(&matches);\n\n let endpoint = blockchain_argument_remote_endpoint_match(&matches);\n\n\n", "file_path": "src/main.rs", "rank": 84, "score": 108739.88708937168 }, { "content": "fn subcommand_transaction<'a>(mut term: term::Term, root_dir: PathBuf, matches: &ArgMatches<'a>) {\n\n match matches.subcommand() {\n\n (\"new\", Some(matches)) => {\n\n let blockchain = blockchain_argument_name_match(&mut term, &matches);\n\n transaction::commands::new(&mut term, root_dir, blockchain)\n\n .unwrap_or_else(|e| term.fail_with(e));\n\n },\n\n (\"list\", _) => {\n\n transaction::commands::list(&mut term, root_dir)\n\n .unwrap_or_else(|e| term.fail_with(e));\n\n },\n\n (\"destroy\", Some(matches)) => {\n\n let id = transaction_argument_name_match(&matches);\n\n transaction::commands::destroy(&mut term, root_dir, id)\n\n .unwrap_or_else(|e| term.fail_with(e));\n\n },\n\n (\"export\", Some(matches)) => {\n\n let id = transaction_argument_name_match(&matches);\n\n let file = matches.value_of(\"EXPORT_FILE\");\n\n transaction::commands::export(&mut term, root_dir, id, file)\n", "file_path": "src/main.rs", "rank": 85, "score": 108739.88708937168 }, { "content": "fn get_block(blockchain: &Blockchain, hash: &HeaderHash) -> Result<RawBlock>\n\n{\n\n let block_location = match storage::block_location(&blockchain.storage, &hash) {\n\n None => {\n\n return Err(Error::GetBlockDoesNotExist(hash.clone()));\n\n },\n\n Some(loc) => loc\n\n };\n\n\n\n debug!(\"blk location: {:?}\", block_location);\n\n\n\n match storage::block_read_location(&blockchain.storage, &block_location, &hash) {\n\n None => {\n\n // this is a bug, we have a block location available for this hash\n\n // but we were not able to read the block.\n\n return Err(Error::GetInvalidBLock(hash.clone()));\n\n },\n\n Some(rblk) => Ok(rblk)\n\n }\n\n}\n\n\n", "file_path": "src/blockchain/commands.rs", "rank": 86, "score": 108054.37926820874 }, { "content": "fn global_color_option<'a>(matches: &ArgMatches<'a>) -> term::ColorChoice {\n\n match matches.value_of(\"COLOR\") {\n\n None => term::ColorChoice::Auto,\n\n Some(\"auto\") => term::ColorChoice::Auto,\n\n Some(\"always\") => term::ColorChoice::Always,\n\n Some(\"never\") => term::ColorChoice::Never,\n\n Some(&_) => {\n\n // this should not be reachable `clap` will perform validation\n\n // checking of the possible_values given when creating the argument\n\n unreachable!()\n\n }\n\n }\n\n}\n", "file_path": "src/main.rs", "rank": 87, "score": 104907.3156039821 }, { "content": "/// get the path of the given transaction via its staging id\n\npub fn transaction_file(root_dir: PathBuf, id: StagingId) -> PathBuf {\n\n transaction_directory(root_dir).join(id.to_string())\n\n}\n", "file_path": "src/transaction/core/config.rs", "rank": 88, "score": 102571.36190971205 }, { "content": "/// handy function to compute the path to directory\n\n/// where all the wallet metadata will lie.\n\npub fn directory( root_dir: PathBuf\n\n , name: &str\n\n ) -> PathBuf\n\n{\n\n root_dir.join(WALLETS_DIRECTORY).join(name)\n\n}\n\n\n\n/// all the HDWallet supported models\n\n///\n\n/// * BIP44 will support a wallet with multiple accounts and sequential indices;\n\n/// * RandomIndex2Levels will support a wallet, without accounts\n\n/// and randomly selected indices (this will force us to encrypt the derivation\n\n/// path in the address, making the address longer and increasing the fee sligthly)\n\n///\n\n#[derive(Debug, PartialEq, Eq, Serialize, Deserialize, Copy, Clone)]\n\npub enum HDWalletModel {\n\n BIP44,\n\n RandomIndex2Levels\n\n}\n\n\n", "file_path": "src/wallet/config.rs", "rank": 89, "score": 96836.21261541932 }, { "content": "/// handy function to define where to find the blockchains related\n\n/// functions in a given _cardano-cli_ directory.\n\n///\n\npub fn directory( root_dir: PathBuf\n\n , name: &str\n\n ) -> PathBuf\n\n{\n\n root_dir.join(BLOCKCHAINS_DIRECTORY).join(name)\n\n}\n", "file_path": "src/blockchain/config.rs", "rank": 90, "score": 96836.21261541932 }, { "content": "enum IteratorType<'a> {\n\n Epoch(epoch::Epochs<'a>, Option<epoch::Iter>),\n\n Loose(&'a Storage, storage::block::Range)\n\n}\n\nimpl<'a> IteratorType<'a> {\n\n fn is_loose(&self) -> bool {\n\n match self {\n\n IteratorType::Loose(_, _) => true,\n\n _ => false\n\n }\n\n }\n\n}\n\nimpl<'a> Iterator for IteratorType<'a> {\n\n type Item = Result<RawBlock>;\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n\n match self {\n\n IteratorType::Epoch(ref mut epochs, ref mut opt_iter) => {\n\n if opt_iter.is_none() {\n\n *opt_iter = match epochs.next() {\n", "file_path": "src/blockchain/iter/mod.rs", "rank": 91, "score": 95217.96053666338 }, { "content": "pub fn wallet_directory( root_dir: &PathBuf\n\n ) -> PathBuf\n\n{\n\n root_dir.join(WALLETS_DIRECTORY)\n\n}\n\n\n", "file_path": "src/wallet/config.rs", "rank": 92, "score": 94966.41819097167 }, { "content": "pub fn blockchains_directory( root_dir: &PathBuf\n\n ) -> PathBuf\n\n{\n\n root_dir.join(BLOCKCHAINS_DIRECTORY)\n\n}\n\n\n", "file_path": "src/blockchain/config.rs", "rank": 93, "score": 94966.41819097167 }, { "content": "fn find_input_in_all_utxos(term: &mut Term, root_dir: PathBuf, txid: TxId, index: u32)\n\n -> Result<core::Input, Error>\n\n{\n\n let txin = TxoPointer { id: txid, index: index };\n\n for (_, wallet) in Wallets::load(root_dir.clone()).unwrap() {\n\n let state = wallet::utils::create_wallet_state_from_logs(term, &wallet, root_dir.clone(), wallet::state::lookup::accum::Accum::default());\n\n\n\n if let Some(utxo) = state.utxos.get(&txin) {\n\n let txin = utxo.extract_txin();\n\n return Ok(core::Input {\n\n transaction_id: txin.id,\n\n index_in_transaction: txin.index,\n\n expected_value: utxo.credited_value,\n\n });\n\n }\n\n }\n\n\n\n Err(Error::CannotFindInputsInAllLocalUtxos)\n\n}\n\n\n", "file_path": "src/transaction/commands.rs", "rank": 94, "score": 94267.05319494239 }, { "content": "fn blockchain_argument_template_match<'a>(matches: &ArgMatches<'a>)\n\n -> blockchain::Config\n\n{\n\n match matches.value_of(\"BLOCKCHAIN_TEMPLATE\") {\n\n None => blockchain::Config::mainnet(),\n\n Some(\"mainnet\") => blockchain::Config::mainnet(),\n\n Some(\"staging\") => blockchain::Config::staging(),\n\n Some(\"testnet\") => blockchain::Config::testnet(),\n\n Some(&_) => {\n\n // this should not be reachable as clap is handling\n\n // checking the value against all possible value\n\n unreachable!()\n\n }\n\n }\n\n}\n", "file_path": "src/main.rs", "rank": 95, "score": 88400.28962144566 }, { "content": "/// return the directory path where all the pending transactions are\n\npub fn transaction_directory(root_dir: PathBuf) -> PathBuf {\n\n root_dir.join(TRANSACTION_DIR)\n\n}\n\n\n", "file_path": "src/transaction/core/config.rs", "rank": 96, "score": 87502.54919951562 }, { "content": "fn wallet_argument_mnemonic_languages_match<'a>(matches: &ArgMatches<'a>)\n\n -> Vec<impl cardano::bip::bip39::dictionary::Language>\n\n{\n\n let mut languages = Vec::new();\n\n for lan in matches.values_of(\"MNEMONIC_LANGUAGES\").unwrap() {\n\n let value = match lan {\n\n \"chinese-simplified\" => cardano::bip::bip39::dictionary::CHINESE_SIMPLIFIED,\n\n \"chinese-traditional\" => cardano::bip::bip39::dictionary::CHINESE_TRADITIONAL,\n\n \"english\" => cardano::bip::bip39::dictionary::ENGLISH,\n\n \"french\" => cardano::bip::bip39::dictionary::FRENCH,\n\n \"italian\" => cardano::bip::bip39::dictionary::ITALIAN,\n\n \"japanese\" => cardano::bip::bip39::dictionary::JAPANESE,\n\n \"korean\" => cardano::bip::bip39::dictionary::KOREAN,\n\n \"spanish\" => cardano::bip::bip39::dictionary::SPANISH,\n\n _ => unreachable!() // clap knows the default values\n\n };\n\n languages.push(value);\n\n }\n\n languages\n\n}\n", "file_path": "src/main.rs", "rank": 97, "score": 86704.13655801641 }, { "content": "fn wallet_argument_mnemonic_language_match<'a>(matches: &ArgMatches<'a>)\n\n -> impl cardano::bip::bip39::dictionary::Language\n\n{\n\n match matches.value_of(\"MNEMONIC_LANGUAGE\").unwrap() {\n\n \"chinese-simplified\" => cardano::bip::bip39::dictionary::CHINESE_SIMPLIFIED,\n\n \"chinese-traditional\" => cardano::bip::bip39::dictionary::CHINESE_TRADITIONAL,\n\n \"english\" => cardano::bip::bip39::dictionary::ENGLISH,\n\n \"french\" => cardano::bip::bip39::dictionary::FRENCH,\n\n \"italian\" => cardano::bip::bip39::dictionary::ITALIAN,\n\n \"japanese\" => cardano::bip::bip39::dictionary::JAPANESE,\n\n \"korean\" => cardano::bip::bip39::dictionary::KOREAN,\n\n \"spanish\" => cardano::bip::bip39::dictionary::SPANISH,\n\n _ => unreachable!() // clap knows the default values\n\n }\n\n}\n", "file_path": "src/main.rs", "rank": 98, "score": 86704.13655801641 }, { "content": "fn blockchain_argument_remote_alias_match<'a>(matches: &ArgMatches<'a>) -> String {\n\n match matches.value_of(\"BLOCKCHAIN_REMOTE_ALIAS\") {\n\n Some(r) => { r.to_owned() },\n\n None => { unreachable!() }\n\n }\n\n}\n", "file_path": "src/main.rs", "rank": 99, "score": 84127.40753904113 } ]
Rust
src/librustc/middle/typeck/check/writeback.rs
ehsanul/rust
7156ded5bcf6831a6da22688d08f71985fdc81df
use middle::pat_util; use middle::ty; use middle::typeck::astconv::AstConv; use middle::typeck::check::FnCtxt; use middle::typeck::infer::{force_all, resolve_all, resolve_region}; use middle::typeck::infer::resolve_type; use middle::typeck::infer; use middle::typeck::{MethodCall, MethodCallee}; use middle::typeck::{vtable_res, vtable_origin}; use middle::typeck::{vtable_static, vtable_param}; use middle::typeck::write_substs_to_tcx; use middle::typeck::write_ty_to_tcx; use util::ppaux; use util::ppaux::Repr; use std::vec_ng::Vec; use syntax::ast; use syntax::codemap::Span; use syntax::print::pprust::pat_to_str; use syntax::visit; use syntax::visit::Visitor; fn resolve_type_vars_in_type(fcx: @FnCtxt, sp: Span, typ: ty::t) -> Option<ty::t> { if !ty::type_needs_infer(typ) { return Some(typ); } match resolve_type(fcx.infcx(), typ, resolve_all | force_all) { Ok(new_type) => return Some(new_type), Err(e) => { if !fcx.ccx.tcx.sess.has_errors() { fcx.ccx.tcx.sess.span_err( sp, format!("cannot determine a type \ for this expression: {}", infer::fixup_err_to_str(e))) } return None; } } } fn resolve_type_vars_in_types(fcx: @FnCtxt, sp: Span, tys: &[ty::t]) -> Vec<ty::t> { tys.iter().map(|t| { match resolve_type_vars_in_type(fcx, sp, *t) { Some(t1) => t1, None => ty::mk_err() } }).collect() } fn resolve_method_map_entry(wbcx: &mut WbCtxt, sp: Span, method_call: MethodCall) { let fcx = wbcx.fcx; let tcx = fcx.ccx.tcx; match fcx.inh.method_map.borrow().get().find(&method_call) { Some(method) => { debug!("writeback::resolve_method_map_entry(call={:?}, entry={:?})", method_call, method.repr(tcx)); let method_ty = match resolve_type_vars_in_type(fcx, sp, method.ty) { Some(t) => t, None => { wbcx.success = false; return; } }; let mut new_tps = Vec::new(); for &subst in method.substs.tps.iter() { match resolve_type_vars_in_type(fcx, sp, subst) { Some(t) => new_tps.push(t), None => { wbcx.success = false; return; } } } let new_method = MethodCallee { origin: method.origin, ty: method_ty, substs: ty::substs { tps: new_tps, regions: ty::ErasedRegions, self_ty: None } }; fcx.ccx.method_map.borrow_mut().get().insert(method_call, new_method); } None => {} } } fn resolve_vtable_map_entry(fcx: @FnCtxt, sp: Span, id: ast::NodeId) { match fcx.inh.vtable_map.borrow().get().find_copy(&id) { Some(origins) => { let r_origins = resolve_origins(fcx, sp, origins); fcx.ccx.vtable_map.borrow_mut().get().insert(id, r_origins); debug!("writeback::resolve_vtable_map_entry(id={}, vtables={:?})", id, r_origins.repr(fcx.tcx())); } None => {} } fn resolve_origins(fcx: @FnCtxt, sp: Span, vtbls: vtable_res) -> vtable_res { @vtbls.map(|os| @os.map(|o| resolve_origin(fcx, sp, o))) } fn resolve_origin(fcx: @FnCtxt, sp: Span, origin: &vtable_origin) -> vtable_origin { match origin { &vtable_static(def_id, ref tys, origins) => { let r_tys = resolve_type_vars_in_types(fcx, sp, tys.as_slice()); let r_origins = resolve_origins(fcx, sp, origins); vtable_static(def_id, r_tys, r_origins) } &vtable_param(n, b) => { vtable_param(n, b) } } } } fn resolve_type_vars_for_node(wbcx: &mut WbCtxt, sp: Span, id: ast::NodeId) -> Option<ty::t> { let fcx = wbcx.fcx; let tcx = fcx.ccx.tcx; let adjustment = fcx.inh.adjustments.borrow().get().find_copy(&id); match adjustment { None => (), Some(adjustment) => { match *adjustment { ty::AutoAddEnv(r, s) => { match resolve_region(fcx.infcx(), r, resolve_all | force_all) { Err(e) => { tcx.sess.span_err( sp, format!("cannot resolve bound for closure: \ {}", infer::fixup_err_to_str(e))); } Ok(r1) => { match tcx.def_map.borrow().get().find(&id) { Some(&ast::DefFn(..)) | Some(&ast::DefStaticMethod(..)) | Some(&ast::DefVariant(..)) | Some(&ast::DefStruct(_)) => {} _ => tcx.sess.span_err(sp, "cannot coerce non-statically resolved bare fn") } let resolved_adj = @ty::AutoAddEnv(r1, s); debug!("Adjustments for node {}: {:?}", id, resolved_adj); tcx.adjustments.borrow_mut().get().insert(id, resolved_adj); } } } ty::AutoDerefRef(adj) => { for autoderef in range(0, adj.autoderefs) { let method_call = MethodCall::autoderef(id, autoderef as u32); resolve_method_map_entry(wbcx, sp, method_call); } let fixup_region = |r| { match resolve_region(fcx.infcx(), r, resolve_all | force_all) { Ok(r1) => r1, Err(e) => { tcx.sess.span_err( sp, format!("cannot resolve scope of borrow: \ {}", infer::fixup_err_to_str(e))); r } } }; let resolved_autoref = match adj.autoref { None => None, Some(ref r) => Some(r.map_region(fixup_region)) }; let resolved_adj = @ty::AutoDerefRef(ty::AutoDerefRef { autoderefs: adj.autoderefs, autoref: resolved_autoref, }); debug!("Adjustments for node {}: {:?}", id, resolved_adj); tcx.adjustments.borrow_mut().get().insert(id, resolved_adj); } ty::AutoObject(..) => { debug!("Adjustments for node {}: {:?}", id, adjustment); tcx.adjustments.borrow_mut().get().insert(id, adjustment); } } } } let n_ty = fcx.node_ty(id); match resolve_type_vars_in_type(fcx, sp, n_ty) { None => { wbcx.success = false; return None; } Some(t) => { debug!("resolve_type_vars_for_node(id={}, n_ty={}, t={})", id, ppaux::ty_to_str(tcx, n_ty), ppaux::ty_to_str(tcx, t)); write_ty_to_tcx(tcx, id, t); let mut ret = Some(t); fcx.opt_node_ty_substs(id, |substs| { let mut new_tps = Vec::new(); for subst in substs.tps.iter() { match resolve_type_vars_in_type(fcx, sp, *subst) { Some(t) => new_tps.push(t), None => { wbcx.success = false; ret = None; break } } } write_substs_to_tcx(tcx, id, new_tps); ret.is_some() }); ret } } } struct WbCtxt { fcx: @FnCtxt, success: bool, } fn visit_stmt(s: &ast::Stmt, wbcx: &mut WbCtxt) { if !wbcx.success { return; } resolve_type_vars_for_node(wbcx, s.span, ty::stmt_node_id(s)); visit::walk_stmt(wbcx, s, ()); } fn visit_expr(e: &ast::Expr, wbcx: &mut WbCtxt) { if !wbcx.success { return; } resolve_type_vars_for_node(wbcx, e.span, e.id); resolve_method_map_entry(wbcx, e.span, MethodCall::expr(e.id)); resolve_vtable_map_entry(wbcx.fcx, e.span, e.id); match e.node { ast::ExprFnBlock(ref decl, _) | ast::ExprProc(ref decl, _) => { for input in decl.inputs.iter() { let _ = resolve_type_vars_for_node(wbcx, e.span, input.id); } } _ => {} } visit::walk_expr(wbcx, e, ()); } fn visit_block(b: &ast::Block, wbcx: &mut WbCtxt) { if !wbcx.success { return; } resolve_type_vars_for_node(wbcx, b.span, b.id); visit::walk_block(wbcx, b, ()); } fn visit_pat(p: &ast::Pat, wbcx: &mut WbCtxt) { if !wbcx.success { return; } resolve_type_vars_for_node(wbcx, p.span, p.id); debug!("Type for pattern binding {} (id {}) resolved to {}", pat_to_str(p), p.id, wbcx.fcx.infcx().ty_to_str( ty::node_id_to_type(wbcx.fcx.ccx.tcx, p.id))); visit::walk_pat(wbcx, p, ()); } fn visit_local(l: &ast::Local, wbcx: &mut WbCtxt) { if !wbcx.success { return; } let var_ty = wbcx.fcx.local_ty(l.span, l.id); match resolve_type(wbcx.fcx.infcx(), var_ty, resolve_all | force_all) { Ok(lty) => { debug!("Type for local {} (id {}) resolved to {}", pat_to_str(l.pat), l.id, wbcx.fcx.infcx().ty_to_str(lty)); write_ty_to_tcx(wbcx.fcx.ccx.tcx, l.id, lty); } Err(e) => { wbcx.fcx.ccx.tcx.sess.span_err( l.span, format!("cannot determine a type \ for this local variable: {}", infer::fixup_err_to_str(e))); wbcx.success = false; } } visit::walk_local(wbcx, l, ()); } fn visit_item(_item: &ast::Item, _wbcx: &mut WbCtxt) { } impl Visitor<()> for WbCtxt { fn visit_item(&mut self, i: &ast::Item, _: ()) { visit_item(i, self); } fn visit_stmt(&mut self, s: &ast::Stmt, _: ()) { visit_stmt(s, self); } fn visit_expr(&mut self, ex:&ast::Expr, _: ()) { visit_expr(ex, self); } fn visit_block(&mut self, b: &ast::Block, _: ()) { visit_block(b, self); } fn visit_pat(&mut self, p: &ast::Pat, _: ()) { visit_pat(p, self); } fn visit_local(&mut self, l: &ast::Local, _: ()) { visit_local(l, self); } fn visit_ty(&mut self, _t: &ast::Ty, _: ()) {} } fn resolve_upvar_borrow_map(wbcx: &mut WbCtxt) { if !wbcx.success { return; } let fcx = wbcx.fcx; let tcx = fcx.tcx(); let upvar_borrow_map = fcx.inh.upvar_borrow_map.borrow(); for (upvar_id, upvar_borrow) in upvar_borrow_map.get().iter() { let r = upvar_borrow.region; match resolve_region(fcx.infcx(), r, resolve_all | force_all) { Ok(r) => { let new_upvar_borrow = ty::UpvarBorrow { kind: upvar_borrow.kind, region: r }; debug!("Upvar borrow for {} resolved to {}", upvar_id.repr(tcx), new_upvar_borrow.repr(tcx)); let mut tcx_upvar_borrow_map = tcx.upvar_borrow_map.borrow_mut(); tcx_upvar_borrow_map.get().insert(*upvar_id, new_upvar_borrow); } Err(e) => { let span = ty::expr_span(tcx, upvar_id.closure_expr_id); fcx.ccx.tcx.sess.span_err( span, format!("cannot resolve lifetime for \ captured variable `{}`: {}", ty::local_var_name_str(tcx, upvar_id.var_id).get().to_str(), infer::fixup_err_to_str(e))); wbcx.success = false; } }; } } pub fn resolve_type_vars_in_expr(fcx: @FnCtxt, e: &ast::Expr) -> bool { let mut wbcx = WbCtxt { fcx: fcx, success: true }; let wbcx = &mut wbcx; wbcx.visit_expr(e, ()); resolve_upvar_borrow_map(wbcx); return wbcx.success; } pub fn resolve_type_vars_in_fn(fcx: @FnCtxt, decl: &ast::FnDecl, blk: &ast::Block) -> bool { let mut wbcx = WbCtxt { fcx: fcx, success: true }; let wbcx = &mut wbcx; wbcx.visit_block(blk, ()); for arg in decl.inputs.iter() { wbcx.visit_pat(arg.pat, ()); if !pat_util::pat_is_binding(fcx.tcx().def_map, arg.pat) { resolve_type_vars_for_node(wbcx, arg.pat.span, arg.pat.id); } } resolve_upvar_borrow_map(wbcx); return wbcx.success; }
use middle::pat_util; use middle::ty; use middle::typeck::astconv::AstConv; use middle::typeck::check::FnCtxt; use middle::typeck::infer::{force_all, resolve_all, resolve_region}; use middle::typeck::infer::resolve_type; use middle::typeck::infer; use middle::typeck::{MethodCall, MethodCallee}; use middle::typeck::{vtable_res, vtable_origin}; use middle::typeck::{vtable_static, vtable_param}; use middle::typeck::write_substs_to_tcx; use middle::typeck::write_ty_to_tcx; use util::ppaux; use util::ppaux::Repr; use std::vec_ng::Vec; use syntax::ast; use syntax::codemap::Span; use syntax::print::pprust::pat_to_str; use syntax::visit; use syntax::visit::Visitor; fn resolve_type_vars_in_type(fcx: @FnCtxt, sp: Span, typ: ty::t) -> Option<ty::t> { if !ty::type_needs_infer(typ) { return Some(typ); } match resolve_type(fcx.infcx(), typ, resolve_all | force_all) { Ok(new_type) => return Some(new_type), Err(e) => { if !fcx.ccx.tcx.sess.has_errors() { fcx.ccx.tcx.sess.span_err( sp, format!("cannot determine a type \ for this expression: {}", infer::fixup_err_to_str(e))) } return None; } } } fn resolve_type_vars_in_types(fcx: @FnCtxt, sp: Span, tys: &[ty::t]) -> Vec<ty::t> { tys.iter().map(|t| { match resolve_type_vars_in_type(fcx, sp, *t) { Some(t1) => t1, None => ty::mk_err() } }).collect() } fn resolve_method_map_entry(wbcx: &mut WbCtxt, sp: Span, method_call: MethodCall) { let fcx = wbcx.fcx; let tcx = fcx.ccx.tcx; match fcx.inh.method_map.borrow().get().find(&method_call) { Some(method) => { debug!("writeback::resolve_method_map_entry(call={:?}, entry={:?})", method_call, method.repr(tcx)); let method_ty = match resolve_type_vars_in_type(fcx, sp, method.ty) { Some(t) => t, None => { wbcx.success = false; return; } }; let mut new_tps = Vec::new(); for &subst in method.substs.tps.iter() { match resolve_type_vars_in_type(fcx, sp, subst) { Some(t) => new_tps.push(t), None => { wbcx.success = false; return; } } } let new_method = MethodCallee { origin: method.origin, ty: method_ty, substs: ty::substs { tps: new_tps, regions: ty::ErasedRegions, self_ty: None } }; fcx.ccx.method_map.borrow_mut().get().insert(method_call, new_method); } None => {} } } fn resolve_vtable_map_entry(fcx: @FnCtxt, sp: Span, id: ast::NodeId) { match fcx.inh.vtable_map.borrow().get().find_copy(&id) { Some(origins) => { let r_origins = resolve_origins(fcx, sp, origins); fcx.ccx.vtable_map.borrow_mut().get().insert(id, r_origins); debug!("writeback::resolve_vtable_map_entry(id={}, vtables={:?})", id, r_origins.repr(fcx.tcx())); } None => {} } fn resolve_origins(fcx: @FnCtxt, sp: Span, vtbls: vtable_res) -> vtable_res { @vtbls.map(|os| @os.map(|o| resolve_origin(fcx, sp, o))) } fn resolve_origin(fcx: @FnCtxt, sp: Span, origin: &vtable_origin) -> vtable_origin { match origin { &vtable_static(def_id, ref tys, origins) => { let r_tys = resolve_type_vars_in_types(fcx, sp, tys.as_slice()); let r_origins = resolve_origins(fcx, sp, origins); vtable_static(def_id, r_tys, r_origins) } &vtable_param(n, b) => { vtable_param(n, b) } } } } fn resolve_type_vars_for_node(wbcx: &mut WbCtxt, sp: Span, id: ast::NodeId) -> Option<ty::t> { let fcx = wbcx.fcx; let tcx = fcx.ccx.tcx; let adjustment = fcx.inh.adjustments.borrow().get().find_copy(&id); match adjustment { None => (), Some(adjustment) => { match *adjustment { ty::AutoAddEnv(r, s) => { match
{ Err(e) => { tcx.sess.span_err( sp, format!("cannot resolve bound for closure: \ {}", infer::fixup_err_to_str(e))); } Ok(r1) => { match tcx.def_map.borrow().get().find(&id) { Some(&ast::DefFn(..)) | Some(&ast::DefStaticMethod(..)) | Some(&ast::DefVariant(..)) | Some(&ast::DefStruct(_)) => {} _ => tcx.sess.span_err(sp, "cannot coerce non-statically resolved bare fn") } let resolved_adj = @ty::AutoAddEnv(r1, s); debug!("Adjustments for node {}: {:?}", id, resolved_adj); tcx.adjustments.borrow_mut().get().insert(id, resolved_adj); } } } ty::AutoDerefRef(adj) => { for autoderef in range(0, adj.autoderefs) { let method_call = MethodCall::autoderef(id, autoderef as u32); resolve_method_map_entry(wbcx, sp, method_call); } let fixup_region = |r| { match resolve_region(fcx.infcx(), r, resolve_all | force_all) { Ok(r1) => r1, Err(e) => { tcx.sess.span_err( sp, format!("cannot resolve scope of borrow: \ {}", infer::fixup_err_to_str(e))); r } } }; let resolved_autoref = match adj.autoref { None => None, Some(ref r) => Some(r.map_region(fixup_region)) }; let resolved_adj = @ty::AutoDerefRef(ty::AutoDerefRef { autoderefs: adj.autoderefs, autoref: resolved_autoref, }); debug!("Adjustments for node {}: {:?}", id, resolved_adj); tcx.adjustments.borrow_mut().get().insert(id, resolved_adj); } ty::AutoObject(..) => { debug!("Adjustments for node {}: {:?}", id, adjustment); tcx.adjustments.borrow_mut().get().insert(id, adjustment); } } } } let n_ty = fcx.node_ty(id); match resolve_type_vars_in_type(fcx, sp, n_ty) { None => { wbcx.success = false; return None; } Some(t) => { debug!("resolve_type_vars_for_node(id={}, n_ty={}, t={})", id, ppaux::ty_to_str(tcx, n_ty), ppaux::ty_to_str(tcx, t)); write_ty_to_tcx(tcx, id, t); let mut ret = Some(t); fcx.opt_node_ty_substs(id, |substs| { let mut new_tps = Vec::new(); for subst in substs.tps.iter() { match resolve_type_vars_in_type(fcx, sp, *subst) { Some(t) => new_tps.push(t), None => { wbcx.success = false; ret = None; break } } } write_substs_to_tcx(tcx, id, new_tps); ret.is_some() }); ret } } } struct WbCtxt { fcx: @FnCtxt, success: bool, } fn visit_stmt(s: &ast::Stmt, wbcx: &mut WbCtxt) { if !wbcx.success { return; } resolve_type_vars_for_node(wbcx, s.span, ty::stmt_node_id(s)); visit::walk_stmt(wbcx, s, ()); } fn visit_expr(e: &ast::Expr, wbcx: &mut WbCtxt) { if !wbcx.success { return; } resolve_type_vars_for_node(wbcx, e.span, e.id); resolve_method_map_entry(wbcx, e.span, MethodCall::expr(e.id)); resolve_vtable_map_entry(wbcx.fcx, e.span, e.id); match e.node { ast::ExprFnBlock(ref decl, _) | ast::ExprProc(ref decl, _) => { for input in decl.inputs.iter() { let _ = resolve_type_vars_for_node(wbcx, e.span, input.id); } } _ => {} } visit::walk_expr(wbcx, e, ()); } fn visit_block(b: &ast::Block, wbcx: &mut WbCtxt) { if !wbcx.success { return; } resolve_type_vars_for_node(wbcx, b.span, b.id); visit::walk_block(wbcx, b, ()); } fn visit_pat(p: &ast::Pat, wbcx: &mut WbCtxt) { if !wbcx.success { return; } resolve_type_vars_for_node(wbcx, p.span, p.id); debug!("Type for pattern binding {} (id {}) resolved to {}", pat_to_str(p), p.id, wbcx.fcx.infcx().ty_to_str( ty::node_id_to_type(wbcx.fcx.ccx.tcx, p.id))); visit::walk_pat(wbcx, p, ()); } fn visit_local(l: &ast::Local, wbcx: &mut WbCtxt) { if !wbcx.success { return; } let var_ty = wbcx.fcx.local_ty(l.span, l.id); match resolve_type(wbcx.fcx.infcx(), var_ty, resolve_all | force_all) { Ok(lty) => { debug!("Type for local {} (id {}) resolved to {}", pat_to_str(l.pat), l.id, wbcx.fcx.infcx().ty_to_str(lty)); write_ty_to_tcx(wbcx.fcx.ccx.tcx, l.id, lty); } Err(e) => { wbcx.fcx.ccx.tcx.sess.span_err( l.span, format!("cannot determine a type \ for this local variable: {}", infer::fixup_err_to_str(e))); wbcx.success = false; } } visit::walk_local(wbcx, l, ()); } fn visit_item(_item: &ast::Item, _wbcx: &mut WbCtxt) { } impl Visitor<()> for WbCtxt { fn visit_item(&mut self, i: &ast::Item, _: ()) { visit_item(i, self); } fn visit_stmt(&mut self, s: &ast::Stmt, _: ()) { visit_stmt(s, self); } fn visit_expr(&mut self, ex:&ast::Expr, _: ()) { visit_expr(ex, self); } fn visit_block(&mut self, b: &ast::Block, _: ()) { visit_block(b, self); } fn visit_pat(&mut self, p: &ast::Pat, _: ()) { visit_pat(p, self); } fn visit_local(&mut self, l: &ast::Local, _: ()) { visit_local(l, self); } fn visit_ty(&mut self, _t: &ast::Ty, _: ()) {} } fn resolve_upvar_borrow_map(wbcx: &mut WbCtxt) { if !wbcx.success { return; } let fcx = wbcx.fcx; let tcx = fcx.tcx(); let upvar_borrow_map = fcx.inh.upvar_borrow_map.borrow(); for (upvar_id, upvar_borrow) in upvar_borrow_map.get().iter() { let r = upvar_borrow.region; match resolve_region(fcx.infcx(), r, resolve_all | force_all) { Ok(r) => { let new_upvar_borrow = ty::UpvarBorrow { kind: upvar_borrow.kind, region: r }; debug!("Upvar borrow for {} resolved to {}", upvar_id.repr(tcx), new_upvar_borrow.repr(tcx)); let mut tcx_upvar_borrow_map = tcx.upvar_borrow_map.borrow_mut(); tcx_upvar_borrow_map.get().insert(*upvar_id, new_upvar_borrow); } Err(e) => { let span = ty::expr_span(tcx, upvar_id.closure_expr_id); fcx.ccx.tcx.sess.span_err( span, format!("cannot resolve lifetime for \ captured variable `{}`: {}", ty::local_var_name_str(tcx, upvar_id.var_id).get().to_str(), infer::fixup_err_to_str(e))); wbcx.success = false; } }; } } pub fn resolve_type_vars_in_expr(fcx: @FnCtxt, e: &ast::Expr) -> bool { let mut wbcx = WbCtxt { fcx: fcx, success: true }; let wbcx = &mut wbcx; wbcx.visit_expr(e, ()); resolve_upvar_borrow_map(wbcx); return wbcx.success; } pub fn resolve_type_vars_in_fn(fcx: @FnCtxt, decl: &ast::FnDecl, blk: &ast::Block) -> bool { let mut wbcx = WbCtxt { fcx: fcx, success: true }; let wbcx = &mut wbcx; wbcx.visit_block(blk, ()); for arg in decl.inputs.iter() { wbcx.visit_pat(arg.pat, ()); if !pat_util::pat_is_binding(fcx.tcx().def_map, arg.pat) { resolve_type_vars_for_node(wbcx, arg.pat.span, arg.pat.id); } } resolve_upvar_borrow_map(wbcx); return wbcx.success; }
resolve_region(fcx.infcx(), r, resolve_all | force_all)
call_expression
[ { "content": "pub fn type_is_region_ptr(fcx: @FnCtxt, sp: Span, typ: ty::t) -> bool {\n\n let typ_s = structurally_resolved_type(fcx, sp, typ);\n\n return ty::type_is_region_ptr(typ_s);\n\n}\n\n\n", "file_path": "src/librustc/middle/typeck/check/mod.rs", "rank": 0, "score": 681692.0685702388 }, { "content": "// Resolves `typ` by a single level if `typ` is a type variable. If no\n\n// resolution is possible, then an error is reported.\n\npub fn structurally_resolved_type(fcx: &FnCtxt, sp: Span, tp: ty::t) -> ty::t {\n\n match infer::resolve_type(fcx.infcx(), tp, force_tvar) {\n\n Ok(t_s) if !ty::type_is_ty_var(t_s) => t_s,\n\n _ => {\n\n fcx.type_error_message(sp, |_actual| {\n\n ~\"the type of this value must be known in this context\"\n\n }, tp, None);\n\n demand::suptype(fcx, sp, ty::mk_err(), tp);\n\n tp\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/librustc/middle/typeck/check/mod.rs", "rank": 2, "score": 656162.1009947392 }, { "content": "pub fn type_is_bare_fn(fcx: @FnCtxt, sp: Span, typ: ty::t) -> bool {\n\n let typ_s = structurally_resolved_type(fcx, sp, typ);\n\n return ty::type_is_bare_fn(typ_s);\n\n}\n\n\n", "file_path": "src/librustc/middle/typeck/check/mod.rs", "rank": 3, "score": 642071.6242849659 }, { "content": "pub fn type_is_scalar(fcx: @FnCtxt, sp: Span, typ: ty::t) -> bool {\n\n let typ_s = structurally_resolved_type(fcx, sp, typ);\n\n return ty::type_is_scalar(typ_s);\n\n}\n\n\n", "file_path": "src/librustc/middle/typeck/check/mod.rs", "rank": 4, "score": 640047.4499959967 }, { "content": "pub fn type_is_char(fcx: @FnCtxt, sp: Span, typ: ty::t) -> bool {\n\n let typ_s = structurally_resolved_type(fcx, sp, typ);\n\n return ty::type_is_char(typ_s);\n\n}\n\n\n", "file_path": "src/librustc/middle/typeck/check/mod.rs", "rank": 5, "score": 640047.4499959967 }, { "content": "pub fn type_is_integral(fcx: @FnCtxt, sp: Span, typ: ty::t) -> bool {\n\n let typ_s = structurally_resolved_type(fcx, sp, typ);\n\n return ty::type_is_integral(typ_s);\n\n}\n\n\n", "file_path": "src/librustc/middle/typeck/check/mod.rs", "rank": 6, "score": 640047.4499959967 }, { "content": "pub fn type_is_c_like_enum(fcx: @FnCtxt, sp: Span, typ: ty::t) -> bool {\n\n let typ_s = structurally_resolved_type(fcx, sp, typ);\n\n return ty::type_is_c_like_enum(fcx.ccx.tcx, typ_s);\n\n}\n\n\n", "file_path": "src/librustc/middle/typeck/check/mod.rs", "rank": 7, "score": 633549.4428635887 }, { "content": "pub fn type_is_unsafe_ptr(fcx: @FnCtxt, sp: Span, typ: ty::t) -> bool {\n\n let typ_s = structurally_resolved_type(fcx, sp, typ);\n\n return ty::type_is_unsafe_ptr(typ_s);\n\n}\n\n\n", "file_path": "src/librustc/middle/typeck/check/mod.rs", "rank": 8, "score": 633549.4428635887 }, { "content": "// Returns the one-level-deep structure of the given type.\n\npub fn structure_of<'a>(fcx: @FnCtxt, sp: Span, typ: ty::t)\n\n -> &'a ty::sty {\n\n &ty::get(structurally_resolved_type(fcx, sp, typ)).sty\n\n}\n\n\n", "file_path": "src/librustc/middle/typeck/check/mod.rs", "rank": 10, "score": 602768.2927584322 }, { "content": "pub fn check_simd(tcx: ty::ctxt, sp: Span, id: ast::NodeId) {\n\n let t = ty::node_id_to_type(tcx, id);\n\n if ty::type_needs_subst(t) {\n\n tcx.sess.span_err(sp, \"SIMD vector cannot be generic\");\n\n return;\n\n }\n\n match ty::get(t).sty {\n\n ty::ty_struct(did, ref substs) => {\n\n let fields = ty::lookup_struct_fields(tcx, did);\n\n if fields.is_empty() {\n\n tcx.sess.span_err(sp, \"SIMD vector cannot be empty\");\n\n return;\n\n }\n\n let e = ty::lookup_field_type(tcx, did, fields.get(0).id, substs);\n\n if !fields.iter().all(\n\n |f| ty::lookup_field_type(tcx, did, f.id, substs) == e) {\n\n tcx.sess.span_err(sp, \"SIMD vector should be homogeneous\");\n\n return;\n\n }\n\n if !ty::type_is_machine(e) {\n\n tcx.sess.span_err(sp, \"SIMD vector element type should be \\\n\n machine type\");\n\n return;\n\n }\n\n }\n\n _ => ()\n\n }\n\n}\n\n\n", "file_path": "src/librustc/middle/typeck/check/mod.rs", "rank": 12, "score": 545008.6403367617 }, { "content": "pub fn lookup_def_tcx(tcx: ty::ctxt, sp: Span, id: ast::NodeId) -> ast::Def {\n\n let def_map = tcx.def_map.borrow();\n\n match def_map.get().find(&id) {\n\n Some(&x) => x,\n\n _ => {\n\n tcx.sess.span_fatal(sp, \"internal error looking up a definition\")\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/librustc/middle/typeck/mod.rs", "rank": 14, "score": 534358.6574638409 }, { "content": "pub fn require_integral(fcx: @FnCtxt, sp: Span, t: ty::t) {\n\n if !type_is_integral(fcx, sp, t) {\n\n fcx.type_error_message(sp, |actual| {\n\n format!(\"mismatched types: expected integral type but found `{}`\",\n\n actual)\n\n }, t, None);\n\n }\n\n}\n\n\n", "file_path": "src/librustc/middle/typeck/check/mod.rs", "rank": 15, "score": 520376.73238542594 }, { "content": "pub fn autoderef<T>(fcx: @FnCtxt, sp: Span, base_ty: ty::t,\n\n expr_id: Option<ast::NodeId>,\n\n mut lvalue_pref: LvaluePreference,\n\n should_stop: |ty::t, uint| -> Option<T>)\n\n -> (ty::t, uint, Option<T>) {\n\n /*!\n\n * Executes an autoderef loop for the type `t`. At each step, invokes\n\n * `should_stop` to decide whether to terminate the loop. Returns\n\n * the final type and number of derefs that it performed.\n\n *\n\n * Note: this method does not modify the adjustments table. The caller is\n\n * responsible for inserting an AutoAdjustment record into the `fcx`\n\n * using one of the suitable methods.\n\n */\n\n\n\n let mut t = base_ty;\n\n for autoderefs in range(0, fcx.tcx().sess.recursion_limit.get()) {\n\n let resolved_t = structurally_resolved_type(fcx, sp, t);\n\n\n\n match should_stop(resolved_t, autoderefs) {\n", "file_path": "src/librustc/middle/typeck/check/mod.rs", "rank": 16, "score": 517917.92331319774 }, { "content": "// Requires that the two types unify, and prints an error message if they\n\n// don't.\n\npub fn suptype(fcx: &FnCtxt, sp: Span, expected: ty::t, actual: ty::t) {\n\n suptype_with_fn(fcx, sp, false, expected, actual,\n\n |sp, e, a, s| { fcx.report_mismatched_types(sp, e, a, s) })\n\n}\n\n\n", "file_path": "src/librustc/middle/typeck/check/demand.rs", "rank": 17, "score": 516460.1801187496 }, { "content": "pub fn eqtype(fcx: @FnCtxt, sp: Span, expected: ty::t, actual: ty::t) {\n\n match infer::mk_eqty(fcx.infcx(), false, infer::Misc(sp), actual, expected) {\n\n Ok(()) => { /* ok */ }\n\n Err(ref err) => {\n\n fcx.report_mismatched_types(sp, expected, actual, err);\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/librustc/middle/typeck/check/demand.rs", "rank": 18, "score": 516453.38716439274 }, { "content": "pub fn subtype(fcx: &FnCtxt, sp: Span, expected: ty::t, actual: ty::t) {\n\n suptype_with_fn(fcx, sp, true, actual, expected,\n\n |sp, a, e, s| { fcx.report_mismatched_types(sp, e, a, s) })\n\n}\n\n\n", "file_path": "src/librustc/middle/typeck/check/demand.rs", "rank": 19, "score": 516453.38716439274 }, { "content": "pub fn lookup_def(fcx: @FnCtxt, sp: Span, id: ast::NodeId) -> ast::Def {\n\n lookup_def_ccx(fcx.ccx, sp, id)\n\n}\n\n\n", "file_path": "src/librustc/middle/typeck/check/mod.rs", "rank": 20, "score": 495939.80379950174 }, { "content": "pub fn check_static(tcx: ty::ctxt, ty: ty::t, sp: Span) -> bool {\n\n if !ty::type_is_static(tcx, ty) {\n\n match ty::get(ty).sty {\n\n ty::ty_param(..) => {\n\n tcx.sess.span_err(sp,\n\n format!(\"value may contain references; \\\n\n add `'static` bound to `{}`\", ty_to_str(tcx, ty)));\n\n }\n\n _ => {\n\n tcx.sess.span_err(sp, \"value may contain references\");\n\n }\n\n }\n\n false\n\n } else {\n\n true\n\n }\n\n}\n\n\n", "file_path": "src/librustc/middle/kind.rs", "rank": 21, "score": 495777.56014797953 }, { "content": "// Checks that the type `actual` can be coerced to `expected`.\n\npub fn coerce(fcx: @FnCtxt, sp: Span, expected: ty::t, expr: &ast::Expr) {\n\n let expr_ty = fcx.expr_ty(expr);\n\n match fcx.mk_assignty(expr, expr_ty, expected) {\n\n result::Ok(()) => { /* ok */ }\n\n result::Err(ref err) => {\n\n fcx.report_mismatched_types(sp, expected, expr_ty, err);\n\n }\n\n }\n\n}\n", "file_path": "src/librustc/middle/typeck/check/demand.rs", "rank": 22, "score": 478719.4174144954 }, { "content": "fn region_of_def(fcx: @FnCtxt, def: ast::Def) -> ty::Region {\n\n /*!\n\n * Returns the validity region of `def` -- that is, how long\n\n * is `def` valid?\n\n */\n\n\n\n let tcx = fcx.tcx();\n\n match def {\n\n DefLocal(node_id, _) | DefArg(node_id, _) |\n\n DefBinding(node_id, _) => {\n\n tcx.region_maps.var_region(node_id)\n\n }\n\n DefUpvar(_, subdef, closure_id, body_id) => {\n\n match ty::ty_closure_sigil(fcx.node_ty(closure_id)) {\n\n BorrowedSigil => region_of_def(fcx, *subdef),\n\n ManagedSigil | OwnedSigil => ReScope(body_id)\n\n }\n\n }\n\n _ => {\n\n tcx.sess.bug(format!(\"unexpected def in region_of_def: {:?}\",\n", "file_path": "src/librustc/middle/typeck/check/regionck.rs", "rank": 23, "score": 448400.7379800674 }, { "content": "pub fn method_call_type_param_defs(tcx: ctxt, origin: typeck::MethodOrigin)\n\n -> Rc<Vec<TypeParameterDef> > {\n\n match origin {\n\n typeck::MethodStatic(did) => {\n\n // n.b.: When we encode impl methods, the bounds\n\n // that we encode include both the impl bounds\n\n // and then the method bounds themselves...\n\n ty::lookup_item_type(tcx, did).generics.type_param_defs\n\n }\n\n typeck::MethodParam(typeck::MethodParam {\n\n trait_id: trt_id,\n\n method_num: n_mth, ..}) |\n\n typeck::MethodObject(typeck::MethodObject {\n\n trait_id: trt_id,\n\n method_num: n_mth, ..}) => {\n\n // ...trait methods bounds, in contrast, include only the\n\n // method bounds, so we must preprend the tps from the\n\n // trait itself. This ought to be harmonized.\n\n let trait_type_param_defs =\n\n lookup_trait_def(tcx, trt_id).generics.type_param_defs();\n\n Rc::new(vec_ng::append(\n\n Vec::from_slice(trait_type_param_defs),\n\n ty::trait_method(tcx,\n\n trt_id,\n\n n_mth).generics.type_param_defs()))\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/librustc/middle/ty.rs", "rank": 25, "score": 445507.79414970253 }, { "content": "fn compute_types<'tcx,'ast>(tcx: &mut TypeContext<'tcx,'ast>,\n\n ast: Ast<'ast>) -> Type<'tcx>\n\n{\n\n match ast.kind {\n\n ExprInt | ExprVar(_) => {\n\n let ty = tcx.add_type(TypeInt);\n\n tcx.set_type(ast.id, ty)\n\n }\n\n ExprLambda(ast) => {\n\n let arg_ty = tcx.add_type(TypeInt);\n\n let body_ty = compute_types(tcx, ast);\n\n let lambda_ty = tcx.add_type(TypeFunction(arg_ty, body_ty));\n\n tcx.set_type(ast.id, lambda_ty)\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/test/run-pass/regions-mock-tcx.rs", "rank": 26, "score": 443186.000534868 }, { "content": "fn subst_receiver_types_in_method_ty(tcx: ty::ctxt,\n\n impl_id: ast::DefId,\n\n trait_ref: &ty::TraitRef,\n\n new_def_id: ast::DefId,\n\n method: &ty::Method,\n\n provided_source: Option<ast::DefId>)\n\n -> ty::Method {\n\n\n\n let combined_substs = make_substs_for_receiver_types(\n\n tcx, impl_id, trait_ref, method);\n\n\n\n ty::Method::new(\n\n method.ident,\n\n\n\n // method types *can* appear in the generic bounds\n\n method.generics.subst(tcx, &combined_substs),\n\n\n\n // method types *can* appear in the fty\n\n method.fty.subst(tcx, &combined_substs),\n\n\n\n method.explicit_self,\n\n method.vis,\n\n new_def_id,\n\n ImplContainer(impl_id),\n\n provided_source\n\n )\n\n}\n\n\n", "file_path": "src/librustc/middle/typeck/coherence.rs", "rank": 27, "score": 434814.55921598064 }, { "content": "/// Resolve vtables for a method call after typeck has finished.\n\n/// Used by trans to monomorphize artificial method callees (e.g. drop).\n\npub fn trans_resolve_method(tcx: ty::ctxt, id: ast::NodeId,\n\n substs: &ty::substs) -> Option<vtable_res> {\n\n let generics = ty::lookup_item_type(tcx, ast_util::local_def(id)).generics;\n\n let type_param_defs = generics.type_param_defs.deref();\n\n if has_trait_bounds(type_param_defs.as_slice()) {\n\n let vcx = VtableContext {\n\n infcx: &infer::new_infer_ctxt(tcx),\n\n param_env: &ty::construct_parameter_environment(tcx, None, [], [], [], [], id)\n\n };\n\n\n\n Some(lookup_vtables(&vcx,\n\n tcx.map.span(id),\n\n type_param_defs.as_slice(),\n\n substs,\n\n false))\n\n } else {\n\n None\n\n }\n\n}\n\n\n\nimpl<'a> visit::Visitor<()> for &'a FnCtxt {\n\n fn visit_expr(&mut self, ex: &ast::Expr, _: ()) {\n\n early_resolve_expr(ex, *self, false);\n\n visit::walk_expr(self, ex, ());\n\n }\n\n fn visit_item(&mut self, _: &ast::Item, _: ()) {\n\n // no-op\n\n }\n\n}\n\n\n", "file_path": "src/librustc/middle/typeck/check/vtable.rs", "rank": 28, "score": 433006.283879468 }, { "content": "pub fn AllocaFcx(fcx: &FunctionContext, ty: Type, name: &str) -> ValueRef {\n\n let b = fcx.ccx.builder();\n\n b.position_before(fcx.alloca_insert_pt.get().unwrap());\n\n b.alloca(ty, name)\n\n}\n\n\n", "file_path": "src/librustc/middle/trans/build.rs", "rank": 29, "score": 425781.1508666908 }, { "content": "pub fn get_type(cdata: Cmd, id: ast::NodeId, tcx: ty::ctxt)\n\n -> ty::ty_param_bounds_and_ty {\n\n\n\n let item = lookup_item(id, cdata.data());\n\n\n\n let t = item_type(ast::DefId { krate: cdata.cnum, node: id }, item, tcx,\n\n cdata);\n\n\n\n let tp_defs = item_ty_param_defs(item, tcx, cdata, tag_items_data_item_ty_param_bounds);\n\n let rp_defs = item_region_param_defs(item, cdata);\n\n\n\n ty::ty_param_bounds_and_ty {\n\n generics: ty::Generics {type_param_defs: tp_defs,\n\n region_param_defs: rp_defs},\n\n ty: t\n\n }\n\n}\n\n\n", "file_path": "src/librustc/metadata/decoder.rs", "rank": 30, "score": 424125.9358791333 }, { "content": "fn check_trait_cast(cx: &mut Context, source_ty: ty::t, target_ty: ty::t, span: Span) {\n\n check_cast_for_escaping_regions(cx, source_ty, target_ty, span);\n\n match ty::get(target_ty).sty {\n\n ty::ty_trait(_, _, _, _, bounds) => {\n\n check_trait_cast_bounds(cx, span, source_ty, bounds);\n\n }\n\n _ => {}\n\n }\n\n}\n\n\n", "file_path": "src/librustc/middle/kind.rs", "rank": 31, "score": 418181.7603421118 }, { "content": "fn enc_region_substs(w: &mut MemWriter, cx: @ctxt, substs: &ty::RegionSubsts) {\n\n match *substs {\n\n ty::ErasedRegions => {\n\n mywrite!(w, \"e\");\n\n }\n\n ty::NonerasedRegions(ref regions) => {\n\n mywrite!(w, \"n\");\n\n for &r in regions.iter() {\n\n enc_region(w, cx, r);\n\n }\n\n mywrite!(w, \".\");\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/librustc/metadata/tyencode.rs", "rank": 32, "score": 413254.69723237376 }, { "content": "fn classify_arg_ty(ty: Type, offset: &mut uint) -> ArgType {\n\n let orig_offset = *offset;\n\n let size = ty_size(ty) * 8;\n\n let mut align = ty_align(ty);\n\n\n\n align = cmp::min(cmp::max(align, 4), 8);\n\n *offset = align_up_to(*offset, align);\n\n *offset += align_up_to(size, align * 8) / 8;\n\n\n\n if is_reg_ty(ty) {\n\n ArgType::direct(ty, None, None, None)\n\n } else {\n\n ArgType::direct(\n\n ty,\n\n Some(struct_ty(ty)),\n\n padding_ty(align, orig_offset),\n\n None\n\n )\n\n }\n\n}\n\n\n", "file_path": "src/librustc/middle/trans/cabi_mips.rs", "rank": 33, "score": 411563.927412192 }, { "content": "pub fn resolve_vtable_under_param_substs(tcx: ty::ctxt,\n\n param_substs: Option<@param_substs>,\n\n vt: &typeck::vtable_origin)\n\n -> typeck::vtable_origin {\n\n match *vt {\n\n typeck::vtable_static(trait_id, ref tys, sub) => {\n\n let tys = match param_substs {\n\n Some(substs) => {\n\n tys.iter().map(|t| {\n\n ty::subst_tps(tcx,\n\n substs.tys.as_slice(),\n\n substs.self_ty,\n\n *t)\n\n }).collect()\n\n }\n\n _ => Vec::from_slice(tys.as_slice())\n\n };\n\n typeck::vtable_static(\n\n trait_id, tys,\n\n resolve_vtables_under_param_substs(tcx, param_substs, sub))\n", "file_path": "src/librustc/middle/trans/common.rs", "rank": 34, "score": 410858.4766276066 }, { "content": "pub fn resolve_vtables_under_param_substs(tcx: ty::ctxt,\n\n param_substs: Option<@param_substs>,\n\n vts: typeck::vtable_res)\n\n -> typeck::vtable_res {\n\n @vts.iter().map(|ds|\n\n resolve_param_vtables_under_param_substs(tcx,\n\n param_substs,\n\n *ds))\n\n .collect()\n\n}\n\n\n", "file_path": "src/librustc/middle/trans/common.rs", "rank": 35, "score": 410858.4766276066 }, { "content": "pub fn make_substs_for_receiver_types(tcx: ty::ctxt,\n\n impl_id: ast::DefId,\n\n trait_ref: &ty::TraitRef,\n\n method: &ty::Method)\n\n -> ty::substs {\n\n /*!\n\n * Substitutes the values for the receiver's type parameters\n\n * that are found in method, leaving the method's type parameters\n\n * intact. This is in fact a mildly complex operation,\n\n * largely because of the hokey way that we concatenate the\n\n * receiver and method generics.\n\n */\n\n\n\n // determine how many type parameters were declared on the impl\n\n let num_impl_type_parameters = {\n\n let impl_polytype = ty::lookup_item_type(tcx, impl_id);\n\n impl_polytype.generics.type_param_defs().len()\n\n };\n\n\n\n // determine how many type parameters appear on the trait\n", "file_path": "src/librustc/middle/typeck/coherence.rs", "rank": 36, "score": 410688.7584943459 }, { "content": "fn parse_region_substs(st: &mut PState, conv: conv_did) -> ty::RegionSubsts {\n\n match next(st) {\n\n 'e' => ty::ErasedRegions,\n\n 'n' => {\n\n let mut regions = opt_vec::Empty;\n\n while peek(st) != '.' {\n\n let r = parse_region(st, |x,y| conv(x,y));\n\n regions.push(r);\n\n }\n\n assert_eq!(next(st), '.');\n\n ty::NonerasedRegions(regions)\n\n }\n\n _ => fail!(\"parse_bound_region: bad input\")\n\n }\n\n}\n\n\n", "file_path": "src/librustc/metadata/tydecode.rs", "rank": 37, "score": 409854.36230200646 }, { "content": "fn main() { let x: a = A; match x { B => { } } }\n", "file_path": "src/test/compile-fail/match-tag-nullary.rs", "rank": 38, "score": 409020.2881697925 }, { "content": "fn get_cases(tcx: ty::ctxt, def_id: ast::DefId, substs: &ty::substs) -> Vec<Case> {\n\n ty::enum_variants(tcx, def_id).map(|vi| {\n\n let arg_tys = vi.args.map(|&raw_ty| {\n\n ty::subst(tcx, substs, raw_ty)\n\n });\n\n Case { discr: vi.disr_val, tys: arg_tys }\n\n })\n\n}\n\n\n\n\n", "file_path": "src/librustc/middle/trans/adt.rs", "rank": 39, "score": 407371.0683479341 }, { "content": "pub fn check_freevar_bounds(cx: &Context, sp: Span, ty: ty::t,\n\n bounds: ty::BuiltinBounds, referenced_ty: Option<ty::t>)\n\n{\n\n check_builtin_bounds(cx, ty, bounds, |missing| {\n\n // Will be Some if the freevar is implicitly borrowed (stack closure).\n\n // Emit a less mysterious error message in this case.\n\n match referenced_ty {\n\n Some(rty) => cx.tcx.sess.span_err(sp,\n\n format!(\"cannot implicitly borrow variable of type `{}` in a bounded \\\n\n stack closure (implicit reference does not fulfill `{}`)\",\n\n ty_to_str(cx.tcx, rty), missing.user_string(cx.tcx))),\n\n None => cx.tcx.sess.span_err(sp,\n\n format!(\"cannot capture variable of type `{}`, which does \\\n\n not fulfill `{}`, in a bounded closure\",\n\n ty_to_str(cx.tcx, ty), missing.user_string(cx.tcx))),\n\n }\n\n cx.tcx.sess.span_note(\n\n sp,\n\n format!(\"this closure's environment must satisfy `{}`\",\n\n bounds.user_string(cx.tcx)));\n\n });\n\n}\n\n\n", "file_path": "src/librustc/middle/kind.rs", "rank": 40, "score": 404501.51902958466 }, { "content": "fn check_heap_type(cx: &Context, span: Span, ty: ty::t) {\n\n let xs = [ManagedHeapMemory, OwnedHeapMemory, HeapMemory];\n\n for &lint in xs.iter() {\n\n if cx.get_level(lint) == allow { continue }\n\n\n\n let mut n_box = 0;\n\n let mut n_uniq = 0;\n\n ty::fold_ty(cx.tcx, ty, |t| {\n\n match ty::get(t).sty {\n\n ty::ty_box(_) => {\n\n n_box += 1;\n\n }\n\n ty::ty_uniq(_) | ty::ty_str(ty::vstore_uniq) |\n\n ty::ty_vec(_, ty::vstore_uniq) |\n\n ty::ty_trait(_, _, ty::UniqTraitStore, _, _) => {\n\n n_uniq += 1;\n\n }\n\n ty::ty_closure(ref c) if c.sigil == ast::OwnedSigil => {\n\n n_uniq += 1;\n\n }\n", "file_path": "src/librustc/middle/lint.rs", "rank": 41, "score": 403540.68308296485 }, { "content": "pub fn check_trait_cast_bounds(cx: &Context, sp: Span, ty: ty::t,\n\n bounds: ty::BuiltinBounds) {\n\n check_builtin_bounds(cx, ty, bounds, |missing| {\n\n cx.tcx.sess.span_err(sp,\n\n format!(\"cannot pack type `{}`, which does not fulfill \\\n\n `{}`, as a trait bounded by {}\",\n\n ty_to_str(cx.tcx, ty), missing.user_string(cx.tcx),\n\n bounds.user_string(cx.tcx)));\n\n });\n\n}\n\n\n", "file_path": "src/librustc/middle/kind.rs", "rank": 42, "score": 400410.2979237416 }, { "content": "fn g(a: *int) -> *int { let b = f(a); return b; }\n\n\n", "file_path": "src/test/run-pass/type-ptr.rs", "rank": 43, "score": 400146.7042284632 }, { "content": "fn check_copy(cx: &Context, ty: ty::t, sp: Span, reason: &str) {\n\n debug!(\"type_contents({})={}\",\n\n ty_to_str(cx.tcx, ty),\n\n ty::type_contents(cx.tcx, ty).to_str());\n\n if ty::type_moves_by_default(cx.tcx, ty) {\n\n cx.tcx.sess.span_err(\n\n sp, format!(\"copying a value of non-copyable type `{}`\",\n\n ty_to_str(cx.tcx, ty)));\n\n cx.tcx.sess.span_note(sp, format!(\"{}\", reason));\n\n }\n\n}\n\n\n", "file_path": "src/librustc/middle/kind.rs", "rank": 44, "score": 398104.87986547727 }, { "content": "pub fn check_send(cx: &Context, ty: ty::t, sp: Span) -> bool {\n\n if !ty::type_is_sendable(cx.tcx, ty) {\n\n cx.tcx.sess.span_err(\n\n sp, format!(\"value has non-sendable type `{}`\",\n\n ty_to_str(cx.tcx, ty)));\n\n false\n\n } else {\n\n true\n\n }\n\n}\n\n\n", "file_path": "src/librustc/middle/kind.rs", "rank": 45, "score": 398104.87986547727 }, { "content": "fn main() { let x: a = A(0); match x { B(y) => { } } }\n", "file_path": "src/test/compile-fail/match-tag-unary.rs", "rank": 46, "score": 395526.6689682176 }, { "content": "// Always use this function instead of storing a zero constant to the memory\n\n// in question. If you store a zero constant, LLVM will drown in vreg\n\n// allocation for large data structures, and the generated code will be\n\n// awful. (A telltale sign of this is large quantities of\n\n// `mov [byte ptr foo],0` in the generated code.)\n\nfn memzero(b: &Builder, llptr: ValueRef, ty: Type) {\n\n let _icx = push_ctxt(\"memzero\");\n\n let ccx = b.ccx;\n\n\n\n let intrinsic_key = match ccx.sess.targ_cfg.arch {\n\n X86 | Arm | Mips => \"llvm.memset.p0i8.i32\",\n\n X86_64 => \"llvm.memset.p0i8.i64\"\n\n };\n\n\n\n let llintrinsicfn = ccx.intrinsics.get_copy(&intrinsic_key);\n\n let llptr = b.pointercast(llptr, Type::i8().ptr_to());\n\n let llzeroval = C_u8(0);\n\n let size = machine::llsize_of(ccx, ty);\n\n let align = C_i32(llalign_of_min(ccx, ty) as i32);\n\n let volatile = C_i1(false);\n\n b.call(llintrinsicfn, [llptr, llzeroval, size, align, volatile], []);\n\n}\n\n\n", "file_path": "src/librustc/middle/trans/base.rs", "rank": 47, "score": 395113.4116092921 }, { "content": "fn parse_region(st: &mut PState, conv: conv_did) -> ty::Region {\n\n match next(st) {\n\n 'b' => {\n\n assert_eq!(next(st), '[');\n\n let id = parse_uint(st) as ast::NodeId;\n\n assert_eq!(next(st), '|');\n\n let br = parse_bound_region(st, |x,y| conv(x,y));\n\n assert_eq!(next(st), ']');\n\n ty::ReLateBound(id, br)\n\n }\n\n 'B' => {\n\n assert_eq!(next(st), '[');\n\n let node_id = parse_uint(st) as ast::NodeId;\n\n assert_eq!(next(st), '|');\n\n let index = parse_uint(st);\n\n assert_eq!(next(st), '|');\n\n let nm = token::str_to_ident(parse_str(st, ']'));\n\n ty::ReEarlyBound(node_id, index, nm.name)\n\n }\n\n 'f' => {\n", "file_path": "src/librustc/metadata/tydecode.rs", "rank": 48, "score": 395077.9159944055 }, { "content": "fn copy_borrowed_ptr<'a, 'b, 'c>(p: &'a mut &'b mut &'c mut int) -> &'b mut int {\n\n &mut ***p //~ ERROR cannot infer an appropriate lifetime\n\n}\n\n\n", "file_path": "src/test/compile-fail/regions-reborrow-from-shorter-mut-ref-mut-ref.rs", "rank": 49, "score": 393451.79256538604 }, { "content": "pub fn get_field_type(tcx: ty::ctxt, class_id: ast::DefId,\n\n def: ast::DefId) -> ty::ty_param_bounds_and_ty {\n\n let cstore = tcx.cstore;\n\n let cdata = cstore.get_crate_data(class_id.krate);\n\n let all_items = reader::get_doc(reader::Doc(cdata.data()), tag_items);\n\n let class_doc = expect(tcx.diag,\n\n decoder::maybe_find_item(class_id.node, all_items),\n\n || format!(\"get_field_type: class ID {:?} not found\",\n\n class_id) );\n\n let the_field = expect(tcx.diag,\n\n decoder::maybe_find_item(def.node, class_doc),\n\n || format!(\"get_field_type: in class {:?}, field ID {:?} not found\",\n\n class_id, def) );\n\n let ty = decoder::item_type(def, the_field, tcx, cdata);\n\n ty::ty_param_bounds_and_ty {\n\n generics: ty::Generics {type_param_defs: Rc::new(Vec::new()),\n\n region_param_defs: Rc::new(Vec::new())},\n\n ty: ty\n\n }\n\n}\n\n\n", "file_path": "src/librustc/metadata/csearch.rs", "rank": 50, "score": 393183.5920929309 }, { "content": "fn enc_region(w: &mut MemWriter, cx: @ctxt, r: ty::Region) {\n\n match r {\n\n ty::ReLateBound(id, br) => {\n\n mywrite!(w, \"b[{}|\", id);\n\n enc_bound_region(w, cx, br);\n\n mywrite!(w, \"]\");\n\n }\n\n ty::ReEarlyBound(node_id, index, name) => {\n\n mywrite!(w, \"B[{}|{}|{}]\",\n\n node_id,\n\n index,\n\n token::get_name(name));\n\n }\n\n ty::ReFree(ref fr) => {\n\n mywrite!(w, \"f[{}|\", fr.scope_id);\n\n enc_bound_region(w, cx, fr.bound_region);\n\n mywrite!(w, \"]\");\n\n }\n\n ty::ReScope(nid) => {\n\n mywrite!(w, \"s{}|\", nid);\n", "file_path": "src/librustc/metadata/tyencode.rs", "rank": 51, "score": 392313.6570017289 }, { "content": "// Functions that write types into the node type table\n\npub fn write_ty_to_tcx(tcx: ty::ctxt, node_id: ast::NodeId, ty: ty::t) {\n\n debug!(\"write_ty_to_tcx({}, {})\", node_id, ppaux::ty_to_str(tcx, ty));\n\n assert!(!ty::type_needs_infer(ty));\n\n let mut node_types = tcx.node_types.borrow_mut();\n\n node_types.get().insert(node_id as uint, ty);\n\n}\n", "file_path": "src/librustc/middle/typeck/mod.rs", "rank": 52, "score": 391428.3688138845 }, { "content": "pub fn expand_args(ecx: &mut ExtCtxt, sp: Span,\n\n tts: &[ast::TokenTree]) -> base::MacResult {\n\n\n\n match parse_args(ecx, sp, tts) {\n\n (extra, Some((efmt, args, order, names))) => {\n\n MRExpr(expand_preparsed_format_args(ecx, sp, extra, efmt, args,\n\n order, names))\n\n }\n\n (_, None) => MRExpr(ecx.expr_uint(sp, 2))\n\n }\n\n}\n\n\n", "file_path": "src/libsyntax/ext/format.rs", "rank": 53, "score": 390657.74596103374 }, { "content": "// This calculates STH for a symbol, as defined above\n\nfn symbol_hash(tcx: ty::ctxt, symbol_hasher: &mut Sha256,\n\n t: ty::t, link_meta: &LinkMeta) -> ~str {\n\n // NB: do *not* use abbrevs here as we want the symbol names\n\n // to be independent of one another in the crate.\n\n\n\n symbol_hasher.reset();\n\n symbol_hasher.input_str(link_meta.crateid.name);\n\n symbol_hasher.input_str(\"-\");\n\n symbol_hasher.input_str(link_meta.crate_hash.as_str());\n\n symbol_hasher.input_str(\"-\");\n\n symbol_hasher.input_str(encoder::encoded_ty(tcx, t));\n\n let mut hash = truncated_hash_result(symbol_hasher);\n\n // Prefix with 'h' so that it never blends into adjacent digits\n\n hash.unshift_char('h');\n\n hash\n\n}\n\n\n", "file_path": "src/librustc/back/link.rs", "rank": 54, "score": 390395.9094789851 }, { "content": "pub fn print_type_ref(s: &mut State, ty: &P<ast::Ty>) -> io::IoResult<()> {\n\n print_type(s, *ty)\n\n}\n\n\n", "file_path": "src/libsyntax/print/pprust.rs", "rank": 55, "score": 389536.14364051225 }, { "content": "// Creates and returns space for, or returns the argument representing, the\n\n// slot where the return value of the function must go.\n\npub fn make_return_pointer(fcx: &FunctionContext, output_type: ty::t)\n\n -> ValueRef {\n\n unsafe {\n\n if type_of::return_uses_outptr(fcx.ccx, output_type) {\n\n llvm::LLVMGetParam(fcx.llfn, 0)\n\n } else {\n\n let lloutputtype = type_of::type_of(fcx.ccx, output_type);\n\n let bcx = fcx.entry_bcx.get().unwrap();\n\n Alloca(bcx, lloutputtype, \"__make_return_pointer\")\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/librustc/middle/trans/base.rs", "rank": 56, "score": 386935.65590504825 }, { "content": "// Detect points where a trait-bounded type parameter is\n\n// instantiated, resolve the impls for the parameters.\n\npub fn resolve_in_block(mut fcx: &FnCtxt, bl: &ast::Block) {\n\n visit::walk_block(&mut fcx, bl, ());\n\n}\n", "file_path": "src/librustc/middle/typeck/check/vtable.rs", "rank": 57, "score": 386853.25707341346 }, { "content": "pub fn get_supertraits(tcx: ty::ctxt, def: ast::DefId) -> Vec<@ty::TraitRef> {\n\n let cstore = tcx.cstore;\n\n let cdata = cstore.get_crate_data(def.krate);\n\n decoder::get_supertraits(cdata, def.node, tcx)\n\n}\n\n\n", "file_path": "src/librustc/metadata/csearch.rs", "rank": 58, "score": 385452.3092957661 }, { "content": "pub fn explain_region_and_span(cx: ctxt, region: ty::Region)\n\n -> (~str, Option<Span>) {\n\n return match region {\n\n ReScope(node_id) => {\n\n match cx.map.find(node_id) {\n\n Some(ast_map::NodeBlock(ref blk)) => {\n\n explain_span(cx, \"block\", blk.span)\n\n }\n\n Some(ast_map::NodeExpr(expr)) => {\n\n match expr.node {\n\n ast::ExprCall(..) => explain_span(cx, \"call\", expr.span),\n\n ast::ExprMethodCall(..) => {\n\n explain_span(cx, \"method call\", expr.span)\n\n },\n\n ast::ExprMatch(..) => explain_span(cx, \"match\", expr.span),\n\n _ => explain_span(cx, \"expression\", expr.span)\n\n }\n\n }\n\n Some(ast_map::NodeStmt(stmt)) => {\n\n explain_span(cx, \"statement\", stmt.span)\n", "file_path": "src/librustc/util/ppaux.rs", "rank": 59, "score": 384777.85280766187 }, { "content": "fn copy_borrowed_ptr<'a, 'b>(p: &'a mut &'b mut int) -> &'b mut int {\n\n &mut **p //~ ERROR lifetime of `p` is too short\n\n}\n\n\n", "file_path": "src/test/compile-fail/regions-reborrow-from-shorter-mut-ref.rs", "rank": 60, "score": 384019.69011621154 }, { "content": "fn g() -> int { let x = match true { true => { f() } false => { 10 } }; return x; }\n\n\n", "file_path": "src/test/run-fail/expr-match-fail-fn.rs", "rank": 61, "score": 382998.41423900344 }, { "content": "/// Take the various parts of `format_args!(extra, efmt, args...,\n\n/// name=names...)` and construct the appropriate formatting\n\n/// expression.\n\npub fn expand_preparsed_format_args(ecx: &mut ExtCtxt, sp: Span,\n\n extra: @ast::Expr,\n\n efmt: @ast::Expr, args: Vec<@ast::Expr>,\n\n name_ordering: Vec<~str>,\n\n names: HashMap<~str, @ast::Expr>) -> @ast::Expr {\n\n let arg_types = Vec::from_fn(args.len(), |_| None);\n\n let mut cx = Context {\n\n ecx: ecx,\n\n args: args,\n\n arg_types: arg_types,\n\n names: names,\n\n name_positions: HashMap::new(),\n\n name_types: HashMap::new(),\n\n name_ordering: name_ordering,\n\n nest_level: 0,\n\n next_arg: 0,\n\n pieces: Vec::new(),\n\n method_statics: Vec::new(),\n\n fmtsp: sp,\n\n };\n", "file_path": "src/libsyntax/ext/format.rs", "rank": 62, "score": 382048.1648158055 }, { "content": "fn insert_vtables(fcx: &FnCtxt, expr_id: ast::NodeId, vtables: vtable_res) {\n\n debug!(\"insert_vtables(expr_id={}, vtables={:?})\",\n\n expr_id, vtables.repr(fcx.tcx()));\n\n fcx.inh.vtable_map.borrow_mut().get().insert(expr_id, vtables);\n\n}\n\n\n", "file_path": "src/librustc/middle/typeck/check/vtable.rs", "rank": 63, "score": 380879.217039107 }, { "content": "// Substitute *only* type parameters. Used in trans where regions are erased.\n\npub fn subst_tps(tcx: ctxt, tps: &[t], self_ty_opt: Option<t>, typ: t) -> t {\n\n let mut subst = TpsSubst { tcx: tcx, self_ty_opt: self_ty_opt, tps: tps };\n\n return subst.fold_ty(typ);\n\n\n\n struct TpsSubst<'a> {\n\n tcx: ctxt,\n\n self_ty_opt: Option<t>,\n\n tps: &'a [t],\n\n }\n\n\n\n impl<'a> TypeFolder for TpsSubst<'a> {\n\n fn tcx(&self) -> ty::ctxt { self.tcx }\n\n\n\n fn fold_ty(&mut self, t: ty::t) -> ty::t {\n\n if self.tps.len() == 0u && self.self_ty_opt.is_none() {\n\n return t;\n\n }\n\n\n\n let tb = ty::get(t);\n\n if self.self_ty_opt.is_none() && !tbox_has_flag(tb, has_params) {\n", "file_path": "src/librustc/middle/ty.rs", "rank": 64, "score": 379915.28950242815 }, { "content": "fn call_id_3() { id(return) && id(return); }\n\n\n", "file_path": "src/test/run-pass/unreachable-code.rs", "rank": 65, "score": 378834.80080095306 }, { "content": "fn call_id_3() { id(return) && id(return); }\n\n\n", "file_path": "src/test/run-pass/unreachable-code-1.rs", "rank": 66, "score": 378834.80080095306 }, { "content": "pub fn write_substs_to_tcx(tcx: ty::ctxt,\n\n node_id: ast::NodeId,\n\n substs: Vec<ty::t> ) {\n\n if substs.len() > 0u {\n\n debug!(\"write_substs_to_tcx({}, {:?})\", node_id,\n\n substs.map(|t| ppaux::ty_to_str(tcx, *t)));\n\n assert!(substs.iter().all(|t| !ty::type_needs_infer(*t)));\n\n\n\n let mut node_type_substs = tcx.node_type_substs.borrow_mut();\n\n node_type_substs.get().insert(node_id, substs);\n\n }\n\n}\n", "file_path": "src/librustc/middle/typeck/mod.rs", "rank": 67, "score": 378560.8578314937 }, { "content": "pub fn expr_span(cx: ctxt, id: NodeId) -> Span {\n\n match cx.map.find(id) {\n\n Some(ast_map::NodeExpr(e)) => {\n\n e.span\n\n }\n\n Some(f) => {\n\n cx.sess.bug(format!(\"Node id {} is not an expr: {:?}\",\n\n id, f));\n\n }\n\n None => {\n\n cx.sess.bug(format!(\"Node id {} is not present \\\n\n in the node map\", id));\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/librustc/middle/ty.rs", "rank": 68, "score": 377868.32524473814 }, { "content": "pub fn relate_free_regions(tcx: ty::ctxt, fn_sig: &ty::FnSig) {\n\n /*!\n\n * This function populates the region map's `free_region_map`.\n\n * It walks over the transformed self type and argument types\n\n * for each function just before we check the body of that\n\n * function, looking for types where you have a borrowed\n\n * pointer to other borrowed data (e.g., `&'a &'b [uint]`.\n\n * We do not allow references to outlive the things they\n\n * point at, so we can assume that `'a <= 'b`.\n\n *\n\n * Tests: `src/test/compile-fail/regions-free-region-ordering-*.rs`\n\n */\n\n\n\n debug!(\"relate_free_regions >>\");\n\n\n\n let mut all_tys = Vec::new();\n\n for arg in fn_sig.inputs.iter() {\n\n all_tys.push(*arg);\n\n }\n\n\n", "file_path": "src/librustc/middle/typeck/check/regionmanip.rs", "rank": 69, "score": 376483.8115635965 }, { "content": "fn opt_eq(tcx: ty::ctxt, a: &Opt, b: &Opt) -> bool {\n\n match (a, b) {\n\n (&lit(a), &lit(b)) => {\n\n match (a, b) {\n\n (UnitLikeStructLit(a), UnitLikeStructLit(b)) => a == b,\n\n _ => {\n\n let a_expr;\n\n match a {\n\n ExprLit(existing_a_expr) => a_expr = existing_a_expr,\n\n ConstLit(a_const) => {\n\n let e = const_eval::lookup_const_by_id(tcx, a_const);\n\n a_expr = e.unwrap();\n\n }\n\n UnitLikeStructLit(_) => {\n\n fail!(\"UnitLikeStructLit should have been handled \\\n\n above\")\n\n }\n\n }\n\n\n\n let b_expr;\n", "file_path": "src/librustc/middle/trans/_match.rs", "rank": 70, "score": 376028.4921042193 }, { "content": "pub fn eq_regions<C:Combine>(this: &C, a: ty::Region, b: ty::Region)\n\n -> ures {\n\n debug!(\"eq_regions({}, {})\",\n\n a.repr(this.infcx().tcx),\n\n b.repr(this.infcx().tcx));\n\n let sub = this.sub();\n\n indent(|| {\n\n this.infcx().try(|| {\n\n sub.regions(a, b).and_then(|_r| sub.contraregions(a, b))\n\n }).or_else(|e| {\n\n // substitute a better error, but use the regions\n\n // found in the original error\n\n match e {\n\n ty::terr_regions_does_not_outlive(a1, b1) =>\n\n Err(ty::terr_regions_not_same(a1, b1)),\n\n _ => Err(e)\n\n }\n\n }).to_ures()\n\n })\n\n}\n\n\n", "file_path": "src/librustc/middle/typeck/infer/combine.rs", "rank": 71, "score": 375465.6497958854 }, { "content": "pub fn replace_closure_return_type(tcx: ctxt, fn_type: t, ret_type: t) -> t {\n\n /*!\n\n *\n\n * Returns a new function type based on `fn_type` but returning a value of\n\n * type `ret_type` instead. */\n\n\n\n match ty::get(fn_type).sty {\n\n ty::ty_closure(ref fty) => {\n\n ty::mk_closure(tcx, ClosureTy {\n\n sig: FnSig {output: ret_type, ..fty.sig.clone()},\n\n ..(*fty).clone()\n\n })\n\n }\n\n _ => {\n\n tcx.sess.bug(format!(\n\n \"replace_fn_ret() invoked with non-fn-type: {}\",\n\n ty_to_str(tcx, fn_type)));\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/librustc/middle/ty.rs", "rank": 72, "score": 375149.66944920033 }, { "content": "pub fn expand_syntax_ext(ecx: &mut base::ExtCtxt, sp: Span,\n\n _tts: &[ast::TokenTree]) -> base::MacResult {\n\n ecx.span_err(sp, \"`fmt!` is deprecated, use `format!` instead\");\n\n ecx.parse_sess.span_diagnostic.span_note(sp,\n\n \"see http://static.rust-lang.org/doc/master/std/fmt/index.html \\\n\n for documentation\");\n\n\n\n base::MRExpr(ecx.expr_uint(sp, 2))\n\n}\n", "file_path": "src/libsyntax/ext/fmt.rs", "rank": 73, "score": 374404.0602996387 }, { "content": "/// Returns the supertraits of the given trait.\n\npub fn get_supertraits(cdata: Cmd, id: ast::NodeId, tcx: ty::ctxt)\n\n -> Vec<@ty::TraitRef> {\n\n let mut results = Vec::new();\n\n let item_doc = lookup_item(id, cdata.data());\n\n reader::tagged_docs(item_doc, tag_item_super_trait_ref, |trait_doc| {\n\n // NB. Only reads the ones that *aren't* builtin-bounds. See also\n\n // get_trait_def() for collecting the builtin bounds.\n\n // FIXME(#8559): The builtin bounds shouldn't be encoded in the first place.\n\n let trait_ref = doc_trait_ref(trait_doc, tcx, cdata);\n\n if tcx.lang_items.to_builtin_kind(trait_ref.def_id).is_none() {\n\n results.push(@trait_ref);\n\n }\n\n true\n\n });\n\n return results;\n\n}\n\n\n", "file_path": "src/librustc/metadata/decoder.rs", "rank": 74, "score": 374103.26632042136 }, { "content": "pub fn node_id_to_trait_ref(cx: ctxt, id: ast::NodeId) -> @ty::TraitRef {\n\n let trait_refs = cx.trait_refs.borrow();\n\n match trait_refs.get().find(&id) {\n\n Some(&t) => t,\n\n None => cx.sess.bug(\n\n format!(\"node_id_to_trait_ref: no trait ref for node `{}`\",\n\n cx.map.node_to_str(id)))\n\n }\n\n}\n\n\n", "file_path": "src/librustc/middle/ty.rs", "rank": 75, "score": 373707.44921880914 }, { "content": "fn check_impl_of_trait(cx: &mut Context, it: &Item, trait_ref: &TraitRef, self_type: &Ty) {\n\n let def_map = cx.tcx.def_map.borrow();\n\n let ast_trait_def = def_map.get()\n\n .find(&trait_ref.ref_id)\n\n .expect(\"trait ref not in def map!\");\n\n let trait_def_id = ast_util::def_id_of_def(*ast_trait_def);\n\n let trait_def;\n\n {\n\n let trait_defs = cx.tcx.trait_defs.borrow();\n\n trait_def = *trait_defs.get()\n\n .find(&trait_def_id)\n\n .expect(\"trait def not in trait-defs map!\");\n\n }\n\n\n\n // If this trait has builtin-kind supertraits, meet them.\n\n let self_ty: ty::t = ty::node_id_to_type(cx.tcx, it.id);\n\n debug!(\"checking impl with self type {:?}\", ty::get(self_ty).sty);\n\n check_builtin_bounds(cx, self_ty, trait_def.bounds, |missing| {\n\n cx.tcx.sess.span_err(self_type.span,\n\n format!(\"the type `{}', which does not fulfill `{}`, cannot implement this \\\n", "file_path": "src/librustc/middle/kind.rs", "rank": 76, "score": 373360.7036439451 }, { "content": "pub fn lookup_def_ccx(ccx: &CrateCtxt, sp: Span, id: ast::NodeId)\n\n -> ast::Def {\n\n lookup_def_tcx(ccx.tcx, sp, id)\n\n}\n\n\n", "file_path": "src/librustc/middle/typeck/mod.rs", "rank": 77, "score": 373344.0239927969 }, { "content": "fn check_expr_coercable_to_type(fcx: @FnCtxt, expr: &ast::Expr, expected: ty::t) {\n\n check_expr_with_unifier(fcx, expr, Some(expected), NoPreference, || {\n\n demand::coerce(fcx, expr.span, expected, expr)\n\n });\n\n}\n\n\n", "file_path": "src/librustc/middle/typeck/check/mod.rs", "rank": 78, "score": 372406.2418053269 }, { "content": "pub fn get_type(tcx: ty::ctxt,\n\n def: ast::DefId)\n\n -> ty::ty_param_bounds_and_ty {\n\n let cstore = tcx.cstore;\n\n let cdata = cstore.get_crate_data(def.krate);\n\n decoder::get_type(cdata, def.node, tcx)\n\n}\n\n\n", "file_path": "src/librustc/metadata/csearch.rs", "rank": 79, "score": 371679.3221243082 }, { "content": "fn emit(dst: &mut EmitterWriter, cm: &codemap::CodeMap, sp: Span,\n\n msg: &str, lvl: Level, custom: bool) -> io::IoResult<()> {\n\n let ss = cm.span_to_str(sp);\n\n let lines = cm.span_to_lines(sp);\n\n if custom {\n\n // we want to tell compiletest/runtest to look at the last line of the\n\n // span (since `custom_highlight_lines` displays an arrow to the end of\n\n // the span)\n\n let span_end = Span { lo: sp.hi, hi: sp.hi, expn_info: sp.expn_info};\n\n let ses = cm.span_to_str(span_end);\n\n try!(print_diagnostic(dst, ses, lvl, msg));\n\n try!(custom_highlight_lines(dst, cm, sp, lvl, lines));\n\n } else {\n\n try!(print_diagnostic(dst, ss, lvl, msg));\n\n try!(highlight_lines(dst, cm, sp, lvl, lines));\n\n }\n\n print_macro_backtrace(dst, cm, sp)\n\n}\n\n\n", "file_path": "src/libsyntax/diagnostic.rs", "rank": 80, "score": 371608.85803539003 }, { "content": "pub fn get_trait_of_method(cdata: Cmd, id: ast::NodeId, tcx: ty::ctxt)\n\n -> Option<ast::DefId> {\n\n let item_doc = lookup_item(id, cdata.data());\n\n let parent_item_id = match item_parent_item(item_doc) {\n\n None => return None,\n\n Some(item_id) => item_id,\n\n };\n\n let parent_item_id = translate_def_id(cdata, parent_item_id);\n\n let parent_item_doc = lookup_item(parent_item_id.node, cdata.data());\n\n match item_family(parent_item_doc) {\n\n Trait => Some(item_def_id(parent_item_doc, cdata)),\n\n Impl => {\n\n reader::maybe_get_doc(parent_item_doc, tag_item_trait_ref)\n\n .map(|_| item_trait_ref(parent_item_doc, tcx, cdata).def_id)\n\n }\n\n _ => None\n\n }\n\n}\n\n\n\n\n", "file_path": "src/librustc/metadata/decoder.rs", "rank": 81, "score": 370299.03160349175 }, { "content": "pub fn return_uses_outptr(ccx: &CrateContext, ty: ty::t) -> bool {\n\n !type_is_immediate(ccx, ty)\n\n}\n\n\n", "file_path": "src/librustc/middle/trans/type_of.rs", "rank": 82, "score": 369498.2392794669 }, { "content": "// Given a def_id for an impl, return information about its vtables\n\npub fn get_impl_vtables(tcx: ty::ctxt,\n\n def: ast::DefId) -> typeck::impl_res {\n\n let cstore = tcx.cstore;\n\n let cdata = cstore.get_crate_data(def.krate);\n\n decoder::get_impl_vtables(cdata, def.node, tcx)\n\n}\n\n\n", "file_path": "src/librustc/metadata/csearch.rs", "rank": 83, "score": 366869.8551628295 }, { "content": "fn determine_parameters_to_be_inferred<'a>(tcx: ty::ctxt,\n\n arena: &'a mut Arena,\n\n krate: &ast::Crate)\n\n -> TermsContext<'a> {\n\n let mut terms_cx = TermsContext {\n\n tcx: tcx,\n\n arena: arena,\n\n inferred_map: HashMap::new(),\n\n inferred_infos: Vec::new(),\n\n\n\n // cache and share the variance struct used for items with\n\n // no type/region parameters\n\n empty_variances: @ty::ItemVariances { self_param: None,\n\n type_params: opt_vec::Empty,\n\n region_params: opt_vec::Empty }\n\n };\n\n\n\n visit::walk_crate(&mut terms_cx, krate, ());\n\n\n\n terms_cx\n", "file_path": "src/librustc/middle/typeck/variance.rs", "rank": 84, "score": 366864.0063518539 }, { "content": "pub fn find_vtable(tcx: ty::ctxt,\n\n ps: &param_substs,\n\n n_param: typeck::param_index,\n\n n_bound: uint)\n\n -> typeck::vtable_origin {\n\n debug!(\"find_vtable(n_param={:?}, n_bound={}, ps={})\",\n\n n_param, n_bound, ps.repr(tcx));\n\n\n\n let param_bounds = match n_param {\n\n typeck::param_self => ps.self_vtables.expect(\"self vtables missing\"),\n\n typeck::param_numbered(n) => {\n\n let tables = ps.vtables\n\n .expect(\"vtables missing where they are needed\");\n\n *tables.get(n)\n\n }\n\n };\n\n param_bounds.get(n_bound).clone()\n\n}\n\n\n", "file_path": "src/librustc/middle/trans/common.rs", "rank": 85, "score": 366849.17900621647 }, { "content": "pub fn check_crate(tcx: ty::ctxt,\n\n method_map: MethodMap,\n\n moves_map: moves::MovesMap,\n\n krate: &Crate) {\n\n let cx = @MatchCheckCtxt {tcx: tcx,\n\n method_map: method_map,\n\n moves_map: moves_map};\n\n let mut v = CheckMatchVisitor { cx: cx };\n\n\n\n visit::walk_crate(&mut v, krate, ());\n\n\n\n tcx.sess.abort_if_errors();\n\n}\n\n\n", "file_path": "src/librustc/middle/check_match.rs", "rank": 86, "score": 366741.4604902733 }, { "content": "pub fn require_same_types(tcx: ty::ctxt,\n\n maybe_infcx: Option<&infer::InferCtxt>,\n\n t1_is_expected: bool,\n\n span: Span,\n\n t1: ty::t,\n\n t2: ty::t,\n\n msg: || -> ~str)\n\n -> bool {\n\n let result = match maybe_infcx {\n\n None => {\n\n let infcx = infer::new_infer_ctxt(tcx);\n\n infer::mk_eqty(&infcx, t1_is_expected, infer::Misc(span), t1, t2)\n\n }\n\n Some(infcx) => {\n\n infer::mk_eqty(infcx, t1_is_expected, infer::Misc(span), t1, t2)\n\n }\n\n };\n\n\n\n match result {\n\n Ok(_) => true,\n", "file_path": "src/librustc/middle/typeck/mod.rs", "rank": 87, "score": 366671.92783641035 }, { "content": "pub fn maybe_get_item_ast(cdata: Cmd, tcx: ty::ctxt, id: ast::NodeId,\n\n decode_inlined_item: DecodeInlinedItem)\n\n -> csearch::found_ast {\n\n debug!(\"Looking up item: {}\", id);\n\n let item_doc = lookup_item(id, cdata.data());\n\n let path = Vec::from_slice(item_path(item_doc).init());\n\n match decode_inlined_item(cdata, tcx, path, item_doc) {\n\n Ok(ref ii) => csearch::found(*ii),\n\n Err(path) => {\n\n match item_parent_item(item_doc) {\n\n Some(did) => {\n\n let did = translate_def_id(cdata, did);\n\n let parent_item = lookup_item(did.node, cdata.data());\n\n match decode_inlined_item(cdata, tcx, path, parent_item) {\n\n Ok(ref ii) => csearch::found_parent(did, *ii),\n\n Err(_) => csearch::not_found\n\n }\n\n }\n\n None => csearch::not_found\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/librustc/metadata/decoder.rs", "rank": 88, "score": 366636.8345006368 }, { "content": "fn struct_ty(ty: Type) -> Type {\n\n let size = ty_size(ty) * 8;\n\n let fields = coerce_to_int(size);\n\n return Type::struct_(fields.as_slice(), false);\n\n}\n\n\n", "file_path": "src/librustc/middle/trans/cabi_mips.rs", "rank": 89, "score": 365476.2337103299 }, { "content": "/// Parses the arguments from the given list of tokens, returning None\n\n/// if there's a parse error so we can continue parsing other format!\n\n/// expressions.\n\n///\n\n/// If parsing succeeds, the second return value is:\n\n///\n\n/// Some((fmtstr, unnamed arguments, ordering of named arguments,\n\n/// named arguments))\n\nfn parse_args(ecx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])\n\n -> (@ast::Expr, Option<(@ast::Expr, Vec<@ast::Expr>, Vec<~str>,\n\n HashMap<~str, @ast::Expr>)>) {\n\n let mut args = Vec::new();\n\n let mut names = HashMap::<~str, @ast::Expr>::new();\n\n let mut order = Vec::new();\n\n\n\n let mut p = rsparse::new_parser_from_tts(ecx.parse_sess(),\n\n ecx.cfg(),\n\n tts.iter()\n\n .map(|x| (*x).clone())\n\n .collect());\n\n // Parse the leading function expression (maybe a block, maybe a path)\n\n let extra = p.parse_expr();\n\n if !p.eat(&token::COMMA) {\n\n ecx.span_err(sp, \"expected token: `,`\");\n\n return (extra, None);\n\n }\n\n\n\n if p.token == token::EOF {\n", "file_path": "src/libsyntax/ext/format.rs", "rank": 90, "score": 363359.2148389369 }, { "content": "pub fn resolve_impl(tcx: ty::ctxt,\n\n impl_item: &ast::Item,\n\n impl_generics: &ty::Generics,\n\n impl_trait_ref: &ty::TraitRef) {\n\n let param_env = ty::construct_parameter_environment(\n\n tcx,\n\n None,\n\n impl_generics.type_param_defs(),\n\n [],\n\n impl_generics.region_param_defs(),\n\n [],\n\n impl_item.id);\n\n\n\n let impl_trait_ref = @impl_trait_ref.subst(tcx, &param_env.free_substs);\n\n\n\n let infcx = &infer::new_infer_ctxt(tcx);\n\n let vcx = VtableContext { infcx: infcx, param_env: &param_env };\n\n\n\n // First, check that the impl implements any trait bounds\n\n // on the trait.\n", "file_path": "src/librustc/middle/typeck/check/vtable.rs", "rank": 91, "score": 362055.36709618784 }, { "content": "pub fn lookup_variant_by_id(tcx: ty::ctxt,\n\n enum_def: ast::DefId,\n\n variant_def: ast::DefId)\n\n -> Option<@Expr> {\n\n fn variant_expr(variants: &[ast::P<ast::Variant>], id: ast::NodeId) -> Option<@Expr> {\n\n for variant in variants.iter() {\n\n if variant.node.id == id {\n\n return variant.node.disr_expr;\n\n }\n\n }\n\n None\n\n }\n\n\n\n if ast_util::is_local(enum_def) {\n\n {\n\n match tcx.map.find(enum_def.node) {\n\n None => None,\n\n Some(ast_map::NodeItem(it)) => match it.node {\n\n ItemEnum(ast::EnumDef { variants: ref variants }, _) => {\n\n variant_expr(variants.as_slice(), variant_def.node)\n", "file_path": "src/librustc/middle/const_eval.rs", "rank": 92, "score": 362024.0508562147 }, { "content": "fn quux<T>(x: T) -> T { let f = id::<T>; return f(x); }\n\n\n", "file_path": "src/test/run-pass/issue-333.rs", "rank": 93, "score": 358432.45543061855 }, { "content": "pub fn fold_regions_in_sig(tcx: ty::ctxt,\n\n fn_sig: &ty::FnSig,\n\n fldr: |r: ty::Region| -> ty::Region)\n\n -> ty::FnSig {\n\n ty_fold::RegionFolder::regions(tcx, fldr).fold_sig(fn_sig)\n\n}\n\n\n\nimpl TypeTrace {\n\n pub fn span(&self) -> Span {\n\n self.origin.span()\n\n }\n\n}\n\n\n\nimpl Repr for TypeTrace {\n\n fn repr(&self, tcx: ty::ctxt) -> ~str {\n\n format!(\"TypeTrace({})\", self.origin.repr(tcx))\n\n }\n\n}\n\n\n\nimpl TypeOrigin {\n", "file_path": "src/librustc/middle/typeck/infer/mod.rs", "rank": 94, "score": 357334.12248944264 }, { "content": "pub fn relate_nested_regions(tcx: ty::ctxt,\n\n opt_region: Option<ty::Region>,\n\n ty: ty::t,\n\n relate_op: |ty::Region, ty::Region|) {\n\n /*!\n\n * This rather specialized function walks each region `r` that appear\n\n * in `ty` and invokes `relate_op(r_encl, r)` for each one. `r_encl`\n\n * here is the region of any enclosing `&'r T` pointer. If there is\n\n * no enclosing pointer, and `opt_region` is Some, then `opt_region.get()`\n\n * is used instead. Otherwise, no callback occurs at all).\n\n *\n\n * Here are some examples to give you an intution:\n\n *\n\n * - `relate_nested_regions(Some('r1), &'r2 uint)` invokes\n\n * - `relate_op('r1, 'r2)`\n\n * - `relate_nested_regions(Some('r1), &'r2 &'r3 uint)` invokes\n\n * - `relate_op('r1, 'r2)`\n\n * - `relate_op('r2, 'r3)`\n\n * - `relate_nested_regions(None, &'r2 &'r3 uint)` invokes\n\n * - `relate_op('r2, 'r3)`\n", "file_path": "src/librustc/middle/typeck/check/regionmanip.rs", "rank": 95, "score": 357334.12248944264 }, { "content": "fn param_substs_to_str(this: &param_substs, tcx: ty::ctxt) -> ~str {\n\n format!(\"param_substs \\\\{tys:{}, vtables:{}\\\\}\",\n\n this.tys.repr(tcx),\n\n this.vtables.repr(tcx))\n\n}\n\n\n\nimpl Repr for param_substs {\n\n fn repr(&self, tcx: ty::ctxt) -> ~str {\n\n param_substs_to_str(self, tcx)\n\n }\n\n}\n\n\n\n// work around bizarre resolve errors\n\npub type RvalueDatum = datum::Datum<datum::Rvalue>;\n\npub type LvalueDatum = datum::Datum<datum::Lvalue>;\n\n\n\n// Function context. Every LLVM function we create will have one of\n\n// these.\n\npub struct FunctionContext<'a> {\n\n // The ValueRef returned from a call to llvm::LLVMAddFunction; the\n", "file_path": "src/librustc/middle/trans/common.rs", "rank": 96, "score": 356897.1642385024 }, { "content": "pub fn RegionVarBindings(tcx: ty::ctxt) -> RegionVarBindings {\n\n RegionVarBindings {\n\n tcx: tcx,\n\n var_origins: RefCell::new(Vec::new()),\n\n values: RefCell::new(None),\n\n constraints: RefCell::new(HashMap::new()),\n\n lubs: RefCell::new(HashMap::new()),\n\n glbs: RefCell::new(HashMap::new()),\n\n skolemization_count: Cell::new(0),\n\n bound_count: Cell::new(0),\n\n undo_log: RefCell::new(Vec::new())\n\n }\n\n}\n\n\n\nimpl RegionVarBindings {\n\n pub fn in_snapshot(&self) -> bool {\n\n let undo_log = self.undo_log.borrow();\n\n undo_log.get().len() > 0\n\n }\n\n\n", "file_path": "src/librustc/middle/typeck/infer/region_inference/mod.rs", "rank": 97, "score": 356262.09091970697 }, { "content": "fn doc_trait_ref(doc: ebml::Doc, tcx: ty::ctxt, cdata: Cmd) -> ty::TraitRef {\n\n parse_trait_ref_data(doc.data, cdata.cnum, doc.start, tcx,\n\n |_, did| translate_def_id(cdata, did))\n\n}\n\n\n", "file_path": "src/librustc/metadata/decoder.rs", "rank": 98, "score": 354433.0439577621 }, { "content": "fn item_trait_ref(doc: ebml::Doc, tcx: ty::ctxt, cdata: Cmd) -> ty::TraitRef {\n\n let tp = reader::get_doc(doc, tag_item_trait_ref);\n\n doc_trait_ref(tp, tcx, cdata)\n\n}\n\n\n", "file_path": "src/librustc/metadata/decoder.rs", "rank": 99, "score": 354433.0439577621 } ]
Rust
src/unixuser.rs
giganteous/webdav-server-rs
3fa3e9f63f9894d4658c9c1923f416f60acf8712
use std; use std::ffi::{CStr, OsStr}; use std::io; use std::os::unix::ffi::OsStrExt; use std::path::{Path, PathBuf}; use tokio::task::block_in_place; #[derive(Debug)] pub struct User { pub name: String, pub passwd: String, pub gecos: String, pub uid: u32, pub gid: u32, pub groups: Vec<u32>, pub dir: PathBuf, pub shell: PathBuf, } unsafe fn cptr_to_osstr<'a>(c: *const libc::c_char) -> &'a OsStr { let bytes = CStr::from_ptr(c).to_bytes(); OsStr::from_bytes(&bytes) } unsafe fn cptr_to_path<'a>(c: *const libc::c_char) -> &'a Path { Path::new(cptr_to_osstr(c)) } unsafe fn to_user(pwd: &libc::passwd) -> User { let cs_name = CStr::from_ptr(pwd.pw_name); let cs_passwd = CStr::from_ptr(pwd.pw_passwd); let cs_gecos = CStr::from_ptr(pwd.pw_gecos); let cs_dir = cptr_to_path(pwd.pw_dir); let cs_shell = cptr_to_path(pwd.pw_shell); User { name: cs_name.to_string_lossy().into_owned(), passwd: cs_passwd.to_string_lossy().into_owned(), gecos: cs_gecos.to_string_lossy().into_owned(), dir: cs_dir.to_path_buf(), shell: cs_shell.to_path_buf(), uid: pwd.pw_uid, gid: pwd.pw_gid, groups: Vec::new(), } } impl User { pub fn by_name(name: &str, with_groups: bool) -> Result<User, io::Error> { let mut buf = [0u8; 1024]; let mut pwd: libc::passwd = unsafe { std::mem::zeroed() }; let mut result: *mut libc::passwd = std::ptr::null_mut(); let cname = match std::ffi::CString::new(name) { Ok(un) => un, Err(_) => return Err(io::Error::from_raw_os_error(libc::ENOENT)), }; let ret = unsafe { libc::getpwnam_r( cname.as_ptr(), &mut pwd as *mut _, buf.as_mut_ptr() as *mut _, buf.len() as libc::size_t, &mut result as *mut _, ) }; if ret != 0 { return Err(io::Error::from_raw_os_error(ret)); } if result.is_null() { return Err(io::Error::from_raw_os_error(libc::ENOENT)); } let mut user = unsafe { to_user(&pwd) }; if with_groups { let mut ngroups = (buf.len() / std::mem::size_of::<libc::gid_t>()) as libc::c_int; let ret = unsafe { libc::getgrouplist( cname.as_ptr(), user.gid as libc::gid_t, buf.as_mut_ptr() as *mut _, &mut ngroups as *mut _, ) }; if ret >= 0 && ngroups > 0 { let mut groups_vec = Vec::with_capacity(ngroups as usize); let groups = unsafe { std::slice::from_raw_parts(buf.as_ptr() as *const libc::gid_t, ngroups as usize) }; groups_vec.extend(groups.iter().map(|&g| g as u32).filter(|&g| g != user.gid)); user.groups = groups_vec; } } Ok(user) } /* pub fn by_uid(uid: u32) -> Result<User, io::Error> { let mut buf = [0; 1024]; let mut pwd: libc::passwd = unsafe { std::mem::zeroed() }; let mut result: *mut libc::passwd = std::ptr::null_mut(); let ret = unsafe { getpwuid_r( uid, &mut pwd as *mut _, buf.as_mut_ptr(), buf.len() as libc::size_t, &mut result as *mut _, ) }; if ret == 0 { if result.is_null() { return Err(io::Error::from_raw_os_error(libc::ENOENT)); } let p = unsafe { to_user(&pwd) }; Ok(p) } else { Err(io::Error::from_raw_os_error(ret)) } } */ pub async fn by_name_async(name: &str, with_groups: bool) -> Result<User, io::Error> { block_in_place(move || User::by_name(name, with_groups)) } }
use std; use std::ffi::{CStr, OsStr}; use std::io; use std::os::unix::ffi::OsStrExt; use std::path::{Path, PathBuf}; use tokio::task::block_in_place; #[derive(Debug)] pub struct User { pub name: String, pub passwd: String, pub gecos: String, pub uid: u32, pub gid: u32, pub groups: Vec<u32>, pub dir: PathBuf, pub shell: PathBuf, } unsafe fn cptr_to_osstr<'a>(c: *const libc::c_char) -> &'a OsStr { let bytes = CStr::from_ptr(c).to_bytes(); OsStr::from_bytes(&bytes) } unsafe fn cptr_to_path<'a>(c: *const libc::c_char) -> &'a Path { Path::new(cptr_to_osstr(c)) } unsafe fn to_user(pwd: &libc::passwd) -> User { let cs_name = CStr::from_ptr(pwd.pw_name); let cs_passwd = CStr::from_ptr(pwd.pw_passwd); let cs_gecos = CStr::from_ptr(pwd.pw_gecos); let cs_dir = cptr_to_path(pwd.pw_dir); let cs_shell = cptr_to_path(pwd.pw_shel
impl User { pub fn by_name(name: &str, with_groups: bool) -> Result<User, io::Error> { let mut buf = [0u8; 1024]; let mut pwd: libc::passwd = unsafe { std::mem::zeroed() }; let mut result: *mut libc::passwd = std::ptr::null_mut(); let cname = match std::ffi::CString::new(name) { Ok(un) => un, Err(_) => return Err(io::Error::from_raw_os_error(libc::ENOENT)), }; let ret = unsafe { libc::getpwnam_r( cname.as_ptr(), &mut pwd as *mut _, buf.as_mut_ptr() as *mut _, buf.len() as libc::size_t, &mut result as *mut _, ) }; if ret != 0 { return Err(io::Error::from_raw_os_error(ret)); } if result.is_null() { return Err(io::Error::from_raw_os_error(libc::ENOENT)); } let mut user = unsafe { to_user(&pwd) }; if with_groups { let mut ngroups = (buf.len() / std::mem::size_of::<libc::gid_t>()) as libc::c_int; let ret = unsafe { libc::getgrouplist( cname.as_ptr(), user.gid as libc::gid_t, buf.as_mut_ptr() as *mut _, &mut ngroups as *mut _, ) }; if ret >= 0 && ngroups > 0 { let mut groups_vec = Vec::with_capacity(ngroups as usize); let groups = unsafe { std::slice::from_raw_parts(buf.as_ptr() as *const libc::gid_t, ngroups as usize) }; groups_vec.extend(groups.iter().map(|&g| g as u32).filter(|&g| g != user.gid)); user.groups = groups_vec; } } Ok(user) } /* pub fn by_uid(uid: u32) -> Result<User, io::Error> { let mut buf = [0; 1024]; let mut pwd: libc::passwd = unsafe { std::mem::zeroed() }; let mut result: *mut libc::passwd = std::ptr::null_mut(); let ret = unsafe { getpwuid_r( uid, &mut pwd as *mut _, buf.as_mut_ptr(), buf.len() as libc::size_t, &mut result as *mut _, ) }; if ret == 0 { if result.is_null() { return Err(io::Error::from_raw_os_error(libc::ENOENT)); } let p = unsafe { to_user(&pwd) }; Ok(p) } else { Err(io::Error::from_raw_os_error(ret)) } } */ pub async fn by_name_async(name: &str, with_groups: bool) -> Result<User, io::Error> { block_in_place(move || User::by_name(name, with_groups)) } }
l); User { name: cs_name.to_string_lossy().into_owned(), passwd: cs_passwd.to_string_lossy().into_owned(), gecos: cs_gecos.to_string_lossy().into_owned(), dir: cs_dir.to_path_buf(), shell: cs_shell.to_path_buf(), uid: pwd.pw_uid, gid: pwd.pw_gid, groups: Vec::new(), } }
function_block-function_prefixed
[]
Rust
tests/get_write_configurations_util/mod.rs
jakehamtexas/constance_rs
b923d35865c5d88eb7791efccf67f2b0e0b8d8e4
use std::collections::HashMap; pub mod dotnet_object_like_enum_buffer; pub mod dotnet_object_like_enum_with_description_buffer; pub mod dotnet_simple_enum_buffer; pub mod dotnet_simple_enum_with_description_buffer; pub mod dotnet_string_enum_buffer; pub mod dotnet_string_enum_with_description_buffer; pub mod rust_simple_enum_buffer; pub mod rust_simple_enum_with_description_buffer; pub mod rust_string_enum_buffer; pub mod rust_string_enum_with_description_buffer; pub mod typescript_object_like_enum_buffer; pub mod typescript_object_like_enum_with_description_buffer; pub mod typescript_simple_enum_buffer; pub mod typescript_simple_enum_with_description_buffer; pub mod typescript_string_enum_buffer; pub mod typescript_string_enum_with_description_buffer; use constance::{ testing_only::{ Column, Language, ObjectLike, SimpleEnum, StringEnum, TableConstant, TableIdentifier, ValueWithDescription, NUMBER_TYPE, STRING_TYPE, }, types::OutputOptions, }; pub fn get_table_constants_for_filename_test() -> Vec<TableConstant> { vec![TableConstant::SimpleEnum(SimpleEnum { identifier: TableIdentifier { object_name: "test_enum".to_string(), ..TableIdentifier::default() }, ..SimpleEnum::default() })] } pub fn get_table_constants_for_simple_enum_buffer_test() -> Vec<TableConstant> { let mut map = HashMap::new(); map.insert( "test1".to_string(), ValueWithDescription { value: "5".to_string(), description: None, }, ); map.insert( "test2".to_string(), ValueWithDescription { value: "7".to_string(), description: None, }, ); vec![TableConstant::SimpleEnum(SimpleEnum { identifier: TableIdentifier { object_name: "test_enum".to_string(), ..TableIdentifier::default() }, map, })] } pub fn get_table_constants_for_string_enum_buffer_test() -> Vec<TableConstant> { let mut map = HashMap::new(); map.insert( "test1".to_string(), ValueWithDescription { value: "test1".to_string(), description: None, }, ); map.insert( "test2".to_string(), ValueWithDescription { value: "test2".to_string(), description: None, }, ); vec![TableConstant::StringEnum(StringEnum { identifier: TableIdentifier { object_name: "test_enum".to_string(), ..TableIdentifier::default() }, map, })] } pub fn get_table_constants_for_simple_enum_with_description_buffer_test() -> Vec<TableConstant> { let mut map = HashMap::new(); map.insert( "test1".to_string(), ValueWithDescription { value: "5".to_string(), description: Some("description5".to_string()), }, ); map.insert( "test2".to_string(), ValueWithDescription { value: "7".to_string(), description: Some("description7".to_string()), }, ); vec![TableConstant::SimpleEnum(SimpleEnum { identifier: TableIdentifier { object_name: "test_enum".to_string(), ..TableIdentifier::default() }, map, })] } pub fn get_table_constants_for_string_enum_with_description_buffer_test() -> Vec<TableConstant> { let mut map = HashMap::new(); map.insert( "test1".to_string(), ValueWithDescription { value: "test1".to_string(), description: Some("description5".to_string()), }, ); map.insert( "test2".to_string(), ValueWithDescription { value: "test2".to_string(), description: Some("description7".to_string()), }, ); vec![TableConstant::StringEnum(StringEnum { identifier: TableIdentifier { object_name: "test_enum".to_string(), ..TableIdentifier::default() }, map, })] } pub fn get_table_constants_for_object_like_buffer_test() -> Vec<TableConstant> { let mut map = HashMap::new(); map.insert( ValueWithDescription { value: "test1".to_string(), description: None, }, vec![ ( Column { name: "first".to_string(), data_type: STRING_TYPE.to_string(), }, "first1".to_string(), ), ( Column { name: "second".to_string(), data_type: NUMBER_TYPE.to_string(), }, "1".to_string(), ), ], ); map.insert( ValueWithDescription { value: "test2".to_string(), description: None, }, vec![ ( Column { name: "first".to_string(), data_type: STRING_TYPE.to_string(), }, "first2".to_string(), ), ( Column { name: "second".to_string(), data_type: NUMBER_TYPE.to_string(), }, "2".to_string(), ), ], ); vec![TableConstant::ObjectLike(ObjectLike { identifier: TableIdentifier { object_name: "test_enum".to_string(), ..TableIdentifier::default() }, map, })] } pub fn get_table_constants_for_object_like_with_description_buffer_test() -> Vec<TableConstant> { let mut map = HashMap::new(); map.insert( ValueWithDescription { value: "test1".to_string(), description: Some("description1".to_string()), }, vec![ ( Column { name: "first".to_string(), data_type: STRING_TYPE.to_string(), }, "first1".to_string(), ), ( Column { name: "second".to_string(), data_type: NUMBER_TYPE.to_string(), }, "1".to_string(), ), ], ); map.insert( ValueWithDescription { value: "test2".to_string(), description: Some("description2".to_string()), }, vec![ ( Column { name: "first".to_string(), data_type: STRING_TYPE.to_string(), }, "first2".to_string(), ), ( Column { name: "second".to_string(), data_type: NUMBER_TYPE.to_string(), }, "2".to_string(), ), ], ); vec![TableConstant::ObjectLike(ObjectLike { identifier: TableIdentifier { object_name: "test_enum".to_string(), ..TableIdentifier::default() }, map, })] } pub fn get_output_options_for_filename_test(lang: Language) -> OutputOptions { OutputOptions { language_targets: Some(vec![lang.to_string()]), ..OutputOptions::default() } }
use std::collections::HashMap; pub mod dotnet_object_like_enum_buffer; pub mod dotnet_object_like_enum_with_description_buffer; pub mod dotnet_simple_enum_buffer; pub mod dotnet_simple_enum_with_description_buffer; pub mod dotnet_string_enum_buffer; pub mod dotnet_string_enum_with_description_buffer; pub mod rust_simple_enum_buffer; pub mod rust_simple_enum_with_description_buffer; pub mod rust_string_enum_buffer; pub mod rust_string_enum_with_description_buffer; pub mod typescript_object_like_enum_buffer; pub mod typescript_object_like_enum_with_description_buffer; pub mod typescript_simple_enum_buffer; pub mod typescript_simple_enum_with_description_buffer; pub mod typescript_string_enum_buffer; pub mod typescript_string_enum_with_description_buffer; use constance::{ testing_only::{ Column, Language, ObjectLike, SimpleEnum, StringEnum, TableConstant, TableIdentifier, ValueWithDescription, NUMBER_TYPE, STRING_TYPE, }, types::OutputOptions, }; pub fn get_table_constants_for_filename_test() -> Vec<TableConstant> { vec![TableConstant::SimpleEnum(SimpleEnum { identifier: TableIdentifier { object_name: "test_enum".to_string(), ..TableIdentifier::default() }, ..SimpleEnum::default() })] } pub fn get_table_constants_for_simple_enum_buffer_test() -> Vec<TableConstant> { let mut map = HashMap::new(); map.insert( "test1".to_string(), ValueWithDescription { value: "5".to_string(), description: None, }, ); map.insert( "test2".to_string(), ValueWithDescription { value: "7".to_string(), description: None, }, ); vec![TableConstant::SimpleEnum(SimpleEnum { identifier: TableIdentifier { object_name: "test_enum".to_string(), ..TableIdentifier::default() }, map, })] } pub fn get_table_constants_for_string_enum_buffer_test() -> Vec<TableConstant> { let mut map = HashMap::new(); map.insert( "test1".to_string(), ValueWithDescription { value: "test1".to_string(), description: None, }, ); map.insert( "test2".to_string(), ValueWithDescription { value: "test2".to_string(), description: None, }, ); vec![TableConstant::StringEnum(StringEnum { identifier: TableIdentifier { object_name: "test_enum".to_string(), ..TableIdentifier::default() }, map, })] } pub fn get_table_constants_for_simple_enum_with_description_buffer_test() -> Vec<TableConstant> { let mut map = HashMap::new(); map.insert( "test1".to_string(), ValueWithDescription { value: "5".to_string(), description: Some("description5".to_string()), }, ); map.insert( "test2".to_string(), ValueWithDescription { value: "7".to_string(), description: Some("description7".to_string()), }, ); vec![TableConstant::SimpleEnum(SimpleEnum { identifier: TableIdentifier { object_name: "test_enum".to_string(), ..TableIdentifier::default() }, map, })] } pub fn get_table_constants_for_string_enum_with_description_buffer_test() -> Vec<TableConstant> { let mut map = HashMap::new(); map.insert( "test1".to_string(), ValueWithDescription { value: "test1".to_string(), description: Some("description5".to_string()), }, ); map.insert( "test2".to_string(), ValueWithDescription { value: "test2".to_string(), description: Some("description7".to_string()), }, ); vec![TableConstant::StringEnum(StringEnum { identifier: TableIdentifier { object_name: "test_enum".to_string(), ..TableIdentifier::default() }, map, })] } pub fn get_table_constants_for_object_like_buffer_test() -> Vec<TableConstant> { let mut map = HashMap::new(); map.insert( ValueWithDescription { value: "test1".to_string(), description: None, }, vec![ ( Column { name: "first".to_string(), data_type: STRING_TYPE.to_string(), }, "first1".to_string(), ), (
pub fn get_table_constants_for_object_like_with_description_buffer_test() -> Vec<TableConstant> { let mut map = HashMap::new(); map.insert( ValueWithDescription { value: "test1".to_string(), description: Some("description1".to_string()), }, vec![ ( Column { name: "first".to_string(), data_type: STRING_TYPE.to_string(), }, "first1".to_string(), ), ( Column { name: "second".to_string(), data_type: NUMBER_TYPE.to_string(), }, "1".to_string(), ), ], ); map.insert( ValueWithDescription { value: "test2".to_string(), description: Some("description2".to_string()), }, vec![ ( Column { name: "first".to_string(), data_type: STRING_TYPE.to_string(), }, "first2".to_string(), ), ( Column { name: "second".to_string(), data_type: NUMBER_TYPE.to_string(), }, "2".to_string(), ), ], ); vec![TableConstant::ObjectLike(ObjectLike { identifier: TableIdentifier { object_name: "test_enum".to_string(), ..TableIdentifier::default() }, map, })] } pub fn get_output_options_for_filename_test(lang: Language) -> OutputOptions { OutputOptions { language_targets: Some(vec![lang.to_string()]), ..OutputOptions::default() } }
Column { name: "second".to_string(), data_type: NUMBER_TYPE.to_string(), }, "1".to_string(), ), ], ); map.insert( ValueWithDescription { value: "test2".to_string(), description: None, }, vec![ ( Column { name: "first".to_string(), data_type: STRING_TYPE.to_string(), }, "first2".to_string(), ), ( Column { name: "second".to_string(), data_type: NUMBER_TYPE.to_string(), }, "2".to_string(), ), ], ); vec![TableConstant::ObjectLike(ObjectLike { identifier: TableIdentifier { object_name: "test_enum".to_string(), ..TableIdentifier::default() }, map, })] }
function_block-function_prefix_line
[ { "content": "fn get_constructor_for_object_like(class_name: &str, columns: &Vec<&Column>) -> String {\n\n let constructor_first_line = format!(\"private {}(\", pascal_case(class_name));\n\n let args = columns\n\n .iter()\n\n .map(|Column { data_type, name }| {\n\n format!(\n\n \"{} {}\",\n\n get_type_name(&data_type),\n\n casing_engine::camel_case(&name)\n\n )\n\n })\n\n .collect::<Vec<String>>();\n\n let constructor_assignments = columns\n\n .iter()\n\n .map(|Column { data_type: _, name }| {\n\n [\n\n format!(\n\n \"{} = {};\",\n\n casing_engine::pascal_case(name),\n\n casing_engine::camel_case(name)\n", "file_path": "src/get_write_configurations/file_buffer_engine/dotnet.rs", "rank": 7, "score": 173270.10844161463 }, { "content": "pub fn get_string_enum_with_description_rc(options: ConnectionOptions) -> ConstanceRc {\n\n let table_options = get_table_options(\n\n \"string_enum_with_description\",\n\n \"name\".to_string(),\n\n &[Column {\n\n name: \"string_id\".to_string(),\n\n data_type: STRING_TYPE.to_string(),\n\n }],\n\n Some(\"description\".to_string()),\n\n );\n\n let base_rc = get_base_rc(options);\n\n ConstanceRc {\n\n table_options,\n\n ..base_rc\n\n }\n\n}\n\n\n", "file_path": "tests/table_to_constants_util/mssql/mod.rs", "rank": 8, "score": 168901.98149339293 }, { "content": "pub fn get_simple_enum_with_description_rc(options: ConnectionOptions) -> ConstanceRc {\n\n let table_options = get_table_options(\n\n \"simple_enum_with_description\",\n\n \"name\".to_string(),\n\n &[Column {\n\n name: \"id\".to_string(),\n\n data_type: NUMBER_TYPE.to_string(),\n\n }],\n\n Some(\"description\".to_string()),\n\n );\n\n let base_rc = get_base_rc(options);\n\n ConstanceRc {\n\n table_options,\n\n ..base_rc\n\n }\n\n}\n\n\n", "file_path": "tests/table_to_constants_util/mssql/mod.rs", "rank": 9, "score": 168901.98149339293 }, { "content": "pub fn get_object_like_enum_with_description_rc(options: ConnectionOptions) -> ConstanceRc {\n\n let table_options = get_table_options(\n\n \"object_like_enum_with_description\",\n\n \"name\".to_string(),\n\n &[\n\n Column {\n\n name: \"first\".to_string(),\n\n data_type: STRING_TYPE.to_string(),\n\n },\n\n Column {\n\n name: \"second\".to_string(),\n\n data_type: NUMBER_TYPE.to_string(),\n\n },\n\n ],\n\n Some(\"description\".to_string()),\n\n );\n\n let base_rc = get_base_rc(options);\n\n ConstanceRc {\n\n table_options,\n\n ..base_rc\n\n }\n\n}\n", "file_path": "tests/table_to_constants_util/mssql/mod.rs", "rank": 10, "score": 166509.04659011174 }, { "content": "fn get_properties_for_object_like(columns: &Vec<&Column>) -> String {\n\n columns\n\n .iter()\n\n .map(|Column { data_type, name }| {\n\n format!(\n\n \"public readonly {} {};\",\n\n get_type_name(data_type),\n\n pascal_case(name)\n\n )\n\n })\n\n .collect::<Vec<String>>()\n\n .join([NEWLINE, FOUR_SPACE_TAB, FOUR_SPACE_TAB].join(\"\").as_str())\n\n}\n\n\n", "file_path": "src/get_write_configurations/file_buffer_engine/dotnet.rs", "rank": 11, "score": 155852.35109104912 }, { "content": "pub fn vec_assert(assertable: &Vec<String>) {\n\n string_assert(assertable.first().unwrap());\n\n}\n\n\n", "file_path": "tests/get_runtime_configuration_util/common.rs", "rank": 12, "score": 155106.91185844337 }, { "content": "pub fn get_value(value: &str, quotes: bool) -> String {\n\n if quotes {\n\n [QUOTATION_MARK, value, QUOTATION_MARK].join(\"\")\n\n } else {\n\n value.to_string()\n\n }\n\n}\n", "file_path": "src/get_write_configurations/file_buffer_engine/mod.rs", "rank": 13, "score": 149255.5337566032 }, { "content": "pub fn camel_case(name: &str) -> String {\n\n camelcase::to_camel_case(name)\n\n}\n\n\n", "file_path": "src/get_write_configurations/casing_engine.rs", "rank": 14, "score": 146673.94951767355 }, { "content": "pub fn pascal_case(name: &str) -> String {\n\n pascalcase::to_pascal_case(name)\n\n}\n", "file_path": "src/get_write_configurations/casing_engine.rs", "rank": 15, "score": 146673.94951767355 }, { "content": "pub fn snake_case(name: &str) -> String {\n\n snakecase::to_snake_case(name)\n\n}\n\n\n", "file_path": "src/get_write_configurations/casing_engine.rs", "rank": 16, "score": 146673.94951767355 }, { "content": "pub fn get_simple_enum_rc(options: ConnectionOptions) -> ConstanceRc {\n\n let table_options = get_table_options(\n\n \"simple_enum\",\n\n \"name\".to_string(),\n\n &[Column {\n\n name: \"id\".to_string(),\n\n data_type: NUMBER_TYPE.to_string(),\n\n }],\n\n None,\n\n );\n\n let base_rc = get_base_rc(options);\n\n ConstanceRc {\n\n table_options,\n\n ..base_rc\n\n }\n\n}\n\n\n", "file_path": "tests/table_to_constants_util/mssql/mod.rs", "rank": 18, "score": 142849.8724828431 }, { "content": "pub fn get_string_enum_rc(options: ConnectionOptions) -> ConstanceRc {\n\n let table_options = get_table_options(\n\n \"string_enum\",\n\n \"name\".to_string(),\n\n &[Column {\n\n name: \"string_id\".to_string(),\n\n data_type: STRING_TYPE.to_string(),\n\n }],\n\n None,\n\n );\n\n let base_rc = get_base_rc(options);\n\n ConstanceRc {\n\n table_options,\n\n ..base_rc\n\n }\n\n}\n\n\n", "file_path": "tests/table_to_constants_util/mssql/mod.rs", "rank": 19, "score": 142849.8724828431 }, { "content": "pub fn get_object_like_enum_rc(options: ConnectionOptions) -> ConstanceRc {\n\n let table_options = get_table_options(\n\n \"object_like_enum\",\n\n \"name\".to_string(),\n\n &[\n\n Column {\n\n name: \"first\".to_string(),\n\n data_type: STRING_TYPE.to_string(),\n\n },\n\n Column {\n\n name: \"second\".to_string(),\n\n data_type: NUMBER_TYPE.to_string(),\n\n },\n\n ],\n\n None,\n\n );\n\n let base_rc = get_base_rc(options);\n\n ConstanceRc {\n\n table_options,\n\n ..base_rc\n\n }\n\n}\n\n\n", "file_path": "tests/table_to_constants_util/mssql/mod.rs", "rank": 20, "score": 140878.15428626523 }, { "content": "fn get_identifier(table_name: &str) -> TableIdentifier {\n\n TableIdentifier {\n\n database_name: \"test\".to_string(),\n\n schema_name: \"dbo\".to_string(),\n\n object_name: table_name.to_string(),\n\n }\n\n}\n\n\n", "file_path": "tests/table_to_constants_util/mssql/mod.rs", "rank": 21, "score": 140156.6583448603 }, { "content": "pub fn insert_statement<'a>(json: &'a Vec<StringEnum>) -> Result<String, sql::Error> {\n\n let raw = get_raw_insert_statement(TABLE_NAME, &COLUMNS, json.len())?;\n\n Ok(json\n\n .iter()\n\n .enumerate()\n\n .map(|(index, StringEnum { name, string_id })| {\n\n vec![index.to_string(), name.to_string(), string_id.to_string()]\n\n })\n\n .fold(raw, |statement, args| to_substituted(&statement, &args)))\n\n}\n", "file_path": "tests/table_to_constants_util/mssql/pretest/src/string_enum.rs", "rank": 22, "score": 140138.19299154196 }, { "content": "#[test]\n\npub fn dotnet_string_enum_with_description_buffer() {\n\n do_string_enum_with_description_buffer_test(\n\n Language::Dotnet,\n\n &[\n\n DOTNET_STRING_ENUM_WITH_DESCRIPTION_BUFFER1,\n\n DOTNET_STRING_ENUM_WITH_DESCRIPTION_BUFFER2,\n\n ],\n\n )\n\n}\n\n\n", "file_path": "tests/get_write_configurations.rs", "rank": 23, "score": 130166.00731176151 }, { "content": "#[test]\n\npub fn typescript_string_enum_with_description_buffer() {\n\n do_string_enum_with_description_buffer_test(\n\n Language::Typescript,\n\n &[\n\n TYPESCRIPT_STRING_ENUM_WITH_DESCRIPTION_BUFFER1,\n\n TYPESCRIPT_STRING_ENUM_WITH_DESCRIPTION_BUFFER2,\n\n ],\n\n )\n\n}\n\n\n", "file_path": "tests/get_write_configurations.rs", "rank": 24, "score": 130166.00731176151 }, { "content": "#[test]\n\npub fn dotnet_simple_enum_with_description_buffer() {\n\n do_simple_enum_with_description_buffer_test(\n\n Language::Dotnet,\n\n &[\n\n DOTNET_SIMPLE_ENUM_WITH_DESCRIPTION_BUFFER1,\n\n DOTNET_SIMPLE_ENUM_WITH_DESCRIPTION_BUFFER2,\n\n ],\n\n )\n\n}\n\n\n", "file_path": "tests/get_write_configurations.rs", "rank": 25, "score": 130166.00731176151 }, { "content": "#[test]\n\npub fn rust_string_enum_with_description_buffer() {\n\n do_string_enum_with_description_buffer_test(\n\n Language::Rust,\n\n &[\n\n RUST_STRING_ENUM_WITH_DESCRIPTION_BUFFER1,\n\n RUST_STRING_ENUM_WITH_DESCRIPTION_BUFFER2,\n\n ],\n\n )\n\n}\n\n\n", "file_path": "tests/get_write_configurations.rs", "rank": 26, "score": 130166.00731176151 }, { "content": "#[test]\n\npub fn rust_simple_enum_with_description_buffer() {\n\n do_simple_enum_with_description_buffer_test(\n\n Language::Rust,\n\n &[\n\n RUST_SIMPLE_ENUM_WITH_DESCRIPTION_BUFFER1,\n\n RUST_SIMPLE_ENUM_WITH_DESCRIPTION_BUFFER2,\n\n ],\n\n )\n\n}\n\n\n", "file_path": "tests/get_write_configurations.rs", "rank": 27, "score": 130166.00731176151 }, { "content": "#[test]\n\npub fn typescript_simple_enum_with_description_buffer() {\n\n do_simple_enum_with_description_buffer_test(\n\n Language::Typescript,\n\n &[\n\n TYPESCRIPT_SIMPLE_ENUM_WITH_DESCRIPTION_BUFFER1,\n\n TYPESCRIPT_SIMPLE_ENUM_WITH_DESCRIPTION_BUFFER2,\n\n ],\n\n )\n\n}\n\n\n", "file_path": "tests/get_write_configurations.rs", "rank": 28, "score": 130166.00731176151 }, { "content": "pub fn get_runtime_configuration(\n\n cli_args: impl ICliArgs,\n\n file_system: impl IFileSystem,\n\n rc_parser: impl IRcParser,\n\n) -> ConstanceRc {\n\n let path = cli_args.get_path();\n\n let buf = file_system\n\n .get_file(&path)\n\n .unwrap_or_else(|_| panic!(\"No configuration file found at path: {}\", path));\n\n let result = match file_system.get_extension(&path) {\n\n RcFileExtension::Json => rc_parser.from_json(&buf),\n\n RcFileExtension::Yaml => rc_parser.from_yaml(&buf),\n\n };\n\n result.unwrap_or_else(|| panic!(\"Unable to parse {}\", path))\n\n}\n", "file_path": "src/constancerc/get_runtime_configuration/mod.rs", "rank": 29, "score": 129825.2060793787 }, { "content": "#[test]\n\npub fn dotnet_object_like_enum_with_description_buffer() {\n\n do_object_like_with_description_buffer_test(\n\n Language::Dotnet,\n\n &[\n\n DOTNET_OBJECT_LIKE_ENUM_WITH_DESCRIPTION_BUFFER1,\n\n DOTNET_OBJECT_LIKE_ENUM_WITH_DESCRIPTION_BUFFER2,\n\n ],\n\n )\n\n}\n\n\n", "file_path": "tests/get_write_configurations.rs", "rank": 30, "score": 128330.79074140696 }, { "content": "#[test]\n\npub fn typescript_object_like_enum_with_description_buffer() {\n\n do_object_like_with_description_buffer_test(\n\n Language::Typescript,\n\n &[\n\n TYPESCRIPT_OBJECT_LIKE_ENUM_WITH_DESCRIPTION_BUFFER1,\n\n TYPESCRIPT_OBJECT_LIKE_ENUM_WITH_DESCRIPTION_BUFFER2,\n\n ],\n\n )\n\n}\n", "file_path": "tests/get_write_configurations.rs", "rank": 31, "score": 128330.79074140696 }, { "content": "pub fn insert_statement(json: &Vec<&str>) -> Result<String, sql::Error> {\n\n let raw = get_raw_insert_statement(TABLE_NAME, &COLUMNS, json.len())?;\n\n Ok(json\n\n .iter()\n\n .enumerate()\n\n .map(|(index, arg)| vec![index.to_string(), arg.to_string()])\n\n .fold(raw, |statement, args| to_substituted(&statement, &args)))\n\n}\n", "file_path": "tests/table_to_constants_util/mssql/pretest/src/simple_enum.rs", "rank": 32, "score": 125623.73938939506 }, { "content": "pub fn get_database(rc: &ConstanceRc) -> Rdbms {\n\n Rdbms::from_options(&rc.query_execution_options)\n\n}\n", "file_path": "src/reader/manager.rs", "rank": 33, "score": 124691.7309023838 }, { "content": "pub fn get_connection_options_from_env() -> ConnectionOptions {\n\n let port = env::var(\"PORT\").map(|port| port.parse::<u16>().unwrap());\n\n let host = env::var(\"HOST\");\n\n let password = env::var(\"PASSWORD\");\n\n let user = env::var(\"USER\");\n\n match (port, host, password, user) {\n\n (Ok(port), Ok(host), Ok(password), Ok(user)) => {\n\n ConnectionOptions::new(Some(host), Some(port), Some(user), password)\n\n }\n\n _ => ConnectionOptions::empty(),\n\n }\n\n}\n", "file_path": "tests/table_to_constants_util/mod.rs", "rank": 34, "score": 124007.46096474797 }, { "content": "pub fn insert_statement(json: &Vec<ObjectLikeEnum>) -> Result<String, sql::Error> {\n\n let raw = get_raw_insert_statement(TABLE_NAME, &COLUMNS, json.len())?;\n\n Ok(json\n\n .iter()\n\n .enumerate()\n\n .map(|(index, ObjectLikeEnum { name, value })| {\n\n vec![\n\n index.to_string(),\n\n name.to_string(),\n\n value.first.to_string(),\n\n value.second.to_string(),\n\n ]\n\n })\n\n .fold(raw, |statement, args| to_substituted(&statement, &args)))\n\n}\n", "file_path": "tests/table_to_constants_util/mssql/pretest/src/object_like.rs", "rank": 35, "score": 123023.44906897005 }, { "content": "pub fn assert(rc: ConstanceRc) {\n\n let output_options = rc.output_options.unwrap();\n\n\n\n let output_options_path = &output_options.path.unwrap();\n\n let language_targets = &output_options.language_targets.unwrap();\n\n\n\n string_assert(output_options_path);\n\n vec_assert(language_targets);\n\n\n\n let table_option = rc.table_options.first().unwrap();\n\n\n\n let identifier = &table_option.identifier;\n\n let database_name = &identifier.database_name;\n\n let schema_name = &identifier.schema_name;\n\n let object_name = &identifier.object_name;\n\n\n\n let key_column_name = &table_option.key_column_name;\n\n let value_columns = &table_option.value_columns;\n\n let description_column_name = &table_option.description_column_name.as_ref().unwrap();\n\n\n", "file_path": "tests/get_runtime_configuration_util/every_field.rs", "rank": 36, "score": 122849.6570852041 }, { "content": "pub fn assert(rc: ConstanceRc) {\n\n let output_options = &rc.output_options;\n\n option_assert(output_options);\n\n\n\n let table_option = rc.table_options.first().unwrap();\n\n\n\n let identifier = &table_option.identifier;\n\n let database_name = &identifier.database_name;\n\n let schema_name = &identifier.schema_name;\n\n let object_name = &identifier.object_name;\n\n\n\n let key_column_name = &table_option.key_column_name;\n\n let value_columns = &table_option.value_columns;\n\n let description_column_name = &table_option.description_column_name;\n\n\n\n string_assert(database_name);\n\n string_assert(schema_name);\n\n string_assert(object_name);\n\n\n\n string_assert(key_column_name);\n", "file_path": "tests/get_runtime_configuration_util/only_required_fields.rs", "rank": 37, "score": 122849.6570852041 }, { "content": "pub fn insert_statement<'a>(\n\n json: &'a Vec<SimpleEnumWithDescription>,\n\n) -> Result<String, sql::Error> {\n\n let raw = get_raw_insert_statement(TABLE_NAME, &COLUMNS, json.len())?;\n\n Ok(json\n\n .iter()\n\n .enumerate()\n\n .map(|(index, SimpleEnumWithDescription { name, description })| {\n\n vec![index.to_string(), name.to_string(), description.to_string()]\n\n })\n\n .fold(raw, |statement, args| to_substituted(&statement, &args)))\n\n}\n", "file_path": "tests/table_to_constants_util/mssql/pretest/src/simple_enum_with_description.rs", "rank": 38, "score": 122639.06404865076 }, { "content": "pub fn insert_statement<'a>(\n\n json: &'a Vec<StringEnumWithDescription>,\n\n) -> Result<String, sql::Error> {\n\n let raw = get_raw_insert_statement(TABLE_NAME, &COLUMNS, json.len())?;\n\n Ok(json\n\n .iter()\n\n .enumerate()\n\n .map(\n\n |(\n\n index,\n\n StringEnumWithDescription {\n\n name,\n\n string_id,\n\n description,\n\n },\n\n )| {\n\n vec![\n\n index.to_string(),\n\n name.to_string(),\n\n string_id.to_string(),\n\n description.to_string(),\n\n ]\n\n },\n\n )\n\n .fold(raw, |statement, args| to_substituted(&statement, &args)))\n\n}\n", "file_path": "tests/table_to_constants_util/mssql/pretest/src/string_enum_with_description.rs", "rank": 39, "score": 122639.06404865076 }, { "content": "pub fn insert_statement<'a>(\n\n json: &'a Vec<ObjectLikeEnumWithDescription>,\n\n) -> Result<String, sql::Error> {\n\n let raw = get_raw_insert_statement(TABLE_NAME, &COLUMNS, json.len())?;\n\n Ok(json\n\n .iter()\n\n .enumerate()\n\n .map(\n\n |(\n\n index,\n\n ObjectLikeEnumWithDescription {\n\n name,\n\n description,\n\n value,\n\n },\n\n )| {\n\n vec![\n\n index.to_string(),\n\n name.to_string(),\n\n description.to_string(),\n\n value.first.to_string(),\n\n value.second.to_string(),\n\n ]\n\n },\n\n )\n\n .fold(raw, |statement, args| to_substituted(&statement, &args)))\n\n}\n", "file_path": "tests/table_to_constants_util/mssql/pretest/src/object_like_with_description.rs", "rank": 40, "score": 122639.06404865076 }, { "content": "#[test]\n\n#[should_panic]\n\npub fn get_path_no_args_supplied_should_panic() {\n\n // arrange\n\n let cli_args = GivesNoArg {};\n\n\n\n // act\n\n cli_args.get_path();\n\n}\n\n\n", "file_path": "src/constancerc/get_runtime_configuration/test/i_cli_args/mod.rs", "rank": 41, "score": 121358.17425631468 }, { "content": "pub fn from_path(path: &str) -> ConstanceRc {\n\n let cli_args = FromPath { path };\n\n let file_system = FileSystem {};\n\n let rc_parser = RcParser {};\n\n get_runtime_configuration(cli_args, file_system, rc_parser)\n\n}\n\n\n", "file_path": "tests/get_runtime_configuration_util/common.rs", "rank": 42, "score": 121072.69433094858 }, { "content": "#[test]\n\npub fn get_path_two_arguments_supplied_should_return_first() {\n\n // arrange\n\n let cli_args = GivesTwoArgs {};\n\n let expected = FIRST_ARG.to_string();\n\n\n\n // act\n\n let actual = cli_args.get_path();\n\n\n\n // assert\n\n assert_eq!(actual, expected);\n\n}\n", "file_path": "src/constancerc/get_runtime_configuration/test/i_cli_args/mod.rs", "rank": 43, "score": 118393.44197177835 }, { "content": "#[test]\n\npub fn get_path_one_argument_supplied_should_return_arg() {\n\n // arrange\n\n let cli_args = GivesOneArg {};\n\n let expected = FIRST_ARG.to_string();\n\n\n\n // act\n\n let actual = cli_args.get_path();\n\n\n\n // assert\n\n assert_eq!(actual, expected);\n\n}\n\n\n", "file_path": "src/constancerc/get_runtime_configuration/test/i_cli_args/mod.rs", "rank": 44, "score": 118393.44197177835 }, { "content": "fn get_column_of_unknown_type(row: &Row, column: &Column) -> Option<String> {\n\n let column_type = &column.data_type;\n\n let column_name = &column.name;\n\n match ColumnType::from_string(column_type) {\n\n ColumnType::Number => get_column::<i32>(&row, Some(&column_name)),\n\n ColumnType::String => get_column::<&str>(&row, Some(&column_name)),\n\n }\n\n}\n\n\n", "file_path": "src/reader/rdbms/mssql.rs", "rank": 45, "score": 112628.78660989122 }, { "content": "pub fn create_table_statement() -> Result<String, sql::Error> {\n\n get_create_table_statement(TABLE_NAME, &COLUMNS)\n\n}\n\n\n", "file_path": "tests/table_to_constants_util/mssql/pretest/src/string_enum_with_description.rs", "rank": 46, "score": 109687.58553965288 }, { "content": "pub fn create_table_statement() -> Result<String, sql::Error> {\n\n get_create_table_statement(TABLE_NAME, &COLUMNS)\n\n}\n\n\n", "file_path": "tests/table_to_constants_util/mssql/pretest/src/object_like_with_description.rs", "rank": 47, "score": 109687.58553965288 }, { "content": "pub fn create_table_statement() -> Result<String, sql::Error> {\n\n get_create_table_statement(TABLE_NAME, &COLUMNS)\n\n}\n\n\n", "file_path": "tests/table_to_constants_util/mssql/pretest/src/simple_enum_with_description.rs", "rank": 48, "score": 109687.58553965288 }, { "content": "fn get_name(identifier: &TableIdentifier) -> String {\n\n casing_engine::pascal_case(&identifier.object_name)\n\n}\n", "file_path": "src/get_write_configurations/file_buffer_engine/typescript.rs", "rank": 49, "score": 108721.88694774536 }, { "content": "#[test]\n\npub fn typescript_filename() {\n\n do_filename_test(Language::Typescript, \"TestEnum.ts\");\n\n}\n\n\n", "file_path": "tests/get_write_configurations.rs", "rank": 50, "score": 102054.9509078622 }, { "content": "#[test]\n\npub fn dotnet_filename() {\n\n do_filename_test(Language::Dotnet, \"TestEnum.cs\");\n\n}\n\n\n", "file_path": "tests/get_write_configurations.rs", "rank": 51, "score": 102054.9509078622 }, { "content": "#[test]\n\npub fn rust_filename() {\n\n do_filename_test(Language::Rust, \"test_enum.rs\");\n\n}\n\n\n", "file_path": "tests/get_write_configurations.rs", "rank": 52, "score": 102054.9509078622 }, { "content": "#[test]\n\npub fn only_required_fields_from_yaml() {\n\n // arrange\n\n let path = get_yaml(\"only-required-fields\");\n\n\n\n // act\n\n let rc = from_path(&path);\n\n\n\n // assert\n\n only_required_fields::assert(rc);\n\n}\n\n\n", "file_path": "tests/get_runtime_configuration.rs", "rank": 53, "score": 100698.98075037893 }, { "content": "#[test]\n\npub fn every_field_from_yaml() {\n\n // arrange\n\n let path = get_yaml(\"every-field\");\n\n\n\n // act\n\n let rc = from_path(&path);\n\n\n\n // assert\n\n every_field::assert(rc);\n\n}\n\n\n", "file_path": "tests/get_runtime_configuration.rs", "rank": 54, "score": 100698.98075037893 }, { "content": "#[test]\n\npub fn only_required_fields_from_json() {\n\n // arrange\n\n let path = get_json(\"only-required-fields\");\n\n\n\n // act\n\n let rc = from_path(&path);\n\n\n\n // assert\n\n only_required_fields::assert(rc);\n\n}\n\n\n", "file_path": "tests/get_runtime_configuration.rs", "rank": 55, "score": 100698.98075037893 }, { "content": "#[test]\n\n#[should_panic]\n\npub fn incomplete_fields_from_json() {\n\n // arrange\n\n let path = get_json(\"incomplete-fields\");\n\n\n\n // act\n\n from_path(&path);\n\n}\n\n\n", "file_path": "tests/get_runtime_configuration.rs", "rank": 56, "score": 100698.98075037893 }, { "content": "#[test]\n\npub fn every_field_from_json() {\n\n // arrange\n\n let path = get_json(\"every-field\");\n\n\n\n // act\n\n let rc = from_path(&path);\n\n\n\n // assert\n\n every_field::assert(rc);\n\n}\n\n\n", "file_path": "tests/get_runtime_configuration.rs", "rank": 57, "score": 100698.98075037893 }, { "content": "#[test]\n\n#[should_panic]\n\npub fn incomplete_fields_from_yaml() {\n\n // arrange\n\n let path = get_yaml(\"incomplete-fields\");\n\n\n\n // act\n\n from_path(&path);\n\n}\n", "file_path": "tests/get_runtime_configuration.rs", "rank": 58, "score": 100698.98075037893 }, { "content": "fn do_simple_enum_with_description_buffer_test(lang: Language, expecteds: &[&str]) {\n\n // arrange\n\n let table_constants = get_table_constants_for_simple_enum_with_description_buffer_test();\n\n let output_options = get_output_options_for_filename_test(lang);\n\n\n\n do_buffer_assertion(&table_constants, &output_options, expecteds);\n\n}\n\n\n", "file_path": "tests/get_write_configurations.rs", "rank": 59, "score": 100137.19549417129 }, { "content": "fn do_string_enum_with_description_buffer_test(lang: Language, expecteds: &[&str]) {\n\n // arrange\n\n let table_constants = get_table_constants_for_string_enum_with_description_buffer_test();\n\n let output_options = get_output_options_for_filename_test(lang);\n\n\n\n do_buffer_assertion(&table_constants, &output_options, expecteds);\n\n}\n\n\n", "file_path": "tests/get_write_configurations.rs", "rank": 60, "score": 100137.19549417129 }, { "content": "fn do_object_like_with_description_buffer_test(lang: Language, expecteds: &[&str]) {\n\n // arrange\n\n let table_constants = get_table_constants_for_object_like_with_description_buffer_test();\n\n let output_options = get_output_options_for_filename_test(lang);\n\n\n\n do_buffer_assertion(&table_constants, &output_options, expecteds);\n\n}\n\n\n", "file_path": "tests/get_write_configurations.rs", "rank": 61, "score": 100137.19549417129 }, { "content": "fn get_select_statement(columns: &[&str], table_identifier: &str) -> String {\n\n format!(\"SELECT {} FROM {}\", columns.join(\", \"), table_identifier)\n\n}\n\n\n", "file_path": "src/reader/rdbms/mssql.rs", "rank": 62, "score": 100090.69847735166 }, { "content": "#[test]\n\npub fn dotnet_string_enum_buffer() {\n\n do_string_enum_buffer_test(\n\n Language::Dotnet,\n\n &[DOTNET_STRING_ENUM_BUFFER1, DOTNET_STRING_ENUM_BUFFER2],\n\n )\n\n}\n\n\n", "file_path": "tests/get_write_configurations.rs", "rank": 63, "score": 99399.31153282826 }, { "content": "#[test]\n\npub fn rust_simple_enum_buffer() {\n\n do_simple_enum_buffer_test(\n\n Language::Rust,\n\n &[RUST_SIMPLE_ENUM_BUFFER1, RUST_SIMPLE_ENUM_BUFFER2],\n\n );\n\n}\n\n\n", "file_path": "tests/get_write_configurations.rs", "rank": 64, "score": 99399.31153282826 }, { "content": "#[test]\n\npub fn typescript_simple_enum_buffer() {\n\n do_simple_enum_buffer_test(\n\n Language::Typescript,\n\n &[\n\n TYPESCRIPT_SIMPLE_ENUM_BUFFER1,\n\n TYPESCRIPT_SIMPLE_ENUM_BUFFER2,\n\n ],\n\n );\n\n}\n\n\n", "file_path": "tests/get_write_configurations.rs", "rank": 65, "score": 99399.31153282826 }, { "content": "pub fn get_write_configurations(\n\n table_constants: &[TableConstant],\n\n output_options: &OutputOptions,\n\n) -> Vec<WriteConfiguration> {\n\n let file_buffer_engine_types = &output_options\n\n .language_targets\n\n .clone()\n\n .map(|vec| {\n\n vec.iter()\n\n .map(|language_raw| Language::from_string(language_raw))\n\n .collect()\n\n })\n\n .unwrap_or_else(|| vec![Language::default()])\n\n .into_iter()\n\n .map(|language| match language {\n\n Language::Typescript => FileBufferEngineType::Typesript(Typescript {}),\n\n Language::Dotnet => FileBufferEngineType::Dotnet(Dotnet {}),\n\n Language::Rust => FileBufferEngineType::Rust(Rust {}),\n\n })\n\n .collect::<Vec<_>>();\n", "file_path": "src/get_write_configurations/manager.rs", "rank": 66, "score": 99399.31153282826 }, { "content": "#[test]\n\npub fn dotnet_simple_enum_buffer() {\n\n do_simple_enum_buffer_test(\n\n Language::Dotnet,\n\n &[DOTNET_SIMPLE_ENUM_BUFFER1, DOTNET_SIMPLE_ENUM_BUFFER2],\n\n );\n\n}\n\n\n", "file_path": "tests/get_write_configurations.rs", "rank": 67, "score": 99399.31153282826 }, { "content": "#[test]\n\npub fn typescript_string_enum_buffer() {\n\n do_string_enum_buffer_test(\n\n Language::Typescript,\n\n &[\n\n TYPESCRIPT_STRING_ENUM_BUFFER1,\n\n TYPESCRIPT_STRING_ENUM_BUFFER2,\n\n ],\n\n )\n\n}\n\n\n", "file_path": "tests/get_write_configurations.rs", "rank": 68, "score": 99399.31153282826 }, { "content": "#[test]\n\npub fn rust_string_enum_buffer() {\n\n do_string_enum_buffer_test(\n\n Language::Rust,\n\n &[RUST_STRING_ENUM_BUFFER1, RUST_STRING_ENUM_BUFFER2],\n\n )\n\n}\n\n\n", "file_path": "tests/get_write_configurations.rs", "rank": 69, "score": 99399.31153282826 }, { "content": "fn get_base_rc(options: ConnectionOptions) -> ConstanceRc {\n\n ConstanceRc {\n\n table_options: vec![],\n\n output_options: Some(OutputOptions::default()),\n\n query_execution_options: QueryExecutionOptions {\n\n connection: Connection {\n\n connection_options: Some(options),\n\n connection_string: None,\n\n },\n\n rdbms: MSSQL.to_string(),\n\n query_timeout_in_ms: None,\n\n should_parallelize: None,\n\n },\n\n }\n\n}\n\n\n", "file_path": "tests/table_to_constants_util/mssql/mod.rs", "rank": 70, "score": 98757.84188456023 }, { "content": "#[test]\n\npub fn dotnet_object_like_enum_buffer() {\n\n do_object_like_buffer_test(\n\n Language::Dotnet,\n\n &[\n\n DOTNET_OBJECT_LIKE_ENUM_BUFFER1,\n\n DOTNET_OBJECT_LIKE_ENUM_BUFFER2,\n\n ],\n\n )\n\n}\n\n\n", "file_path": "tests/get_write_configurations.rs", "rank": 71, "score": 98152.50808224785 }, { "content": "#[test]\n\npub fn typescript_object_like_enum_buffer() {\n\n do_object_like_buffer_test(\n\n Language::Typescript,\n\n &[\n\n TYPESCRIPT_OBJECT_LIKE_ENUM_BUFFER1,\n\n TYPESCRIPT_OBJECT_LIKE_ENUM_BUFFER2,\n\n ],\n\n )\n\n}\n\n\n", "file_path": "tests/get_write_configurations.rs", "rank": 72, "score": 98152.50808224785 }, { "content": "fn get_before_for_enum(name: &str) -> String {\n\n get_before_for_entity_type(name, EntityType::Enum)\n\n}\n\n\n", "file_path": "src/get_write_configurations/file_buffer_engine/dotnet.rs", "rank": 73, "score": 97582.06466730172 }, { "content": "fn get_before_for_sealed_class(name: &str) -> String {\n\n get_before_for_entity_type(name, EntityType::SealedClass)\n\n}\n\n\n", "file_path": "src/get_write_configurations/file_buffer_engine/dotnet.rs", "rank": 74, "score": 96484.43664021703 }, { "content": "pub fn get_raw_insert_statement(\n\n table_name: &str,\n\n columns: &[&Column],\n\n batch_size: usize,\n\n) -> Result<String, sql::Error> {\n\n insert_into(table_name)\n\n .columns(\n\n &columns\n\n .iter()\n\n .map(|column| match column {\n\n Column::Pkey => \"id\",\n\n Column::Text(name) => name,\n\n Column::Number(name) => name,\n\n })\n\n .collect::<Vec<&str>>(),\n\n )\n\n .batch(batch_size)\n\n .compile()\n\n}\n", "file_path": "tests/table_to_constants_util/mssql/pretest/src/sql_util.rs", "rank": 75, "score": 93634.29374933893 }, { "content": "pub fn get_create_table_statement(\n\n table_name: &str,\n\n columns: &[&Column],\n\n) -> Result<String, sql::Error> {\n\n columns\n\n .iter()\n\n .fold(create_table(table_name), |create_table, column| {\n\n create_table.column(match column {\n\n Column::Pkey => \"id\".integer().not_null(),\n\n Column::Text(name) => name.string(),\n\n Column::Number(name) => name.integer(),\n\n })\n\n })\n\n .compile()\n\n}\n\n\n", "file_path": "tests/table_to_constants_util/mssql/pretest/src/sql_util.rs", "rank": 76, "score": 93634.29374933893 }, { "content": "#[test]\n\n#[should_panic]\n\npub fn get_extension_no_extension_given_should_panic() {\n\n // arrange\n\n let file_system = FileSystem {};\n\n let expected = \"\";\n\n\n\n let path = get_path(expected);\n\n\n\n // act\n\n file_system.get_extension(&path);\n\n}\n\n\n", "file_path": "src/constancerc/get_runtime_configuration/test/file_system.rs", "rank": 77, "score": 93634.29374933893 }, { "content": "pub fn string_assert(assertable: &str) {\n\n assert_eq!(assertable, \"\");\n\n}\n\n\n", "file_path": "tests/get_runtime_configuration_util/common.rs", "rank": 78, "score": 92737.82544278426 }, { "content": "pub fn bool_assert(assertable: bool) {\n\n assert!(!assertable);\n\n}\n\n\n", "file_path": "tests/get_runtime_configuration_util/common.rs", "rank": 79, "score": 92737.82544278426 }, { "content": "pub fn num_assert(assertable: i32) {\n\n assert_eq!(assertable, 0);\n\n}\n\n\n", "file_path": "tests/get_runtime_configuration_util/common.rs", "rank": 80, "score": 92737.82544278426 }, { "content": "#[test]\n\n#[should_panic]\n\npub fn get_extension_foreign_extension_given_should_panic() {\n\n // arrange\n\n let file_system = FileSystem {};\n\n let expected = \"txt\";\n\n\n\n let path = get_path(expected);\n\n\n\n // act\n\n file_system.get_extension(&path);\n\n}\n\n\n", "file_path": "src/constancerc/get_runtime_configuration/test/file_system.rs", "rank": 81, "score": 92609.00283157203 }, { "content": "#[test]\n\npub fn get_extension_yml_extension_given_returns_yml() {\n\n // arrange\n\n let file_system = FileSystem {};\n\n let expected = RcFileExtension::Yaml;\n\n\n\n let path = get_path_from_enum(&expected);\n\n\n\n // act\n\n let actual = file_system.get_extension(&path);\n\n\n\n // assert\n\n assert_eq!(actual, expected);\n\n}\n", "file_path": "src/constancerc/get_runtime_configuration/test/file_system.rs", "rank": 82, "score": 91620.8812446848 }, { "content": "#[test]\n\npub fn get_extension_json_extension_given_returns_json() {\n\n // arrange\n\n let file_system = FileSystem {};\n\n let expected = RcFileExtension::Json;\n\n\n\n let path = get_path_from_enum(&expected);\n\n\n\n // act\n\n let actual = file_system.get_extension(&path);\n\n\n\n // assert\n\n assert_eq!(actual, expected);\n\n}\n\n\n", "file_path": "src/constancerc/get_runtime_configuration/test/file_system.rs", "rank": 83, "score": 91620.8812446848 }, { "content": "pub fn get_json(filename: &str) -> String {\n\n get_path(\"json\")(filename)\n\n}\n\n\n", "file_path": "tests/get_runtime_configuration_util/common.rs", "rank": 84, "score": 89810.55404955134 }, { "content": "pub fn get_yaml(filename: &str) -> String {\n\n get_path(\"yml\")(filename)\n\n}\n", "file_path": "tests/get_runtime_configuration_util/common.rs", "rank": 85, "score": 89810.55404955134 }, { "content": "fn get_before_for_entity_type(name: &str, entity_type: EntityType) -> String {\n\n let namespace_statement = \"namespace Constant\";\n\n let entity = match entity_type {\n\n EntityType::Enum => \"enum\",\n\n EntityType::SealedClass => \"sealed class\",\n\n };\n\n let name = format!(\"public {} {}\", entity, casing_engine::pascal_case(name));\n\n [\n\n namespace_statement,\n\n NEWLINE,\n\n OPEN_BRACE,\n\n NEWLINE,\n\n FOUR_SPACE_TAB,\n\n &name,\n\n NEWLINE,\n\n FOUR_SPACE_TAB,\n\n OPEN_BRACE,\n\n NEWLINE,\n\n FOUR_SPACE_TAB,\n\n FOUR_SPACE_TAB,\n\n ]\n\n .join(\"\")\n\n}\n\n\n", "file_path": "src/get_write_configurations/file_buffer_engine/dotnet.rs", "rank": 86, "score": 88568.39248246714 }, { "content": "pub fn option_assert<T>(option: &Option<T>) {\n\n assert!(option.is_none());\n\n}\n\n\n\nstatic PATH_PREFIX: &str = \"./tests/get_runtime_configuration_util/rcs\";\n\n\n", "file_path": "tests/get_runtime_configuration_util/common.rs", "rank": 87, "score": 87104.50146403059 }, { "content": "pub fn write_all(write_configurations: &[WriteConfiguration], output_options: &OutputOptions) {\n\n let path = match &output_options.path {\n\n Some(p) => p,\n\n None => DEFAULT_PATH,\n\n };\n\n\n\n for config in write_configurations.iter() {\n\n let res = write(config, path);\n\n if res.is_err() {\n\n panic!(\"File write unsuccessful!\")\n\n }\n\n }\n\n}\n", "file_path": "src/write_all/manager.rs", "rank": 88, "score": 87104.50146403059 }, { "content": "pub trait FileBufferEngine {\n\n fn simple_enum(&self, _constant: &SimpleEnum) -> String;\n\n fn string_enum(&self, _constant: &StringEnum) -> String;\n\n fn object_like(&self, _constant: &ObjectLike) -> String;\n\n}\n\n\n\npub enum FileBufferEngineType {\n\n Typesript(typescript::Typescript),\n\n Dotnet(dotnet::Dotnet),\n\n Rust(rust::Rust),\n\n}\n\n\n", "file_path": "src/get_write_configurations/file_buffer_engine/mod.rs", "rank": 89, "score": 84274.56008140347 }, { "content": "pub fn create_table_statement() -> Result<String, sql::Error> {\n\n get_create_table_statement(TABLE_NAME, &COLUMNS)\n\n}\n\n\n", "file_path": "tests/table_to_constants_util/mssql/pretest/src/object_like.rs", "rank": 90, "score": 83073.5537672367 }, { "content": "pub fn create_table_statement() -> Result<String, sql::Error> {\n\n get_create_table_statement(TABLE_NAME, &COLUMNS)\n\n}\n\n\n", "file_path": "tests/table_to_constants_util/mssql/pretest/src/string_enum.rs", "rank": 91, "score": 83073.5537672367 }, { "content": "pub fn create_table_statement() -> Result<String, sql::Error> {\n\n get_create_table_statement(TABLE_NAME, &COLUMNS)\n\n}\n\n\n", "file_path": "tests/table_to_constants_util/mssql/pretest/src/simple_enum.rs", "rank": 92, "score": 83073.5537672367 }, { "content": "pub fn to_substituted(statement: &str, args: &[String]) -> String {\n\n let (before, after) = get_before_and_after(&statement);\n\n\n\n let substitutions = format!(\n\n \"({})\",\n\n args.into_iter()\n\n .enumerate()\n\n .map(|(index, arg)| if index == 0 {\n\n arg.to_string()\n\n } else {\n\n format!(\"'{}'\", arg)\n\n })\n\n .collect::<Vec<String>>()\n\n .join(\", \")\n\n );\n\n [before, substitutions, after].join(\"\")\n\n}\n", "file_path": "tests/table_to_constants_util/mssql/pretest/src/insert_utils.rs", "rank": 93, "score": 82578.54242427673 }, { "content": "fn get_json<'a, T>(buf: &'a str) -> Result<Vec<T>, serde_json::Error>\n\nwhere\n\n T: Deserialize<'a>,\n\n{\n\n serde_json::from_str::<Vec<T>>(buf)\n\n}\n\n\n\nuse std::env;\n", "file_path": "tests/table_to_constants_util/mssql/pretest/src/main.rs", "rank": 94, "score": 80293.48799676943 }, { "content": "fn get_after(identifier: &TableIdentifier) -> String {\n\n let export = format!(\"export default {}\", get_name(identifier));\n\n [NEWLINE, CLOSE_BRACE, NEWLINE, NEWLINE, &export].join(\"\")\n\n}\n\nuse super::{\n\n get_value, tokens::CLOSE_BRACE, tokens::COMMA, tokens::COMMENT_END, tokens::COMMENT_START,\n\n tokens::FOUR_SPACE_TAB, tokens::NEWLINE, tokens::OPEN_BRACE, tokens::SPACE, FileBufferEngine,\n\n};\n\npub struct Typescript {}\n\n\n", "file_path": "src/get_write_configurations/file_buffer_engine/typescript.rs", "rank": 95, "score": 79622.03179029051 }, { "content": "fn get_before(identifier: &TableIdentifier) -> String {\n\n let name = format!(\"enum {}\", get_name(identifier));\n\n [&name, SPACE, OPEN_BRACE, NEWLINE, FOUR_SPACE_TAB].join(\"\")\n\n}\n", "file_path": "src/get_write_configurations/file_buffer_engine/typescript.rs", "rank": 96, "score": 79622.03179029051 }, { "content": "fn get_before(identifier: &TableIdentifier) -> String {\n\n let name = format!(\n\n \"pub enum {}\",\n\n casing_engine::pascal_case(&identifier.object_name)\n\n );\n\n [&name, SPACE, OPEN_BRACE, NEWLINE, FOUR_SPACE_TAB].join(\"\")\n\n}\n\n\n", "file_path": "src/get_write_configurations/file_buffer_engine/rust.rs", "rank": 97, "score": 79622.03179029051 }, { "content": "fn get_columns<'a>(table_option: &'a TableOption) -> ColumnsDto<'a> {\n\n let key_column_name = &table_option.key_column_name;\n\n let description_column_name = match &table_option.description_column_name {\n\n Some(ref d) => d,\n\n None => \"\",\n\n };\n\n let value_columns = &table_option.value_columns;\n\n let value_column_names = value_columns\n\n .iter()\n\n .map(|Column { name, .. }| name.as_str())\n\n .collect::<Vec<&str>>();\n\n let nested_columns: Vec<Vec<&str>> = vec![\n\n vec![&key_column_name],\n\n vec![&description_column_name],\n\n value_column_names,\n\n ];\n\n let column_names = nested_columns\n\n .into_iter()\n\n .flatten()\n\n .filter(|str| !str.is_empty())\n", "file_path": "src/reader/rdbms/mssql.rs", "rank": 98, "score": 78813.40487638098 }, { "content": "fn get_table_options(\n\n table_name: &str,\n\n key_column_name: String,\n\n value_column: &[Column],\n\n description_column_name: Option<String>,\n\n) -> Vec<TableOption> {\n\n let identifier = get_identifier(table_name);\n\n vec![TableOption {\n\n identifier,\n\n key_column_name,\n\n value_columns: value_column.to_vec(),\n\n description_column_name,\n\n }]\n\n}\n\n\n", "file_path": "tests/table_to_constants_util/mssql/mod.rs", "rank": 99, "score": 76965.4756545218 } ]
Rust
src/sinks/util/tcp.rs
parampavar/vector
83bd797ff6a05fb3246a2442a701db3a85e323b5
use std::{ io::ErrorKind, net::SocketAddr, pin::Pin, task::{Context, Poll}, time::Duration, }; use async_trait::async_trait; use bytes::{Bytes, BytesMut}; use futures::{stream::BoxStream, task::noop_waker_ref, SinkExt, StreamExt}; use serde::{Deserialize, Serialize}; use snafu::{ResultExt, Snafu}; use tokio::{ io::{AsyncRead, ReadBuf}, net::TcpStream, time::sleep, }; use tokio_util::codec::Encoder; use vector_core::{buffers::Acker, ByteSizeOf}; use crate::{ config::SinkContext, dns, event::Event, internal_events::{ ConnectionOpen, OpenGauge, SocketMode, TcpSocketConnectionError, TcpSocketConnectionEstablished, TcpSocketConnectionShutdown, TcpSocketError, }, sink::VecSinkExt, sinks::{ util::{ encoding::Transformer, retries::ExponentialBackoff, socket_bytes_sink::{BytesSink, ShutdownCheck}, EncodedEvent, SinkBuildError, StreamSink, }, Healthcheck, VectorSink, }, tcp::TcpKeepaliveConfig, tls::{MaybeTlsSettings, MaybeTlsStream, TlsEnableableConfig, TlsError}, }; #[derive(Debug, Snafu)] enum TcpError { #[snafu(display("Connect error: {}", source))] ConnectError { source: TlsError }, #[snafu(display("Unable to resolve DNS: {}", source))] DnsError { source: dns::DnsError }, #[snafu(display("No addresses returned."))] NoAddresses, #[snafu(display("Send error: {}", source))] SendError { source: tokio::io::Error }, } #[derive(Deserialize, Serialize, Debug, Clone)] pub struct TcpSinkConfig { address: String, keepalive: Option<TcpKeepaliveConfig>, tls: Option<TlsEnableableConfig>, send_buffer_bytes: Option<usize>, } impl TcpSinkConfig { pub const fn new( address: String, keepalive: Option<TcpKeepaliveConfig>, tls: Option<TlsEnableableConfig>, send_buffer_bytes: Option<usize>, ) -> Self { Self { address, keepalive, tls, send_buffer_bytes, } } pub const fn from_address(address: String) -> Self { Self { address, keepalive: None, tls: None, send_buffer_bytes: None, } } pub fn build( &self, cx: SinkContext, transformer: Transformer, encoder: impl Encoder<Event, Error = codecs::encoding::Error> + Clone + Send + Sync + 'static, ) -> crate::Result<(VectorSink, Healthcheck)> { let uri = self.address.parse::<http::Uri>()?; let host = uri.host().ok_or(SinkBuildError::MissingHost)?.to_string(); let port = uri.port_u16().ok_or(SinkBuildError::MissingPort)?; let tls = MaybeTlsSettings::from_config(&self.tls, false)?; let connector = TcpConnector::new(host, port, self.keepalive, tls, self.send_buffer_bytes); let sink = TcpSink::new(connector.clone(), cx.acker(), transformer, encoder); Ok(( VectorSink::from_event_streamsink(sink), Box::pin(async move { connector.healthcheck().await }), )) } } #[derive(Clone)] struct TcpConnector { host: String, port: u16, keepalive: Option<TcpKeepaliveConfig>, tls: MaybeTlsSettings, send_buffer_bytes: Option<usize>, } impl TcpConnector { const fn new( host: String, port: u16, keepalive: Option<TcpKeepaliveConfig>, tls: MaybeTlsSettings, send_buffer_bytes: Option<usize>, ) -> Self { Self { host, port, keepalive, tls, send_buffer_bytes, } } #[cfg(test)] fn from_host_port(host: String, port: u16) -> Self { Self::new(host, port, None, None.into(), None) } const fn fresh_backoff() -> ExponentialBackoff { ExponentialBackoff::from_millis(2) .factor(250) .max_delay(Duration::from_secs(60)) } async fn connect(&self) -> Result<MaybeTlsStream<TcpStream>, TcpError> { let ip = dns::Resolver .lookup_ip(self.host.clone()) .await .context(DnsSnafu)? .next() .ok_or(TcpError::NoAddresses)?; let addr = SocketAddr::new(ip, self.port); self.tls .connect(&self.host, &addr) .await .context(ConnectSnafu) .map(|mut maybe_tls| { if let Some(keepalive) = self.keepalive { if let Err(error) = maybe_tls.set_keepalive(keepalive) { warn!(message = "Failed configuring TCP keepalive.", %error); } } if let Some(send_buffer_bytes) = self.send_buffer_bytes { if let Err(error) = maybe_tls.set_send_buffer_bytes(send_buffer_bytes) { warn!(message = "Failed configuring send buffer size on TCP socket.", %error); } } maybe_tls }) } async fn connect_backoff(&self) -> MaybeTlsStream<TcpStream> { let mut backoff = Self::fresh_backoff(); loop { match self.connect().await { Ok(socket) => { emit!(TcpSocketConnectionEstablished { peer_addr: socket.peer_addr().ok(), }); return socket; } Err(error) => { emit!(TcpSocketConnectionError { error }); sleep(backoff.next().unwrap()).await; } } } } async fn healthcheck(&self) -> crate::Result<()> { self.connect().await.map(|_| ()).map_err(Into::into) } } struct TcpSink<E> where E: Encoder<Event, Error = codecs::encoding::Error> + Clone + Send + Sync, { connector: TcpConnector, acker: Acker, transformer: Transformer, encoder: E, } impl<E> TcpSink<E> where E: Encoder<Event, Error = codecs::encoding::Error> + Clone + Send + Sync + 'static, { fn new(connector: TcpConnector, acker: Acker, transformer: Transformer, encoder: E) -> Self { Self { connector, acker, transformer, encoder, } } async fn connect(&self) -> BytesSink<MaybeTlsStream<TcpStream>> { let stream = self.connector.connect_backoff().await; BytesSink::new( stream, Self::shutdown_check, self.acker.clone(), SocketMode::Tcp, ) } fn shutdown_check(stream: &mut MaybeTlsStream<TcpStream>) -> ShutdownCheck { let mut cx = Context::from_waker(noop_waker_ref()); let mut buf = [0u8; 1]; let mut buf = ReadBuf::new(&mut buf); match Pin::new(stream).poll_read(&mut cx, &mut buf) { Poll::Ready(Err(error)) => ShutdownCheck::Error(error), Poll::Ready(Ok(())) if buf.filled().is_empty() => { ShutdownCheck::Close("ShutdownCheck::Close") } _ => ShutdownCheck::Alive, } } } #[async_trait] impl<E> StreamSink<Event> for TcpSink<E> where E: Encoder<Event, Error = codecs::encoding::Error> + Clone + Send + Sync + Sync + 'static, { async fn run(self: Box<Self>, input: BoxStream<'_, Event>) -> Result<(), ()> { let mut encoder = self.encoder.clone(); let mut input = input .map(|mut event| { let byte_size = event.size_of(); let finalizers = event.metadata_mut().take_finalizers(); self.transformer.transform(&mut event); let mut bytes = BytesMut::new(); if encoder.encode(event, &mut bytes).is_ok() { let item = bytes.freeze(); EncodedEvent { item, finalizers, byte_size, } } else { EncodedEvent::new(Bytes::new(), 0) } }) .peekable(); while Pin::new(&mut input).peek().await.is_some() { let mut sink = self.connect().await; let _open_token = OpenGauge::new().open(|count| emit!(ConnectionOpen { count })); let result = match sink .send_all_peekable(&mut (&mut input).map(|item| item.item).peekable()) .await { Ok(()) => sink.close().await, Err(error) => Err(error), }; if let Err(error) = result { if error.kind() == ErrorKind::Other && error.to_string() == "ShutdownCheck::Close" { emit!(TcpSocketConnectionShutdown {}); } else { emit!(TcpSocketError { error }); } } } Ok(()) } } #[cfg(test)] mod test { use tokio::net::TcpListener; use super::*; use crate::test_util::{next_addr, trace_init}; #[tokio::test] async fn healthcheck() { trace_init(); let addr = next_addr(); let _listener = TcpListener::bind(&addr).await.unwrap(); let good = TcpConnector::from_host_port(addr.ip().to_string(), addr.port()); assert!(good.healthcheck().await.is_ok()); let addr = next_addr(); let bad = TcpConnector::from_host_port(addr.ip().to_string(), addr.port()); assert!(bad.healthcheck().await.is_err()); } }
use std::{ io::ErrorKind, net::SocketAddr, pin::Pin, task::{Context, Poll}, time::Duration, }; use async_trait::async_trait; use bytes::{Bytes, BytesMut}; use futures::{stream::BoxStream, task::noop_waker_ref, SinkExt, StreamExt}; use serde::{Deserialize, Serialize}; use snafu::{ResultExt, Snafu}; use tokio::{ io::{AsyncRead, ReadBuf}, net::TcpStream, time::sleep, }; use tokio_util::codec::Encoder; use vector_core::{buffers::Acker, ByteSizeOf}; use crate::{ config::SinkContext, dns, event::Event, internal_events::{ ConnectionOpen, OpenGauge, SocketMode, TcpSocketConnectionError, TcpSocketConnectionEstablished, TcpSocketConnectionShutdown, TcpSocketError, }, sink::VecSinkExt, sinks::{ util::{ encoding::Transformer, retries::ExponentialBackoff, socket_bytes_sink::{BytesSink, ShutdownCheck}, EncodedEvent, SinkBuildError, StreamSink, }, Healthcheck, VectorSink, }, tcp::TcpKeepaliveConfig, tls::{MaybeTlsSettings, MaybeTlsStream, TlsEnableableConfig, TlsError}, }; #[derive(Debug, Snafu)] enum TcpError { #[snafu(display("Connect error: {}", source))] ConnectError { source: TlsError }, #[snafu(display("Unable to resolve DNS: {}", source))] DnsError { source: dns::DnsError }, #[snafu(display("No addresses returned."))] NoAddresses, #[snafu(display("Send error: {}", source))] SendError { source: tokio::io::Error }, } #[derive(Deserialize, Serialize, Debug, Clone)] pub struct TcpSinkConfig { address: String, keepalive: Option<TcpKeepaliveConfig>, tls: Option<TlsEnableableConfig>, send_buffer_bytes: Option<usize>, } impl TcpSinkConfig { pub const fn new( address: String, keepalive: Option<TcpKeepaliveConfig>, tls: Option<TlsEnableableConfig>, send_buffer_bytes: Option<usize>, ) -> Self { Self { address, keepalive, tls, send_buffer_bytes, } } pub const fn from_address(address: String) -> Self { Self { address, keepalive: None, tls: None, send_buffer_bytes: None, } } pub fn build( &self, cx: SinkContext, transformer: Transformer, encoder: impl Encoder<Event, Error = codecs::encoding::Error> + Clone + Send + Sync + 'static, ) -> crate::Result<(VectorSink, Healthcheck)> { let uri = self.address.parse::<http::Uri>()?; let host = uri.host().ok_or(SinkBuildError::MissingHost)?.to_string(); let port = uri.port_u16().ok_or(SinkBuildError::MissingPort)?; let tls = MaybeTlsSettings::from_config(&self.tls, false)?; let connector = TcpConnector::new(host, port, self.keepalive, tls, self.send_buffer_bytes); let sink = TcpSink::new(connector.clone(), cx.acker(), transformer, encoder); Ok(( VectorSink::from_event_streamsink(sink), Box::pin(async move { connector.healthcheck().await }), )) } } #[derive(Clone)] struct TcpConnector { host: String, port: u16, keepalive: Option<TcpKeepaliveConfig>, tls: MaybeTlsSettings, send_buffer_bytes: Option<usize>, } impl TcpConnector { const fn new( host: String, port: u16, keepalive: Option<TcpKeepaliveConfig>, tls: MaybeTlsSettings, send_buffer_bytes: Option<usize>, ) -> Self { Self { host, port, keepalive, tls, send_buffer_bytes, } } #[cfg(test)] fn from_host_port(host: String, port: u16) -> Self { Self::new(host, port, None, None.into(), None) } const fn fresh_backoff() -> ExponentialBackoff { ExponentialBackoff::from_millis(2) .factor(250) .max_delay(Duration::from_secs(60)) } async fn connect(&self) -> Result<MaybeTlsStream<TcpStream>, TcpError> { let ip = dns::Resolver .lookup_ip(self.host.clone()) .await .context(DnsSnafu)? .next() .ok_or(TcpError::NoAddresses)?; let addr = SocketAddr::new(ip, self.port); self.tls .connect(&self.host, &addr) .await .context(ConnectSnafu) .map(|mut maybe_tls| { if let Some(keepalive) = self.keepalive { if let Err(error) = maybe_tls.set_keepalive(keepalive) { warn!(message = "Failed configuring TCP keepalive.", %error); } } if let Some(send_buffer_bytes) = self.send_buffer_bytes {
} maybe_tls }) } async fn connect_backoff(&self) -> MaybeTlsStream<TcpStream> { let mut backoff = Self::fresh_backoff(); loop { match self.connect().await { Ok(socket) => { emit!(TcpSocketConnectionEstablished { peer_addr: socket.peer_addr().ok(), }); return socket; } Err(error) => { emit!(TcpSocketConnectionError { error }); sleep(backoff.next().unwrap()).await; } } } } async fn healthcheck(&self) -> crate::Result<()> { self.connect().await.map(|_| ()).map_err(Into::into) } } struct TcpSink<E> where E: Encoder<Event, Error = codecs::encoding::Error> + Clone + Send + Sync, { connector: TcpConnector, acker: Acker, transformer: Transformer, encoder: E, } impl<E> TcpSink<E> where E: Encoder<Event, Error = codecs::encoding::Error> + Clone + Send + Sync + 'static, { fn new(connector: TcpConnector, acker: Acker, transformer: Transformer, encoder: E) -> Self { Self { connector, acker, transformer, encoder, } } async fn connect(&self) -> BytesSink<MaybeTlsStream<TcpStream>> { let stream = self.connector.connect_backoff().await; BytesSink::new( stream, Self::shutdown_check, self.acker.clone(), SocketMode::Tcp, ) } fn shutdown_check(stream: &mut MaybeTlsStream<TcpStream>) -> ShutdownCheck { let mut cx = Context::from_waker(noop_waker_ref()); let mut buf = [0u8; 1]; let mut buf = ReadBuf::new(&mut buf); match Pin::new(stream).poll_read(&mut cx, &mut buf) { Poll::Ready(Err(error)) => ShutdownCheck::Error(error), Poll::Ready(Ok(())) if buf.filled().is_empty() => { ShutdownCheck::Close("ShutdownCheck::Close") } _ => ShutdownCheck::Alive, } } } #[async_trait] impl<E> StreamSink<Event> for TcpSink<E> where E: Encoder<Event, Error = codecs::encoding::Error> + Clone + Send + Sync + Sync + 'static, { async fn run(self: Box<Self>, input: BoxStream<'_, Event>) -> Result<(), ()> { let mut encoder = self.encoder.clone(); let mut input = input .map(|mut event| { let byte_size = event.size_of(); let finalizers = event.metadata_mut().take_finalizers(); self.transformer.transform(&mut event); let mut bytes = BytesMut::new(); if encoder.encode(event, &mut bytes).is_ok() { let item = bytes.freeze(); EncodedEvent { item, finalizers, byte_size, } } else { EncodedEvent::new(Bytes::new(), 0) } }) .peekable(); while Pin::new(&mut input).peek().await.is_some() { let mut sink = self.connect().await; let _open_token = OpenGauge::new().open(|count| emit!(ConnectionOpen { count })); let result = match sink .send_all_peekable(&mut (&mut input).map(|item| item.item).peekable()) .await { Ok(()) => sink.close().await, Err(error) => Err(error), }; if let Err(error) = result { if error.kind() == ErrorKind::Other && error.to_string() == "ShutdownCheck::Close" { emit!(TcpSocketConnectionShutdown {}); } else { emit!(TcpSocketError { error }); } } } Ok(()) } } #[cfg(test)] mod test { use tokio::net::TcpListener; use super::*; use crate::test_util::{next_addr, trace_init}; #[tokio::test] async fn healthcheck() { trace_init(); let addr = next_addr(); let _listener = TcpListener::bind(&addr).await.unwrap(); let good = TcpConnector::from_host_port(addr.ip().to_string(), addr.port()); assert!(good.healthcheck().await.is_ok()); let addr = next_addr(); let bad = TcpConnector::from_host_port(addr.ip().to_string(), addr.port()); assert!(bad.healthcheck().await.is_err()); } }
if let Err(error) = maybe_tls.set_send_buffer_bytes(send_buffer_bytes) { warn!(message = "Failed configuring send buffer size on TCP socket.", %error); }
if_condition
[ { "content": "pub trait TcpSource: Clone + Send + Sync + 'static\n\nwhere\n\n <<Self as TcpSource>::Decoder as tokio_util::codec::Decoder>::Item: std::marker::Send,\n\n{\n\n // Should be default: `std::io::Error`.\n\n // Right now this is unstable: https://github.com/rust-lang/rust/issues/29661\n\n type Error: From<io::Error>\n\n + StreamDecodingError\n\n + std::fmt::Debug\n\n + std::fmt::Display\n\n + Send\n\n + Unpin;\n\n type Item: Into<SmallVec<[Event; 1]>> + Send + Unpin;\n\n type Decoder: Decoder<Item = (Self::Item, usize), Error = Self::Error> + Send + 'static;\n\n type Acker: TcpSourceAcker + Send;\n\n\n\n fn decoder(&self) -> Self::Decoder;\n\n\n\n fn handle_events(&self, _events: &mut [Event], _host: std::net::SocketAddr) {}\n\n\n", "file_path": "src/sources/util/tcp/mod.rs", "rank": 0, "score": 672736.7293231342 }, { "content": "#[async_trait]\n\npub trait HttpSource: Clone + Send + Sync + 'static {\n\n fn build_events(\n\n &self,\n\n body: Bytes,\n\n header_map: HeaderMap,\n\n query_parameters: HashMap<String, String>,\n\n path: &str,\n\n ) -> Result<Vec<Event>, ErrorMessage>;\n\n\n\n #[allow(clippy::too_many_arguments)]\n\n fn run(\n\n self,\n\n address: SocketAddr,\n\n path: &str,\n\n method: HttpMethod,\n\n strict_path: bool,\n\n tls: &Option<TlsEnableableConfig>,\n\n auth: &Option<HttpSourceAuthConfig>,\n\n cx: SourceContext,\n\n acknowledgements: AcknowledgementsConfig,\n", "file_path": "src/sources/util/http/prelude.rs", "rank": 1, "score": 602069.3067030129 }, { "content": "pub trait RetryLogic: Clone + Send + Sync + 'static {\n\n type Error: std::error::Error + Send + Sync + 'static;\n\n type Response;\n\n\n\n fn is_retriable_error(&self, error: &Self::Error) -> bool;\n\n\n\n fn should_retry_response(&self, _response: &Self::Response) -> RetryAction {\n\n // Treat the default as the request is successful\n\n RetryAction::Successful\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct FixedRetryPolicy<L> {\n\n remaining_attempts: usize,\n\n previous_duration: Duration,\n\n current_duration: Duration,\n\n max_duration: Duration,\n\n logic: L,\n\n}\n", "file_path": "src/sinks/util/retries.rs", "rank": 2, "score": 594374.129710623 }, { "content": "fn handle_decode_error(encoding: &str, error: impl std::error::Error) -> ErrorMessage {\n\n emit!(HttpDecompressError {\n\n encoding,\n\n error: &error\n\n });\n\n ErrorMessage::new(\n\n StatusCode::UNPROCESSABLE_ENTITY,\n\n format!(\"Failed decompressing payload with {} decoder.\", encoding),\n\n )\n\n}\n", "file_path": "src/sources/util/http/encoding.rs", "rank": 3, "score": 558494.0527184076 }, { "content": "/// Simplify the URI into a protocol and endpoint by removing the\n\n/// \"query\" portion of the `path_and_query`.\n\npub fn protocol_endpoint(uri: Uri) -> (String, String) {\n\n let mut parts = uri.into_parts();\n\n\n\n // Drop any username and password\n\n parts.authority = parts.authority.map(|auth| {\n\n let host = auth.host();\n\n match auth.port() {\n\n None => host.to_string(),\n\n Some(port) => format!(\"{}:{}\", host, port),\n\n }\n\n .parse()\n\n .unwrap_or_else(|_| unreachable!())\n\n });\n\n\n\n // Drop the query and fragment\n\n parts.path_and_query = parts.path_and_query.map(|pq| {\n\n pq.path()\n\n .parse::<PathAndQuery>()\n\n .unwrap_or_else(|_| unreachable!())\n\n });\n", "file_path": "src/sinks/util/uri.rs", "rank": 4, "score": 554744.2303565046 }, { "content": "/// An error that occurred while framing bytes.\n\npub trait FramingError: std::error::Error + Send + Sync {}\n\n\n\nimpl std::error::Error for BoxedFramingError {}\n\n\n\nimpl FramingError for std::io::Error {}\n\n\n\nimpl FramingError for LinesCodecError {}\n\n\n\nimpl From<std::io::Error> for BoxedFramingError {\n\n fn from(error: std::io::Error) -> Self {\n\n Box::new(error)\n\n }\n\n}\n\n\n\n/// A `Box` containing a `FramingError`.\n\npub type BoxedFramingError = Box<dyn FramingError>;\n\n\n", "file_path": "lib/codecs/src/encoding/framing/mod.rs", "rank": 5, "score": 547794.5643919711 }, { "content": "pub fn build_healthcheck(bucket: String, client: S3Client) -> crate::Result<Healthcheck> {\n\n let healthcheck = async move {\n\n let req = client\n\n .head_bucket()\n\n .bucket(bucket.clone())\n\n .set_expected_bucket_owner(None)\n\n .send()\n\n .await;\n\n\n\n match req {\n\n Ok(_) => Ok(()),\n\n Err(error) => Err(match error {\n\n SdkError::ServiceError { err: _, raw } => match raw.http().status() {\n\n StatusCode::FORBIDDEN => HealthcheckError::InvalidCredentials.into(),\n\n StatusCode::NOT_FOUND => HealthcheckError::UnknownBucket { bucket }.into(),\n\n status => HealthcheckError::UnknownStatus { status }.into(),\n\n },\n\n error => error.into(),\n\n }),\n\n }\n", "file_path": "src/sinks/s3_common/config.rs", "rank": 6, "score": 547267.1070210647 }, { "content": "/// A `Filter` is a generic type that contains methods that are invoked by the `build_filter`\n\n/// function. Each method returns a heap-allocated `Matcher<V>` (typically a closure) containing\n\n/// logic to determine whether the value matches the filter. A filter is intended to be side-effect\n\n/// free and idempotent, and so only receives an immutable reference to self.\n\npub trait Filter<V: Debug + Send + Sync + Clone + 'static>: DynClone {\n\n /// Determine whether a field value exists.\n\n fn exists(&self, field: Field) -> Box<dyn Matcher<V>>;\n\n\n\n /// Determine whether a field value equals `to_match`.\n\n fn equals(&self, field: Field, to_match: &str) -> Box<dyn Matcher<V>>;\n\n\n\n /// Determine whether a value starts with a prefix.\n\n fn prefix(&self, field: Field, prefix: &str) -> Box<dyn Matcher<V>>;\n\n\n\n /// Determine whether a value matches a wilcard.\n\n fn wildcard(&self, field: Field, wildcard: &str) -> Box<dyn Matcher<V>>;\n\n\n\n /// Compare a field value against `comparison_value`, using one of the `comparator` operators.\n\n fn compare(\n\n &self,\n\n field: Field,\n\n comparator: Comparison,\n\n comparison_value: ComparisonValue,\n\n ) -> Box<dyn Matcher<V>>;\n", "file_path": "lib/datadog/filter/src/filter.rs", "rank": 7, "score": 541653.4099081288 }, { "content": "/// Every internal event in this crate has a corresponding\n\n/// method in this trait which should emit the event.\n\npub trait FileSourceInternalEvents: Send + Sync + Clone + 'static {\n\n fn emit_file_added(&self, path: &Path);\n\n\n\n fn emit_file_resumed(&self, path: &Path, file_position: u64);\n\n\n\n fn emit_file_watch_error(&self, path: &Path, error: Error);\n\n\n\n fn emit_file_unwatched(&self, path: &Path);\n\n\n\n fn emit_file_deleted(&self, path: &Path);\n\n\n\n fn emit_file_delete_error(&self, path: &Path, error: Error);\n\n\n\n fn emit_file_fingerprint_read_error(&self, path: &Path, error: Error);\n\n\n\n fn emit_file_checkpointed(&self, count: usize, duration: Duration);\n\n\n\n fn emit_file_checksum_failed(&self, path: &Path);\n\n\n\n fn emit_file_checkpoint_write_error(&self, error: Error);\n\n\n\n fn emit_files_open(&self, count: usize);\n\n\n\n fn emit_path_globbing_failed(&self, path: &Path, error: &Error);\n\n}\n", "file_path": "lib/file-source/src/internal_events.rs", "rank": 8, "score": 530033.072181168 }, { "content": "#[async_trait::async_trait]\n\npub trait HttpSink: Send + Sync + 'static {\n\n type Input;\n\n type Output;\n\n type Encoder: HttpEventEncoder<Self::Input>;\n\n\n\n fn build_encoder(&self) -> Self::Encoder;\n\n async fn build_request(&self, events: Self::Output) -> crate::Result<http::Request<Bytes>>;\n\n}\n\n\n\n/// Provides a simple wrapper around internal tower and\n\n/// batching sinks for http.\n\n///\n\n/// This type wraps some `HttpSink` and some `Batch` type\n\n/// and will apply request, batch and tls settings. Internally,\n\n/// it holds an Arc reference to the `HttpSink`. It then exposes\n\n/// a `Sink` interface that can be returned from `SinkConfig`.\n\n///\n\n/// Implementation details we require to buffer a single item due\n\n/// to how `Sink` works. This is because we must \"encode\" the type\n\n/// to be able to send it to the inner batch type and sink. Because of\n", "file_path": "src/sinks/util/http.rs", "rank": 9, "score": 526408.7983710358 }, { "content": "/// Check if a port is free on TCP\n\npub fn is_free_tcp(ip: IpAddr, port: Port) -> bool {\n\n test_bind_tcp(SocketAddr::new(ip, port)).is_some()\n\n}\n\n\n", "file_path": "lib/portpicker/src/lib.rs", "rank": 10, "score": 518887.83752192085 }, { "content": "pub trait ReduceValueMerger: std::fmt::Debug + Send + Sync {\n\n fn add(&mut self, v: Value) -> Result<(), String>;\n\n fn insert_into(self: Box<Self>, k: String, v: &mut LogEvent) -> Result<(), String>;\n\n}\n\n\n\nimpl From<Value> for Box<dyn ReduceValueMerger> {\n\n fn from(v: Value) -> Self {\n\n match v {\n\n Value::Integer(i) => Box::new(AddNumbersMerger::new(i.into())),\n\n Value::Float(f) => Box::new(AddNumbersMerger::new(f.into())),\n\n Value::Timestamp(ts) => Box::new(TimestampWindowMerger::new(ts)),\n\n Value::Object(_) => Box::new(DiscardMerger::new(v)),\n\n Value::Null => Box::new(DiscardMerger::new(v)),\n\n Value::Boolean(_) => Box::new(DiscardMerger::new(v)),\n\n Value::Bytes(_) => Box::new(DiscardMerger::new(v)),\n\n Value::Regex(_) => Box::new(DiscardMerger::new(v)),\n\n Value::Array(_) => Box::new(DiscardMerger::new(v)),\n\n }\n\n }\n\n}\n", "file_path": "src/transforms/reduce/merge_strategy.rs", "rank": 11, "score": 511867.6616968109 }, { "content": "pub fn build_uri(host: &str, path: &str) -> Result<Uri, http::uri::InvalidUri> {\n\n format!(\"{}{}\", host.trim_end_matches('/'), path).parse::<Uri>()\n\n}\n\n\n", "file_path": "src/sinks/splunk_hec/common/util.rs", "rank": 12, "score": 510749.5154761084 }, { "content": "#[typetag::serde(tag = \"type\")]\n\npub trait ConditionConfig: std::fmt::Debug + Send + Sync + dyn_clone::DynClone {\n\n fn build(&self, enrichment_tables: &enrichment::TableRegistry) -> crate::Result<Condition>;\n\n}\n\n\n\ndyn_clone::clone_trait_object!(ConditionConfig);\n\n\n", "file_path": "src/conditions/mod.rs", "rank": 13, "score": 506436.460211901 }, { "content": "pub fn next_addr_for_ip(ip: IpAddr) -> SocketAddr {\n\n let port = pick_unused_port(ip);\n\n SocketAddr::new(ip, port)\n\n}\n\n\n", "file_path": "src/test_util/mod.rs", "rank": 14, "score": 492441.3963886496 }, { "content": "fn handle_decode_error(encoding: &str, error: impl std::error::Error) -> ErrorMessage {\n\n emit!(HttpDecompressError {\n\n encoding,\n\n error: &error\n\n });\n\n ErrorMessage::new(\n\n StatusCode::UNPROCESSABLE_ENTITY,\n\n format!(\"Failed decompressing payload with {} decoder.\", encoding),\n\n )\n\n}\n\n\n\n// https://github.com/DataDog/datadog-agent/blob/a33248c2bc125920a9577af1e16f12298875a4ad/pkg/logs/processor/json.go#L23-L49\n", "file_path": "src/sources/datadog/agent/mod.rs", "rank": 15, "score": 492083.19867677963 }, { "content": "fn agent_health_address(port: Option<u16>) -> String {\n\n std::env::var(\"AGENT_HEALTH_ADDRESS\")\n\n .unwrap_or_else(|_| format!(\"http://0.0.0.0:{}\", port.unwrap_or(8182)))\n\n}\n\n\n\nconst AGENT_TIMEOUT: u64 = 60; // timeout in seconds\n\n\n\nasync fn wait_for_agent(port: Option<u16>) {\n\n let start = SystemTime::now();\n\n let address = agent_health_address(port);\n\n while start\n\n .elapsed()\n\n .map(|value| value.as_secs() < AGENT_TIMEOUT)\n\n .unwrap_or(false)\n\n {\n\n if reqwest::get(&address)\n\n .await\n\n .map(|res| res.status().is_success())\n\n .unwrap_or(false)\n\n {\n", "file_path": "src/sources/datadog/agent/integration_tests.rs", "rank": 16, "score": 489469.0784247115 }, { "content": "/// grpc doesn't like an address without a scheme, so we default to http or https if one isn't\n\n/// specified in the address.\n\npub fn with_default_scheme(address: &str, tls: bool) -> crate::Result<Uri> {\n\n let uri: Uri = address.parse()?;\n\n if uri.scheme().is_none() {\n\n // Default the scheme to http or https.\n\n let mut parts = uri.into_parts();\n\n\n\n parts.scheme = if tls {\n\n Some(\n\n \"https\"\n\n .parse()\n\n .unwrap_or_else(|_| unreachable!(\"https should be valid\")),\n\n )\n\n } else {\n\n Some(\n\n \"http\"\n\n .parse()\n\n .unwrap_or_else(|_| unreachable!(\"http should be valid\")),\n\n )\n\n };\n\n\n", "file_path": "src/sinks/vector/v2/config.rs", "rank": 17, "score": 485657.55808542314 }, { "content": "fn build_uri(host: &str, endpoint: &str) -> crate::Result<Uri> {\n\n let result = format!(\"{}{}\", host, endpoint)\n\n .parse::<Uri>()\n\n .context(UriParseSnafu)?;\n\n Ok(result)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn generate_config() {\n\n crate::test_util::test_generate_config::<DatadogMetricsConfig>();\n\n }\n\n}\n", "file_path": "src/sinks/datadog/metrics/config.rs", "rank": 18, "score": 478380.0177719198 }, { "content": "fn build_uri(host: &str, endpoint: &str) -> crate::Result<Uri> {\n\n let result = format!(\"{}{}\", host, endpoint)\n\n .parse::<Uri>()\n\n .context(UriParseSnafu)?;\n\n Ok(result)\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use crate::sinks::datadog::traces::DatadogTracesConfig;\n\n\n\n #[test]\n\n fn generate_config() {\n\n crate::test_util::test_generate_config::<DatadogTracesConfig>();\n\n }\n\n}\n", "file_path": "src/sinks/datadog/traces/config.rs", "rank": 19, "score": 478380.0177719198 }, { "content": "/// An error that occurred while producing byte frames from a byte stream / byte\n\n/// message.\n\n///\n\n/// It requires conformance to `TcpError` so that we can determine whether the\n\n/// error is recoverable or if trying to continue will lead to hanging up the\n\n/// TCP source indefinitely.\n\npub trait FramingError: std::error::Error + StreamDecodingError + Send + Sync {}\n\n\n\nimpl std::error::Error for BoxedFramingError {}\n\n\n\nimpl FramingError for std::io::Error {}\n\n\n\nimpl FramingError for LinesCodecError {}\n\n\n\nimpl From<std::io::Error> for BoxedFramingError {\n\n fn from(error: std::io::Error) -> Self {\n\n Box::new(error)\n\n }\n\n}\n\n\n\nimpl From<LinesCodecError> for BoxedFramingError {\n\n fn from(error: LinesCodecError) -> Self {\n\n Box::new(error)\n\n }\n\n}\n\n\n\n/// A `Box` containing a `FramingError`.\n\npub type BoxedFramingError = Box<dyn FramingError>;\n\n\n\nimpl StreamDecodingError for BoxedFramingError {\n\n fn can_continue(&self) -> bool {\n\n self.as_ref().can_continue()\n\n }\n\n}\n\n\n", "file_path": "lib/codecs/src/decoding/framing/mod.rs", "rank": 20, "score": 478195.6395111234 }, { "content": "pub fn host_key() -> String {\n\n crate::config::log_schema().host_key().to_string()\n\n}\n\n\n", "file_path": "src/sinks/splunk_hec/common/util.rs", "rank": 21, "score": 477504.242488528 }, { "content": "#[async_trait]\n\n#[typetag::serde(tag = \"type\")]\n\npub trait TransformConfig: core::fmt::Debug + Send + Sync + dyn_clone::DynClone {\n\n async fn build(&self, globals: &TransformContext)\n\n -> crate::Result<crate::transform::Transform>;\n\n\n\n fn input(&self) -> Input;\n\n\n\n /// Returns a list of outputs to which this transform can deliver events.\n\n ///\n\n /// The provided `merged_definition` can be used by transforms to understand the expected shape\n\n /// of events flowing through the transform.\n\n fn outputs(&self, merged_definition: &schema::Definition) -> Vec<Output>;\n\n\n\n /// Verifies that the provided outputs and the inner plumbing of the transform are valid.\n\n fn validate(&self, _merged_definition: &schema::Definition) -> Result<(), Vec<String>> {\n\n Ok(())\n\n }\n\n\n\n fn transform_type(&self) -> &'static str;\n\n\n\n /// Return true if the transform is able to be run across multiple tasks simultaneously with no\n", "file_path": "lib/vector-core/src/transform/config.rs", "rank": 22, "score": 471631.03806525574 }, { "content": "/// Picks an available port that is available on both TCP and UDP\n\n/// ```rust\n\n/// use portpicker::pick_unused_port;\n\n/// use std::net::{IpAddr, Ipv4Addr};\n\n/// let port: u16 = pick_unused_port(IpAddr::V4(Ipv4Addr::LOCALHOST));\n\n/// ```\n\npub fn pick_unused_port(ip: IpAddr) -> Port {\n\n let mut rng = thread_rng();\n\n\n\n loop {\n\n // Try random port first\n\n for _ in 0..10 {\n\n let port = rng.gen_range(15000..25000);\n\n if is_free(ip, port) {\n\n return port;\n\n }\n\n }\n\n\n\n // Ask the OS for a port\n\n for _ in 0..10 {\n\n if let Some(port) = ask_free_tcp_port(ip) {\n\n // Test that the udp port is free as well\n\n if is_free_udp(ip, port) {\n\n return port;\n\n }\n\n }\n", "file_path": "lib/portpicker/src/lib.rs", "rank": 23, "score": 470688.5272581784 }, { "content": "/// Check if a port is free on both TCP and UDP\n\npub fn is_free(ip: IpAddr, port: Port) -> bool {\n\n is_free_tcp(ip, port) && is_free_udp(ip, port)\n\n}\n\n\n", "file_path": "lib/portpicker/src/lib.rs", "rank": 24, "score": 468995.1382417675 }, { "content": "pub fn format_error(error: &mlua::Error) -> String {\n\n match error {\n\n mlua::Error::CallbackError { traceback, cause } => format_error(cause) + \"\\n\" + traceback,\n\n err => err.to_string(),\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::event::{Event, Value};\n\n\n\n #[test]\n\n fn lua_add_field() {\n\n crate::test_util::trace_init();\n\n let mut transform = Lua::new(\n\n r#\"\n\n event[\"hello\"] = \"goodbye\"\n\n \"#\n\n .to_string(),\n", "file_path": "src/transforms/lua/v1/mod.rs", "rank": 25, "score": 466577.9148096313 }, { "content": "/// Check if a port is free on UDP\n\npub fn is_free_udp(ip: IpAddr, port: Port) -> bool {\n\n test_bind_udp(SocketAddr::new(ip, port)).is_some()\n\n}\n\n\n", "file_path": "lib/portpicker/src/lib.rs", "rank": 26, "score": 464357.7893634206 }, { "content": "fn spawn_reader_thread<R: 'static + AsyncRead + Unpin + std::marker::Send>(\n\n reader: BufReader<R>,\n\n decoder: Decoder,\n\n origin: &'static str,\n\n sender: Sender<((SmallVec<[Event; 1]>, usize), &'static str)>,\n\n) {\n\n // Start the green background thread for collecting\n\n let _ = Box::pin(tokio::spawn(async move {\n\n debug!(\"Start capturing {} command output.\", origin);\n\n\n\n let mut stream = FramedRead::new(reader, decoder);\n\n while let Some(result) = stream.next().await {\n\n match result {\n\n Ok(next) => {\n\n if sender.send((next, origin)).await.is_err() {\n\n // If the receive half of the channel is closed, either due to close being\n\n // called or the Receiver handle dropping, the function returns an error.\n\n debug!(\"Receive channel closed, unable to send.\");\n\n break;\n\n }\n", "file_path": "src/sources/exec/mod.rs", "rank": 27, "score": 457257.6566821026 }, { "content": "#[async_trait]\n\n#[typetag::serde(tag = \"type\")]\n\npub trait SourceConfig: core::fmt::Debug + Send + Sync {\n\n async fn build(&self, cx: SourceContext) -> crate::Result<sources::Source>;\n\n\n\n fn outputs(&self) -> Vec<Output>;\n\n\n\n fn source_type(&self) -> &'static str;\n\n\n\n /// Resources that the source is using.\n\n fn resources(&self) -> Vec<Resource> {\n\n Vec::new()\n\n }\n\n\n\n fn can_acknowledge(&self) -> bool;\n\n}\n\n\n\npub struct SourceContext {\n\n pub key: ComponentKey,\n\n pub globals: GlobalOptions,\n\n pub shutdown: ShutdownSignal,\n\n pub out: SourceSender,\n", "file_path": "src/config/source.rs", "rank": 28, "score": 448964.586421579 }, { "content": "#[async_trait]\n\n#[typetag::serde(tag = \"type\")]\n\npub trait SinkConfig: core::fmt::Debug + Send + Sync {\n\n async fn build(\n\n &self,\n\n cx: SinkContext,\n\n ) -> crate::Result<(sinks::VectorSink, sinks::Healthcheck)>;\n\n\n\n fn input(&self) -> Input;\n\n\n\n fn sink_type(&self) -> &'static str;\n\n\n\n /// Resources that the sink is using.\n\n fn resources(&self) -> Vec<Resource> {\n\n Vec::new()\n\n }\n\n\n\n fn acknowledgements(&self) -> Option<&AcknowledgementsConfig>;\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct SinkContext {\n", "file_path": "src/config/sink.rs", "rank": 29, "score": 448889.01449431945 }, { "content": "pub fn decode(header: &Option<String>, mut body: Bytes) -> Result<Bytes, ErrorMessage> {\n\n if let Some(encodings) = header {\n\n for encoding in encodings.rsplit(',').map(str::trim) {\n\n body = match encoding {\n\n \"identity\" => body,\n\n \"gzip\" => {\n\n let mut decoded = Vec::new();\n\n MultiGzDecoder::new(body.reader())\n\n .read_to_end(&mut decoded)\n\n .map_err(|error| handle_decode_error(encoding, error))?;\n\n decoded.into()\n\n }\n\n \"deflate\" => {\n\n let mut decoded = Vec::new();\n\n ZlibDecoder::new(body.reader())\n\n .read_to_end(&mut decoded)\n\n .map_err(|error| handle_decode_error(encoding, error))?;\n\n decoded.into()\n\n }\n\n \"snappy\" => SnappyDecoder::new()\n", "file_path": "src/sources/util/http/encoding.rs", "rank": 30, "score": 446802.54558760533 }, { "content": "pub fn to_json<T: Serialize>(model: &T) -> Result<Vec<u8>, NewRelicSinkError> {\n\n match serde_json::to_vec(model) {\n\n Ok(mut json) => {\n\n json.push(b'\\n');\n\n Ok(json)\n\n }\n\n Err(error) => Err(NewRelicSinkError::new(&format!(\n\n \"Failed generating JSON: {}\",\n\n error\n\n ))),\n\n }\n\n}\n", "file_path": "src/sinks/new_relic/encoding.rs", "rank": 31, "score": 443336.13534767146 }, { "content": "/// Parse structured events from bytes.\n\npub trait Deserializer: DynClone + Debug + Send + Sync {\n\n /// Parses structured events from bytes.\n\n ///\n\n /// It returns a `SmallVec` rather than an `Event` directly, since one byte\n\n /// frame can potentially hold multiple events, e.g. when parsing a JSON\n\n /// array. However, we optimize the most common case of emitting one event\n\n /// by not requiring heap allocations for it.\n\n fn parse(&self, bytes: Bytes) -> vector_core::Result<SmallVec<[Event; 1]>>;\n\n}\n\n\n\ndyn_clone::clone_trait_object!(Deserializer);\n\n\n\n/// A `Box` containing a `Deserializer`.\n\npub type BoxedDeserializer = Box<dyn Deserializer>;\n", "file_path": "lib/codecs/src/decoding/format/mod.rs", "rank": 32, "score": 432309.2886916891 }, { "content": "/// Serialize the input value map into a logfmt string.\n\n///\n\n/// # Errors\n\n///\n\n/// Returns an `EncodingError` if any of the keys are not strings.\n\npub fn encode_map<V: Serialize>(input: &BTreeMap<String, V>) -> Result<String, EncodingError> {\n\n encode_key_value(input, &[], \"=\", \" \", true)\n\n}\n\n\n", "file_path": "lib/vector-common/src/encode_logfmt.rs", "rank": 33, "score": 432019.7209386211 }, { "content": "/// Broader than the simple [`FunctionTransform`], this trait allows transforms to write to\n\n/// multiple outputs. Those outputs must be known in advanced and returned via\n\n/// `TransformConfig::outputs`. Attempting to send to any output not registered in advance is\n\n/// considered a bug and will cause a panic.\n\npub trait SyncTransform: Send + dyn_clone::DynClone + Sync {\n\n fn transform(&mut self, event: Event, output: &mut TransformOutputsBuf);\n\n\n\n fn transform_all(&mut self, events: EventArray, output: &mut TransformOutputsBuf) {\n\n for event in events.into_events() {\n\n self.transform(event, output);\n\n }\n\n }\n\n}\n\n\n\ndyn_clone::clone_trait_object!(SyncTransform);\n\n\n\nimpl<T> SyncTransform for T\n\nwhere\n\n T: FunctionTransform,\n\n{\n\n fn transform(&mut self, event: Event, output: &mut TransformOutputsBuf) {\n\n FunctionTransform::transform(\n\n self,\n\n output.primary_buffer.as_mut().expect(\"no default output\"),\n", "file_path": "lib/vector-core/src/transform/mod.rs", "rank": 34, "score": 429231.8530375129 }, { "content": "pub trait Expression: Send + Sync + fmt::Debug + DynClone {\n\n /// Resolve an expression to a concrete [`Value`].\n\n ///\n\n /// This method is executed at runtime.\n\n ///\n\n /// An expression is allowed to fail, which aborts the running program.\n\n fn resolve(&self, ctx: &mut Context) -> Resolved;\n\n\n\n /// Compile the expression to bytecode that can be interpreted by the VM.\n\n fn compile_to_vm(\n\n &self,\n\n _vm: &mut vm::Vm,\n\n _state: (&mut LocalEnv, &mut ExternalEnv),\n\n ) -> Result<(), String> {\n\n Ok(())\n\n }\n\n\n\n /// Resolve an expression to a value without any context, if possible.\n\n ///\n\n /// This returns `Some` for static expressions, or `None` for dynamic expressions.\n", "file_path": "lib/vrl/compiler/src/expression.rs", "rank": 37, "score": 425098.07844511216 }, { "content": "/// Asks the OS for a free port\n\nfn ask_free_tcp_port(ip: IpAddr) -> Option<Port> {\n\n test_bind_tcp(SocketAddr::new(ip, 0))\n\n}\n\n\n", "file_path": "lib/portpicker/src/lib.rs", "rank": 38, "score": 424966.3223436753 }, { "content": "#[derive(Debug, Snafu)]\n\nenum SinkBuildError {\n\n #[snafu(display(\"Missing host in address field\"))]\n\n MissingHost,\n\n #[snafu(display(\"Missing port in address field\"))]\n\n MissingPort,\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct EncodedEvent<I> {\n\n pub item: I,\n\n pub finalizers: EventFinalizers,\n\n pub byte_size: usize,\n\n}\n\n\n\nimpl<I> EncodedEvent<I> {\n\n /// Create a trivial input with no metadata. This method will be\n\n /// removed when all sinks are converted.\n\n pub fn new(item: I, byte_size: usize) -> Self {\n\n Self {\n\n item,\n", "file_path": "src/sinks/util/mod.rs", "rank": 39, "score": 422759.33053975535 }, { "content": "pub fn docker(host: Option<String>, tls: Option<DockerTlsConfig>) -> crate::Result<Docker> {\n\n let host = host.or_else(|| env::var(\"DOCKER_HOST\").ok());\n\n\n\n match host {\n\n None => Docker::connect_with_local_defaults().map_err(Into::into),\n\n Some(host) => {\n\n let scheme = host\n\n .parse::<Uri>()\n\n .ok()\n\n .and_then(|uri| uri.into_parts().scheme);\n\n\n\n match scheme.as_ref().map(|scheme| scheme.as_str()) {\n\n Some(\"http\") => {\n\n let host = get_authority(&host)?;\n\n Docker::connect_with_http(&host, DEFAULT_TIMEOUT, API_DEFAULT_VERSION)\n\n .map_err(Into::into)\n\n }\n\n Some(\"https\") => {\n\n let host = get_authority(&host)?;\n\n let tls = tls\n", "file_path": "src/docker.rs", "rank": 40, "score": 420377.8624863075 }, { "content": "pub fn load_sink<T>(config: &str) -> crate::Result<(T, SinkContext)>\n\nwhere\n\n for<'a> T: Deserialize<'a> + SinkConfig,\n\n{\n\n let sink_config: T = toml::from_str(config)?;\n\n let cx = SinkContext::new_test();\n\n\n\n Ok((sink_config, cx))\n\n}\n\n\n", "file_path": "src/sinks/util/test.rs", "rank": 41, "score": 420176.4316521468 }, { "content": "#[async_trait]\n\n#[typetag::serde(tag = \"type\")]\n\npub trait ProviderConfig: core::fmt::Debug + Send + Sync + dyn_clone::DynClone {\n\n /// Builds a provider, returning a string containing the config. It's passed a signals\n\n /// channel to control reloading and shutdown, as applicable.\n\n async fn build(&mut self, signal_handler: &mut signal::SignalHandler) -> providers::Result;\n\n fn provider_type(&self) -> &'static str;\n\n}\n\n\n\ndyn_clone::clone_trait_object!(ProviderConfig);\n\n\n\n/// Describes a provider plugin storing its type name and an optional example config.\n\npub struct ProviderDescription {\n\n pub type_str: &'static str,\n\n example_value: fn() -> Option<Value>,\n\n}\n\n\n\nimpl ProviderDescription\n\nwhere\n\n inventory::iter<ProviderDescription>:\n\n std::iter::IntoIterator<Item = &'static ProviderDescription>,\n\n{\n", "file_path": "src/config/provider.rs", "rank": 42, "score": 418639.2232941048 }, { "content": "#[async_trait]\n\n#[typetag::serde(tag = \"type\")]\n\npub trait EnrichmentTableConfig: core::fmt::Debug + Send + Sync + dyn_clone::DynClone {\n\n async fn build(\n\n &self,\n\n globals: &GlobalOptions,\n\n ) -> crate::Result<Box<dyn enrichment::Table + Send + Sync>>;\n\n}\n\n\n\npub type EnrichmentTableDescription = ComponentDescription<Box<dyn EnrichmentTableConfig>>;\n\n\n\ninventory::collect!(EnrichmentTableDescription);\n\n\n\n/// Unique thing, like port, of which only one owner can be.\n\n#[derive(Clone, Debug, Eq, PartialEq, Hash, Ord, PartialOrd)]\n\npub enum Resource {\n\n Port(SocketAddr, Protocol),\n\n SystemFdOffset(usize),\n\n Stdin,\n\n DiskBuffer(String),\n\n}\n\n\n", "file_path": "src/config/mod.rs", "rank": 43, "score": 414272.9393594458 }, { "content": "#[typetag::serde(tag = \"type\")]\n\npub trait SecretBackend: core::fmt::Debug + Send + Sync + dyn_clone::DynClone {\n\n fn retrieve(\n\n &mut self,\n\n secret_keys: Vec<String>,\n\n signal_rx: &mut signal::SignalRx,\n\n ) -> crate::Result<HashMap<String, String>>;\n\n}\n\n\n\n#[derive(Deserialize, Serialize, Debug, Default)]\n\npub struct SecretBackendLoader {\n\n backends: IndexMap<ComponentKey, Box<dyn SecretBackend>>,\n\n pub(crate) secret_keys: HashMap<String, Vec<String>>,\n\n}\n\n\n\nimpl SecretBackendLoader {\n\n pub(crate) fn new() -> Self {\n\n Self {\n\n backends: IndexMap::new(),\n\n secret_keys: HashMap::new(),\n\n }\n", "file_path": "src/config/loading/secret.rs", "rank": 44, "score": 414265.88022299553 }, { "content": "/// A `Matcher` is a type that contains a \"run\" method which returns true/false if value `V`\n\n/// matches a filter.\n\npub trait Matcher<V>: DynClone + fmt::Debug + Send + Sync {\n\n fn run(&self, value: &V) -> bool;\n\n}\n\n\n\nclone_trait_object!(<V>Matcher<V>);\n\n\n\n/// Implementing `Matcher` for bool allows a `Box::new(true|false)` convenience.\n\nimpl<V> Matcher<V> for bool {\n\n fn run(&self, _value: &V) -> bool {\n\n *self\n\n }\n\n}\n\n\n\n/// Container for holding a thread-safe function type that can receive a `V` value and\n\n/// return true/false for whether the value matches some internal expectation.\n\n#[derive(Clone)]\n\npub struct Run<V, T>\n\nwhere\n\n V: fmt::Debug + Send + Sync + Clone,\n\n T: Fn(&V) -> bool + Send + Sync + Clone,\n", "file_path": "lib/datadog/filter/src/matcher.rs", "rank": 45, "score": 413684.3272678431 }, { "content": "/// Transforms that are simple, and don't require attention to coordination.\n\n/// You can run them as simple functions over events in any order.\n\n///\n\n/// # Invariants\n\n///\n\n/// * It is an illegal invariant to implement `FunctionTransform` for a\n\n/// `TaskTransform` or vice versa.\n\npub trait FunctionTransform: Send + dyn_clone::DynClone + Sync {\n\n fn transform(&mut self, output: &mut OutputBuffer, event: Event);\n\n}\n\n\n\ndyn_clone::clone_trait_object!(FunctionTransform);\n\n\n", "file_path": "lib/vector-core/src/transform/mod.rs", "rank": 46, "score": 413558.5988729094 }, { "content": "pub fn random_lines(len: usize) -> impl Iterator<Item = String> {\n\n iter::repeat_with(move || random_string(len))\n\n}\n\n\n", "file_path": "src/test_util/mod.rs", "rank": 47, "score": 413164.9385635037 }, { "content": "pub fn build_framestream_unix_source(\n\n frame_handler: impl FrameHandler + Send + Sync + Clone + 'static,\n\n shutdown: ShutdownSignal,\n\n out: SourceSender,\n\n) -> crate::Result<Source> {\n\n let path = frame_handler.socket_path();\n\n\n\n //check if the path already exists (and try to delete it)\n\n match fs::metadata(&path) {\n\n Ok(_) => {\n\n //exists, so try to delete it\n\n info!(message = \"Deleting file.\", ?path);\n\n fs::remove_file(&path)?;\n\n }\n\n Err(ref e) if e.kind() == std::io::ErrorKind::NotFound => {} //doesn't exist, do nothing\n\n Err(e) => {\n\n error!(\"Unable to get socket information; error = {:?}.\", e);\n\n return Err(Box::new(e));\n\n }\n\n };\n", "file_path": "src/sources/util/framestream.rs", "rank": 48, "score": 398624.1057357027 }, { "content": "pub fn sink_failing_healthcheck(\n\n channel_size: usize,\n\n) -> (impl Stream<Item = EventArray>, MockSinkConfig) {\n\n let (tx, rx) = SourceSender::new_with_buffer(channel_size);\n\n let sink = MockSinkConfig::new(tx, false);\n\n (rx.into_stream(), sink)\n\n}\n\n\n", "file_path": "tests/support/mod.rs", "rank": 49, "score": 396587.15589216026 }, { "content": "/// Iterates over all paths in form `a.b[0].c[1]` in alphabetical order.\n\n/// It is implemented as a wrapper around `all_fields` to reduce code\n\n/// duplication.\n\npub fn keys(fields: &BTreeMap<String, Value>) -> impl Iterator<Item = String> + '_ {\n\n all_fields(fields).map(|(k, _)| k)\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use serde_json::json;\n\n\n\n use super::{super::test::fields_from_json, *};\n\n\n\n #[test]\n\n fn keys_simple() {\n\n let fields = fields_from_json(json!({\n\n \"field2\": 3,\n\n \"field1\": 4,\n\n \"field3\": 5\n\n }));\n\n let expected: Vec<_> = vec![\"field1\", \"field2\", \"field3\"]\n\n .into_iter()\n\n .map(String::from)\n", "file_path": "lib/vector-core/src/event/util/log/keys.rs", "rank": 50, "score": 395898.2344056784 }, { "content": "/// Serialize the input value into a logfmt string. If the value is not an object,\n\n/// it is treated as the value of an object where the key is \"message\".\n\n///\n\n/// # Errors\n\n///\n\n/// Returns an `EncodingError` if any of the keys are not strings.\n\npub fn encode_value(input: &Value) -> Result<String, EncodingError> {\n\n if let Some(map) = input.as_object() {\n\n encode_map(map)\n\n } else {\n\n let mut map = BTreeMap::new();\n\n map.insert(\"message\".to_owned(), &input);\n\n encode_map(&map)\n\n }\n\n}\n", "file_path": "lib/vector-common/src/encode_logfmt.rs", "rank": 51, "score": 395078.50166684086 }, { "content": "pub fn next_addr() -> SocketAddr {\n\n next_addr_for_ip(IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1)))\n\n}\n\n\n", "file_path": "src/test_util/mod.rs", "rank": 52, "score": 394884.1853208103 }, { "content": "fn emit_received() -> impl Filter<Extract = (), Error = warp::reject::Rejection> + Clone {\n\n warp::any()\n\n .and(warp::header::optional(\"X-Amz-Firehose-Request-Id\"))\n\n .and(warp::header::optional(\"X-Amz-Firehose-Source-Arn\"))\n\n .map(|request_id: Option<String>, source_arn: Option<String>| {\n\n emit!(AwsKinesisFirehoseRequestReceived {\n\n request_id: request_id.as_deref(),\n\n source_arn: source_arn.as_deref(),\n\n });\n\n })\n\n .untuple_one()\n\n}\n\n\n", "file_path": "src/sources/aws_kinesis_firehose/filters.rs", "rank": 53, "score": 394116.0512409039 }, { "content": "/// Returns a `Source` object corresponding to a Unix domain datagram socket.\n\n/// Passing in different functions for `decoder` and `handle_events` can allow\n\n/// for different source-specific logic (such as decoding syslog messages in the\n\n/// syslog source).\n\npub fn build_unix_datagram_source(\n\n listen_path: PathBuf,\n\n socket_file_mode: Option<u32>,\n\n max_length: usize,\n\n decoder: Decoder,\n\n handle_events: impl Fn(&mut [Event], Option<Bytes>) + Clone + Send + Sync + 'static,\n\n shutdown: ShutdownSignal,\n\n out: SourceSender,\n\n) -> crate::Result<Source> {\n\n let socket = UnixDatagram::bind(&listen_path).expect(\"Failed to bind to datagram socket\");\n\n info!(message = \"Listening.\", path = ?listen_path, r#type = \"unix_datagram\");\n\n\n\n change_socket_permissions(&listen_path, socket_file_mode)?;\n\n\n\n Ok(Box::pin(async move {\n\n let result = listen(socket, max_length, decoder, shutdown, handle_events, out).await;\n\n\n\n // Delete socket file.\n\n if let Err(error) = remove_file(&listen_path) {\n\n emit!(UnixSocketFileDeleteError {\n", "file_path": "src/sources/util/unix_datagram.rs", "rank": 54, "score": 393311.5057296776 }, { "content": "/// Returns a `Source` object corresponding to a Unix domain stream socket.\n\n/// Passing in different functions for `decoder` and `handle_events` can allow\n\n/// for different source-specific logic (such as decoding syslog messages in the\n\n/// syslog source).\n\npub fn build_unix_stream_source(\n\n listen_path: PathBuf,\n\n socket_file_mode: Option<u32>,\n\n decoder: Decoder,\n\n handle_events: impl Fn(&mut [Event], Option<Bytes>) + Clone + Send + Sync + 'static,\n\n shutdown: ShutdownSignal,\n\n out: SourceSender,\n\n) -> crate::Result<Source> {\n\n let listener = UnixListener::bind(&listen_path).expect(\"Failed to bind to listener socket\");\n\n info!(message = \"Listening.\", path = ?listen_path, r#type = \"unix\");\n\n\n\n change_socket_permissions(&listen_path, socket_file_mode)?;\n\n\n\n Ok(Box::pin(async move {\n\n let connection_open = OpenGauge::new();\n\n let stream = UnixListenerStream::new(listener).take_until(shutdown.clone());\n\n tokio::pin!(stream);\n\n while let Some(socket) = stream.next().await {\n\n let socket = match socket {\n\n Err(error) => {\n", "file_path": "src/sources/util/unix_stream.rs", "rank": 55, "score": 393311.5057296776 }, { "content": "/// Joins namespace with name via delimiter if namespace is present.\n\npub fn encode_namespace<'a>(\n\n namespace: Option<&str>,\n\n delimiter: char,\n\n name: impl Into<Cow<'a, str>>,\n\n) -> String {\n\n let name = name.into();\n\n namespace\n\n .map(|namespace| format!(\"{}{}{}\", namespace, delimiter, name))\n\n .unwrap_or_else(|| name.into_owned())\n\n}\n\n\n", "file_path": "src/sinks/util/mod.rs", "rank": 56, "score": 390895.5851737648 }, { "content": "pub fn build_healthcheck(\n\n bucket: String,\n\n client: HttpClient,\n\n base_url: String,\n\n creds: Option<GcpCredentials>,\n\n) -> crate::Result<Healthcheck> {\n\n let healthcheck = async move {\n\n let uri = base_url.parse::<Uri>()?;\n\n let mut request = http::Request::head(uri).body(Body::empty())?;\n\n\n\n if let Some(creds) = creds.as_ref() {\n\n creds.apply(&mut request);\n\n }\n\n\n\n let not_found_error = GcsError::BucketNotFound { bucket }.into();\n\n\n\n let response = client.send(request).await?;\n\n healthcheck_response(response, creds, not_found_error)\n\n };\n\n\n\n Ok(healthcheck.boxed())\n\n}\n\n\n", "file_path": "src/sinks/gcs_common/config.rs", "rank": 57, "score": 390525.0805792755 }, { "content": "pub fn build_healthcheck(\n\n container_name: String,\n\n client: Arc<ContainerClient>,\n\n) -> crate::Result<Healthcheck> {\n\n let healthcheck = async move {\n\n let request = client.get_properties().execute().await;\n\n\n\n match request {\n\n Ok(_) => Ok(()),\n\n Err(reason) => Err(match reason.downcast_ref::<HttpError>() {\n\n Some(HttpError::StatusCode { status, .. }) => match *status {\n\n StatusCode::FORBIDDEN => HealthcheckError::InvalidCredentials.into(),\n\n StatusCode::NOT_FOUND => HealthcheckError::UnknownContainer {\n\n container: container_name,\n\n }\n\n .into(),\n\n status => HealthcheckError::Unknown { status }.into(),\n\n },\n\n _ => reason,\n\n }),\n\n }\n\n };\n\n\n\n Ok(healthcheck.boxed())\n\n}\n\n\n", "file_path": "src/sinks/azure_common/config.rs", "rank": 58, "score": 390525.0805792755 }, { "content": "pub fn build_test_server(\n\n addr: SocketAddr,\n\n) -> (\n\n mpsc::Receiver<(http::request::Parts, Bytes)>,\n\n Trigger,\n\n impl std::future::Future<Output = Result<(), ()>>,\n\n) {\n\n build_test_server_generic(addr, || Response::new(Body::empty()))\n\n}\n\n\n", "file_path": "src/sinks/util/test.rs", "rank": 59, "score": 390126.52506853663 }, { "content": "/// Check if `except_fields` and `only_fields` items are mutually exclusive.\n\n///\n\n/// If an error is returned, the entire encoding configuration should be considered inoperable.\n\npub fn validate_fields(\n\n only_fields: Option<&[OwnedPath]>,\n\n except_fields: Option<&[String]>,\n\n) -> Result<()> {\n\n if let (Some(only_fields), Some(except_fields)) = (only_fields, except_fields) {\n\n if except_fields.iter().any(|f| {\n\n let path_iter = parse_path(f);\n\n only_fields.iter().any(|v| v == &path_iter)\n\n }) {\n\n return Err(\"`except_fields` and `only_fields` should be mutually exclusive.\".into());\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/sinks/util/encoding/mod.rs", "rank": 60, "score": 390010.29520055826 }, { "content": "pub fn vector_version() -> impl std::fmt::Display {\n\n #[cfg(feature = \"nightly\")]\n\n let pkg_version = format!(\"{}-nightly\", built_info::PKG_VERSION);\n\n\n\n #[cfg(not(feature = \"nightly\"))]\n\n let pkg_version = built_info::PKG_VERSION;\n\n\n\n pkg_version\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 61, "score": 389819.37931915716 }, { "content": "pub fn next_addr_v6() -> SocketAddr {\n\n next_addr_for_ip(IpAddr::V6(Ipv6Addr::new(0, 0, 0, 0, 0, 0, 0, 1)))\n\n}\n\n\n", "file_path": "src/test_util/mod.rs", "rank": 62, "score": 389540.40967027924 }, { "content": "pub fn get_hostname() -> std::io::Result<String> {\n\n Ok(hostname::get()?.to_string_lossy().into())\n\n}\n\n\n\n#[track_caller]\n\npub(crate) fn spawn_named<T>(\n\n task: impl std::future::Future<Output = T> + Send + 'static,\n\n _name: &str,\n\n) -> tokio::task::JoinHandle<T>\n\nwhere\n\n T: Send + 'static,\n\n{\n\n #[cfg(tokio_unstable)]\n\n return tokio::task::Builder::new().name(_name).spawn(task);\n\n\n\n #[cfg(not(tokio_unstable))]\n\n tokio::spawn(task)\n\n}\n", "file_path": "src/lib.rs", "rank": 63, "score": 389340.66842154274 }, { "content": "pub fn open_fixture(path: impl AsRef<Path>) -> crate::Result<serde_json::Value> {\n\n let test_file = match File::open(path) {\n\n Ok(file) => file,\n\n Err(e) => return Err(e.into()),\n\n };\n\n let value: serde_json::Value = serde_json::from_reader(test_file)?;\n\n Ok(value)\n\n}\n\n\n", "file_path": "src/test_util/mod.rs", "rank": 64, "score": 388439.53876600094 }, { "content": "pub trait Function: Send + Sync + fmt::Debug {\n\n /// The identifier by which the function can be called.\n\n fn identifier(&self) -> &'static str;\n\n\n\n /// A brief single-line description explaining what this function does.\n\n fn summary(&self) -> &'static str {\n\n \"TODO\"\n\n }\n\n\n\n /// A more elaborate multi-paragraph description on how to use the function.\n\n fn usage(&self) -> &'static str {\n\n \"TODO\"\n\n }\n\n\n\n /// One or more examples demonstrating usage of the function in VRL source\n\n /// code.\n\n fn examples(&self) -> &'static [Example];\n\n // fn examples(&self) -> &'static [Example] {\n\n // &[/* ODO */]\n\n // }\n", "file_path": "lib/vrl/compiler/src/function.rs", "rank": 65, "score": 385929.99230386526 }, { "content": "pub fn build_test_server_status(\n\n addr: SocketAddr,\n\n status: StatusCode,\n\n) -> (\n\n mpsc::Receiver<(http::request::Parts, Bytes)>,\n\n Trigger,\n\n impl std::future::Future<Output = Result<(), ()>>,\n\n) {\n\n build_test_server_generic(addr, move || {\n\n Response::builder()\n\n .status(status)\n\n .body(Body::empty())\n\n .unwrap_or_else(|_| unreachable!())\n\n })\n\n}\n\n\n", "file_path": "src/sinks/util/test.rs", "rank": 66, "score": 384339.9986501852 }, { "content": "#[cfg(test)]\n\nfn format_error(error: &mlua::Error) -> String {\n\n match error {\n\n mlua::Error::CallbackError { traceback, cause } => format_error(cause) + \"\\n\" + traceback,\n\n err => err.to_string(),\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use futures::{stream, StreamExt};\n\n\n\n use super::*;\n\n use crate::{\n\n event::{\n\n metric::{Metric, MetricKind, MetricValue},\n\n Event, Value,\n\n },\n\n test_util::trace_init,\n\n transforms::TaskTransform,\n\n };\n", "file_path": "src/transforms/lua/v2/mod.rs", "rank": 67, "score": 381193.9405967832 }, { "content": "/// Decode (if needed) and parse request body\n\n///\n\n/// Firehose can be configured to gzip compress messages so we handle this here\n\nfn parse_body() -> impl Filter<Extract = (FirehoseRequest,), Error = warp::reject::Rejection> + Clone\n\n{\n\n warp::any()\n\n .and(warp::header::optional::<String>(\"Content-Encoding\"))\n\n .and(warp::header(\"X-Amz-Firehose-Request-Id\"))\n\n .and(warp::body::bytes())\n\n .and_then(\n\n |encoding: Option<String>, request_id: String, body: Bytes| async move {\n\n match encoding {\n\n Some(s) if s == \"gzip\" => {\n\n Ok(Box::new(MultiGzDecoder::new(body.reader())) as Box<dyn io::Read>)\n\n }\n\n Some(s) => Err(warp::reject::Rejection::from(\n\n RequestError::UnsupportedEncoding {\n\n encoding: s,\n\n request_id: request_id.clone(),\n\n },\n\n )),\n\n None => Ok(Box::new(body.reader()) as Box<dyn io::Read>),\n\n }\n", "file_path": "src/sources/aws_kinesis_firehose/filters.rs", "rank": 68, "score": 380163.2322777304 }, { "content": "fn result_status<R: Response + Send>(result: crate::Result<R>) -> EventStatus {\n\n match result {\n\n Ok(response) => {\n\n if response.is_successful() {\n\n trace!(message = \"Response successful.\", ?response);\n\n EventStatus::Delivered\n\n } else if response.is_transient() {\n\n error!(message = \"Response wasn't successful.\", ?response);\n\n EventStatus::Errored\n\n } else {\n\n error!(message = \"Response failed.\", ?response);\n\n EventStatus::Rejected\n\n }\n\n }\n\n Err(error) => {\n\n error!(message = \"Request failed.\", %error);\n\n EventStatus::Errored\n\n }\n\n }\n\n}\n\n\n\n// === Response ===\n\n\n", "file_path": "src/sinks/util/sink.rs", "rank": 69, "score": 378534.09966169484 }, { "content": "pub fn err_serde_failed(context: Ctxt) -> darling::Error {\n\n context\n\n .check()\n\n .map_err(|errs| darling::Error::multiple(errs.into_iter().map(Into::into).collect()))\n\n .expect_err(\"serde error context should not be empty\")\n\n}\n\n\n", "file_path": "lib/vector-config-macros/src/ast/util.rs", "rank": 70, "score": 377406.3343949098 }, { "content": "pub fn build_test_server_generic<B>(\n\n addr: SocketAddr,\n\n responder: impl Fn() -> Response<B> + Clone + Send + Sync + 'static,\n\n) -> (\n\n mpsc::Receiver<(http::request::Parts, Bytes)>,\n\n Trigger,\n\n impl std::future::Future<Output = Result<(), ()>>,\n\n)\n\nwhere\n\n B: HttpBody + Send + 'static,\n\n <B as HttpBody>::Data: Send + Sync,\n\n <B as HttpBody>::Error: snafu::Error + Send + Sync,\n\n{\n\n let (tx, rx) = mpsc::channel(100);\n\n let service = make_service_fn(move |_| {\n\n let responder = responder.clone();\n\n let tx = tx.clone();\n\n async move {\n\n let responder = responder.clone();\n\n Ok::<_, Error>(service_fn(move |req: Request<Body>| {\n", "file_path": "src/sinks/util/test.rs", "rank": 71, "score": 373668.83541122667 }, { "content": "pub fn render_template_string<'a>(\n\n template: &Template,\n\n event: impl Into<EventRef<'a>>,\n\n field_name: &str,\n\n) -> Option<String> {\n\n template\n\n .render_string(event)\n\n .map_err(|error| {\n\n emit!(TemplateRenderingError {\n\n error,\n\n field: Some(field_name),\n\n drop_event: false\n\n });\n\n })\n\n .ok()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use bytes::Bytes;\n", "file_path": "src/sinks/splunk_hec/common/util.rs", "rank": 72, "score": 373598.8578774605 }, { "content": "pub fn build_http_batch_service(\n\n client: HttpClient,\n\n http_request_builder: Arc<HttpRequestBuilder>,\n\n) -> HttpBatchService<BoxFuture<'static, Result<Request<Bytes>, crate::Error>>, HecRequest> {\n\n HttpBatchService::new(client, move |req: HecRequest| {\n\n let request_builder = Arc::clone(&http_request_builder);\n\n let future: BoxFuture<'static, Result<http::Request<Bytes>, crate::Error>> =\n\n Box::pin(async move {\n\n request_builder.build_request(\n\n req.body,\n\n \"/services/collector/event\",\n\n req.passthrough_token,\n\n )\n\n });\n\n future\n\n })\n\n}\n\n\n\npub async fn build_healthcheck(\n\n endpoint: String,\n", "file_path": "src/sinks/splunk_hec/common/util.rs", "rank": 73, "score": 373417.05898952135 }, { "content": "/// Transforms that tend to be more complicated runtime style components.\n\n///\n\n/// These require coordination and map a stream of some `T` to some `U`.\n\n///\n\n/// # Invariants\n\n///\n\n/// * It is an illegal invariant to implement `FunctionTransform` for a\n\n/// `TaskTransform` or vice versa.\n\npub trait TaskTransform<T: EventContainer + 'static>: Send + 'static {\n\n fn transform(\n\n self: Box<Self>,\n\n task: Pin<Box<dyn Stream<Item = T> + Send>>,\n\n ) -> Pin<Box<dyn Stream<Item = T> + Send>>;\n\n\n\n /// Wrap the transform task to process and emit individual\n\n /// events. This is used to simplify testing task transforms.\n\n fn transform_events(\n\n self: Box<Self>,\n\n task: Pin<Box<dyn Stream<Item = Event> + Send>>,\n\n ) -> Pin<Box<dyn Stream<Item = Event> + Send>>\n\n where\n\n T: From<Event>,\n\n T::IntoIter: Send,\n\n {\n\n self.transform(task.map(Into::into).boxed())\n\n .flat_map(into_event_stream)\n\n .boxed()\n\n }\n\n}\n\n\n", "file_path": "lib/vector-core/src/transform/mod.rs", "rank": 74, "score": 368554.69192228187 }, { "content": "/// converts Joda time format to strptime format\n\npub fn convert_time_format(format: &str) -> std::result::Result<String, String> {\n\n let mut time_format = String::new();\n\n let mut chars = format.chars().peekable();\n\n while let Some(&c) = chars.peek() {\n\n if ('A'..='Z').contains(&c) || ('a'..='z').contains(&c) {\n\n let token: String = chars.by_ref().peeking_take_while(|&cn| cn == c).collect();\n\n match token.chars().next().unwrap() {\n\n // hour of day (number, 1..12)\n\n 'h' => time_format.push_str(\"%I\"),\n\n // hour of day (number, 0..23)\n\n 'H' => time_format.push_str(\"%H\"),\n\n // minute of hour\n\n 'm' => time_format.push_str(\"%M\"),\n\n // second of minute\n\n 's' => time_format.push_str(\"%S\"),\n\n // fraction of second\n\n 'S' => {\n\n if time_format.ends_with('.') {\n\n time_format.pop(); // drop .\n\n time_format.push_str(\"%.f\");\n", "file_path": "lib/datadog/grok/src/matchers/date.rs", "rank": 75, "score": 367595.3456518636 }, { "content": "pub fn default_namespace() -> String {\n\n \"apache\".to_string()\n\n}\n\n\n\ninventory::submit! {\n\n SourceDescription::new::<ApacheMetricsConfig>(\"apache_metrics\")\n\n}\n\n\n\nimpl GenerateConfig for ApacheMetricsConfig {\n\n fn generate_config() -> toml::Value {\n\n toml::Value::try_from(Self {\n\n endpoints: vec![\"http://localhost:8080/server-status/?auto\".to_owned()],\n\n scrape_interval_secs: default_scrape_interval_secs(),\n\n namespace: default_namespace(),\n\n })\n\n .unwrap()\n\n }\n\n}\n\n\n\n#[async_trait::async_trait]\n", "file_path": "src/sources/apache_metrics/mod.rs", "rank": 76, "score": 367108.40300632385 }, { "content": "pub fn default_endpoint() -> String {\n\n \"https://localhost:2113/stats\".to_string()\n\n}\n\n\n\ninventory::submit! {\n\n SourceDescription::new::<EventStoreDbConfig>(\"eventstoredb_metrics\")\n\n}\n\n\n\nimpl_generate_config_from_default!(EventStoreDbConfig);\n\n\n\n#[async_trait::async_trait]\n\n#[typetag::serde(name = \"eventstoredb_metrics\")]\n\nimpl SourceConfig for EventStoreDbConfig {\n\n async fn build(&self, cx: SourceContext) -> crate::Result<super::Source> {\n\n eventstoredb(\n\n self.endpoint.clone(),\n\n self.scrape_interval_secs,\n\n self.default_namespace.clone(),\n\n cx,\n\n )\n", "file_path": "src/sources/eventstoredb_metrics/mod.rs", "rank": 77, "score": 367108.40300632385 }, { "content": "pub fn default_namespace() -> String {\n\n \"nginx\".to_string()\n\n}\n\n\n\ninventory::submit! {\n\n SourceDescription::new::<NginxMetricsConfig>(\"nginx_metrics\")\n\n}\n\n\n\nimpl_generate_config_from_default!(NginxMetricsConfig);\n\n\n\n#[async_trait::async_trait]\n\n#[typetag::serde(name = \"nginx_metrics\")]\n\nimpl SourceConfig for NginxMetricsConfig {\n\n async fn build(&self, mut cx: SourceContext) -> crate::Result<super::Source> {\n\n let tls = TlsSettings::from_options(&self.tls)?;\n\n let http_client = HttpClient::new(tls, &cx.proxy)?;\n\n\n\n let namespace = Some(self.namespace.clone()).filter(|namespace| !namespace.is_empty());\n\n let mut sources = Vec::with_capacity(self.endpoints.len());\n\n for endpoint in self.endpoints.iter() {\n", "file_path": "src/sources/nginx_metrics/mod.rs", "rank": 78, "score": 367108.40300632385 }, { "content": "pub fn default_namespace() -> String {\n\n \"mongodb\".to_string()\n\n}\n\n\n\ninventory::submit! {\n\n SourceDescription::new::<MongoDbMetricsConfig>(\"mongodb_metrics\")\n\n}\n\n\n\nimpl_generate_config_from_default!(MongoDbMetricsConfig);\n\n\n\n#[async_trait::async_trait]\n\n#[typetag::serde(name = \"mongodb_metrics\")]\n\nimpl SourceConfig for MongoDbMetricsConfig {\n\n async fn build(&self, mut cx: SourceContext) -> crate::Result<super::Source> {\n\n let namespace = Some(self.namespace.clone()).filter(|namespace| !namespace.is_empty());\n\n\n\n let sources = try_join_all(\n\n self.endpoints\n\n .iter()\n\n .map(|endpoint| MongoDbMetrics::new(endpoint, namespace.clone())),\n", "file_path": "src/sources/mongodb_metrics/mod.rs", "rank": 79, "score": 367108.40300632385 }, { "content": "/// Encodes input to key value format with specified\n\n/// delimiters in field order where unspecified fields\n\n/// will follow after them. `Flattens_boolean` values\n\n/// to only a key if true.\n\n///\n\n/// # Errors\n\n///\n\n/// Returns an `EncodingError` if the input contains non-`String` map keys.\n\npub fn to_string<V: Serialize>(\n\n input: &BTreeMap<String, V>,\n\n fields_order: &[String],\n\n key_value_delimiter: &str,\n\n field_delimiter: &str,\n\n flatten_boolean: bool,\n\n) -> Result<String, EncodingError> {\n\n let mut output = String::new();\n\n\n\n let mut input = flatten(input, '.')?;\n\n\n\n for field in fields_order.iter() {\n\n match (input.remove(field), flatten_boolean) {\n\n (Some(Data::Boolean(false)), true) | (None, _) => (),\n\n (Some(Data::Boolean(true)), true) => {\n\n encode_string(&mut output, field);\n\n output.push_str(field_delimiter);\n\n }\n\n (Some(value), _) => {\n\n encode_field(&mut output, field, &value.to_string(), key_value_delimiter);\n", "file_path": "lib/vector-common/src/encode_key_value.rs", "rank": 80, "score": 365000.1331271884 }, { "content": "pub fn random_string(len: usize) -> String {\n\n thread_rng()\n\n .sample_iter(&Alphanumeric)\n\n .take(len)\n\n .map(char::from)\n\n .collect::<String>()\n\n}\n\n\n", "file_path": "src/test_util/mod.rs", "rank": 81, "score": 364741.7284347362 }, { "content": "pub fn apache_error_log_line() -> String {\n\n // Example log line:\n\n // [Sat Oct 31 19:27:55 2020] [deleniti:crit] [pid 879:tid 9607] [client 169.198.228.174:1364] Something bad happened\n\n format!(\n\n \"[{}] [{}:{}] [pid {}:tid] [client {}:{}] {}\",\n\n timestamp_apache_error(),\n\n username(),\n\n error_level(),\n\n pid(),\n\n ipv4_address(),\n\n port(),\n\n error_message(),\n\n )\n\n}\n\n\n", "file_path": "lib/fakedata/src/logs.rs", "rank": 82, "score": 363617.3110121008 }, { "content": "pub fn default_namespace() -> String {\n\n \"awsecs\".to_string()\n\n}\n\n\n\ninventory::submit! {\n\n SourceDescription::new::<AwsEcsMetricsSourceConfig>(\"aws_ecs_metrics\")\n\n}\n\n\n\nimpl AwsEcsMetricsSourceConfig {\n\n fn stats_endpoint(&self) -> String {\n\n match self.version {\n\n Version::V2 => format!(\"{}/stats\", self.endpoint),\n\n _ => format!(\"{}/task/stats\", self.endpoint),\n\n }\n\n }\n\n}\n\n\n\nimpl GenerateConfig for AwsEcsMetricsSourceConfig {\n\n fn generate_config() -> toml::Value {\n\n toml::Value::try_from(Self {\n", "file_path": "src/sources/aws_ecs_metrics/mod.rs", "rank": 83, "score": 362657.2258801641 }, { "content": "pub fn default_endpoint() -> String {\n\n env::var(METADATA_URI_V4)\n\n .or_else(|_| env::var(METADATA_URI_V3))\n\n .unwrap_or_else(|_| \"http://169.254.170.2/v2\".into())\n\n}\n\n\n", "file_path": "src/sources/aws_ecs_metrics/mod.rs", "rank": 84, "score": 362657.2258801641 }, { "content": "pub fn change_socket_permissions(path: &Path, perms: Option<u32>) -> crate::Result<()> {\n\n if let Some(mode) = perms {\n\n match fs::set_permissions(path, fs::Permissions::from_mode(mode)) {\n\n Ok(_) => debug!(message = \"Socket permissions updated.\", permission = mode),\n\n Err(e) => {\n\n if let Err(error) = remove_file(path) {\n\n emit!(UnixSocketFileDeleteError { path, error });\n\n }\n\n return Err(Box::new(e));\n\n }\n\n }\n\n }\n\n Ok(())\n\n}\n", "file_path": "src/sources/util/unix.rs", "rank": 85, "score": 362269.6617655519 }, { "content": "/// The behavior of a encoding configuration.\n\npub trait EncodingConfiguration {\n\n type Codec;\n\n // Required Accessors\n\n\n\n fn codec(&self) -> &Self::Codec;\n\n fn schema(&self) -> &Option<String>;\n\n fn only_fields(&self) -> &Option<Vec<OwnedPath>>;\n\n fn except_fields(&self) -> &Option<Vec<String>>;\n\n fn timestamp_format(&self) -> &Option<TimestampFormat>;\n\n\n\n fn apply_only_fields(&self, log: &mut LogEvent) {\n\n if let Some(only_fields) = &self.only_fields() {\n\n let mut to_remove = match log.keys() {\n\n Some(keys) => keys\n\n .filter(|field| {\n\n let field_path = parse_path(field);\n\n !only_fields\n\n .iter()\n\n .any(|only| field_path.segments.starts_with(&only.segments[..]))\n\n })\n", "file_path": "src/sinks/util/encoding/mod.rs", "rank": 86, "score": 360390.66035523766 }, { "content": "pub fn sink(channel_size: usize) -> (impl Stream<Item = EventArray>, MockSinkConfig) {\n\n let (tx, rx) = SourceSender::new_with_buffer(channel_size);\n\n let sink = MockSinkConfig::new(tx, true);\n\n (rx.into_stream(), sink)\n\n}\n\n\n", "file_path": "tests/support/mod.rs", "rank": 87, "score": 356985.55265869875 }, { "content": "#[derive(Debug, Snafu)]\n\nenum BuildError {\n\n #[snafu(display(\n\n \"Missing authentication key, must provide either 'license_key' or 'insert_key'\"\n\n ))]\n\n MissingAuthParam,\n\n}\n\n\n\n#[derive(Deserialize, Serialize, Debug, Eq, PartialEq, Clone, Derivative)]\n\n#[serde(rename_all = \"snake_case\")]\n\n#[derivative(Default)]\n\npub enum NewRelicLogsRegion {\n\n #[derivative(Default)]\n\n Us,\n\n Eu,\n\n}\n\n\n\n#[derive(Clone, Copy, Debug, Default)]\n\npub struct NewRelicLogsDefaultBatchSettings;\n\n\n\nimpl SinkBatchSettings for NewRelicLogsDefaultBatchSettings {\n", "file_path": "src/sinks/new_relic_logs.rs", "rank": 88, "score": 356806.6210066003 }, { "content": "pub fn timestamp_nanos_key() -> Option<String> {\n\n Some(\"@timestamp.nanos\".to_string())\n\n}\n\n\n\nimpl GenerateConfig for HumioLogsConfig {\n\n fn generate_config() -> toml::Value {\n\n toml::Value::try_from(Self {\n\n token: \"${HUMIO_TOKEN}\".to_owned(),\n\n endpoint: None,\n\n source: None,\n\n encoding: Encoding::Json.into(),\n\n event_type: None,\n\n indexed_fields: vec![],\n\n index: None,\n\n host_key: host_key(),\n\n compression: Compression::default(),\n\n request: TowerRequestConfig::default(),\n\n batch: BatchConfig::default(),\n\n tls: None,\n\n timestamp_nanos_key: None,\n", "file_path": "src/sinks/humio/logs.rs", "rank": 89, "score": 355607.48931498796 }, { "content": "fn serialize_int64_value<S>(value: &Option<i64>, serializer: S) -> Result<S::Ok, S::Error>\n\nwhere\n\n S: serde::Serializer,\n\n{\n\n serializer.serialize_str(value.as_ref().expect(\"always defined\").to_string().as_str())\n\n}\n\n\n", "file_path": "src/sinks/gcp/mod.rs", "rank": 90, "score": 355237.3187130814 }, { "content": "#[derive(Debug, Clone, Default, PartialEq, Deserialize, Serialize)]\n\nstruct TransformerInner {\n\n #[serde(default, skip_serializing_if = \"skip_serializing_if_default\")]\n\n only_fields: Option<Vec<OwnedPath>>,\n\n #[serde(default, skip_serializing_if = \"skip_serializing_if_default\")]\n\n except_fields: Option<Vec<String>>,\n\n #[serde(default, skip_serializing_if = \"skip_serializing_if_default\")]\n\n timestamp_format: Option<TimestampFormat>,\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use codecs::encoding::CharacterDelimitedEncoderOptions;\n\n use lookup::lookup_v2::parse_path;\n\n\n\n use super::*;\n\n use crate::sinks::util::encoding::EncodingConfig;\n\n\n\n #[derive(Debug, Copy, Clone, PartialEq, Eq, Deserialize, Serialize)]\n\n #[serde(rename_all = \"snake_case\")]\n\n enum FooLegacyEncoding {\n", "file_path": "src/sinks/util/encoding/adapter.rs", "rank": 91, "score": 354884.1022507927 }, { "content": "pub fn validate_quantiles(quantiles: &[f64]) -> Result<(), ValidationError> {\n\n if quantiles\n\n .iter()\n\n .all(|&quantile| (0.0..=1.0).contains(&quantile))\n\n {\n\n Ok(())\n\n } else {\n\n Err(ValidationError::QuantileOutOfRange)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n impl PartialEq<Self> for DistributionStatistic {\n\n fn eq(&self, other: &Self) -> bool {\n\n self.min == other.min\n\n && self.max == other.max\n\n && self.median == other.median\n", "file_path": "src/sinks/util/statistic.rs", "rank": 92, "score": 350671.9655702081 }, { "content": "pub fn parse(input: impl AsRef<str>) -> Result<Program, Error> {\n\n let lexer = lex::Lexer::new(input.as_ref());\n\n\n\n parser::ProgramParser::new()\n\n .parse(input.as_ref(), lexer)\n\n .map_err(|source| Error::ParseError {\n\n span: Span::new(0, input.as_ref().len()),\n\n source: source\n\n .map_token(|t| t.map(|s| s.to_owned()))\n\n .map_error(|err| err.to_string()),\n\n dropped_tokens: vec![],\n\n })\n\n}\n\n\n", "file_path": "lib/vrl/parser/src/lib.rs", "rank": 93, "score": 349626.7736619827 }, { "content": "pub fn generate_const_string_schema(value: String) -> SchemaObject {\n\n SchemaObject {\n\n const_value: Some(Value::String(value)),\n\n ..Default::default()\n\n }\n\n}\n\n\n", "file_path": "lib/vector-config/src/schema.rs", "rank": 94, "score": 349610.5220854216 }, { "content": "// Try to bind to a socket using TCP\n\nfn test_bind_tcp<A: ToSocketAddrs>(addr: A) -> Option<Port> {\n\n Some(TcpListener::bind(addr).ok()?.local_addr().ok()?.port())\n\n}\n\n\n", "file_path": "lib/portpicker/src/lib.rs", "rank": 95, "score": 347898.04460393055 }, { "content": "#[derive(Clone, Copy, Debug, Snafu)]\n\nenum Error {\n\n Deferred,\n\n}\n\n\n", "file_path": "src/sinks/util/adaptive_concurrency/tests.rs", "rank": 96, "score": 347826.62573198276 }, { "content": "pub fn encode_log(mut event: Event, encoding: &EncodingConfig<Encoding>) -> Option<Bytes> {\n\n encoding.apply_rules(&mut event);\n\n let log = event.into_log();\n\n\n\n let b = match encoding.codec() {\n\n Encoding::Json => serde_json::to_vec(&log),\n\n Encoding::Text => {\n\n let bytes = log\n\n .get(crate::config::log_schema().message_key())\n\n .map(|v| v.coerce_to_bytes().to_vec())\n\n .unwrap_or_default();\n\n Ok(bytes)\n\n }\n\n };\n\n\n\n b.map(|mut b| {\n\n b.push(b'\\n');\n\n Bytes::from(b)\n\n })\n\n .map_err(|error| error!(message = \"Unable to encode.\", %error))\n\n .ok()\n\n}\n\n\n", "file_path": "src/sinks/util/mod.rs", "rank": 97, "score": 347672.1730062712 }, { "content": "pub fn generate_lines_with_stream<Gen: FnMut(usize) -> String>(\n\n generator: Gen,\n\n count: usize,\n\n batch: Option<Arc<BatchNotifier>>,\n\n) -> (Vec<String>, impl Stream<Item = EventArray>) {\n\n let lines = (0..count).map(generator).collect::<Vec<_>>();\n\n let stream = map_batch_stream(stream::iter(lines.clone()).map(LogEvent::from), batch);\n\n (lines, stream)\n\n}\n\n\n", "file_path": "src/test_util/mod.rs", "rank": 98, "score": 347103.59565667465 }, { "content": "pub fn prepare_input<R: std::io::Read>(mut input: R) -> Result<(String, Vec<String>), Vec<String>> {\n\n let mut source_string = String::new();\n\n input\n\n .read_to_string(&mut source_string)\n\n .map_err(|e| vec![e.to_string()])?;\n\n\n\n let mut vars = std::env::vars().collect::<HashMap<_, _>>();\n\n if !vars.contains_key(\"HOSTNAME\") {\n\n if let Ok(hostname) = crate::get_hostname() {\n\n vars.insert(\"HOSTNAME\".into(), hostname);\n\n }\n\n }\n\n vars::interpolate(&source_string, &vars)\n\n}\n\n\n", "file_path": "src/config/loading/mod.rs", "rank": 99, "score": 346754.9209967102 } ]
Rust
delsum-lib/src/fletcher/mod.rs
8051Enthusiast/delsum
5605cd8343cb8ba3133eea31610968cd3c6444d4
mod rev; use crate::bitnum::{BitNum, Modnum}; use crate::checksum::{CheckBuilderErr, Digest, LinearCheck}; use crate::endian::{Endian, WordSpec}; use crate::keyval::KeyValIter; use num_traits::{One, Zero}; pub use rev::reverse_fletcher; #[cfg(feature = "parallel")] pub use rev::reverse_fletcher_para; use std::fmt::Display; use std::str::FromStr; #[derive(Clone, Debug)] pub struct FletcherBuilder<Sum: Modnum> { width: Option<usize>, module: Option<Sum>, init: Option<Sum>, addout: Option<Sum::Double>, swap: Option<bool>, input_endian: Option<Endian>, output_endian: Option<Endian>, wordsize: Option<usize>, check: Option<Sum::Double>, name: Option<String>, } impl<S: Modnum> FletcherBuilder<S> { pub fn width(&mut self, w: usize) -> &mut Self { self.width = Some(w); self } pub fn module(&mut self, m: S) -> &mut Self { self.module = Some(m); self } pub fn init(&mut self, i: S) -> &mut Self { self.init = Some(i); self } pub fn addout(&mut self, o: S::Double) -> &mut Self { self.addout = Some(o); self } pub fn swap(&mut self, s: bool) -> &mut Self { self.swap = Some(s); self } pub fn inendian(&mut self, e: Endian) -> &mut Self { self.input_endian = Some(e); self } pub fn wordsize(&mut self, n: usize) -> &mut Self { self.wordsize = Some(n); self } pub fn outendian(&mut self, e: Endian) -> &mut Self { self.output_endian = Some(e); self } pub fn check(&mut self, c: S::Double) -> &mut Self { self.check = Some(c); self } pub fn name(&mut self, n: &str) -> &mut Self { self.name = Some(String::from(n)); self } pub fn build(&self) -> Result<Fletcher<S>, CheckBuilderErr> { let init = self.init.unwrap_or_else(S::zero); let addout = self.addout.unwrap_or_else(S::Double::zero); let hwidth = match self.width { None => return Err(CheckBuilderErr::MissingParameter("width")), Some(w) => { if w % 2 != 0 || w > addout.bits() { return Err(CheckBuilderErr::ValueOutOfRange("width")); } else { w / 2 } } }; let mask = (S::Double::one() << hwidth) - S::Double::one(); let module = self.module.unwrap_or_else(S::zero); let wordsize = self.wordsize.unwrap_or(8); if wordsize == 0 || wordsize % 8 != 0 || wordsize > 64 { return Err(CheckBuilderErr::ValueOutOfRange("wordsize")); } let wordspec = WordSpec { input_endian: self.input_endian.unwrap_or(Endian::Big), wordsize, output_endian: self.output_endian.unwrap_or(Endian::Big), }; let mut fletch = Fletcher { hwidth, module, init, addout, swap: self.swap.unwrap_or(false), wordspec, mask, name: self.name.clone(), }; let (mut s, mut c) = fletch.from_compact(addout); if !module.is_zero() { s = s % module; c = c % module; fletch.init = init % module; } else { fletch.init = init; }; fletch.addout = fletch.to_compact((s, c)); match self.check { Some(chk) => { if fletch.digest(&b"123456789"[..]).unwrap() != chk { println!("{:x?}", fletch.digest(&b"123456789"[..]).unwrap()); Err(CheckBuilderErr::CheckFail) } else { Ok(fletch) } } None => Ok(fletch), } } } #[derive(Clone, Debug, Eq, PartialEq)] pub struct Fletcher<Sum: Modnum> { hwidth: usize, module: Sum, init: Sum, addout: Sum::Double, swap: bool, wordspec: WordSpec, mask: Sum::Double, name: Option<String>, } impl<Sum: Modnum> Display for Fletcher<Sum> { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match &self.name { Some(n) => write!(f, "{}", n), None => { write!( f, "fletcher width={} module={:#x} init={:#x} addout={:#x} swap={}", 2 * self.hwidth, self.module, self.init, self.addout, self.swap )?; if self.wordspec.word_bytes() != 1 { write!( f, " in_endian={} wordsize={}", self.wordspec.input_endian, self.wordspec.wordsize )?; }; if self.hwidth * 2 > 8 { write!(f, " out_endian={}", self.wordspec.output_endian)?; }; Ok(()) } } } } impl<Sum: Modnum> Fletcher<Sum> { pub fn with_options() -> FletcherBuilder<Sum> { FletcherBuilder { width: None, module: None, init: None, addout: None, swap: None, input_endian: None, output_endian: None, wordsize: None, check: None, name: None, } } fn from_compact(&self, x: Sum::Double) -> (Sum, Sum) { let l = Sum::from_double(x & self.mask); let h = Sum::from_double((x >> self.hwidth) & self.mask); if self.swap { (h, l) } else { (l, h) } } fn to_compact(&self, (s, c): (Sum, Sum)) -> Sum::Double { let (l, h) = if self.swap { (c, s) } else { (s, c) }; (Sum::Double::from(l) & self.mask) ^ (Sum::Double::from(h) & self.mask) << self.hwidth } } impl<Sum: Modnum> FromStr for FletcherBuilder<Sum> { fn from_str(s: &str) -> Result<FletcherBuilder<Sum>, CheckBuilderErr> { let mut fletch = Fletcher::<Sum>::with_options(); for x in KeyValIter::new(s) { let (current_key, current_val) = match x { Err(key) => return Err(CheckBuilderErr::MalformedString(key)), Ok(s) => s, }; let fletch_op = match current_key.as_str() { "width" => usize::from_str(&current_val).ok().map(|x| fletch.width(x)), "module" => Sum::from_hex(&current_val).ok().map(|x| fletch.module(x)), "init" => Sum::from_hex(&current_val).ok().map(|x| fletch.init(x)), "addout" => Sum::Double::from_hex(&current_val) .ok() .map(|x| fletch.addout(x)), "swap" => bool::from_str(&current_val).ok().map(|x| fletch.swap(x)), "in_endian" => Endian::from_str(&current_val) .ok() .map(|x| fletch.inendian(x)), "wordsize" => usize::from_str(&current_val) .ok() .map(|x| fletch.wordsize(x)), "out_endian" => Endian::from_str(&current_val) .ok() .map(|x| fletch.outendian(x)), "check" => Sum::Double::from_hex(&current_val) .ok() .map(|x| fletch.check(x)), "name" => Some(fletch.name(&current_val)), _ => return Err(CheckBuilderErr::UnknownKey(current_key)), }; match fletch_op { Some(f) => fletch = f.clone(), None => return Err(CheckBuilderErr::MalformedString(current_key)), } } Ok(fletch) } type Err = CheckBuilderErr; } impl<Sum: Modnum> FromStr for Fletcher<Sum> { fn from_str(s: &str) -> Result<Fletcher<Sum>, CheckBuilderErr> { FletcherBuilder::<Sum>::from_str(s)?.build() } type Err = CheckBuilderErr; } impl<S: Modnum> Digest for Fletcher<S> { type Sum = S::Double; fn init(&self) -> Self::Sum { self.to_compact((self.init, S::zero())) } fn dig_word(&self, sum: Self::Sum, word: u64) -> Self::Sum { let (mut s, mut c) = self.from_compact(sum); let modword = S::mod_from(word, &self.module); s = S::add_mod(s, &modword, &self.module); c = S::add_mod(c, &s, &self.module); self.to_compact((s, c)) } fn finalize(&self, sum: Self::Sum) -> Self::Sum { self.add(sum, &self.addout) } fn to_bytes(&self, s: Self::Sum) -> Vec<u8> { self.wordspec.output_to_bytes(s, 2 * self.hwidth) } fn wordspec(&self) -> WordSpec { self.wordspec } } impl<S: Modnum> LinearCheck for Fletcher<S> { type Shift = S; fn init_shift(&self) -> Self::Shift { S::zero() } fn inc_shift(&self, shift: Self::Shift) -> Self::Shift { S::add_mod(shift, &S::one(), &self.module) } fn shift(&self, sum: Self::Sum, shift: &Self::Shift) -> Self::Sum { let (s, mut c) = self.from_compact(sum); let shift_diff = S::mul_mod(s, shift, &self.module); c = S::add_mod(c, &shift_diff, &self.module); self.to_compact((s, c)) } fn add(&self, sum_a: Self::Sum, sum_b: &Self::Sum) -> Self::Sum { let (sa, ca) = self.from_compact(sum_a); let (sb, cb) = self.from_compact(*sum_b); let sum_s = sa.add_mod(&sb, &self.module); let sum_c = ca.add_mod(&cb, &self.module); self.to_compact((sum_s, sum_c)) } fn negate(&self, sum: Self::Sum) -> Self::Sum { let (s, c) = self.from_compact(sum); self.to_compact((s.neg_mod(&self.module), c.neg_mod(&self.module))) } } #[cfg(test)] mod tests { use super::*; use crate::checksum::tests::{check_example, test_find, test_prop, test_shifts}; use std::str::FromStr; #[test] fn adler32() { let adel = Fletcher::<u16>::with_options() .width(32) .init(1) .module(65521) .check(0x091e01de) .build() .unwrap(); test_shifts(&adel); test_find(&adel); test_prop(&adel); check_example(&adel, 0x81bfd25f); let nobel = Fletcher::with_options() .width(32) .init(1u32) .module(65521) .check(0x091e01de) .build() .unwrap(); test_shifts(&nobel); test_find(&nobel); test_prop(&adel); check_example(&nobel, 0x81bfd25f); } #[test] fn fletcher16() { let f16 = Fletcher::with_options() .width(16) .module(0xffu8) .check(0x1ede) .build() .unwrap(); test_shifts(&f16); test_find(&f16); test_prop(&f16); check_example(&f16, 0x7815); } #[test] fn fletcher8() { let f8 = Fletcher::<u8>::from_str("width=8 module=f init=0 addout=0 swap=false check=0xc") .unwrap(); test_shifts(&f8); test_prop(&f8); check_example(&f8, 0x6); } }
mod rev; use crate::bitnum::{BitNum, Modnum}; use crate::checksum::{CheckBuilderErr, Digest, LinearCheck}; use crate::endian::{Endian, WordSpec}; use crate::keyval::KeyValIter; use num_traits::{One, Zero}; pub use rev::reverse_fletcher; #[cfg(feature = "parallel")] pub use rev::reverse_fletcher_para; use std::fmt::Display; use std::str::FromStr; #[derive(Clone, Debug)] pub struct FletcherBuilder<Sum: Modnum> { width: Option<usize>, module: Option<Sum>, init: Option<Sum>, addout: Option<Sum::Double>, swap: Option<bool>, input_endian: Option<Endian>, output_endian: Option<Endian>, wordsize: Option<usize>, check: Option<Sum::Double>, name: Option<String>, } impl<S: Modnum> FletcherBuilder<S> { pub fn width(&mut self, w: usize) -> &mut Self { self.width = Some(w); self } pub fn module(&mut self, m: S) -> &mut Self { self.module = Some(m); self } pub fn init(&mut self, i: S) -> &mut Self { self.init = Some(i); self } pub fn addout(&mut self, o: S::Double) -> &mut Self { self.addout = Some(o); self } pub fn swap(&mut self, s: bool) -> &mut Self { self.swap = Some(s); self } pub fn inendian(&mut self, e: Endian) -> &mut Self { self.input_endian = Some(e); self } pub fn wordsize(&mut self, n: usize) -> &mut Self { self.wordsize = Some(n); self } pub fn outendian(&mut self, e: Endian) -> &mut Self { self.output_endian = Some(e); self } pub fn check(&mut self, c: S::Double) -> &mut Self { self.check = Some(c); self } pub fn name(&mut self, n: &str) -> &mut Self { self.name = Some(String::from(n)); self } pub fn build(&self) -> Result<Fletcher<S>, CheckBuilderErr> { let init = self.init.unwrap_or_else(S::zero); let addout = self.addout.unwrap_or_else(S::Double::zero); let hwidth = match self.width { None => return Err(CheckBuilderErr::MissingParameter("width")), Some(w) => { if w % 2 != 0 || w > addout.bits() { return Err(CheckBuilderErr::ValueOutOfRange("width")); } else { w / 2 } } }; let mask = (S::Double::one() << hwidth) - S::Double::one(); let module = self.module.unwrap_or_else(S::zero); let wordsize = self.wordsize.unwrap_or(8); if wordsize == 0 || wordsize % 8 != 0 || wordsize > 64 { return Err(CheckBuilderErr::ValueOutOfRange("wordsize")); } let wordspec = WordSpec { input_endian: self.input_endian.unwrap_or(Endian::Big), wordsize, output_endian: self.output_endian.unwrap_or(Endian::Big), }; let mut fletch = Fletcher { hwidth, module, init, addout, swap: self.swap.unwrap_or(false), wordspec, mask, name: self.name.clone(), }; let (mut s, mut c) = fletch.from_compact(addout); if !module.is_zero() { s = s % module; c = c % module; fletch.init = init % module; } else { fletch.init = init; }; fletch.addout = fletch.to_compact((s, c)); match self.check { Some(chk) => { if fletch.digest(&b"123456789"[..]).unwrap() != chk { println!("{:x?}", fletch.digest(&b"123456789"[..]).unwrap()); Err(CheckBuilderErr::CheckFail) } else { Ok(fletch) } } None => Ok(fletch), } } } #[derive(Clone, Debug, Eq, PartialEq)] pub struct Fletcher<Sum: Modnum> { hwidth: usize, module: Sum, init: Sum, addout: Sum::Double, swap: bool, wordspec: WordSpec, mask: Sum::Double, name: Option<String>, } impl<Sum: Modnum> Display for Fletcher<Sum> { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match &self.name { Some(n) => write!(f, "{}", n), None => { write!( f, "fletcher width={} module={:#x} init={:#x} addout={:#x} swap={}", 2 * self.hwidth, self.module, self.init, self.addout, self.swap )?; if self.wordspec.word_bytes() != 1 { write!( f, " in_endian={} wordsize={}", self.wordspec.input_endian, self.wordspec.wordsize )?; }; if self.hwidth * 2 > 8 { write!(f, " out_endian={}", self.wordspec.output_endian)?; }; Ok(()) } } } } impl<Sum: Modnum> Fletcher<Sum> { pub fn with_options() -> FletcherBuilder<Sum> { FletcherBuilder { width: None, module: None, init: None, addout: None, swap: None, input_endian: None, output_endian: None, wordsize: None, check: None, name: None, } } fn from_compact(&self, x: Sum::Double) -> (Sum, Sum) { let l = Sum::from_double(x & self.mask); let h = Sum::from_double((x >> self.hwidth) & self.mask); if self.swap { (h, l) } else { (l, h) } } fn to_compact(&self, (s, c): (Sum, Sum)) -> Sum::Double { let (l, h) = if self.swap { (c, s) } else { (s, c) }; (Sum::Double::from(l) & self.mask) ^ (Sum::Double::from(h) & self.mask) << self.hwidth } } impl<Sum: Modnum> FromStr for FletcherBuilder<Sum> { fn from_str(s: &str) -> Result<FletcherBuilder<Sum>, CheckBuilderErr> { let mut fletch = Fletcher::<Sum>::with_options(); for x in KeyValIter::new(s) { let (current_key, current_val) =
; let fletch_op = match current_key.as_str() { "width" => usize::from_str(&current_val).ok().map(|x| fletch.width(x)), "module" => Sum::from_hex(&current_val).ok().map(|x| fletch.module(x)), "init" => Sum::from_hex(&current_val).ok().map(|x| fletch.init(x)), "addout" => Sum::Double::from_hex(&current_val) .ok() .map(|x| fletch.addout(x)), "swap" => bool::from_str(&current_val).ok().map(|x| fletch.swap(x)), "in_endian" => Endian::from_str(&current_val) .ok() .map(|x| fletch.inendian(x)), "wordsize" => usize::from_str(&current_val) .ok() .map(|x| fletch.wordsize(x)), "out_endian" => Endian::from_str(&current_val) .ok() .map(|x| fletch.outendian(x)), "check" => Sum::Double::from_hex(&current_val) .ok() .map(|x| fletch.check(x)), "name" => Some(fletch.name(&current_val)), _ => return Err(CheckBuilderErr::UnknownKey(current_key)), }; match fletch_op { Some(f) => fletch = f.clone(), None => return Err(CheckBuilderErr::MalformedString(current_key)), } } Ok(fletch) } type Err = CheckBuilderErr; } impl<Sum: Modnum> FromStr for Fletcher<Sum> { fn from_str(s: &str) -> Result<Fletcher<Sum>, CheckBuilderErr> { FletcherBuilder::<Sum>::from_str(s)?.build() } type Err = CheckBuilderErr; } impl<S: Modnum> Digest for Fletcher<S> { type Sum = S::Double; fn init(&self) -> Self::Sum { self.to_compact((self.init, S::zero())) } fn dig_word(&self, sum: Self::Sum, word: u64) -> Self::Sum { let (mut s, mut c) = self.from_compact(sum); let modword = S::mod_from(word, &self.module); s = S::add_mod(s, &modword, &self.module); c = S::add_mod(c, &s, &self.module); self.to_compact((s, c)) } fn finalize(&self, sum: Self::Sum) -> Self::Sum { self.add(sum, &self.addout) } fn to_bytes(&self, s: Self::Sum) -> Vec<u8> { self.wordspec.output_to_bytes(s, 2 * self.hwidth) } fn wordspec(&self) -> WordSpec { self.wordspec } } impl<S: Modnum> LinearCheck for Fletcher<S> { type Shift = S; fn init_shift(&self) -> Self::Shift { S::zero() } fn inc_shift(&self, shift: Self::Shift) -> Self::Shift { S::add_mod(shift, &S::one(), &self.module) } fn shift(&self, sum: Self::Sum, shift: &Self::Shift) -> Self::Sum { let (s, mut c) = self.from_compact(sum); let shift_diff = S::mul_mod(s, shift, &self.module); c = S::add_mod(c, &shift_diff, &self.module); self.to_compact((s, c)) } fn add(&self, sum_a: Self::Sum, sum_b: &Self::Sum) -> Self::Sum { let (sa, ca) = self.from_compact(sum_a); let (sb, cb) = self.from_compact(*sum_b); let sum_s = sa.add_mod(&sb, &self.module); let sum_c = ca.add_mod(&cb, &self.module); self.to_compact((sum_s, sum_c)) } fn negate(&self, sum: Self::Sum) -> Self::Sum { let (s, c) = self.from_compact(sum); self.to_compact((s.neg_mod(&self.module), c.neg_mod(&self.module))) } } #[cfg(test)] mod tests { use super::*; use crate::checksum::tests::{check_example, test_find, test_prop, test_shifts}; use std::str::FromStr; #[test] fn adler32() { let adel = Fletcher::<u16>::with_options() .width(32) .init(1) .module(65521) .check(0x091e01de) .build() .unwrap(); test_shifts(&adel); test_find(&adel); test_prop(&adel); check_example(&adel, 0x81bfd25f); let nobel = Fletcher::with_options() .width(32) .init(1u32) .module(65521) .check(0x091e01de) .build() .unwrap(); test_shifts(&nobel); test_find(&nobel); test_prop(&adel); check_example(&nobel, 0x81bfd25f); } #[test] fn fletcher16() { let f16 = Fletcher::with_options() .width(16) .module(0xffu8) .check(0x1ede) .build() .unwrap(); test_shifts(&f16); test_find(&f16); test_prop(&f16); check_example(&f16, 0x7815); } #[test] fn fletcher8() { let f8 = Fletcher::<u8>::from_str("width=8 module=f init=0 addout=0 swap=false check=0xc") .unwrap(); test_shifts(&f8); test_prop(&f8); check_example(&f8, 0x6); } }
match x { Err(key) => return Err(CheckBuilderErr::MalformedString(key)), Ok(s) => s, }
if_condition
[ { "content": "fn glue_sum(mut s1: u64, mut s2: u64, width: usize, swap: bool) -> u128 {\n\n if swap {\n\n std::mem::swap(&mut s1, &mut s2);\n\n }\n\n (s1 as u128) | ((s2 as u128) << (width / 2))\n\n}\n\n\n", "file_path": "delsum-lib/src/fletcher/rev.rs", "rank": 0, "score": 242015.56974271697 }, { "content": "fn split_sum(sum: u128, width: usize, swap: bool) -> (u64, u64) {\n\n let mut lower = sum & ((1 << (width / 2)) - 1);\n\n let mut upper = sum >> (width / 2);\n\n if swap {\n\n std::mem::swap(&mut lower, &mut upper);\n\n }\n\n (lower as u64, upper as u64)\n\n}\n\n\n", "file_path": "delsum-lib/src/fletcher/rev.rs", "rank": 1, "score": 241547.31277238642 }, { "content": "fn remove_init(sums: &mut Vec<(BigInt, usize)>, init: &BigInt) {\n\n for (s, l) in sums.iter_mut() {\n\n *s -= init * BigInt::from(*l);\n\n *l = 0;\n\n }\n\n}\n\n\n", "file_path": "delsum-lib/src/fletcher/rev.rs", "rank": 2, "score": 190764.25367489242 }, { "content": "// binary gcd algorithm\n\npub fn gcd<N: BitNum>(mut a: N, mut b: N) -> N {\n\n if a.is_zero() {\n\n return b;\n\n }\n\n if b.is_zero() {\n\n return a;\n\n }\n\n let a_shift = a.trail_zeros() as usize;\n\n let b_shift = b.trail_zeros() as usize;\n\n a = a >> a_shift;\n\n b = b >> b_shift;\n\n let common_shift = a_shift.min(b_shift);\n\n loop {\n\n if b > a {\n\n std::mem::swap(&mut a, &mut b);\n\n }\n\n a = a - b;\n\n if a.is_zero() {\n\n return b << common_shift;\n\n }\n\n a = a >> a.trail_zeros() as usize;\n\n }\n\n}\n", "file_path": "delsum-lib/src/factor.rs", "rank": 3, "score": 184427.48690115177 }, { "content": "/// For figuring out what type of integer to use, we need to parse the width from the\n\n/// model string, but to parse the model string, we need to know the integer type,\n\n/// so it is done here separately.\n\n/// We also need the prefix to find out what algorithm to use\n\nfn find_prefix_width(s: &str) -> Result<(&str, usize, &str), CheckBuilderErr> {\n\n let stripped = s.trim_start();\n\n // it is done like this to ensure that no non-whitespace (blackspace?) is left at the end of the prefix\n\n let pref = stripped.split_whitespace().next();\n\n let (prefix, rest) = match PREFIXES.iter().find(|x| Some(**x) == pref) {\n\n Some(p) => (*p, &stripped[p.len()..]),\n\n None => return Err(CheckBuilderErr::MalformedString(\"algorithm\".to_owned())),\n\n };\n\n for x in keyval::KeyValIter::new(rest) {\n\n match x {\n\n Err(k) => return Err(CheckBuilderErr::MalformedString(k)),\n\n Ok((k, v)) => {\n\n if &k == \"width\" {\n\n return v\n\n .parse()\n\n .map_err(|_| CheckBuilderErr::MalformedString(k))\n\n .map(|width| (prefix, width, rest));\n\n }\n\n }\n\n }\n\n }\n\n Err(CheckBuilderErr::MissingParameter(\"width\"))\n\n}\n\n\n", "file_path": "delsum-lib/src/lib.rs", "rank": 4, "score": 174267.0218579738 }, { "content": "fn refine_module(module: &mut BigInt, sums: Vec<(BigInt, usize)>) -> Vec<(BigInt, usize)> {\n\n let mut non_zero = Vec::new();\n\n for (s, l) in sums {\n\n if l != 0 {\n\n non_zero.push((s, l));\n\n continue;\n\n }\n\n // if we have l == 0, they don't contain init, and because they also don't contain\n\n // addout, they have to be divisible by module\n\n *module = gcd(module, &s);\n\n }\n\n for ((sa, la), (sb, lb)) in non_zero.iter().zip(non_zero.iter().skip(1)) {\n\n // for x = a*init mod m, y = b*init mod m we can get 0 mod m by calculating\n\n // (b * x + a * y)/gcd(a, b)\n\n let bla = BigInt::from(*la);\n\n let blb = BigInt::from(*lb);\n\n let common = gcd(&bla, &blb);\n\n let mul_sa = sa * blb;\n\n let mul_sb = sb * bla;\n\n *module = gcd(module, &((mul_sa - mul_sb) / common));\n\n }\n\n non_zero\n\n .iter()\n\n .map(|(s, l)| (mod_red(s, module), *l))\n\n .collect()\n\n}\n\n\n", "file_path": "delsum-lib/src/fletcher/rev.rs", "rank": 5, "score": 165505.2006127587 }, { "content": "#[derive(Debug)]\n\nstruct PresumSet<Sum: Clone + Eq + Ord + Debug> {\n\n idx: Vec<u32>,\n\n presum: Vec<Vec<Sum>>,\n\n}\n\n\n\nimpl<Sum: Clone + Eq + Ord + Debug + Send + Sync> PresumSet<Sum> {\n\n /// Gets a new PresumSet. Gets sorted on construction.\n\n fn new(presum: Vec<Vec<Sum>>) -> Self {\n\n let firstlen = presum[0].len();\n\n // check that all sum arrays are of the same length\n\n for x in presum.iter() {\n\n assert_eq!(firstlen, x.len());\n\n }\n\n // vector of all indices\n\n let mut idxvec: Vec<_> = (0..firstlen as u32).collect();\n\n // get a permutation vector representing the sort of the presum arrays first by value and then by index\n\n\n\n #[cfg(feature = \"parallel\")]\n\n idxvec.par_sort_unstable_by(|a, b| Self::cmp_idx(&presum, *a, &presum, *b).then(a.cmp(&b)));\n\n #[cfg(not(feature = \"parallel\"))]\n", "file_path": "delsum-lib/src/checksum.rs", "rank": 6, "score": 163712.0339077245 }, { "content": "fn find_regular_sum(spec: &RevSpec, sums: &[i128], mut module: u128) -> (u128, i128) {\n\n let width = spec.width;\n\n // init is here actually addout1 + init, which we can only know if we have both values\n\n let maybe_init = spec\n\n .addout\n\n .map(|x| {\n\n spec.init\n\n .map(|y| y as i128 + split_sum(x, width, spec.swap).0 as i128)\n\n })\n\n .flatten();\n\n // delegate to the corresponding modsum function\n\n let sum1_addout = super::super::modsum::find_largest_mod(&sums, maybe_init, &mut module);\n\n (module, sum1_addout)\n\n}\n\n\n", "file_path": "delsum-lib/src/fletcher/rev.rs", "rank": 7, "score": 154732.76555769326 }, { "content": "/// Find the parameters of a fletcher algorithm.\n\n///\n\n/// `spec` contains the known parameters of the algorithm (by setting the corresponding values in the builder).\n\n/// `chk_bytes` are pairs of files and their checksums.\n\n/// `verbosity` makes the function output what it is doing.\n\n///\n\n/// The `width` parameter of the builder has to be set.\n\npub fn reverse_fletcher<'a>(\n\n spec: &FletcherBuilder<u64>,\n\n chk_bytes: &[(&'a [u8], Vec<u8>)],\n\n verbosity: u64,\n\n extended_search: bool,\n\n) -> impl Iterator<Item = Result<Fletcher<u64>, CheckReverserError>> + 'a {\n\n let spec = spec.clone();\n\n let mut files = chk_bytes.to_owned();\n\n files.sort_unstable_by(|a, b| a.0.len().cmp(&b.0.len()).reverse());\n\n discrete_combos(spec.clone(), extended_search)\n\n .into_iter()\n\n .map(move |x| {\n\n unresult_iter(\n\n reverse_discrete(spec.clone(), files.clone(), x, verbosity).map(|y| y.iter()),\n\n )\n\n })\n\n .flatten()\n\n}\n\n\n\n/// Parallel version of reverse_fletcher.\n\n///\n\n/// It is parallel in the sense that there are two threads, for swap=false and swap=true, if it is not given,\n\n/// so don't expect too much speedup.\n", "file_path": "delsum-lib/src/fletcher/rev.rs", "rank": 8, "score": 154349.35737290175 }, { "content": "fn remove_inits(init: &Poly, polys: &mut [InitPoly]) {\n\n for (p, l) in polys {\n\n match l {\n\n InitPlace::Single(d) => {\n\n p.pin_mut().add_to(&shift(init, 8 * *d as i64));\n\n *l = InitPlace::None;\n\n }\n\n // note: this branch shouldn't happen, but it is also no problem if it happens\n\n InitPlace::None => (),\n\n // this is not really a problem either, but I will not bother implementing it\n\n InitPlace::Pair(_, _) => {\n\n panic!(\"Internal Error: remove_inits should not receive Pair Inits\")\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "delsum-lib/src/crc/rev.rs", "rank": 9, "score": 151125.76380786888 }, { "content": "#[cfg(feature = \"parallel\")]\n\npub fn reverse_fletcher_para<'a>(\n\n spec: &FletcherBuilder<u64>,\n\n chk_bytes: &[(&'a [u8], Vec<u8>)],\n\n verbosity: u64,\n\n extended_search: bool,\n\n) -> impl ParallelIterator<Item = Result<Fletcher<u64>, CheckReverserError>> + 'a {\n\n let spec = spec.clone();\n\n let mut files = chk_bytes.to_owned();\n\n files.sort_unstable_by(|a, b| a.0.len().cmp(&b.0.len()).reverse());\n\n discrete_combos(spec.clone(), extended_search)\n\n .into_par_iter()\n\n .map(move |x| {\n\n unresult_iter(\n\n reverse_discrete(spec.clone(), files.clone(), x, verbosity).map(|y| y.iter()),\n\n )\n\n .par_bridge()\n\n })\n\n .flatten()\n\n}\n\n\n", "file_path": "delsum-lib/src/fletcher/rev.rs", "rank": 10, "score": 150230.16820378436 }, { "content": "// implementation of probabilistic rabin-miller primality test\n\nfn is_prob_prime<N: FactorNum>(n: N) -> bool {\n\n if (n & N::one()).is_zero() {\n\n return false;\n\n }\n\n if n < N::from(128u8) {\n\n // note: match instead of unwrap because of some debug trait missing\n\n let nu8 = match n.try_into() {\n\n Ok(x) => x,\n\n Err(_) => unreachable!(),\n\n };\n\n return PRIMES.binary_search(&nu8).is_ok();\n\n }\n\n let n1 = n - N::one();\n\n let mon = MonContext::new(n);\n\n let trail_zero = n1.trail_zeros() as usize;\n\n let d = n1 >> trail_zero;\n\n let mut rng = thread_rng();\n\n let minus_one = n.mod_neg(mon.one);\n\n // 32 rounds for a 2^-64 probability of false positive\n\n 'a: for _ in 0..32 {\n", "file_path": "delsum-lib/src/factor.rs", "rank": 11, "score": 142109.03328893706 }, { "content": "fn discrete_combos(spec: FletcherBuilder<u64>, extended_search: bool) -> Vec<(bool, WordSpec)> {\n\n let swap = spec\n\n .swap\n\n .map(|x| vec![x])\n\n .unwrap_or_else(|| vec![false, true]);\n\n let wordspecs = wordspec_combos(\n\n spec.wordsize,\n\n spec.input_endian,\n\n spec.output_endian,\n\n spec.width.unwrap(),\n\n extended_search,\n\n );\n\n cart_prod(&swap, &wordspecs)\n\n}\n\n\n", "file_path": "delsum-lib/src/fletcher/rev.rs", "rank": 12, "score": 140047.01638195044 }, { "content": "/// made this function thinking i'd need this,\n\n/// turns out i didn't\n\n/// i still have an emotional bond to it, so i won't delete it\n\nfn perfect_power<N: FactorNum>(mut n: N) -> (N, u8) {\n\n let mut current_power = 1;\n\n // PRIMES is in P or something like that, idk im no number theorist\n\n for p in PRIMES.iter() {\n\n if (n >> *p as usize).is_zero() {\n\n break;\n\n }\n\n while let Ok(b) = get_exact_root(n, *p) {\n\n current_power *= p;\n\n n = b;\n\n }\n\n }\n\n (n, current_power)\n\n}\n\n\n", "file_path": "delsum-lib/src/factor.rs", "rank": 13, "score": 139404.3740463946 }, { "content": "#[derive(Clone, Debug)]\n\nstruct PrefactorMod {\n\n unknown: BigInt,\n\n possible: BigInt,\n\n module: BigInt,\n\n}\n\n\n\nimpl PrefactorMod {\n\n fn empty() -> PrefactorMod {\n\n PrefactorMod {\n\n module: one(),\n\n unknown: one(),\n\n possible: zero(),\n\n }\n\n }\n\n fn from_sum(sum: &BigInt, power: usize, module: &mut BigInt) -> Option<PrefactorMod> {\n\n let bpower = BigInt::from(power);\n\n // this basically calculates sum*power^-1, but adjusting module if there are no solutions\n\n // and keeping in mind that there can be multiple solutions (which the unknown var keeps track of)\n\n let (possible, unknown) = partial_mod_div(sum, &bpower, module);\n\n if module.is_one() {\n", "file_path": "delsum-lib/src/fletcher/rev.rs", "rank": 14, "score": 135939.04924131642 }, { "content": "// modular reduction, because % is just wrong\n\nfn mod_red(n: &BigInt, module: &BigInt) -> BigInt {\n\n if module.is_zero() {\n\n // yes, n modulo 0 is n and i will die on this hill\n\n n.clone()\n\n } else {\n\n let k = n % module;\n\n if k < zero() {\n\n module + k\n\n } else {\n\n k\n\n }\n\n }\n\n}\n", "file_path": "delsum-lib/src/fletcher/rev.rs", "rank": 15, "score": 135656.30555029184 }, { "content": "fn find_init(\n\n maybe_init: &Option<BigInt>,\n\n module: &mut BigInt,\n\n sums: Vec<(BigInt, usize)>,\n\n) -> PrefactorMod {\n\n if module.is_one() {\n\n return PrefactorMod::empty();\n\n };\n\n let mut ret = PrefactorMod::new_init(maybe_init, module);\n\n for (p, l) in sums {\n\n // get the set of inits that solve l*init ≡ p mod module\n\n let file_solutions = PrefactorMod::from_sum(&p, l, module);\n\n // merge the solutions with the other solutions\n\n ret = match file_solutions.map(|f| ret.merge(f)) {\n\n Some(valid) => valid,\n\n None => return PrefactorMod::empty(),\n\n }\n\n }\n\n ret\n\n}\n\n// describes a set of solutions for unknown*possible % module\n\n// the `unknown` parameter divides module and captures the fact that there\n\n// can be multiple solutions for unknown*possible mod `module` because we only\n\n// know possible modulo (module / unknown)\n", "file_path": "delsum-lib/src/fletcher/rev.rs", "rank": 16, "score": 132123.84404092148 }, { "content": "/// A checksum that also has some notion of linearity.\n\n///\n\n/// What does linearity mean here? In a mathematically pure world, it would mean\n\n/// that you could add the texts in some way (XOR for crc) and that would be the\n\n/// same as adding (XORing) both checksums.\n\n/// However, we live in a world that needs practical considerations, so it's not as clean.\n\n/// Mostly, this is skewed by `init` and `finalize`.\n\n///\n\n/// This trait adds another type, the `Shift` type.\n\n/// This acts, when applied to an unfinalized sum in the `shift` function, as if appending\n\n/// `n` 0s to the summed text. For example, in a Fletcher sum, this would simply be an integer\n\n/// containing `n` and applying the shift corresponds to adding the first sum `n` times to the second one, possible in\n\n/// constant time. However, in the crc case, this is not possible in constant time just using\n\n/// the integer containing `n`. In this case, the value of of `x^{8n}` reduced by the generator is stored\n\n/// and the shift is applied using polynomial multiplication modulo the generator.\n\n///\n\n/// The assumptions are here (the `self`s are omitted for clarity):\n\n/// * `add(a,b)` forms an abeliean group with `negate(a)` as inverse (hereafter, the sum value 0 will be equal to `add(init(), negate(init()))`)\n\n/// * `shift(s, shift_n(1)) == dig(s, 0u8)`\n\n/// * `shift(s, shift_n(1))` is bijective in the set of all valid `Sum` values\n\n/// * `shift(shift(s, shift_n(a)), shift_n(b)) == shift(s, shift_n(a+b))`\n\n/// * `add(dig_word(s, 0), dig_word(r, 0)) == dig_word(add(s, r), 0)`\n\n/// * `dig_word(s, k) == dig_word(0, k) + dig_word(s, 0)` (consequently, `dig_word(0, 0) == 0`)\n\n/// * for all sums `s`, `add(finalize(s), negate(s))` is constant (finalize adds a constant value to the sum)\n\n/// * all methods without default implementations (including those from `Digest`) should run in constant time (assuming constant `Shift`, `Sum` types)\n\n///\n\n/// Basically, it is a graded ring or something idk.\n\npub trait LinearCheck: Digest + Send + Sync {\n\n /// The Shift type (see trait documentation for more).\n\n type Shift: Clone;\n\n /// The initial shift corresponding to the identity shift of 0 (see trait documentation for more).\n\n fn init_shift(&self) -> Self::Shift;\n\n /// Increments the shift by one (see trait documentation for more).\n\n fn inc_shift(&self, shift: Self::Shift) -> Self::Shift;\n\n /// Applies a shift to a sum (see trait documentation for more).\n\n fn shift(&self, sum: Self::Sum, shift: &Self::Shift) -> Self::Sum;\n\n /// Adds two sums together (see trait documentation for more).\n\n fn add(&self, sum_a: Self::Sum, sum_b: &Self::Sum) -> Self::Sum;\n\n /// Gets inverse in the abelian group of `add` (see trait documentation for more).\n\n fn negate(&self, sum: Self::Sum) -> Self::Sum;\n\n /// Acts as if applying `dig_word(s, 0)` `n` times to to `s` (see trait documentation for more).\n\n ///\n\n /// Please implement more efficient (equivalent) implementation for each type if possible.\n\n fn shift_n(&self, n: usize) -> Self::Shift {\n\n let mut shift = self.init_shift();\n\n for _ in 0..n {\n\n shift = self.inc_shift(shift);\n", "file_path": "delsum-lib/src/checksum.rs", "rank": 17, "score": 129454.24285667556 }, { "content": "// from b*x ≡ a mod m, try to calculate x mod m/y where y is the second return value\n\nfn partial_mod_div(a: &BigInt, b: &BigInt, module: &mut BigInt) -> (BigInt, BigInt) {\n\n let common = gcd(&b, &module);\n\n // if we want b*x ≡ a mod m, and c divides both b and m,\n\n // then a must be divisible by c as well\n\n // if that is not the case, we determine the maximal module where this is true\n\n if !(a % &common).is_zero() {\n\n // assume for simplicity that module is a prime power p^k\n\n // then we have b = d*p^n, a = e*p^m with d, e not divisible by p\n\n // then gcd(b, p^k) = p^n (because n has to be smaller than k)\n\n // if m < n, then b doesn't divide a and we try to adjust k so that it does\n\n // this can be done by simply setting m = k so that we now have 0*x ≡ 0 mod p^m\n\n let mut x = common.clone() / gcd(a, &common);\n\n // this loop tries to calculate\n\n // if m < n { k = m }\n\n // without having to factor the number to obtain the prime powers\n\n // this works by first determining p^m by squaring and gcd'ing the product of all p's where\n\n // m < n, so that we have the maximum powers that divide module\n\n loop {\n\n let new_x = gcd(&(&x * &x), &module);\n\n if new_x == x {\n", "file_path": "delsum-lib/src/fletcher/rev.rs", "rank": 18, "score": 128828.02611602921 }, { "content": "// note that pollard's p-1 algorithm may return composite factors, so we still have to fall back\n\n// to trial division if we detect that the number is not prime, but hopefully on a smaller number\n\nfn trial_div<N: FactorNum>(mut n: N, sieve: &mut PrimeSieve, mut bound: u64) -> Vec<(u128, u8)> {\n\n let mut ret = Vec::new();\n\n for p in sieve.iter() {\n\n if bound < p {\n\n break;\n\n }\n\n let mut mult = 0u8;\n\n while (n % p.into()).is_zero() {\n\n n = n / p.into();\n\n mult += 1;\n\n }\n\n if mult > 0 {\n\n let sqrt = get_exact_root(n, 2).unwrap_or_else(|x| x).as_u64();\n\n bound = sqrt.min(bound);\n\n ret.push((p as u128, mult));\n\n }\n\n }\n\n if !n.is_one() {\n\n ret.push((n.into(), 1));\n\n }\n\n ret\n\n}\n\n\n", "file_path": "delsum-lib/src/factor.rs", "rank": 19, "score": 127397.91217028257 }, { "content": "fn cond_reverse(width: u8, value: u128, refout: bool) -> u128 {\n\n if refout {\n\n value.reverse_bits() >> (128 - width)\n\n } else {\n\n value\n\n }\n\n}\n\n\n", "file_path": "delsum-lib/src/crc/rev.rs", "rank": 20, "score": 126056.42509444754 }, { "content": "/// A helper function for calling the find_segments function with strings arguments\n\nfn find_segment_str<L>(\n\n spec: &str,\n\n bytes: &[Vec<u8>],\n\n sum: &[Vec<u8>],\n\n start_range: SignedInclRange,\n\n end_range: SignedInclRange,\n\n) -> Result<Vec<RangePair>, CheckBuilderErr>\n\nwhere\n\n L: LinearCheck + FromStr<Err = CheckBuilderErr>,\n\n L::Sum: BitNum,\n\n{\n\n let spec = L::from_str(spec)?;\n\n let sum_array: Vec<_> = sum\n\n .iter()\n\n .map(|x| spec.wordspec().bytes_to_output(x))\n\n .collect();\n\n Ok(spec.find_segments_range(bytes, &sum_array, start_range, end_range))\n\n}\n\n\n\n/// The available checksum types\n\nstatic PREFIXES: &[&str] = &[\"fletcher\", \"crc\", \"modsum\"];\n\n\n", "file_path": "delsum-lib/src/lib.rs", "rank": 21, "score": 125247.03163768683 }, { "content": "fn reorder_poly_bytes(bytes: &[u8], refin: bool, wordspec: WordSpec) -> Vec<u8> {\n\n wordspec\n\n .iter_words(bytes)\n\n .rev()\n\n .map(|n| {\n\n let n_ref = if refin {\n\n n.reverse_bits() >> (64 - wordspec.wordsize)\n\n } else {\n\n n\n\n };\n\n int_to_bytes(n_ref, Endian::Little, wordspec.wordsize)\n\n })\n\n .flatten()\n\n .collect()\n\n}\n\n\n", "file_path": "delsum-lib/src/crc/rev.rs", "rank": 22, "score": 123517.47171392848 }, { "content": "#[derive(Debug, StructOpt)]\n\n#[structopt(rename_all = \"kebab-case\")]\n\nstruct Check {\n\n /// Print some messages indicating progress\n\n #[structopt(short, long, parse(from_occurrences))]\n\n verbose: u64,\n\n /// Do more parallelism, in turn using more memory\n\n #[structopt(short, long)]\n\n parallel: bool,\n\n /// Use the checksum algorithm given by the model string\n\n #[structopt(short, long)]\n\n model: Option<String>,\n\n /// The hexadecimal offset of the first byte to be checksummed (can be negative to indicate offset from end)\n\n #[structopt(short = \"S\", long, parse(try_from_str = read_signed_maybe_hex))]\n\n start: Option<isize>,\n\n /// The hexadecimal offset of the last byte to be checksummed (can be negative to indicate offset from end)\n\n #[structopt(short = \"E\", long, parse(try_from_str = read_signed_maybe_hex))]\n\n end: Option<isize>,\n\n /// Read model strings line-by-line from given file\n\n #[structopt(short = \"M\", long)]\n\n model_file: Option<OsString>,\n\n /// The files of which to find checksummed parts\n\n files: Vec<OsString>,\n\n}\n", "file_path": "src/bin.rs", "rank": 23, "score": 122590.07628677628 }, { "content": "pub fn find_checksum(strspec: &str, bytes: &[&[u8]]) -> Result<Vec<Vec<u8>>, CheckBuilderErr> {\n\n let (prefix, width, rest) = find_prefix_width(strspec)?;\n\n // look, it's not really useful to it in this case, but i really like how this looks\n\n match (width, prefix) {\n\n (1..=8, \"crc\") => get_checksums::<CRC<u8>>(rest, bytes, width),\n\n (9..=16, \"crc\") => get_checksums::<CRC<u16>>(rest, bytes, width),\n\n (17..=32, \"crc\") => get_checksums::<CRC<u32>>(rest, bytes, width),\n\n (33..=64, \"crc\") => get_checksums::<CRC<u64>>(rest, bytes, width),\n\n (65..=128, \"crc\") => get_checksums::<CRC<u128>>(rest, bytes, width),\n\n (1..=8, \"modsum\") => get_checksums::<ModSum<u8>>(rest, bytes, width),\n\n (9..=16, \"modsum\") => get_checksums::<ModSum<u16>>(rest, bytes, width),\n\n (17..=32, \"modsum\") => get_checksums::<ModSum<u32>>(rest, bytes, width),\n\n (33..=64, \"modsum\") => get_checksums::<ModSum<u64>>(rest, bytes, width),\n\n (1..=16, \"fletcher\") => get_checksums::<Fletcher<u8>>(rest, bytes, width),\n\n (17..=32, \"fletcher\") => get_checksums::<Fletcher<u16>>(rest, bytes, width),\n\n (33..=64, \"fletcher\") => get_checksums::<Fletcher<u32>>(rest, bytes, width),\n\n (65..=128, \"fletcher\") => get_checksums::<Fletcher<u64>>(rest, bytes, width),\n\n _ => Err(CheckBuilderErr::ValueOutOfRange(\"width\")),\n\n }\n\n}\n\n\n", "file_path": "delsum-lib/src/lib.rs", "rank": 24, "score": 122288.59655874922 }, { "content": "fn check(opts: &Check) {\n\n let files = read_files(&opts.files);\n\n let start = opts.start.unwrap_or(0);\n\n let end = opts.end.unwrap_or(-1);\n\n let ranged_files: Vec<_> = apply_range_to_file(&files, start, end);\n\n let models = read_models(&opts.model, &opts.model_file);\n\n let is_single = models.len() <= 1;\n\n #[cfg(feature = \"parallel\")]\n\n let parallel = opts.parallel;\n\n #[cfg(not(feature = \"parallel\"))]\n\n let parallel = false;\n\n let print_sums = |model| {\n\n let checksums = find_checksum(model, &ranged_files).unwrap_or_else(|err| {\n\n eprintln!(\"Could not process model '{}': {}\", model, err);\n\n exit(1);\n\n });\n\n if is_single {\n\n println!(\n\n \"{}\",\n\n checksums\n", "file_path": "src/bin.rs", "rank": 25, "score": 119197.3497346405 }, { "content": "#[derive(Debug, Clone)]\n\nstruct RevResult {\n\n inits: PrefactorMod,\n\n addout1: BigInt,\n\n addout2: (BigInt, usize),\n\n modules: Vec<BigInt>,\n\n width: usize,\n\n swap: bool,\n\n wordspec: WordSpec,\n\n}\n\n\n\nimpl RevResult {\n\n fn iter(self) -> impl Iterator<Item = Fletcher<u64>> {\n\n let RevResult {\n\n inits,\n\n addout1,\n\n addout2,\n\n modules,\n\n width,\n\n swap,\n\n wordspec,\n", "file_path": "delsum-lib/src/fletcher/rev.rs", "rank": 26, "score": 118288.80342260315 }, { "content": "struct RevSpec {\n\n width: usize,\n\n addout: Option<u128>,\n\n init: Option<u64>,\n\n module: Option<u64>,\n\n swap: bool,\n\n wordspec: WordSpec,\n\n}\n\n\n\n// contains the information needed for iterating over the possible algorithms\n\n// `inits` is a solution set of the form `a*x ≡ y mod m`, and init still has to\n\n// be subtracted from addout1 and addout2\n", "file_path": "delsum-lib/src/fletcher/rev.rs", "rank": 27, "score": 118284.61403993233 }, { "content": "/// Find the parameters of a CRC algorithm.\n\n///\n\n/// `spec` contains the known parameters of the algorithm (by setting the corresponding values in the builder).\n\n/// `chk_bytes` are pairs of files and their checksums.\n\n/// `verbosity` makes the function output what it is doing.\n\n///\n\n/// The `width` parameter of the builder has to be set.\n\npub fn reverse_crc<'a>(\n\n spec: &CRCBuilder<u128>,\n\n chk_bytes: &'a [(&[u8], Vec<u8>)],\n\n verbosity: u64,\n\n extended_search: bool,\n\n) -> impl Iterator<Item = Result<CRC<u128>, CheckReverserError>> + 'a {\n\n let spec = spec.clone();\n\n let mut files = chk_bytes.to_owned();\n\n files.sort_unstable_by(|a, b| a.0.len().cmp(&b.0.len()).reverse());\n\n let ref_combinations: Vec<_> = discrete_combos(&spec, extended_search);\n\n ref_combinations\n\n .into_iter()\n\n .map(move |((refin, refout), wordspec)| {\n\n unresult_iter(reverse(\n\n &spec,\n\n files.clone(),\n\n verbosity,\n\n refin,\n\n refout,\n\n wordspec,\n", "file_path": "delsum-lib/src/crc/rev.rs", "rank": 28, "score": 118236.16612917889 }, { "content": "/// Find the parameters of a modsum algorithm.\n\n///\n\n/// `spec` contains the known parameters of the algorithm (by setting the corresponding values in the builder).\n\n/// `chk_bytes` are pairs of files and their checksums.\n\n/// `verbosity` makes the function output what it is doing\n\n///\n\n/// The `width` parameter of the builder has to be set.\n\npub fn reverse_modsum<'a>(\n\n spec: &ModSumBuilder<u64>,\n\n chk_bytes: &'a [(&[u8], Vec<u8>)],\n\n verbosity: u64,\n\n extended_search: bool,\n\n) -> impl Iterator<Item = Result<ModSum<u64>, CheckReverserError>> + 'a {\n\n let spec = spec.clone();\n\n wordspec_combos(\n\n spec.wordsize,\n\n spec.input_endian,\n\n spec.output_endian,\n\n spec.width.unwrap(),\n\n extended_search,\n\n )\n\n .into_iter()\n\n .map(move |wordspec| {\n\n let rev = match spec.width {\n\n None => Err(CheckReverserError::MissingParameter(\"width\")),\n\n Some(width) => {\n\n let chk_words: Vec<_> = chk_bytes\n", "file_path": "delsum-lib/src/modsum/rev.rs", "rank": 29, "score": 118236.16612917889 }, { "content": "// finds x in x^e = n, returns Ok if exact, else Err\n\nfn get_exact_root<N: FactorNum>(n: N, e: u8) -> Result<N, N> {\n\n let mut n_shift = n;\n\n let mut shift_amount = 0u8;\n\n while !n_shift.is_zero() {\n\n n_shift = n_shift >> e as usize;\n\n shift_amount += 1;\n\n }\n\n let mut estimate = N::zero();\n\n for s in (0..shift_amount).rev() {\n\n estimate = estimate << 1;\n\n match (estimate + N::one()).checked_pow(e) {\n\n None => continue,\n\n Some(x) => {\n\n if x <= (n >> (s * e) as usize) {\n\n estimate = estimate + N::one()\n\n }\n\n }\n\n }\n\n }\n\n let p = estimate.checked_pow(e).unwrap();\n\n if p == n {\n\n Ok(estimate)\n\n } else {\n\n Err(estimate)\n\n }\n\n}\n\n\n", "file_path": "delsum-lib/src/factor.rs", "rank": 30, "score": 115853.46271254579 }, { "content": "#[cfg(feature = \"parallel\")]\n\npub fn reverse_crc_para<'a>(\n\n spec: &CRCBuilder<u128>,\n\n chk_bytes: &'a [(&[u8], Vec<u8>)],\n\n verbosity: u64,\n\n extended_search: bool,\n\n) -> impl ParallelIterator<Item = Result<CRC<u128>, CheckReverserError>> + 'a {\n\n let spec = spec.clone();\n\n let mut files = chk_bytes.to_owned();\n\n files.sort_unstable_by(|a, b| a.0.len().cmp(&b.0.len()).reverse());\n\n let ref_combinations: Vec<_> = discrete_combos(&spec, extended_search);\n\n ref_combinations\n\n .into_par_iter()\n\n .map(move |((refin, refout), wordspec)| {\n\n unresult_iter(reverse(\n\n &spec,\n\n files.clone(),\n\n verbosity,\n\n refin,\n\n refout,\n\n wordspec,\n", "file_path": "delsum-lib/src/crc/rev.rs", "rank": 31, "score": 115121.74813207609 }, { "content": "// describes a set of solutions for unknown*possible % hull\n\nstruct PrefactorMod {\n\n unknown: PolyPtr,\n\n possible: PolyPtr,\n\n hull: PolyPtr,\n\n}\n\n\n\nimpl PrefactorMod {\n\n fn empty() -> Self {\n\n PrefactorMod {\n\n unknown: new_poly(&[1]),\n\n possible: new_zero(),\n\n hull: new_poly(&[1]),\n\n }\n\n }\n\n fn new_init(maybe_init: &Option<PolyPtr>, hull: &Poly) -> Self {\n\n // if we already have init, we can use that for our solution here, otherwise use the\n\n // set of all possible solutions\n\n let (unknown, possible) = match maybe_init {\n\n None => (copy_poly(hull), new_zero()),\n\n Some(init) => (new_poly(&[1]), copy_poly(init)),\n", "file_path": "delsum-lib/src/crc/rev.rs", "rank": 32, "score": 108610.0569370414 }, { "content": "// For understanding the reversing process, one must first look what the sum looks like.\n\n// If one has a file [a, b, c, d, e] of 5 bytes, then the regular checksum will be in the form\n\n// (init + a + b + c + d + e + addout1) mod m\n\n// and the cumulative one will be\n\n// (5*init + 5*a + 4*b + 3*c + 2*d * e + addout2) mod m\n\n// Because we also know the file, we can subtract a + b + c + d + e or 5a + 4b + 3c + 2d + 1e and get\n\n// (init + addout1) mod m\n\n// (5*init + addout2) mod m\n\n// Note that the notation `mod` here does not mean that the result is 0 <= x < m, just that the difference\n\n// to the unreduced form is a multiple of `m`.\n\n// So we can just subtract these values without knowing the value of m.\n\n//\n\n// We will ignore the regular sum for now because that's the easy part and is the same as in modsum with addout = addout1 + init.\n\n//\n\n// Now assume that we do not know addout2 or init, but have three files of lengths 2, 3 and 5.\n\n// We can therefore get the values (5*init + addout2) mod m, (3*init + addout2) mod m and (2*init + addout2) mod m.\n\n// If we take the differences of the first two and the last two, we get x = 2*init mod m and y = init mod m.\n\n//\n\n// So now we can calculate x - 2*y to get a result that is (0 mod m), which means that the result is divisible by m.\n\n// We can just assume for now that we actually found m and adjust m later if we found that to be wrong.\n\n// If m is zero, that is bad luck and we return an error that the files were likely too similar.\n\n//\n\n// Now that we have a candidate for m, we can start looking for init and addout.\n\n// Say we found m = 4 and x = 2*init = 2 mod 4.\n\n// From that, we can infer that init = 1 mod 4 or init = 3 mod 4 (see PrefactorMod for how exactly that is done).\n\n// Finding out addout2 is now as easy as subtracting 5*init from (5*init + addout2) mod m.\n\nfn reverse(\n\n spec: RevSpec,\n\n chk_bytes: Vec<(impl Iterator<Item = u64>, u128)>,\n\n verbosity: u64,\n\n) -> Result<RevResult, CheckReverserError> {\n\n let log = |s| {\n\n if verbosity > 0 {\n\n eprintln!(\"<fletcher> {}\", s);\n\n }\n\n };\n\n let width = spec.width;\n\n let swap = spec.swap;\n\n let wordspec = spec.wordspec;\n\n let (min, max, mut cumusums, regsums) = summarize(chk_bytes, width, swap);\n\n log(\"finding parameters of lower sum\");\n\n // finding the parameters of the lower sum is pretty much a separate problem already\n\n // handled in modsum, so we delegate to that\n\n let module = spec.module.unwrap_or(0) as u128;\n\n let (module, addout1) = find_regular_sum(&spec, &regsums, module);\n\n let mut module = BigInt::from(module);\n", "file_path": "delsum-lib/src/fletcher/rev.rs", "rank": 33, "score": 107668.79911578163 }, { "content": "fn summarize(\n\n chks: Vec<(impl Iterator<Item = u64>, u128)>,\n\n width: usize,\n\n swap: bool,\n\n) -> (u128, u128, Vec<(BigInt, usize)>, Vec<i128>) {\n\n let mut regsums = Vec::new();\n\n let mut cumusums = Vec::new();\n\n let mut min = 2;\n\n for (words, chk) in chks {\n\n let (s1, s2) = split_sum(chk, width, swap);\n\n min = min.max(s1 as u128 + 1);\n\n min = min.max(s2 as u128 + 1);\n\n let mut current_sum: BigInt = zero();\n\n let mut cumusum: BigInt = zero();\n\n let mut size = 0usize;\n\n for word in words {\n\n size += 1;\n\n current_sum += BigInt::from(word);\n\n cumusum += &current_sum;\n\n }\n\n let (check1, check2) = split_sum(chk, width, swap);\n\n regsums.push(\n\n i128::try_from(current_sum).expect(\"Unexpected overflow in sum\") - check1 as i128,\n\n );\n\n cumusums.push((BigInt::from(check2) - cumusum, size));\n\n }\n\n let max = 1 << (width / 2);\n\n (min, max, cumusums, regsums)\n\n}\n\n\n", "file_path": "delsum-lib/src/fletcher/rev.rs", "rank": 34, "score": 107654.84066053215 }, { "content": "/// A basic trait for a checksum where\n\n/// * init gives an initial state\n\n/// * dig_word processes a single word\n\n/// * finalize is applied to get the final sum after all words are processed.\n\n///\n\n/// They should be implemented in a way such that the digest default implementation\n\n/// corresponds to calculating the checksum.\n\n///\n\n/// Unlike LinearCheck, it is not really required to be linear yet, but in\n\n/// context of this application, there is really no use only implementing this.\n\npub trait Digest {\n\n /// A type that holds the checksum.\n\n ///\n\n /// Note that in this application, a separate state type that holds the interal state\n\n /// and gets converted to a Sum by finalize\n\n /// is not really feasable because of the operations LinearCheck would need to do\n\n /// both on Sums and interal States, so a single Sum type must be enough.\n\n type Sum: Clone + Eq + Ord + Debug + Send + Sync + Checksum;\n\n /// Gets an initial sum before the words are processed through the sum.\n\n ///\n\n /// For instance in the case of crc, the sum type is some integer and the returned value from\n\n /// this function could be 0xffffffff (e.g. in the bzip2 crc).\n\n fn init(&self) -> Self::Sum;\n\n /// Processes a single word from the text.\n\n ///\n\n /// For a crc, this corresponds to shifting, adding the word and reducing.\n\n fn dig_word(&self, sum: Self::Sum, word: u64) -> Self::Sum;\n\n /// After all words are read, this function is called to do some finalization.\n\n ///\n\n /// In the case of crc, this corresponds to adding a constant at the end\n", "file_path": "delsum-lib/src/checksum.rs", "rank": 35, "score": 107316.37902049487 }, { "content": "pub fn read_signed_maybe_hex(s: &str) -> Result<isize, ParseIntError> {\n\n s.strip_prefix(\"0x\")\n\n .map(|s0x| isize::from_str_radix(s0x, 16))\n\n .unwrap_or_else(|| s.parse())\n\n}\n\n\n\nimpl FromStr for SignedInclRange {\n\n // should be enough for now\n\n type Err = String;\n\n\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n let from_maybe_hex = |s: &str| match s {\n\n \"\" => Ok(None),\n\n otherwise => read_signed_maybe_hex(otherwise)\n\n .map(Some),\n\n };\n\n let split = s\n\n .split(':')\n\n .map(from_maybe_hex)\n\n .collect::<Result<Vec<_>, _>>()\n", "file_path": "delsum-lib/src/utils.rs", "rank": 36, "score": 106241.9833178458 }, { "content": "fn find_init(\n\n maybe_init: &Option<PolyPtr>,\n\n mut hull: Pin<&mut Poly>,\n\n polys: Vec<InitPoly>,\n\n) -> PrefactorMod {\n\n if deg(&hull) <= 0 {\n\n return PrefactorMod::empty();\n\n }\n\n let mut ret = PrefactorMod::new_init(maybe_init, &hull);\n\n let mut power = MemoPower::new(&hull);\n\n for (p, l) in polys {\n\n power.update_init_fac(&l);\n\n let file_solutions = PrefactorMod::new_file(p, &mut power, hull.as_mut());\n\n ret = match file_solutions\n\n .map(|f| ret.merge(f, hull.as_mut()))\n\n .flatten()\n\n {\n\n Some(valid) => valid,\n\n None => return PrefactorMod::empty(),\n\n }\n\n }\n\n ret\n\n}\n\n\n", "file_path": "delsum-lib/src/crc/rev.rs", "rank": 37, "score": 104799.04111931726 }, { "content": "fn reverse_discrete(\n\n spec: FletcherBuilder<u64>,\n\n chk_bytes: Vec<(&[u8], Vec<u8>)>,\n\n loop_element: (bool, WordSpec),\n\n verbosity: u64,\n\n) -> Result<RevResult, CheckReverserError> {\n\n let width = spec\n\n .width\n\n .ok_or(CheckReverserError::MissingParameter(\"width\"))?;\n\n let wordspec = loop_element.1;\n\n let chk_words: Vec<_> = chk_bytes\n\n .iter()\n\n .map(|(f, c)| {\n\n (\n\n wordspec.iter_words(*f),\n\n bytes_to_int::<u128>(c, wordspec.output_endian),\n\n )\n\n })\n\n .collect();\n\n let rev = RevSpec {\n\n width,\n\n addout: spec.addout,\n\n init: spec.init,\n\n module: spec.module,\n\n swap: loop_element.0,\n\n wordspec: loop_element.1,\n\n };\n\n reverse(rev, chk_words, verbosity)\n\n}\n\n\n", "file_path": "delsum-lib/src/fletcher/rev.rs", "rank": 38, "score": 103997.5849503318 }, { "content": "fn remove_addout2(\n\n spec: &RevSpec,\n\n mut sums: Vec<(BigInt, usize)>,\n\n module: &BigInt,\n\n) -> (Vec<(BigInt, usize)>, (BigInt, usize)) {\n\n let width = spec.width;\n\n let swap = spec.swap;\n\n let maybe_addout = spec\n\n .addout\n\n .map(|x| BigInt::from(split_sum(x, width, swap).1));\n\n let mut ret_vec = Vec::new();\n\n let mut prev = sums\n\n .pop()\n\n .expect(\"Internal Error: Zero-length vector given to remove_addout2\");\n\n // note: this variable is actually (x,y) where x = addout2 + y*init because we do not know\n\n // init yet\n\n let addout2 = match &maybe_addout {\n\n Some(addout) => {\n\n // if we already know addout, we can use the first file for determining\n\n // the module or init better\n", "file_path": "delsum-lib/src/fletcher/rev.rs", "rank": 39, "score": 103997.5849503318 }, { "content": "/// For the modsum, we need a wider type for temporary reduction modulo some number,\n\n/// so this is implemented in this type (and there's probably no need for an u128 ModSum anyway)\n\npub trait Modnum: BitNum {\n\n type Double: BitNum + ops::Rem<Output = Self::Double> + From<Self> + TryInto<Self>;\n\n /// cuts Self::Double in half (ignores the upper half of bits)\n\n fn from_double(n: Self::Double) -> Self {\n\n let masked_n = n % (Self::Double::one() << (n.bits() / 2));\n\n match masked_n.try_into() {\n\n Ok(k) => k,\n\n Err(_) => panic!(\"Half of Double does not fit into original type!\"),\n\n }\n\n }\n\n /// add numbers modulo some other number (if 0 then the modulo is 2^n where n is the number of bits)\n\n fn add_mod(self, rhs: &Self, modulo: &Self) -> Self {\n\n let dself = Self::Double::from(self);\n\n let drhs = Self::Double::from(*rhs);\n\n Self::from_double(if modulo.is_zero() {\n\n dself + drhs\n\n } else {\n\n (dself + drhs) % Self::Double::from(*modulo)\n\n })\n\n }\n", "file_path": "delsum-lib/src/bitnum.rs", "rank": 40, "score": 97627.44463142855 }, { "content": "/// A stringy function for determining which segments of a file have a given checksum.\n\n///\n\n/// It is given\n\n/// * a string that models a checksum algorithm\n\n/// * a vector of bytes slices (each slice containing the bytes of a file)\n\n/// * a comma-separated string (without whitespace) containing target checksums for each file\n\n/// * a parameter indicating whether the ends of the segments are relative to the start or the end of the file\n\n///\n\n/// # The Model String\n\n/// A model string is generally of the form\n\n/// ```text\n\n/// [algorithm] width=[number] {more parameters}\n\n/// ```\n\n/// The `algorithm` parameter is either `fletcher`, `crc` or `modsum`.\n\n/// Parameters depend solely on what kind of algorithm is used and more information is available\n\n/// at the respective Builders.\n\npub fn find_checksum_segments(\n\n strspec: &str,\n\n bytes: &[Vec<u8>],\n\n sum: &[Vec<u8>],\n\n start_range: SignedInclRange,\n\n end_range: SignedInclRange,\n\n) -> Result<Vec<RangePair>, CheckBuilderErr> {\n\n let (prefix, width, rest) = find_prefix_width(strspec)?;\n\n match (width, prefix) {\n\n (1..=8, \"crc\") => find_segment_str::<CRC<u8>>(rest, bytes, sum, start_range, end_range),\n\n (9..=16, \"crc\") => find_segment_str::<CRC<u16>>(rest, bytes, sum, start_range, end_range),\n\n (17..=32, \"crc\") => find_segment_str::<CRC<u32>>(rest, bytes, sum, start_range, end_range),\n\n (33..=64, \"crc\") => find_segment_str::<CRC<u64>>(rest, bytes, sum, start_range, end_range),\n\n (65..=128, \"crc\") => {\n\n find_segment_str::<CRC<u128>>(rest, bytes, sum, start_range, end_range)\n\n }\n\n (1..=8, \"modsum\") => {\n\n find_segment_str::<ModSum<u8>>(rest, bytes, sum, start_range, end_range)\n\n }\n\n (9..=16, \"modsum\") => {\n", "file_path": "delsum-lib/src/lib.rs", "rank": 41, "score": 97372.7649833898 }, { "content": "pub fn find_algorithm<'a>(\n\n strspec: &str,\n\n bytes: &'a [&[u8]],\n\n sums: &[Vec<u8>],\n\n verbosity: u64,\n\n extended_search: bool,\n\n) -> Result<AlgorithmFinder<'a>, CheckBuilderErr> {\n\n let (prefix, _, rest) = find_prefix_width(strspec)?;\n\n let prefix = prefix.to_ascii_lowercase();\n\n let spec = match prefix.as_str() {\n\n \"crc\" => BuilderEnum::CRC(CRCBuilder::<u128>::from_str(rest)?),\n\n \"modsum\" => BuilderEnum::ModSum(ModSumBuilder::<u64>::from_str(rest)?),\n\n \"fletcher\" => BuilderEnum::Fletcher(FletcherBuilder::<u64>::from_str(rest)?),\n\n _ => unimplemented!(),\n\n };\n\n match sums.len().cmp(&bytes.len()) {\n\n Ordering::Greater => {\n\n return Err(CheckBuilderErr::MissingParameter(\n\n \"not enough files for checksums given\",\n\n ))\n", "file_path": "delsum-lib/src/lib.rs", "rank": 42, "score": 95201.97950946877 }, { "content": "pub fn div_checked(a: &Poly, b: &Poly) -> Option<PolyPtr> {\n\n let mut r = copy_poly(a);\n\n let succ = r.pin_mut().div_to_checked(b);\n\n if succ {\n\n Some(r)\n\n } else {\n\n None\n\n }\n\n}\n\n\n\nimpl ops::Add for &Poly {\n\n type Output = PolyPtr;\n\n\n\n fn add(self, rhs: Self) -> Self::Output {\n\n crate::add(self, rhs)\n\n }\n\n}\n\nimpl ops::AddAssign<&Poly> for PolyPtr {\n\n fn add_assign(&mut self, rhs: &Poly) {\n\n self.pin_mut().add_to(rhs);\n", "file_path": "delsum-poly/src/lib.rs", "rank": 43, "score": 95155.39300508637 }, { "content": "fn p1fac<N: FactorNum>(\n\n n: &mut N,\n\n start_power: N,\n\n prime: &mut u64,\n\n sieve: &mut PrimeSieve,\n\n bound: u64,\n\n lower_n: u64,\n\n) -> (N, Vec<N>) {\n\n let mut ret = Vec::new();\n\n let mut mon = MonContext::new(*n);\n\n let mut power = mon.to_mon(start_power);\n\n while mon.n > N::from(lower_n) {\n\n // check first if the current number is prime so we don't do something unneccessary\n\n if is_prob_prime(mon.n) {\n\n *n = N::one();\n\n ret.push(mon.n);\n\n return (N::zero(), ret);\n\n }\n\n let fac_op = next_factor(&mut mon, prime, &mut power, sieve, bound);\n\n let fac = match fac_op {\n\n Some(f) => f,\n\n None => break,\n\n };\n\n ret.push(fac);\n\n }\n\n *n = mon.n;\n\n (mon.from_mon(power), ret)\n\n}\n\n\n", "file_path": "delsum-lib/src/factor.rs", "rank": 44, "score": 93399.03420252628 }, { "content": "// a single step in pollard's p-1 factorization algorithm\n\nfn next_factor<N: FactorNum>(\n\n mon: &mut MonContext<N>,\n\n current_p: &mut u64,\n\n current_power: &mut N,\n\n sieve: &mut PrimeSieve,\n\n bound: u64,\n\n) -> Option<N> {\n\n let neg_one = mon.n.mod_neg(mon.one);\n\n for p in sieve.iter_from(*current_p).take_while(|&x| x <= bound) {\n\n *current_p = p;\n\n let q = maximum_power(bound, p);\n\n *current_power = mon.mon_powermod(*current_power, q as u128);\n\n let p_minus_one = mon.n.mod_add(*current_power, neg_one);\n\n let g = gcd(p_minus_one, mon.n);\n\n if !g.is_one() {\n\n let n = mon.n / g;\n\n *current_power = *current_power % n;\n\n mon.update(n);\n\n return Some(g);\n\n }\n\n }\n\n None\n\n}\n\n\n", "file_path": "delsum-lib/src/factor.rs", "rank": 45, "score": 91287.80648777966 }, { "content": "struct RevInfo {\n\n width: usize,\n\n init: Option<PolyPtr>,\n\n xorout: Option<PolyPtr>,\n\n poly: Option<PolyPtr>,\n\n refin: bool,\n\n refout: bool,\n\n wordspec: WordSpec,\n\n}\n\n\n\nimpl RevInfo {\n\n // this is responsible for converting integer values to polynomial values\n\n // and returning a RevInfo that can be used for further reversing\n\n fn from_builder(\n\n spec: &CRCBuilder<u128>,\n\n refin: bool,\n\n refout: bool,\n\n wordspec: WordSpec,\n\n ) -> Self {\n\n let width = spec.width.unwrap();\n", "file_path": "delsum-lib/src/crc/rev.rs", "rank": 46, "score": 90959.81111832814 }, { "content": "struct RevResult {\n\n polys: Vec<PolyPtr>,\n\n inits: PrefactorMod,\n\n xorout: InitPoly,\n\n width: usize,\n\n refin: bool,\n\n refout: bool,\n\n wordspec: WordSpec,\n\n}\n\n\n\nimpl RevResult {\n\n // iterate over all possible parameters\n\n fn iter(self) -> impl Iterator<Item = CRC<u128>> {\n\n let RevResult {\n\n polys,\n\n inits,\n\n xorout,\n\n width,\n\n refin,\n\n refout,\n", "file_path": "delsum-lib/src/crc/rev.rs", "rank": 47, "score": 90959.81111832814 }, { "content": "struct RevSpec {\n\n width: usize,\n\n init: Option<u64>,\n\n module: Option<u64>,\n\n wordspec: WordSpec,\n\n}\n\n\n", "file_path": "delsum-lib/src/modsum/rev.rs", "rank": 48, "score": 90959.81111832814 }, { "content": "struct RevResult {\n\n modlist: Vec<u128>,\n\n init: i128,\n\n width: usize,\n\n wordspec: WordSpec,\n\n}\n\n\n\nimpl RevResult {\n\n // iterate over all possible modules and calculate the corresponding init values\n\n fn iter(self) -> impl Iterator<Item = ModSum<u64>> {\n\n let Self {\n\n modlist,\n\n init,\n\n width,\n\n wordspec,\n\n } = self;\n\n modlist.into_iter().map(move |module| {\n\n let init_negative = init < 0;\n\n let mut init = init.abs() as u128 % module;\n\n if init_negative {\n", "file_path": "delsum-lib/src/modsum/rev.rs", "rank": 49, "score": 90959.81111832814 }, { "content": "fn presums<S: LinearCheck + ?Sized>(\n\n summer: &S,\n\n bytes: &[u8],\n\n sum: &S::Sum,\n\n start_range: UnsignedInclRange,\n\n end_range: UnsignedInclRange,\n\n) -> (Vec<S::Sum>, Vec<S::Sum>) {\n\n if start_range.start() > start_range.end() || end_range.start() > end_range.end() {\n\n return (Vec::new(), Vec::new());\n\n }\n\n if start_range.start() >= bytes.len() {\n\n return (Vec::new(), Vec::new());\n\n }\n\n // we calculate two presum arrays, one for the starting values and one for the end values\n\n let step = summer.wordspec().word_bytes();\n\n let mut state = summer.init();\n\n let mut start_presums = Vec::with_capacity(start_range.len() / step);\n\n let mut end_presums = Vec::with_capacity(end_range.len() / step);\n\n let neg_init = summer.negate(summer.init());\n\n let iter_range = start_range.start()..=end_range.end();\n", "file_path": "delsum-lib/src/checksum.rs", "rank": 50, "score": 87779.39912143545 }, { "content": "// The parameter reversing for CRC is quite similar and it may be easier to try to understand that implementation first,\n\n// since it uses integers instead of 𝔽₂[X].\n\n//\n\n// If f is a file of length l (in bits) interpreted as a polynomial in 𝔽₂[X], then the crc is just\n\n// (init*X^l + f*X^width + xorout) % poly\n\n//\n\n// If we have a file with a crc checksum, we can calculate\n\n// checksum - f*X^width ≡ init*X^l + xorout mod poly\n\n// Note that poly is not yet known, so we can't reduce by poly yet and have a giant degree l polynomial,\n\n// with a file that is a few MB, this is a polynomial whose degree is a few millions, so each operation\n\n// can be expensive.\n\n//\n\n// By using multiple files, we can also cancel xorout and init:\n\n// Given three files of len l₁, l₂, l₃, we have calculated init*X^lₐ + xorout mod poly before, so by subtracting\n\n// the first two, we get `a = init*(X^l₁ + X^l₂) mod poly`. Doing the 2nd and 3rd, we get similarly get `b = init*(X^l₂ + X^l₃) mod poly`.\n\n// For simplicity, let's assume l₁ < l₂ < l₃ (if there are two of the same length, init is already cancelled).\n\n// If we multiply a by (X^(l₃ - l₂) + 1), we get init*(X^(l₃ + l₁ - l₂) + X^l₃ + X^l₂ + X^l₁) mod poly.\n\n// When we multiply b by (X^(l₂ - l₁) + 1), we also get that, so by subtracting both, we get 0 mod poly, meaning that\n\n// poly divides the result, which we can use to determine poly later.\n\n//\n\n// If we have more than three files, we can also get more results, but since poly has to divide all of them, we can gcd them\n\n// together to get a smaller polynomial that is divided by poly.\n\n// If we don't have that, we still know that the highest prime factor of poly that we care about has degree width,\n\n// which we can use to construct a polynomial that only has factors of degree <= width and gcd with that.\n\n//\n\n// One could think that doing a gcd between million-degree polynomials could be very slow.\n\n// And if a naive implementation of multiplication and gcd were used, that would be correct.\n\n// However this program uses two excellent libraries, NTL and gf2x, with which the gcd can be calculated in\n\n// around O(n*log^2(n)) time, thanks to the FFT-based Schönhage-Strassen multiplication and a clever\n\n// gcd implementation called half-gcd.\n\n//\n\n// Now we just assume that the result we got in the previous step is already our poly.\n\n// We can just adjust it to be a divisor of that if we found it to be wrong later.\n\n// With that, we can solve init*(X^l₂ + X^l₁) ≡ x mod poly for init using number theory®\n\n// and from that, we get xorout by subtracting e.g. init*X^l₁.\n\n//\n\n// If our poly is still of degree higher than width, we can then factorize it.\n\n// Note that factoring 𝔽₂[X] polynoials is suprisingly feasable (people have factored\n\n// such polynomials in the degree of millions) and because the factors all have degree <= width,\n\n// due to the way distinct degree factorization works, it should still work quite fast.\n\n// However, by this point poly should be very close in degree to width, so it's not a very big issue anyway.\n\n//\n\n// Using the factorization, we can then iterate over all divisors of degree width.\n\nfn rev_from_polys(\n\n spec: &RevInfo,\n\n arg_polys: &[(PolyPtr, usize)],\n\n verbosity: u64,\n\n) -> Result<RevResult, Option<CheckReverserError>> {\n\n let log = |s| {\n\n if verbosity > 0 {\n\n eprintln!(\n\n \"<crc, refin = {}, refout = {}> {}\",\n\n spec.refin, spec.refout, s\n\n );\n\n }\n\n };\n\n // InitPlace is essentially a sparse polynomial with at most 2 coefficients being 1\n\n // note that it has an implied factor of 8, because it uses the byte position instead of bit position\n\n let mut polys: Vec<_> = arg_polys\n\n .iter()\n\n .rev()\n\n .map(|(p, l)| (copy_poly(p), InitPlace::Single(*l)))\n\n .collect();\n", "file_path": "delsum-lib/src/crc/rev.rs", "rank": 51, "score": 86209.1949209863 }, { "content": "fn find_segments_aligned<S: LinearCheck + ?Sized>(\n\n summer: &S,\n\n bytes: &[Vec<u8>],\n\n sum: &[<S as Digest>::Sum],\n\n start_range: SignedInclRange,\n\n end_range: SignedInclRange,\n\n) -> Option<Vec<RangePair>> {\n\n let min_len = bytes.iter().map(|x| x.len()).min().unwrap();\n\n let (start_range, end_range) = normalize_range(\n\n start_range,\n\n end_range,\n\n summer.wordspec().word_bytes(),\n\n min_len,\n\n )?;\n\n #[cfg(feature = \"parallel\")]\n\n let (start_presums, end_presums) = bytes\n\n .par_iter()\n\n .zip(sum.par_iter())\n\n .map(|(b, s)| {\n\n presums(\n", "file_path": "delsum-lib/src/checksum.rs", "rank": 52, "score": 84024.2817464819 }, { "content": "// we don't actually ever convert the factors represented by a\n\n// InitPlaces struct into a full polynomial, we just evaluate it modulo the hull\n\n// to do this faster, we save X^k mod hull and evaluate them from smallest to largest\n\n// so we can reuse it later\n\nstruct MemoPower {\n\n prev_power: usize,\n\n prev_ppoly: PolyRemPtr,\n\n init_fac: PolyPtr,\n\n hull: PolyPtr,\n\n}\n\nimpl MemoPower {\n\n fn new(hull: &Poly) -> Self {\n\n MemoPower {\n\n prev_power: 0,\n\n prev_ppoly: new_polyrem(&new_poly(&[1]), &hull),\n\n init_fac: new_zero(),\n\n hull: copy_poly(hull),\n\n }\n\n }\n\n fn update_init_fac(&mut self, place: &InitPlace) -> &Poly {\n\n let mut update_power = |&new_level: &usize| {\n\n if new_level < self.prev_power {\n\n panic!(\"Internal Error: Polynomials non-ascending\");\n\n }\n", "file_path": "delsum-lib/src/crc/rev.rs", "rank": 53, "score": 81453.88222832396 }, { "content": "// note: this can be replaced with a more efficient implementations, like the one in factor.rs, but\n\n// i'm not feeling like doing it right now tbh\n\nfn gcd(a: &BigInt, b: &BigInt) -> BigInt {\n\n xgcd(a, b).0\n\n}\n\n\n", "file_path": "delsum-lib/src/fletcher/rev.rs", "rank": 54, "score": 80745.80438242802 }, { "content": "// ntl's modular division doesn't account for common factors between\n\n// the arguments, so this is a version which does\n\nfn inverse_fixed(mut a: PolyPtr, b: &Poly, common: &Poly, hull: &Poly) -> PolyPtr {\n\n a /= common;\n\n let mut b = copy_poly(b);\n\n b /= common;\n\n let module = div(hull, common);\n\n if module.eq(&new_poly(&[1])) {\n\n return new_zero();\n\n }\n\n let mut ma = new_polyrem(&a, &module);\n\n let mb = new_polyrem(&b, &module);\n\n ma /= &mb;\n\n ma.rep()\n\n}\n\n\n", "file_path": "delsum-lib/src/crc/rev.rs", "rank": 55, "score": 79319.92574914492 }, { "content": "// If we have a file with the bytes [a, b, c, d] we have a checksum of the form (init + a + b + c + d) mod m.\n\n// By subtracting a + b + c + d from the checksum (without mod'ing by m because we don't know m yet), we get\n\n// init mod m.\n\n// If we have two files, we can take their difference and have a number that is 0 mod m, which means m divides this number.\n\n// The solutions are then the divisors m in the appropiate range.\n\nfn reverse(\n\n spec: RevSpec,\n\n chk_bytes: Vec<(impl Iterator<Item = u64>, u128)>,\n\n verbosity: u64,\n\n) -> Result<RevResult, CheckReverserError> {\n\n let log = |s| {\n\n if verbosity > 0 {\n\n eprintln!(\"<modsum> {}\", s);\n\n }\n\n };\n\n let width = spec.width;\n\n let mut sums = Vec::<i128>::new();\n\n let max_sum = 1u128 << width;\n\n let mut min_sum = 0;\n\n log(\"summing files up\");\n\n for (f, chk) in chk_bytes {\n\n min_sum = min_sum.max(chk);\n\n // here we calculate (init mod m)\n\n sums.push(f.map(i128::from).sum::<i128>() - chk as i128);\n\n }\n", "file_path": "delsum-lib/src/modsum/rev.rs", "rank": 56, "score": 79104.3511074498 }, { "content": "#[inline]\n\nfn make_u128(x: &[u64; 2]) -> u128 {\n\n x[0] as u128 + ((x[1] as u128) << 64)\n\n}\n\n\n", "file_path": "delsum-lib/src/factor.rs", "rank": 57, "score": 78334.89664905168 }, { "content": "#[inline]\n\nfn split_u128(x: u128) -> [u64; 2] {\n\n [x as u64, (x >> 64) as u64]\n\n}\n\n\n", "file_path": "delsum-lib/src/factor.rs", "rank": 58, "score": 78334.89664905168 }, { "content": "// find all combinations of refin, refout and wordspecs using all values when a parameter is not given\n\nfn discrete_combos(\n\n spec: &CRCBuilder<u128>,\n\n extended_search: bool,\n\n) -> Vec<((bool, bool), WordSpec)> {\n\n let width = spec.width.expect(\"Missing width argument\");\n\n let refins = spec\n\n .refin\n\n .map(|x| vec![x])\n\n .unwrap_or_else(|| vec![false, true]);\n\n let mut ret = Vec::new();\n\n for refin in refins {\n\n let refouts = spec.refout.map(|x| vec![x]).unwrap_or_else(|| {\n\n if extended_search {\n\n vec![false, true]\n\n } else {\n\n vec![refin]\n\n }\n\n });\n\n let input_endian = spec.input_endian.or_else(|| {\n\n Some(match refin {\n", "file_path": "delsum-lib/src/crc/rev.rs", "rank": 59, "score": 76680.12353863871 }, { "content": "fn find_polyhull(\n\n spec: &RevInfo,\n\n polys: Vec<InitPoly>,\n\n verbosity: u64,\n\n) -> Result<(Vec<InitPoly>, PolyPtr), Option<CheckReverserError>> {\n\n let log = |s| {\n\n if verbosity > 1 {\n\n eprintln!(\n\n \"<crc poly, refin = {}, refout = {}> {}\",\n\n spec.refin, spec.refout, s\n\n );\n\n }\n\n };\n\n let mut contain_init_vec = Vec::new();\n\n let mut hull = spec\n\n .poly\n\n .as_ref()\n\n .map(|x| copy_poly(x))\n\n .unwrap_or_else(new_zero);\n\n log(\"gcd'ing same length files together\");\n", "file_path": "delsum-lib/src/crc/rev.rs", "rank": 60, "score": 76672.7820287276 }, { "content": "fn remove_xorouts(\n\n maybe_xorout: &Option<PolyPtr>,\n\n mut polys: Vec<InitPoly>,\n\n) -> (Vec<InitPoly>, InitPoly) {\n\n let mut ret_vec = Vec::new();\n\n let mut prev = polys\n\n .pop()\n\n .expect(\"Internal Error: Zero-length vector given to remove_xorouts\");\n\n let xor_ret = match maybe_xorout {\n\n Some(xorout) => {\n\n // if we already have xorout, we can subtract it from the files themselves so\n\n // that we have one more to get parameters from\n\n ret_vec.push((add(&prev.0, xorout), prev.1));\n\n (copy_poly(&xorout), InitPlace::None)\n\n }\n\n None => (copy_poly(&prev.0), prev.1),\n\n };\n\n for (p, l) in polys.into_iter().rev() {\n\n let appendix = match (maybe_xorout, l != InitPlace::None && l == prev.1) {\n\n (None, _) | (_, true) => {\n", "file_path": "delsum-lib/src/crc/rev.rs", "rank": 61, "score": 76672.7820287276 }, { "content": "fn bytes_to_poly(\n\n bytes: &[u8],\n\n checksum: &[u8],\n\n width: u8,\n\n refin: bool,\n\n refout: bool,\n\n wordspec: WordSpec,\n\n) -> Option<PolyPtr> {\n\n let new_bytes = reorder_poly_bytes(bytes, refin, wordspec);\n\n let mut poly = new_poly_shifted(&new_bytes, width as i64);\n\n let sum = bytes_to_int(&checksum, wordspec.output_endian);\n\n let check_mask = 1u128.checked_shl(width as u32).unwrap_or(0).wrapping_sub(1);\n\n if (!check_mask & sum) != 0 {\n\n return None;\n\n }\n\n let check = cond_reverse(width, sum, refout);\n\n poly += &new_poly(&check.to_le_bytes());\n\n Some(poly)\n\n}\n\n\n", "file_path": "delsum-lib/src/crc/rev.rs", "rank": 62, "score": 76672.7820287276 }, { "content": "// wrapper to call rev_from_polys with polynomial arguments\n\nfn reverse<'a>(\n\n spec: &CRCBuilder<u128>,\n\n chk_bytes: Vec<(&'a [u8], Vec<u8>)>,\n\n verbosity: u64,\n\n refin: bool,\n\n refout: bool,\n\n wordspec: WordSpec,\n\n) -> Result<impl Iterator<Item = CRC<u128>> + 'a, Option<CheckReverserError>> {\n\n let width = match spec.width {\n\n Some(x) => x,\n\n None => return Err(Some(CheckReverserError::MissingParameter(\"width\"))),\n\n };\n\n // check for errors in the parameters\n\n if 3 > chk_bytes.len()\n\n + spec.init.is_some() as usize\n\n + spec.xorout.is_some() as usize\n\n + spec.poly.is_some() as usize\n\n {\n\n return Err(Some(CheckReverserError::MissingParameter(\n\n \"at least 3 parameters/files\",\n", "file_path": "delsum-lib/src/crc/rev.rs", "rank": 63, "score": 76537.01586877869 }, { "content": "fn find_prod_comb(\n\n width: usize,\n\n // (degree, multiplicity)\n\n gens: &[(PolyPtr, i64)],\n\n) -> Vec<PolyPtr> {\n\n // there's no reason i implemented it like this in particular; the problem is NP complete\n\n // and i've got no clue how to efficiently solve it anyway and this seemed like a simple solution\n\n let mut ret: Vec<Vec<PolyPtr>> = (0..=width).map(|_| Vec::new()).collect();\n\n for (p, l) in gens.iter() {\n\n // since Poly doesn't implement clone, this will have to do for now\n\n let retcopy: Vec<Vec<_>> = ret\n\n .iter()\n\n .map(|v| v.iter().map(|q| copy_poly(q)).collect())\n\n .collect();\n\n let mut q = copy_poly(p);\n\n for _ in 1..=*l {\n\n let inc_deg = deg(&q) as usize;\n\n if inc_deg > width {\n\n break;\n\n }\n", "file_path": "delsum-lib/src/crc/rev.rs", "rank": 64, "score": 74449.70766537062 }, { "content": "/// reads a bunch of checksums in hex separated by ','\n\nfn read_checksums(s: &str) -> Result<Vec<Vec<u8>>, FromHexError> {\n\n s.split(',').map(|x| x.trim()).map(Vec::from_hex).collect()\n\n}\n\n\n", "file_path": "src/bin.rs", "rank": 65, "score": 74192.52644982388 }, { "content": "// fun fact: this entire function gets loop-unrolled by llvm when optimizing\n\nfn maximum_power_iter(bound: u64, mut base: u64) -> u64 {\n\n let mut stack = [0u64; 6];\n\n let mut stack_idx = 0;\n\n loop {\n\n stack[stack_idx] = base;\n\n base = match base.checked_mul(base).map(|x| (x, x.cmp(&bound))) {\n\n Some((x, Ordering::Equal)) => return x,\n\n Some((x, Ordering::Less)) => x,\n\n _ => break,\n\n };\n\n stack_idx += 1;\n\n }\n\n loop {\n\n let current_square = stack[stack_idx];\n\n base = match base.checked_mul(current_square).map(|x| (x, x.cmp(&bound))) {\n\n Some((x, Ordering::Equal)) => return x,\n\n Some((x, Ordering::Less)) => x,\n\n _ => base,\n\n };\n\n if stack_idx == 0 {\n\n return base;\n\n }\n\n stack_idx -= 1;\n\n }\n\n}\n\n\n", "file_path": "delsum-lib/src/factor.rs", "rank": 66, "score": 72547.10148840018 }, { "content": "fn xgcd(a: &BigInt, b: &BigInt) -> (BigInt, (BigInt, BigInt)) {\n\n let mut a = a.abs();\n\n let mut b = b.abs();\n\n if a.is_zero() {\n\n return (b, (zero(), one()));\n\n }\n\n if b.is_zero() {\n\n return (a, (one(), zero()));\n\n }\n\n let mut a_fac = (one(), zero());\n\n let mut b_fac = (zero(), one());\n\n if a < b {\n\n std::mem::swap(&mut a, &mut b);\n\n std::mem::swap(&mut a_fac, &mut b_fac);\n\n }\n\n while !b.is_zero() {\n\n std::mem::swap(&mut a, &mut b);\n\n std::mem::swap(&mut a_fac, &mut b_fac);\n\n let fac = &b / &a;\n\n let rem = &b % &a;\n", "file_path": "delsum-lib/src/fletcher/rev.rs", "rank": 67, "score": 70569.97767069555 }, { "content": "fn factor(mut num: u128, bound: u128) -> Vec<(u128, u8)> {\n\n let mut prime_factors = Vec::new();\n\n if num == 0 {\n\n panic!(\"refusing to factor 0\");\n\n }\n\n let tz = num.trailing_zeros();\n\n if tz > 0 {\n\n prime_factors.push((2u128, tz as u8));\n\n num >>= tz;\n\n }\n\n let mut sieve = PrimeSieve::new();\n\n let mut prime = 1;\n\n let sqrt = get_exact_root(num, 2).unwrap_or_else(|x| x);\n\n let bound = bound.min(sqrt) as u64;\n\n // use u128 as long as the numbers are too big for u64\n\n let (power, maybe_prime128) = p1fac(&mut num, 2u128, &mut prime, &mut sieve, bound, u64::MAX);\n\n let maybe_prime64 = if let Ok(mut x) = u64::try_from(num) {\n\n // then fall back to u64\n\n let new_power = power as u64 % x;\n\n let (_, maybe_prime64) = p1fac(&mut x, new_power, &mut prime, &mut sieve, bound, 1);\n", "file_path": "delsum-lib/src/factor.rs", "rank": 68, "score": 68962.91508991332 }, { "content": "/// Finds all divisors of number with low <= d <= high\n\npub fn divisors_range(number: u128, low: u128, high: u128) -> Vec<u128> {\n\n let switch_div = number / low > high;\n\n let (new_high, new_low) = if switch_div {\n\n (number / low, number / high)\n\n } else {\n\n (high, low)\n\n };\n\n let facs = factor(number, new_high);\n\n let mut divs = div_combs(1, &facs, new_low, new_high);\n\n if switch_div {\n\n divs = divs\n\n .iter()\n\n .map(|x| number / x)\n\n .filter(|&x| x <= high && x >= low)\n\n .collect();\n\n }\n\n divs.sort_unstable();\n\n divs\n\n}\n\n\n", "file_path": "delsum-lib/src/factor.rs", "rank": 69, "score": 64532.20880458315 }, { "content": "fn poly_to_u128(poly: &Poly) -> u128 {\n\n u128::from_be_bytes(\n\n poly.to_bytes(16)\n\n .as_ref()\n\n .unwrap()\n\n .as_slice()\n\n .try_into()\n\n .unwrap(),\n\n )\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::checksum::tests::ReverseFileSet;\n\n use crate::crc::{CRCBuilder, CRC};\n\n use quickcheck::{Arbitrary, Gen, TestResult};\n\n impl Arbitrary for CRCBuilder<u128> {\n\n fn arbitrary(g: &mut Gen) -> Self {\n\n let width = (u8::arbitrary(g) % 128) + 1;\n", "file_path": "delsum-lib/src/crc/rev.rs", "rank": 70, "score": 64225.93215876898 }, { "content": "/// calculates the inverse of x modulo 2^64\n\n/// assumes x is not divisible by 2\n\nfn word_inverse(x: u64) -> u64 {\n\n // ancient motorola magic\n\n let mut y = 1;\n\n let mut prod = x;\n\n while prod != 1 {\n\n prod &= !1u64;\n\n prod &= prod.wrapping_neg();\n\n y = prod.wrapping_add(y);\n\n prod = x.wrapping_mul(y);\n\n }\n\n y\n\n}\n\n\n", "file_path": "delsum-lib/src/factor.rs", "rank": 71, "score": 62951.45458777691 }, { "content": "#[derive(Debug, StructOpt)]\n\n#[structopt(rename_all = \"kebab-case\")]\n\nstruct Part {\n\n /// Print some messages indicating progress\n\n #[structopt(short, long, parse(from_occurrences))]\n\n verbose: u64,\n\n /// Sets the end of the checksum segments to be relative to the start of the file\n\n #[structopt(short, long)]\n\n start: bool,\n\n /// Sets the end of the checksum segments to be relative to the end of the file (default)\n\n #[structopt(short, long)]\n\n end: bool,\n\n /// The inclusive range of numbers where a checksum may start in format [number]:[number] where [number]\n\n /// is a signed hexadecimal and negative numbers indicate offsets relative from the end\n\n #[structopt(short = \"S\", long)]\n\n start_range: Option<delsum_lib::utils::SignedInclRange>,\n\n /// The inclusive range of numbers where a checksum may end in format [number]:[number] where [number]\n\n /// is a signed hexadecimal and negative numbers indicate offsets relative from the end\n\n #[structopt(short = \"E\", long)]\n\n end_range: Option<delsum_lib::utils::SignedInclRange>,\n\n /// Do more parallelism, in turn using more memory\n\n #[structopt(short, long)]\n", "file_path": "src/bin.rs", "rank": 72, "score": 60888.42364619851 }, { "content": "#[derive(Debug, StructOpt)]\n\n#[structopt(rename_all = \"kebab-case\")]\n\nstruct Reverse {\n\n /// Print some messages indicating progress\n\n #[structopt(short, long, parse(from_occurrences))]\n\n verbose: u64,\n\n /// Do more parallelism, in turn using more memory\n\n #[structopt(short, long)]\n\n parallel: bool,\n\n /// Use the checksum algorithm given by the model string\n\n #[structopt(short, long)]\n\n model: Option<String>,\n\n /// The hexadecimal offset of the first byte to be checksummed (can be negative to indicate offset from end)\n\n #[structopt(short = \"S\", long, parse(try_from_str = read_signed_maybe_hex))]\n\n start: Option<isize>,\n\n /// The hexadecimal offset of the last byte to be checksummed (can be negative to indicate offset from end)\n\n #[structopt(short = \"E\", long, parse(try_from_str = read_signed_maybe_hex))]\n\n end: Option<isize>,\n\n /// Extend the search to parameter combinations that are unlikely\n\n #[structopt(short, long)]\n\n extended_search: bool,\n\n /// Read model strings line-by-line from given file\n\n #[structopt(short = \"M\", long)]\n\n model_file: Option<OsString>,\n\n /// A comma separated list of checksums, each corresponding to a file\n\n #[structopt(short, long)]\n\n checksums: String,\n\n /// The files of which to find checksummed parts\n\n files: Vec<OsString>,\n\n}\n\n\n\n/// From given files and algorithms, find out the checksums\n", "file_path": "src/bin.rs", "rank": 73, "score": 60888.42364619851 }, { "content": " module %= 1 << (width / 2);\n\n }\n\n }\n\n new_fletcher.module(module);\n\n let init = u64::arbitrary(g) % module;\n\n new_fletcher.init(init);\n\n let swap = bool::arbitrary(g);\n\n new_fletcher.swap(swap);\n\n let addout1 = u64::arbitrary(g) as u64 % module;\n\n let addout2 = u64::arbitrary(g) as u64 % module;\n\n let addout = glue_sum(addout1, addout2, width, swap);\n\n new_fletcher.addout(addout);\n\n let wordspec = WordSpec::arbitrary(g);\n\n let max_word_width = ((width + 15) / 16).next_power_of_two() * 8;\n\n new_fletcher.wordsize(max_word_width.min(wordspec.wordsize) as usize);\n\n new_fletcher.inendian(wordspec.input_endian);\n\n new_fletcher.outendian(wordspec.output_endian);\n\n new_fletcher\n\n }\n\n }\n", "file_path": "delsum-lib/src/fletcher/rev.rs", "rank": 96, "score": 58848.44479660888 }, { "content": " } = self;\n\n modules\n\n .into_iter()\n\n .map(move |m| {\n\n let module = if m.is_zero() {\n\n 0u64\n\n } else {\n\n (&m).try_into().unwrap()\n\n };\n\n inits.iter(&addout1, &addout2, &m).map(move |(i, s1, s2)| {\n\n let addout = glue_sum(s1, s2, width, swap);\n\n Fletcher::with_options()\n\n .addout(addout)\n\n .init(i as u64)\n\n .module(module)\n\n .width(width)\n\n .swap(swap)\n\n .inendian(wordspec.input_endian)\n\n .outendian(wordspec.output_endian)\n\n .wordsize(wordspec.wordsize)\n\n .build()\n\n .unwrap()\n\n })\n\n })\n\n .flatten()\n\n }\n\n}\n\n\n", "file_path": "delsum-lib/src/fletcher/rev.rs", "rank": 97, "score": 58847.15929101384 }, { "content": " naive.swap(fletch_build.swap.unwrap());\n\n }\n\n if wordspec_known.0 {\n\n naive.wordsize(fletch_build.wordsize.unwrap());\n\n }\n\n if wordspec_known.1 {\n\n naive.inendian(fletch_build.input_endian.unwrap());\n\n }\n\n if wordspec_known.2 {\n\n naive.outendian(fletch_build.output_endian.unwrap());\n\n }\n\n let chk_files: Vec<_> = files.with_checksums(&fletcher);\n\n let reverser = reverse_fletcher(&naive, &chk_files, 0, false);\n\n files.check_matching(&fletcher, reverser)\n\n }\n\n #[test]\n\n fn error1() {\n\n let f16 = Fletcher::with_options()\n\n .width(32)\n\n .module(0x4d)\n", "file_path": "delsum-lib/src/fletcher/rev.rs", "rank": 98, "score": 58844.914665698474 }, { "content": " vec![87, 21],\n\n vec![],\n\n ]);\n\n let chk_files = f.with_checksums(&f16);\n\n let mut naive = Fletcher::<u64>::with_options();\n\n naive.width(126);\n\n let reverser = reverse_fletcher(&naive, &chk_files, 0, false);\n\n assert!(!f.check_matching(&f16, reverser).is_failure());\n\n }\n\n #[test]\n\n fn error5() {\n\n let f16 = Fletcher::with_options()\n\n .width(42)\n\n .module(6u64)\n\n .addout(0x000000)\n\n .init(0)\n\n .swap(false)\n\n .inendian(Endian::Big)\n\n .outendian(Endian::Little)\n\n .wordsize(64)\n", "file_path": "delsum-lib/src/fletcher/rev.rs", "rank": 99, "score": 58844.6329386891 } ]
Rust
src/r3_port_riscv/src/timer/cfg.rs
yvt/r3
cafe6078fa8a649a6e1c5969c625c2a127a91027
use r3::kernel::InterruptNum; #[macro_export] macro_rules! use_timer { (unsafe impl PortTimer for $ty:ty) => { const _: () = { use $crate::r3::{ kernel::{cfg::CfgBuilder, PortTimer, UTicks}, utils::Init, }; use $crate::r3_portkit::tickless; use $crate::{timer, Timer, TimerOptions}; impl PortTimer for $ty { const MAX_TICK_COUNT: UTicks = u32::MAX; const MAX_TIMEOUT: UTicks = u32::MAX; unsafe fn tick_count() -> UTicks { unsafe { timer::imp::tick_count::<Self>() } } unsafe fn pend_tick() { unsafe { timer::imp::pend_tick::<Self>() } } unsafe fn pend_tick_after(tick_count_delta: UTicks) { unsafe { timer::imp::pend_tick_after::<Self>(tick_count_delta) } } } impl Timer for $ty { unsafe fn init() { unsafe { timer::imp::init::<Self>() } } } const TICKLESS_CFG: tickless::TicklessCfg = match tickless::TicklessCfg::new(tickless::TicklessOptions { hw_freq_num: <$ty as TimerOptions>::FREQUENCY, hw_freq_denom: <$ty as TimerOptions>::FREQUENCY_DENOMINATOR, hw_headroom_ticks: <$ty as TimerOptions>::HEADROOM, force_full_hw_period: true, resettable: !<$ty as TimerOptions>::RESET_MTIME, }) { Ok(x) => x, Err(e) => e.panic(), }; static mut TIMER_STATE: tickless::TicklessState<TICKLESS_CFG> = Init::INIT; unsafe impl timer::imp::TimerInstance for $ty { const TICKLESS_CFG: tickless::TicklessCfg = TICKLESS_CFG; type TicklessState = tickless::TicklessState<TICKLESS_CFG>; fn tickless_state() -> *mut Self::TicklessState { unsafe { core::ptr::addr_of_mut!(TIMER_STATE) } } } impl $ty { pub const fn configure_timer(b: &mut CfgBuilder<Self>) { timer::imp::configure(b); } } }; }; } pub trait TimerOptions { const MTIME_PTR: usize; const MTIMECMP_PTR: usize; const RESET_MTIME: bool = true; const FREQUENCY: u64; const FREQUENCY_DENOMINATOR: u64 = 1; const HEADROOM: u32 = min128( Self::FREQUENCY as u128 * 60 / Self::FREQUENCY_DENOMINATOR as u128, 0x40000000, ) as u32; const INTERRUPT_NUM: InterruptNum = crate::INTERRUPT_TIMER; } const fn min128(x: u128, y: u128) -> u128 { if x < y { x } else { y } }
use r3::kernel::InterruptNum; #[macro_export] macro_rules! use_timer { (unsafe impl PortTimer for $ty:ty) => { const _: () = { use $crate::r3::{ kernel::{cfg::CfgBuilder, PortTimer, UTicks}, utils::Init, }; use $crate::r3_portkit::tickless; use $crate::{timer, Timer, TimerOptions}; impl PortTimer for $ty { const MAX_TICK_COUNT: UTicks = u32::MAX; const MAX_TIMEOUT: UTicks = u32::MAX; unsafe fn tick_count() -> UTicks { unsafe { timer::imp::tick_count::<Self>() } } unsafe fn pend_tick() { unsafe { timer::imp::pend_tick::<Self>() } } unsafe fn pend_tick_after(tick_count_delta:
const RESET_MTIME: bool = true; const FREQUENCY: u64; const FREQUENCY_DENOMINATOR: u64 = 1; const HEADROOM: u32 = min128( Self::FREQUENCY as u128 * 60 / Self::FREQUENCY_DENOMINATOR as u128, 0x40000000, ) as u32; const INTERRUPT_NUM: InterruptNum = crate::INTERRUPT_TIMER; } const fn min128(x: u128, y: u128) -> u128 { if x < y { x } else { y } }
UTicks) { unsafe { timer::imp::pend_tick_after::<Self>(tick_count_delta) } } } impl Timer for $ty { unsafe fn init() { unsafe { timer::imp::init::<Self>() } } } const TICKLESS_CFG: tickless::TicklessCfg = match tickless::TicklessCfg::new(tickless::TicklessOptions { hw_freq_num: <$ty as TimerOptions>::FREQUENCY, hw_freq_denom: <$ty as TimerOptions>::FREQUENCY_DENOMINATOR, hw_headroom_ticks: <$ty as TimerOptions>::HEADROOM, force_full_hw_period: true, resettable: !<$ty as TimerOptions>::RESET_MTIME, }) { Ok(x) => x, Err(e) => e.panic(), }; static mut TIMER_STATE: tickless::TicklessState<TICKLESS_CFG> = Init::INIT; unsafe impl timer::imp::TimerInstance for $ty { const TICKLESS_CFG: tickless::TicklessCfg = TICKLESS_CFG; type TicklessState = tickless::TicklessState<TICKLESS_CFG>; fn tickless_state() -> *mut Self::TicklessState { unsafe { core::ptr::addr_of_mut!(TIMER_STATE) } } } impl $ty { pub const fn configure_timer(b: &mut CfgBuilder<Self>) { timer::imp::configure(b); } } }; }; } pub trait TimerOptions { const MTIME_PTR: usize; const MTIMECMP_PTR: usize;
random
[ { "content": "#[cfg(not(arm))]\n\nfn interrupt_free<R>(_: impl FnOnce() -> R) -> R {\n\n unreachable!();\n\n}\n\n\n", "file_path": "src/arm_semihosting/src/export.rs", "rank": 0, "score": 194965.27888413682 }, { "content": "#[cfg(not(target_arch = \"arm\"))]\n\nfn interrupt_free<T>(_: impl FnOnce() -> T) -> T {\n\n panic!(\"this crate is not supported on this platform\")\n\n}\n\n\n", "file_path": "src/r3_support_rza1/src/stdout.rs", "rank": 1, "score": 193430.4536702907 }, { "content": "#[inline]\n\nfn with_uart(f: impl FnOnce(fn(u8))) {\n\n interrupt::free(\n\n #[inline]\n\n |_| unsafe {\n\n if UART.is_none() {\n\n init();\n\n }\n\n f(UART.unwrap_or_else(|| core::hint::unreachable_unchecked()));\n\n },\n\n );\n\n}\n\n\n", "file_path": "src/r3_port_riscv_test_driver/src/uart_k210.rs", "rank": 2, "score": 176457.7977411031 }, { "content": "fn all_test_runs(test_source: &TestSource) -> impl Iterator<Item = TestRun<'_>> {\n\n let cases = r3_test_suite::kernel_tests::TEST_NAMES\n\n .iter()\n\n .cloned()\n\n .map(TestCase::KernelTest);\n\n\n\n let cases = cases.chain(\n\n r3_test_suite::kernel_benchmarks::TEST_NAMES\n\n .iter()\n\n .cloned()\n\n .map(TestCase::KernelBenchmark),\n\n );\n\n\n\n let cases = cases.chain(\n\n test_source\n\n .driver_kernel_tests\n\n .iter()\n\n .map(|s| TestCase::DriverKernelTest(&**s)),\n\n );\n\n\n", "file_path": "src/r3_test_runner/src/selection.rs", "rank": 3, "score": 171462.5659219101 }, { "content": "/// Map random bytes to operations on `BinaryHeap`.\n\nfn interpret(bytecode: &[u8], max_len: usize) -> impl Iterator<Item = Cmd> + '_ {\n\n let mut i = 0;\n\n let mut len = 0;\n\n std::iter::from_fn(move || {\n\n if let Some(instr) = bytecode.get(i..i + 5) {\n\n i += 5;\n\n\n\n let value = u32::from_le_bytes([instr[1], instr[2], instr[3], instr[4]]) as usize;\n\n\n\n if (instr[0] % 2 == 0 && len != max_len) || len == 0 {\n\n len += 1;\n\n Some(Cmd::Insert(value))\n\n } else {\n\n len -= 1;\n\n Some(Cmd::Remove(value % (len + 1)))\n\n }\n\n } else {\n\n None\n\n }\n\n })\n\n}\n\n\n", "file_path": "src/r3/src/utils/binary_heap/tests.rs", "rank": 4, "score": 165242.79189799586 }, { "content": "#[inline]\n\nfn handle_tick<System: TimerInstance>(_: usize) {\n\n let tcfg = &System::TICKLESS_CFG;\n\n\n\n // Safety: CPU Lock protects it from concurrent access\n\n let tstate = unsafe { &mut *System::tickless_state() };\n\n\n\n let cur_hw_tick_count = System::mtime_reg32()[0].get();\n\n tstate.mark_reference(tcfg, cur_hw_tick_count);\n\n\n\n // Safety: CPU Lock inactive, an interrupt context\n\n unsafe { System::timer_tick() };\n\n}\n", "file_path": "src/r3_port_riscv/src/timer/imp.rs", "rank": 5, "score": 164210.48071660235 }, { "content": "#[cfg(arm)]\n\nfn interrupt_free<R>(f: impl FnOnce() -> R) -> R {\n\n let cpsr_old: u32;\n\n unsafe { llvm_asm!(\"mrs $0, cpsr\":\"=r\"(cpsr_old):::\"volatile\") };\n\n unsafe { llvm_asm!(\"cpsid i\"::::\"volatile\") };\n\n\n\n let ret = f();\n\n\n\n if cpsr_old & 0x80 == 0 {\n\n unsafe { llvm_asm!(\"cpsie i\"::::\"volatile\") };\n\n }\n\n\n\n ret\n\n}\n\n\n", "file_path": "src/arm_semihosting/src/export.rs", "rank": 6, "score": 162512.42519427085 }, { "content": "#[inline]\n\nfn handle_tick<System: OsTimerInstance>(_: usize) {\n\n let tcfg = &System::TICKLESS_CFG;\n\n\n\n // Safety: CPU Lock protects it from concurrent access\n\n let tstate = unsafe { &mut *System::tickless_state() };\n\n\n\n let cur_hw_tick_count = hw_tick_count::<System>();\n\n tstate.mark_reference(tcfg, cur_hw_tick_count);\n\n\n\n // `timer_tick` will call `pend_tick[_after]`, so it's unnecessary to\n\n // clear the interrupt flag\n\n\n\n // Safety: CPU Lock inactive, an interrupt context\n\n unsafe { System::timer_tick() };\n\n}\n", "file_path": "src/r3_support_rza1/src/os_timer/imp.rs", "rank": 7, "score": 161296.2993227642 }, { "content": "#[inline]\n\n#[cfg(target_arch = \"arm\")]\n\nfn interrupt_free<T>(x: impl FnOnce() -> T) -> T {\n\n let cpsr: u32;\n\n unsafe { asm!(\"mrs {}, cpsr\", out(reg)cpsr) };\n\n let unmask = (cpsr & (1 << 7)) == 0;\n\n\n\n unsafe { asm!(\"cpsid i\") };\n\n\n\n let ret = x();\n\n\n\n if unmask {\n\n unsafe { asm!(\"cpsie i\") };\n\n }\n\n\n\n ret\n\n}\n\n\n", "file_path": "src/r3_support_rza1/src/stdout.rs", "rank": 8, "score": 160977.59998042474 }, { "content": "/// Spawn a new thread.\n\npub fn spawn(f: impl FnOnce() + Send + 'static) -> JoinHandle<()> {\n\n let parent_thread = thread::current();\n\n\n\n let data = Arc::new(ThreadData::new());\n\n let data2 = Arc::clone(&data);\n\n\n\n let std_handle = thread::spawn(move || {\n\n // Set up a destructor for `THREAD_DATA`\n\n THREAD_DATA_DTOR.with(|_| {});\n\n\n\n data2.set_self();\n\n\n\n // Move `data2` into `THREAD_DATA`\n\n THREAD_DATA.store(Arc::into_raw(data2) as _, Ordering::Relaxed);\n\n\n\n catch_longjmp(move |jmp_buf| {\n\n EXIT_JMP_BUF.with(|c| c.set(Some(jmp_buf)));\n\n\n\n parent_thread.unpark();\n\n drop(parent_thread);\n", "file_path": "src/r3_port_std/src/threading_unix.rs", "rank": 9, "score": 155768.3679007245 }, { "content": "pub fn set_stdout(writer: impl SerialWrite) {\n\n interrupt::free(|cs| {\n\n *STDOUT.borrow(cs).borrow_mut() = Some(inline_dyn![SerialWrite; writer].ok().unwrap());\n\n });\n\n}\n\n\n", "file_path": "src/r3_support_rp2040/src/stdout.rs", "rank": 10, "score": 153704.67723508854 }, { "content": "#[inline]\n\nfn with_uart(f: impl FnOnce(&mut (Tx<UART0>, Tx<UART1>))) {\n\n interrupt::free(\n\n #[inline]\n\n |_| unsafe {\n\n if UART.is_none() {\n\n init();\n\n }\n\n f(UART\n\n .as_mut()\n\n .unwrap_or_else(|| core::hint::unreachable_unchecked()));\n\n },\n\n );\n\n}\n\n\n", "file_path": "src/r3_port_riscv_test_driver/src/uart_e310x.rs", "rank": 11, "score": 150895.81373171936 }, { "content": "#[inline]\n\nfn ctz_bsearch32<const BITS: usize>(x: usize) -> u32 {\n\n debug_assert!(BITS <= 32);\n\n let mut x = x as u32;\n\n\n\n if x == 0 {\n\n return USIZE_BITS;\n\n }\n\n\n\n let mut i = 0;\n\n\n\n if BITS > 16 && (x & 0xffff) == 0 {\n\n x >>= 16;\n\n i += 16;\n\n }\n\n\n\n if BITS > 8 && (x & 0xff) == 0 {\n\n x >>= 8;\n\n i += 8;\n\n }\n\n\n", "file_path": "src/r3/src/utils/ctz.rs", "rank": 12, "score": 150324.97271665634 }, { "content": "/// Temporarily lock the scheduler, disabling preemption.\n\n///\n\n/// *All* operating system and port functions will be unavailable until the lock\n\n/// is relinquished.\n\npub fn lock_scheduler<System: PortInstance>() -> impl Sized {\n\n let state = System::port_state();\n\n state.thread_group.get().unwrap().lock()\n\n}\n\n\n\n#[macro_export]\n\nmacro_rules! use_port {\n\n (unsafe $vis:vis struct $sys:ident) => {\n\n $vis struct $sys;\n\n\n\n mod port_std_impl {\n\n use super::$sys;\n\n use $crate::r3::kernel::{\n\n ClearInterruptLineError, EnableInterruptLineError, InterruptNum, InterruptPriority,\n\n PendInterruptLineError, Port, QueryInterruptLineError, SetInterruptLinePriorityError,\n\n TaskCb, PortToKernel, PortInterrupts, PortThreading, UTicks, PortTimer,\n\n };\n\n use $crate::{State, TaskState, PortInstance};\n\n\n\n pub(super) static PORT_STATE: State = State::new();\n", "file_path": "src/r3_port_std/src/lib.rs", "rank": 13, "score": 149428.44499172343 }, { "content": "#[inline]\n\nfn ptr_from_option_ref<T>(x: Option<&T>) -> *const T {\n\n if let Some(x) = x {\n\n x\n\n } else {\n\n core::ptr::null()\n\n }\n\n}\n\n\n\n/// Transition the currently running task into the Waiting state. Returns when\n\n/// woken up.\n\n///\n\n/// The current context must be [waitable] (This function doesn't check\n\n/// that). The caller should use `expect_waitable_context` to do that.\n\n///\n\n/// [waitable]: crate#contets\n\npub(super) fn wait_until_woken_up<System: Kernel>(mut lock: utils::CpuLockTokenRefMut<'_, System>) {\n\n debug_assert_eq!(state::expect_waitable_context::<System>(), Ok(()));\n\n\n\n // Transition the current task to Waiting\n\n let running_task = System::state().running_task(lock.borrow_mut()).unwrap();\n", "file_path": "src/r3/src/kernel/task.rs", "rank": 14, "score": 148648.05494834055 }, { "content": "#[inline]\n\nfn ptr_from_option_ref<T>(x: Option<&T>) -> *const T {\n\n if let Some(x) = x {\n\n x\n\n } else {\n\n core::ptr::null()\n\n }\n\n}\n", "file_path": "src/r3/src/kernel/mutex.rs", "rank": 15, "score": 148648.05494834055 }, { "content": "#[inline]\n\nfn ctz_array_lut<const LEN: usize>(x: usize) -> u32 {\n\n struct Lut<const LEN: usize>;\n", "file_path": "src/r3/src/utils/ctz.rs", "rank": 16, "score": 148648.05494834055 }, { "content": "fn timer_body<System: Kernel, D: Driver<App<System>>>(_: usize) {\n\n let App { task, seq, .. } = D::app();\n\n\n\n seq.expect_and_replace(4, 5);\n\n task.unpark_exact().unwrap();\n\n}\n", "file_path": "src/r3_test_suite/src/kernel_tests/timer_stop.rs", "rank": 17, "score": 147974.84684639014 }, { "content": "fn timer_body<System: Kernel, D: Driver<App<System>>>(_: usize) {\n\n let App {\n\n task, timer, seq, ..\n\n } = D::app();\n\n\n\n match seq.get() {\n\n 1 => {\n\n seq.expect_and_replace(1, 2);\n\n timer.set_period(Some(Duration::from_millis(500))).unwrap();\n\n task.unpark_exact().unwrap();\n\n }\n\n 3 => {\n\n seq.expect_and_replace(3, 4);\n\n task.unpark_exact().unwrap();\n\n }\n\n 5 => {\n\n seq.expect_and_replace(5, 6);\n\n task.unpark_exact().unwrap();\n\n }\n\n _ => unreachable!(),\n\n }\n\n}\n", "file_path": "src/r3_test_suite/src/kernel_tests/timer_periodic.rs", "rank": 18, "score": 147974.84684639014 }, { "content": "fn timer_body<System: Kernel, D: Driver<App<System>>>(_: usize) {\n\n let App { task, seq, .. } = D::app();\n\n\n\n match seq.get() {\n\n 1 => {\n\n seq.expect_and_replace(1, 2);\n\n }\n\n 2 => {\n\n seq.expect_and_replace(2, 3);\n\n }\n\n 3 => {\n\n seq.expect_and_replace(3, 4);\n\n task.unpark_exact().unwrap();\n\n }\n\n 5 => {\n\n seq.expect_and_replace(5, 6);\n\n task.unpark_exact().unwrap();\n\n }\n\n _ => unreachable!(),\n\n }\n\n}\n", "file_path": "src/r3_test_suite/src/kernel_tests/timer_overdue.rs", "rank": 19, "score": 147974.84684639014 }, { "content": "fn timer_body<System: Kernel, D: Driver<App<System>>>(_: usize) {\n\n let App {\n\n timer, task, seq, ..\n\n } = D::app();\n\n\n\n match seq.get() {\n\n 0 => {\n\n seq.expect_and_replace(0, 1);\n\n }\n\n 1 => {\n\n seq.expect_and_replace(1, 2);\n\n }\n\n 2 => {\n\n seq.expect_and_replace(2, 3);\n\n timer.set_delay(Some(Duration::from_millis(400))).unwrap();\n\n timer.set_period(None).unwrap();\n\n task.unpark_exact().unwrap();\n\n }\n\n\n\n // 400ms\n", "file_path": "src/r3_test_suite/src/kernel_tests/timer_zero_period.rs", "rank": 20, "score": 146704.27098343163 }, { "content": "fn unreachable_timer_body<System: Kernel, D: Driver<App<System>>>(_: usize) {\n\n unreachable!()\n\n}\n", "file_path": "src/r3_test_suite/src/kernel_tests/timer_misc.rs", "rank": 21, "score": 146704.27098343163 }, { "content": "#[inline]\n\npub fn trailing_zeros<const BITS: usize>(x: usize) -> u32 {\n\n if BITS == 0 {\n\n USIZE_BITS\n\n } else if BITS == 1 {\n\n if x == 0 {\n\n USIZE_BITS\n\n } else {\n\n 0\n\n }\n\n } else if HAS_CTZ {\n\n x.trailing_zeros()\n\n } else if BITS == 2 && HAS_FAST_LOAD {\n\n ctz_array_lut::<4>(x)\n\n } else if BITS == 3 && HAS_FAST_LOAD {\n\n ctz_array_lut::<8>(x)\n\n } else if BITS == 4 && HAS_FAST_LOAD {\n\n ctz_array_lut::<16>(x)\n\n } else if BITS <= 2 {\n\n ctz2(x)\n\n } else if BITS <= 3 && HAS_SHIFTER {\n", "file_path": "src/r3/src/utils/ctz.rs", "rank": 22, "score": 146362.56253583933 }, { "content": "#[inline]\n\nfn ctz_linear<const BITS: usize>(mut x: usize) -> u32 {\n\n for i in 0..BITS as u32 {\n\n if x & 1 != 0 {\n\n return i;\n\n }\n\n x >>= 1;\n\n }\n\n USIZE_BITS\n\n}\n\n\n\n/// Implements [`trailing_zeros`] using binary search. The last level\n\n/// is handled by [`ctz4_lut_nonzero`].\n\n///\n\n///`BITS` must be less than or equal to 32.\n", "file_path": "src/r3/src/utils/ctz.rs", "rank": 23, "score": 146362.56253583933 }, { "content": "fn insertion_sort_inner<T>(a: &mut [T], mut f: impl FnMut(&T, &T) -> bool) {\n\n for i in 1..a.len() {\n\n let mut ap = &mut a[0..=i];\n\n\n\n while let [.., p1, p2] = ap {\n\n if f(p1, p2) {\n\n break;\n\n }\n\n swap(p1, p2);\n\n ap = ap.split_last_mut().unwrap().1;\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n extern crate std;\n\n\n\n use super::*;\n\n use quickcheck_macros::quickcheck;\n", "file_path": "src/r3_test_suite/src/utils/sort.rs", "rank": 24, "score": 141505.92692595933 }, { "content": "/// Start a no-interrupt section and get the global instance of\n\n/// `UsbStdioGlobal`. Will panic if the `UsbStdioGlobal` hasn't been initialized\n\n/// yet.\n\nfn with_usb_stdio_global<T>(f: impl FnOnce(&mut UsbStdioGlobal, &mut WriteBufDeque) -> T) -> T {\n\n interrupt::free(|cs| {\n\n let mut g = USB_STDIO_GLOBAL.borrow(cs).borrow_mut();\n\n let g = g\n\n .as_mut()\n\n .expect(\"UsbStdioGlobal hasn't been initialized yet\");\n\n\n\n let mut write_buf = WRITE_BUF.borrow(cs).borrow_mut();\n\n\n\n f(g, &mut write_buf)\n\n })\n\n}\n\n\n", "file_path": "src/r3_support_rp2040/src/usbstdio.rs", "rank": 25, "score": 140865.3842204379 }, { "content": "/// Sort the slice with a comparator function.\n\npub fn insertion_sort_by<T>(a: &mut [T], mut f: impl FnMut(&T, &T) -> Ordering) {\n\n insertion_sort_inner(a, |x, y| f(x, y) == Ordering::Less);\n\n}\n\n\n", "file_path": "src/r3_test_suite/src/utils/sort.rs", "rank": 26, "score": 139879.43297110227 }, { "content": "/// The core portion of [`Timer::stop`].\n\nfn stop_timer<System: Kernel>(\n\n mut lock: CpuLockTokenRefMut<'_, System>,\n\n timer_cb: &TimerCb<System>,\n\n) {\n\n if timer_cb.timeout.is_linked(lock.borrow_mut()) {\n\n debug_assert!(timer_cb.active.get(&*lock));\n\n\n\n // Capture the current delay value\n\n let delay = timer_cb\n\n .timeout\n\n .saturating_duration_until_timeout(lock.borrow_mut());\n\n\n\n // Unlink the timeout\n\n timeout::remove_timeout(lock.borrow_mut(), &timer_cb.timeout);\n\n\n\n // Store the captured delay value\n\n timer_cb.timeout.set_at_raw(lock.borrow_mut(), delay);\n\n }\n\n\n\n timer_cb.active.replace(&mut *lock, false);\n\n}\n\n\n", "file_path": "src/r3/src/kernel/timer.rs", "rank": 27, "score": 138309.01731480955 }, { "content": "/// The core portion of [`Timer::start`].\n\nfn start_timer<System: Kernel>(\n\n mut lock: CpuLockTokenRefMut<'_, System>,\n\n timer_cb: &'static TimerCb<System>,\n\n) {\n\n if timer_cb.active.get(&*lock) {\n\n return;\n\n }\n\n\n\n // Get the current delay value\n\n let delay = timer_cb.timeout.at_raw(lock.borrow_mut());\n\n\n\n if delay != timeout::BAD_DURATION32 {\n\n // Schedule the next tick\n\n timer_cb\n\n .timeout\n\n .set_expiration_after(lock.borrow_mut(), delay);\n\n timeout::insert_timeout(lock.borrow_mut(), static_pin(&timer_cb.timeout));\n\n }\n\n\n\n timer_cb.active.replace(&mut *lock, true);\n\n}\n\n\n", "file_path": "src/r3/src/kernel/timer.rs", "rank": 28, "score": 138309.01731480955 }, { "content": "/// The core portion of [`Timer::set_period`].\n\nfn set_timer_period<System: Kernel>(\n\n mut lock: CpuLockTokenRefMut<'_, System>,\n\n timer: &TimerCb<System>,\n\n period: timeout::Time32,\n\n) {\n\n timer.period.replace(&mut *lock, period);\n\n}\n\n\n\n/// The timeout callback function for a timer. This function should be\n\n/// registered as a callback function when initializing [`TimerCb::timeout`].\n\n///\n\n/// `i` is an index into [`super::KernelCfg2::timer_cb_pool`].\n\npub(super) fn timer_timeout_handler<System: Kernel>(\n\n i: usize,\n\n mut lock: CpuLockGuard<System>,\n\n) -> CpuLockGuard<System> {\n\n let timer_cb = System::get_timer_cb(i).unwrap();\n\n\n\n // Schedule the next tick\n\n debug_assert!(!timer_cb.timeout.is_linked(lock.borrow_mut()));\n", "file_path": "src/r3/src/kernel/timer.rs", "rank": 29, "score": 136619.36971206008 }, { "content": "/// The core portion of [`Timer::set_delay`].\n\nfn set_timer_delay<System: Kernel>(\n\n mut lock: CpuLockTokenRefMut<'_, System>,\n\n timer_cb: &'static TimerCb<System>,\n\n delay: timeout::Time32,\n\n) {\n\n let is_active = timer_cb.active.get(&*lock);\n\n\n\n if timer_cb.timeout.is_linked(lock.borrow_mut()) {\n\n timeout::remove_timeout(lock.borrow_mut(), &timer_cb.timeout);\n\n }\n\n\n\n if is_active && delay != timeout::BAD_DURATION32 {\n\n timer_cb\n\n .timeout\n\n .set_expiration_after(lock.borrow_mut(), delay);\n\n timeout::insert_timeout(lock.borrow_mut(), static_pin(&timer_cb.timeout));\n\n } else {\n\n timer_cb.timeout.set_at_raw(lock.borrow_mut(), delay);\n\n }\n\n}\n\n\n", "file_path": "src/r3/src/kernel/timer.rs", "rank": 30, "score": 136619.36971206008 }, { "content": "/// Sort the slice with a key extraction function.\n\npub fn insertion_sort_by_key<T, K: Ord>(a: &mut [T], mut f: impl FnMut(&T) -> K) {\n\n insertion_sort_inner(a, |x, y| f(x) < f(y));\n\n}\n\n\n", "file_path": "src/r3_test_suite/src/utils/sort.rs", "rank": 31, "score": 135976.3837202868 }, { "content": "fn task_body<System: Kernel, D: Driver<App<System>>>(_: usize) {\n\n let App {\n\n seq,\n\n timer2,\n\n timer3,\n\n timer4,\n\n ..\n\n } = D::app();\n\n\n\n // Start `timer3`. `timer3` is now in the Active state, but it will never\n\n // fire because its delay is `None` (infinity).\n\n timer3.start().unwrap();\n\n\n\n // The same goes for `timer4`.\n\n timer4.set_delay(None).unwrap();\n\n timer4.start().unwrap();\n\n\n\n // `timer2` is already active, so this is no-op\n\n timer2.start().unwrap();\n\n\n", "file_path": "src/r3_test_suite/src/kernel_tests/timer_misc.rs", "rank": 32, "score": 134884.5882687275 }, { "content": "fn task_body<System: Kernel, D: Driver<App<System>>>(_: usize) {\n\n let App { seq, timer, .. } = D::app();\n\n\n\n // Expected current time\n\n let mut now = 0;\n\n\n\n seq.expect_and_replace(0, 1);\n\n\n\n System::sleep_ms(400);\n\n now += 400;\n\n\n\n macro_rules! check_time {\n\n () => {\n\n System::assert_time_ms_range(now..now + 100);\n\n };\n\n }\n\n\n\n // Start the timer\n\n check_time!();\n\n seq.expect_and_replace(1, 2);\n", "file_path": "src/r3_test_suite/src/kernel_tests/timer_stop.rs", "rank": 33, "score": 134884.5882687275 }, { "content": "fn task_body<System: Kernel, D: Driver<App<System>>>(_: usize) {\n\n let App { seq, timer, .. } = D::app();\n\n\n\n seq.expect_and_replace(0, 1);\n\n\n\n // Advance the time\n\n System::adjust_time(Duration::from_millis(1300)).unwrap();\n\n\n\n // Now the system has missed three calls to the callback function.\n\n // The system will process them soon. (It's unspecified whether it\n\n // happens in `adjust_time`)\n\n\n\n // Wait until the system finishes processing the overdue calls\n\n System::park().unwrap();\n\n seq.expect_and_replace(4, 5);\n\n\n\n System::assert_time_ms_range(1300..1400);\n\n\n\n // The final tick, which takes place on time\n\n System::park().unwrap();\n\n seq.expect_and_replace(6, 7);\n\n\n\n System::assert_time_ms_range(1600..1700);\n\n\n\n timer.stop().unwrap();\n\n\n\n D::success();\n\n}\n\n\n", "file_path": "src/r3_test_suite/src/kernel_tests/timer_overdue.rs", "rank": 34, "score": 134884.5882687275 }, { "content": "fn task_body<System: Kernel, D: Driver<App<System>>>(_: usize) {\n\n let App { seq, timer, .. } = D::app();\n\n\n\n // Expected current time\n\n let mut now = 0u32;\n\n\n\n seq.expect_and_replace(0, 1);\n\n\n\n System::sleep_ms(400);\n\n now += 400;\n\n\n\n macro_rules! check_time {\n\n () => {\n\n System::assert_time_ms_range(now..now + 100);\n\n };\n\n }\n\n\n\n // Start the timer\n\n check_time!();\n\n timer.start().unwrap();\n", "file_path": "src/r3_test_suite/src/kernel_tests/timer_periodic.rs", "rank": 35, "score": 134884.5882687275 }, { "content": "fn task_body<System: Kernel, D: Driver<App<System>>>(_: usize) {\n\n let App { seq, timer, .. } = D::app();\n\n\n\n // Wait until the system finishes the first batch of ticks\n\n System::park().unwrap();\n\n seq.expect_and_replace(3, 4);\n\n\n\n System::assert_time_ms_range(0..100);\n\n\n\n // The next tick\n\n System::park().unwrap();\n\n seq.expect_and_replace(5, 6);\n\n\n\n System::assert_time_ms_range(400..500);\n\n\n\n // Set the period to zero again\n\n timer.set_period(Some(Duration::ZERO)).unwrap();\n\n timer.set_delay(Some(Duration::ZERO)).unwrap();\n\n\n\n // The last three ticks\n", "file_path": "src/r3_test_suite/src/kernel_tests/timer_zero_period.rs", "rank": 36, "score": 133542.61613428415 }, { "content": "fn timer_body<System: Kernel, D: Driver<App<System>>>(_: usize) {\n\n let App { state, tasks, .. } = D::app();\n\n\n\n // Safety: This is a unique reference\n\n let sched_state = unsafe { &mut *state.sched_state.get() };\n\n\n\n sched_state.time += 1;\n\n\n\n // Switch the running task\n\n let new_task = (sched_state.cur_task + 1) % NUM_TASKS;\n\n log::trace!(\"scheduing tasks[{}]\", new_task);\n\n tasks[sched_state.cur_task].set_priority(3).unwrap();\n\n tasks[new_task].set_priority(2).unwrap();\n\n sched_state.cur_task = new_task;\n\n\n\n // Wait for several ticks to catch any bugs in context switching\n\n if sched_state.time < 100 {\n\n return;\n\n }\n\n\n", "file_path": "src/r3_test_suite/src/kernel_tests/compute_round_robin.rs", "rank": 37, "score": 133542.61613428415 }, { "content": "fn timer_body<System: Kernel, D: Driver<App<System>>>(_: usize) {\n\n let App {\n\n state,\n\n tasks,\n\n judge_task,\n\n timer,\n\n ..\n\n } = D::app();\n\n\n\n // Safety: This is a unique reference\n\n let sched_state = unsafe { &mut *state.sched_state.get() };\n\n\n\n sched_state.time += 1;\n\n\n\n // Switch the running task\n\n let new_task = (sched_state.cur_task + 1) % NUM_TASKS;\n\n log::trace!(\"scheduing tasks[{}]\", new_task);\n\n tasks[sched_state.cur_task].set_priority(2).unwrap();\n\n tasks[new_task].set_priority(1).unwrap();\n\n sched_state.cur_task = new_task;\n", "file_path": "src/r3_test_suite/src/kernel_tests/compute_atomics_round_robin.rs", "rank": 38, "score": 132246.06599587295 }, { "content": "#[inline]\n\npub fn init<System: TimerInstance>() {\n\n let tcfg = &System::TICKLESS_CFG;\n\n\n\n // Safety: No context switching during boot\n\n let tstate = unsafe { &mut *System::tickless_state() };\n\n\n\n if System::RESET_MTIME {\n\n System::mtime_reg32()[0].set(0);\n\n } else {\n\n tstate.reset(tcfg, System::mtime_reg32()[0].get());\n\n }\n\n}\n\n\n\n/// Implements [`r3::kernel::PortTimer::tick_count`]\n\n///\n\n/// # Safety\n\n///\n\n/// Only meant to be referenced by `use_timer!`.\n\npub unsafe fn tick_count<System: TimerInstance>() -> UTicks {\n\n let tcfg = &System::TICKLESS_CFG;\n", "file_path": "src/r3_port_riscv/src/timer/imp.rs", "rank": 39, "score": 131963.50507057313 }, { "content": "#[inline]\n\npub fn init<System: OsTimerInstance>() {\n\n let ostm = System::ostm_regs();\n\n let tcfg = System::TICKLESS_CFG;\n\n\n\n // Enable clock supply\n\n if let Some((addr, bit)) = System::STBCR_OSTM {\n\n // Safety: Verified by the user of `use_os_timer!`\n\n unsafe {\n\n let ptr = addr as *mut u8;\n\n ptr.write_volatile(ptr.read_volatile() & !(1u8 << bit));\n\n }\n\n }\n\n\n\n // RZ/A1x includes two instances of OS Timer. We use one of them.\n\n //\n\n // OS Timer will operate in Free-Running Comparison Mode, where the timer\n\n // counts up from `0` and generates an interrupt when the counter value\n\n // matches `OSTMCMP`.\n\n ostm.tt.write(|w| w.tt().stop()); // stop\n\n ostm.ctl.write(|w| {\n", "file_path": "src/r3_support_rza1/src/os_timer/imp.rs", "rank": 40, "score": 128954.90045557283 }, { "content": "fn hw_tick_count<System: OsTimerInstance>() -> u32 {\n\n System::ostm_regs().cnt.read().bits()\n\n}\n\n\n\n/// Implements [`r3::kernel::PortTimer::tick_count`]\n\n///\n\n/// # Safety\n\n///\n\n/// Only meant to be referenced by `use_os_timer!`.\n\npub unsafe fn tick_count<System: OsTimerInstance>() -> UTicks {\n\n let tcfg = &System::TICKLESS_CFG;\n\n\n\n let hw_tick_count = hw_tick_count::<System>();\n\n\n\n // Safety: CPU Lock protects it from concurrent access\n\n let tstate = unsafe { &mut *System::tickless_state() };\n\n tstate.tick_count(tcfg, hw_tick_count)\n\n}\n\n\n\n/// Implements [`r3::kernel::PortTimer::pend_tick`]\n", "file_path": "src/r3_support_rza1/src/os_timer/imp.rs", "rank": 41, "score": 126130.58544679875 }, { "content": "fn task1_body(_: usize) {\n\n log::trace!(\"COTTAGE = {:#?}\", COTTAGE);\n\n log::trace!(\"KENREL = {:#?}\", System::debug());\n\n\n\n COTTAGE.task2.activate().unwrap();\n\n}\n\n\n", "file_path": "examples/basic/src/main.rs", "rank": 42, "score": 120023.8775884085 }, { "content": "fn task2_body(_: usize) {\n\n loop {\n\n dbg!(System::time().unwrap());\n\n System::sleep(r3::time::Duration::from_secs(1)).unwrap();\n\n }\n\n}\n", "file_path": "examples/basic/src/main.rs", "rank": 43, "score": 120023.8775884085 }, { "content": "fn task1_body(_: usize) {\n\n support_rza1::sprintln!(\"COTTAGE = {:?}\", COTTAGE);\n\n\n\n COTTAGE.task2.activate().unwrap();\n\n}\n\n\n", "file_path": "examples/basic_gr_peach/src/main.rs", "rank": 44, "score": 118029.72546005144 }, { "content": "fn task2_body(_: usize) {\n\n loop {\n\n support_rp2040::sprintln!(\" 0 | core0: {:?}\", System::time().unwrap());\n\n System::sleep(r3::time::Duration::from_millis(700)).unwrap();\n\n }\n\n}\n", "file_path": "examples/smp_rp_pico/src/core0.rs", "rank": 45, "score": 118029.72546005144 }, { "content": "fn task1_body(_: usize) {\n\n support_rp2040::sprintln!(\"COTTAGE = {:?}\", COTTAGE);\n\n\n\n COTTAGE.task2.activate().unwrap();\n\n}\n\n\n", "file_path": "examples/smp_rp_pico/src/core0.rs", "rank": 46, "score": 118029.72546005144 }, { "content": "fn task1_body(_: usize) {\n\n rtt_target::rprintln!(\"COTTAGE = {:?}\", COTTAGE);\n\n\n\n COTTAGE.task2.activate().unwrap();\n\n}\n\n\n", "file_path": "examples/basic_nucleo_f401re/src/main.rs", "rank": 47, "score": 118029.72546005144 }, { "content": "fn task1_body(_: usize) {\n\n let c1 = Core1(());\n\n write_bytes(c1, b\"core1: task1 is running\\n\");\n\n\n\n let p = unsafe { rp2040::Peripherals::steal() };\n\n\n\n // Configure GP25 (connected to LED on Pico) for output\n\n // <https://github.com/jannic/rp-microcontroller-rs/blob/master/boards/rp-pico/examples/blink/main.rs>\n\n let pin = 25;\n\n\n\n p.SIO.gpio_oe_clr.write(|w| unsafe { w.bits(1 << pin) });\n\n p.SIO.gpio_out_clr.write(|w| unsafe { w.bits(1 << pin) });\n\n\n\n p.PADS_BANK0\n\n .gpio25\n\n .write(|w| w.ie().bit(true).od().bit(false));\n\n\n\n p.IO_BANK0.gpio25_ctrl.write(|w| w.funcsel().sio_25());\n\n\n\n p.SIO.gpio_oe_set.write(|w| unsafe { w.bits(1 << pin) });\n", "file_path": "examples/smp_rp_pico/src/core1.rs", "rank": 48, "score": 118029.72546005144 }, { "content": "fn task2_body(_: usize) {\n\n loop {\n\n rtt_target::rprintln!(\"time = {:?}\", System::time().unwrap());\n\n System::sleep(r3::time::Duration::from_secs(1)).unwrap();\n\n }\n\n}\n", "file_path": "examples/basic_nucleo_f401re/src/main.rs", "rank": 49, "score": 118029.72546005144 }, { "content": "fn task2_body(_: usize) {\n\n loop {\n\n support_rza1::sprintln!(\"time = {:?}\", System::time().unwrap());\n\n System::sleep(r3::time::Duration::from_secs(1)).unwrap();\n\n }\n\n}\n", "file_path": "examples/basic_gr_peach/src/main.rs", "rank": 50, "score": 118029.72546005144 }, { "content": "fn task1_body(_: usize) {\n\n support_rp2040::sprintln!(\"COTTAGE = {:?}\", COTTAGE);\n\n\n\n COTTAGE.task2.activate().unwrap();\n\n}\n\n\n", "file_path": "examples/basic_rp_pico/src/main.rs", "rank": 51, "score": 118029.72546005144 }, { "content": "fn task2_body(_: usize) {\n\n let p = unsafe { rp2040::Peripherals::steal() };\n\n\n\n // <https://github.com/jannic/rp-microcontroller-rs/blob/master/boards/rp-pico/examples/blink/main.rs>\n\n // TODO: Documentate what this code does\n\n let pin = 25;\n\n\n\n p.SIO.gpio_oe_clr.write(|w| unsafe { w.bits(1 << pin) });\n\n p.SIO.gpio_out_clr.write(|w| unsafe { w.bits(1 << pin) });\n\n\n\n p.PADS_BANK0\n\n .gpio25\n\n .write(|w| w.ie().bit(true).od().bit(false));\n\n\n\n p.IO_BANK0.gpio25_ctrl.write(|w| w.funcsel().sio_25());\n\n\n\n p.SIO.gpio_oe_set.write(|w| unsafe { w.bits(1 << pin) });\n\n p.SIO.gpio_out_set.write(|w| unsafe { w.bits(1 << pin) });\n\n\n\n loop {\n\n // Blink the LED\n\n p.SIO.gpio_out_set.write(|w| unsafe { w.bits(1 << pin) });\n\n System::sleep(r3::time::Duration::from_millis(100)).unwrap();\n\n p.SIO.gpio_out_clr.write(|w| unsafe { w.bits(1 << pin) });\n\n\n\n support_rp2040::sprintln!(\"time = {:?}\", System::time().unwrap());\n\n System::sleep(r3::time::Duration::from_millis(900)).unwrap();\n\n }\n\n}\n", "file_path": "examples/basic_rp_pico/src/main.rs", "rank": 52, "score": 118029.72546005144 }, { "content": "#[inline]\n\nfn handle_tick<System: Sp804Instance>(_: usize) {\n\n let tcfg = &System::TICKLESS_CFG;\n\n\n\n // Safety: CPU Lock protects it from concurrent access\n\n let tstate = unsafe { &mut *System::tickless_state() };\n\n\n\n let cur_hw_tick_count = hw_tick_count::<System>();\n\n tstate.mark_reference(tcfg, cur_hw_tick_count);\n\n\n\n // `timer_tick` will call `pend_tick[_after]`, so it's unnecessary to\n\n // clear the interrupt flag\n\n\n\n // Safety: CPU Lock inactive, an interrupt context\n\n unsafe { System::timer_tick() };\n\n}\n", "file_path": "src/r3_port_arm/src/sp804/imp.rs", "rank": 53, "score": 108630.20302378615 }, { "content": "#[doc(hidden)]\n\npub fn write_fmt(args: fmt::Arguments<'_>) {\n\n let _ = fmt::Write::write_fmt(&mut WrapSerialWrite, args);\n\n}\n\n\n\n/// Macro for printing to the serial standard output\n\n#[macro_export]\n\nmacro_rules! sprint {\n\n ($s:expr) => {\n\n $crate::stdout::write_str($s)\n\n };\n\n ($($tt:tt)*) => {\n\n $crate::stdout::write_fmt(format_args!($($tt)*))\n\n };\n\n}\n\n\n\n/// Macro for printing to the serial standard output, with a newline.\n\n#[macro_export]\n\nmacro_rules! sprintln {\n\n () => {\n\n $crate::stdout::write_str(\"\\n\")\n\n };\n\n ($s:expr) => {\n\n $crate::stdout::write_str(concat!($s, \"\\n\"))\n\n };\n\n ($s:expr, $($tt:tt)*) => {\n\n $crate::stdout::write_fmt(format_args!(concat!($s, \"\\n\"), $($tt)*))\n\n };\n\n}\n", "file_path": "src/r3_support_rp2040/src/stdout.rs", "rank": 54, "score": 106869.9060353224 }, { "content": "#[doc(hidden)]\n\npub fn write_fmt(args: fmt::Arguments<'_>) {\n\n interrupt_free(|| unsafe {\n\n if let Some(stdout) = STDOUT.as_ref() {\n\n (stdout.1)(args);\n\n }\n\n })\n\n}\n\n\n\n/// Macro for printing to the serial standard output\n\n#[macro_export]\n\nmacro_rules! sprint {\n\n ($s:expr) => {\n\n $crate::stdout::write_str($s)\n\n };\n\n ($($tt:tt)*) => {\n\n $crate::stdout::write_fmt(format_args!($($tt)*))\n\n };\n\n}\n\n\n\n/// Macro for printing to the serial standard output, with a newline.\n", "file_path": "src/r3_support_rza1/src/stdout.rs", "rank": 55, "score": 106869.9060353224 }, { "content": "#[inline]\n\nfn interrupt_handler<System: Plic + Kernel>(_: usize) {\n\n if let Some((token, num)) = claim_interrupt::<System>() {\n\n if let Some(handler) = System::INTERRUPT_HANDLERS.get(num) {\n\n if System::USE_NESTING {\n\n unsafe { riscv::register::mie::set_mext() };\n\n }\n\n\n\n // Safety: The interrupt controller driver is responsible for\n\n // dispatching the appropriate interrupt handler for\n\n // a platform interrupt\n\n unsafe { handler() };\n\n\n\n if System::USE_NESTING {\n\n unsafe { riscv::register::mie::clear_mext() };\n\n }\n\n }\n\n\n\n end_interrupt::<System>(token);\n\n }\n\n}\n\n\n", "file_path": "src/r3_port_riscv/src/plic/imp.rs", "rank": 56, "score": 106039.48557608141 }, { "content": "pub fn stderr_write_fmt(args: fmt::Arguments<'_>) {\n\n interrupt::free(|_| {\n\n let _ = SerialWrapper(0x10011000 as *mut u32).write_fmt(args);\n\n });\n\n}\n\n\n", "file_path": "src/r3_port_riscv_test_driver/src/uart_u540.rs", "rank": 57, "score": 103729.11384599903 }, { "content": "pub fn stdout_write_fmt(args: fmt::Arguments<'_>) {\n\n interrupt::free(|_| {\n\n let _ = SerialWrapper(0x10010000 as *mut u32).write_fmt(args);\n\n });\n\n}\n\n\n", "file_path": "src/r3_port_riscv_test_driver/src/uart_u540.rs", "rank": 58, "score": 103729.11384599903 }, { "content": "pub fn stderr_write_fmt(args: fmt::Arguments<'_>) {\n\n with_uart(|uart0| {\n\n // Switch to the log channel using our multiplexing protocol\n\n uart0(0x17);\n\n uart0(b'2');\n\n\n\n let _ = SerialWrapper(uart0).write_fmt(args);\n\n });\n\n}\n\n\n", "file_path": "src/r3_port_riscv_test_driver/src/uart_k210.rs", "rank": 59, "score": 103729.11384599903 }, { "content": "pub fn stderr_write_fmt(args: fmt::Arguments<'_>) {\n\n with_uart(|(_uart0, uart1)| {\n\n let _ = SerialWrapper(uart1).write_fmt(args);\n\n });\n\n}\n\n\n", "file_path": "src/r3_port_riscv_test_driver/src/uart_e310x.rs", "rank": 60, "score": 103729.11384599903 }, { "content": "pub fn stdout_write_fmt(args: fmt::Arguments<'_>) {\n\n with_uart(|uart0| {\n\n // Switch to the primary message channel using our multiplexing protocol\n\n uart0(0x17);\n\n uart0(b'1');\n\n\n\n let _ = SerialWrapper(uart0).write_fmt(args);\n\n });\n\n}\n\n\n", "file_path": "src/r3_port_riscv_test_driver/src/uart_k210.rs", "rank": 61, "score": 103729.11384599903 }, { "content": "pub fn stdout_write_fmt(args: fmt::Arguments<'_>) {\n\n with_uart(|(uart0, _uart1)| {\n\n let _ = SerialWrapper(uart0).write_fmt(args);\n\n });\n\n}\n\n\n", "file_path": "src/r3_port_riscv_test_driver/src/uart_e310x.rs", "rank": 62, "score": 103729.11384599903 }, { "content": "fn timer2_body<System: Kernel, D: Driver<App<System>>>(param: usize) {\n\n let App { task, seq, .. } = D::app();\n\n\n\n assert_eq!(param, 52);\n\n\n\n // Check `timer2`'s expiration time in `task`\n\n // (`System::time` is disallowed in a non-task context)\n\n seq.expect_and_replace(0, 1);\n\n task.unpark().unwrap();\n\n}\n\n\n", "file_path": "src/r3_test_suite/src/kernel_tests/timer_misc.rs", "rank": 63, "score": 103362.80004034308 }, { "content": "fn timer1_body<System: Kernel, D: Driver<App<System>>>(param: usize) {\n\n let App {\n\n timer1,\n\n timer2,\n\n task,\n\n seq,\n\n ..\n\n } = D::app();\n\n\n\n assert_eq!(param, 42);\n\n\n\n assert!(!System::is_task_context());\n\n\n\n // Check `timer1`'s expiration time in `task`\n\n // (`System::time` is disallowed in a non-task context)\n\n seq.expect_and_replace(2, 3);\n\n task.unpark().unwrap();\n\n\n\n // `PartialEq`\n\n assert_ne!(timer1, timer2);\n", "file_path": "src/r3_test_suite/src/kernel_tests/timer_misc.rs", "rank": 64, "score": 103362.80004034308 }, { "content": "pub fn write_fmt(core1: Core1, args: fmt::Arguments<'_>) {\n\n struct WrapCore0Write(Core1);\n\n\n\n impl fmt::Write for WrapCore0Write {\n\n fn write_str(&mut self, s: &str) -> fmt::Result {\n\n write_bytes(self.0, s.as_bytes());\n\n Ok(())\n\n }\n\n }\n\n\n\n let _ = fmt::Write::write_fmt(&mut WrapCore0Write(core1), args);\n\n}\n\n\n\n// --------------------------------------------------------------------------\n\n\n", "file_path": "examples/smp_rp_pico/src/core1.rs", "rank": 65, "score": 100623.04139894882 }, { "content": "fn main_task<System: Kernel, Options: BencherOptions<System>>(_: usize) {\n\n while {\n\n Options::mark_start();\n\n Options::mark_end(\"(empty)\");\n\n\n\n Options::iter();\n\n\n\n let state = unsafe { &mut *Options::cottage().state.0.get() };\n\n\n\n // If there's no custom intervals defined at this point, it's a usage\n\n // error.\n\n if state.intervals.len() <= 1 {\n\n panic!(\"`mark_end` has never been called during the iteration\");\n\n }\n\n\n\n // Repeat until all instances of `IntervalRecord::samples` are full.\n\n state.intervals.iter().any(|i| i.samples.is_not_full())\n\n } {}\n\n\n\n // Report the result\n", "file_path": "src/r3_test_suite/src/utils/benchmark.rs", "rank": 66, "score": 99111.80609184457 }, { "content": "/// Create a tick now.\n\nfn mark_tick<System: Kernel>(mut lock: CpuLockTokenRefMut<'_, System>) {\n\n let (duration_since_last_tick, tick_count) =\n\n duration_since_last_tick::<System>(lock.borrow_mut());\n\n\n\n let g_timeout = System::g_timeout();\n\n g_timeout.last_tick_count.replace(&mut *lock, tick_count);\n\n g_timeout\n\n .last_tick_time\n\n .replace_with(&mut *lock, |old_value| {\n\n old_value.wrapping_add(duration_since_last_tick)\n\n });\n\n #[cfg(feature = \"system_time\")]\n\n g_timeout\n\n .last_tick_sys_time\n\n .replace_with(&mut *lock, |old_value| {\n\n old_value.wrapping_add(duration_since_last_tick as Time64)\n\n });\n\n\n\n g_timeout\n\n .frontier_gap\n", "file_path": "src/r3/src/kernel/timeout.rs", "rank": 67, "score": 98396.60366142355 }, { "content": "fn isr<System: Kernel, D: Driver<App<System>>>(_: usize) {\n\n D::app().seq.expect_and_replace(2, 3);\n\n}\n", "file_path": "src/r3_test_suite/src/kernel_tests/interrupt_unmanaged.rs", "rank": 68, "score": 96330.50119948795 }, { "content": "fn task_body<System: Kernel, D: Driver<App<System>>>(_: usize) {\n\n log::trace!(\"Good morning, Angel!\");\n\n D::success();\n\n}\n", "file_path": "src/r3_test_suite/src/kernel_tests/basic.rs", "rank": 69, "score": 96330.50119948795 }, { "content": "fn isr<System: Kernel, D: Driver<App<System>>>(_: usize) {\n\n unreachable!();\n\n}\n", "file_path": "src/r3_test_suite/src/kernel_tests/interrupt_misc.rs", "rank": 70, "score": 96330.50119948795 }, { "content": "fn isr1<System: Kernel, D: Driver<App<System>>>(_: usize) {\n\n log::trace!(\"isr1\");\n\n\n\n D::app().seq.expect_and_replace(2, 3);\n\n\n\n D::success();\n\n}\n\n\n", "file_path": "src/r3_test_suite/src/kernel_tests/interrupt_priority.rs", "rank": 71, "score": 96330.50119948795 }, { "content": "fn isr<System: Kernel, D: Driver<App<System>>>(_: usize) {\n\n D::app().seq.expect_and_replace(2, 3);\n\n D::success();\n\n}\n", "file_path": "src/r3_test_suite/src/kernel_tests/interrupt_during_boot.rs", "rank": 72, "score": 96330.50119948795 }, { "content": "fn isr0<System: Kernel, D: Driver<App<System>>>(_: usize) {\n\n log::trace!(\"isr0\");\n\n\n\n D::app().seq.expect_and_replace(1, 2);\n\n\n\n if D::app().int[1].is_none() {\n\n log::warn!(\"Only one interrupt line is defined, skipping the second part of the test\");\n\n D::success();\n\n return;\n\n }\n\n}\n", "file_path": "src/r3_test_suite/src/kernel_tests/interrupt_priority.rs", "rank": 73, "score": 96330.50119948795 }, { "content": "fn isr<System: Kernel, D: Driver<App<System>>>(_: usize) {\n\n D::app().done.store(true, Ordering::Relaxed);\n\n}\n", "file_path": "src/r3_port_std/tests/kernel_tests/external_interrupt.rs", "rank": 74, "score": 96330.50119948795 }, { "content": "fn isr<System: Kernel, D: Driver<App<System>>>(_: usize) {\n\n let app = D::app();\n\n let [m1, ..] = app.m;\n\n\n\n app.seq.expect_and_replace(1, 2);\n\n\n\n // Allowed in a non-task context\n\n assert_eq!(m1.is_locked(), Ok(false));\n\n assert_eq!(\n\n m1.mark_consistent(),\n\n Err(r3::kernel::MarkConsistentMutexError::BadObjectState)\n\n );\n\n\n\n // Disallowed in a non-task context\n\n assert_eq!(m1.unlock(), Err(r3::kernel::UnlockMutexError::BadContext));\n\n assert_eq!(m1.lock(), Err(r3::kernel::LockMutexError::BadContext));\n\n assert_eq!(\n\n m1.try_lock(),\n\n Err(r3::kernel::TryLockMutexError::BadContext)\n\n );\n\n assert_eq!(\n\n m1.lock_timeout(Duration::ZERO),\n\n Err(r3::kernel::LockMutexTimeoutError::BadContext)\n\n );\n\n}\n\n\n", "file_path": "src/r3_test_suite/src/kernel_tests/mutex_misc.rs", "rank": 75, "score": 96330.50119948795 }, { "content": "fn task_body<System: Kernel, D: Driver<App<System>>>(_: usize) {\n\n D::app().seq.expect_and_replace(0, 1);\n\n\n\n // Pend both interrupts at the same time. Regardless the order of reception,\n\n // the higher-priority one should be handled first\n\n System::acquire_cpu_lock().unwrap();\n\n if let Some(int) = D::app().int[1] {\n\n int.pend().unwrap();\n\n }\n\n if let Some(int) = D::app().int[0] {\n\n int.pend().unwrap();\n\n }\n\n unsafe { System::release_cpu_lock() }.unwrap();\n\n\n\n if let [None, None] = D::app().int {\n\n log::warn!(\"No interrupt lines defined, skipping the test\");\n\n D::success();\n\n return;\n\n }\n\n}\n\n\n", "file_path": "src/r3_test_suite/src/kernel_tests/interrupt_priority.rs", "rank": 76, "score": 95640.35443100735 }, { "content": "fn task1_body<System: Kernel, D: Driver<App<System>>>(_: usize) {\n\n let app = D::app();\n\n\n\n app.seq.expect_and_replace(0, 1);\n\n\n\n if let Some(int) = app.int {\n\n int.pend().unwrap();\n\n } else {\n\n log::warn!(\"No interrupt lines defined, skipping a portion of the test\");\n\n app.seq.expect_and_replace(1, 2);\n\n }\n\n\n\n // `PartialEq`\n\n let [m1, m2, ..] = app.m;\n\n assert_ne!(m1, m2);\n\n assert_eq!(m1, m1);\n\n assert_eq!(m2, m2);\n\n\n\n // `Hash`\n\n let hash = |x: Mutex<System>| {\n", "file_path": "src/r3_test_suite/src/kernel_tests/mutex_misc.rs", "rank": 77, "score": 95640.35443100735 }, { "content": "fn task1_body<System: Kernel, D: Driver<App<System>>>(_: usize) {\n\n D::app().seq.expect_and_replace(1, 2);\n\n\n\n D::app().task2.unpark_exact().unwrap();\n\n\n\n D::app().seq.expect_and_replace(3, 4);\n\n\n\n D::app().task2.interrupt().unwrap();\n\n}\n\n\n", "file_path": "src/r3_test_suite/src/kernel_tests/task_park.rs", "rank": 78, "score": 95640.35443100735 }, { "content": "fn isr<System: Kernel, D: Driver<App<System>>>(_: usize) {\n\n let sem = D::app().sem;\n\n\n\n D::app().seq.expect_and_replace(2, 3);\n\n\n\n assert_eq!(sem.poll_one(), Err(r3::kernel::PollSemaphoreError::Timeout));\n\n assert_eq!(\n\n sem.wait_one(),\n\n Err(r3::kernel::WaitSemaphoreError::BadContext)\n\n );\n\n\n\n sem.signal(1).unwrap(); // wakes up `task2`\n\n}\n", "file_path": "src/r3_test_suite/src/kernel_tests/semaphore_interrupt_handler.rs", "rank": 79, "score": 95640.35443100735 }, { "content": "fn task1_body<System: Kernel, D: Driver<App<System>>>(_: usize) {\n\n let App { m } = D::app();\n\n\n\n let cur_task: Task<System> = Task::current().unwrap().unwrap();\n\n assert_eq!(cur_task.priority().unwrap(), 15);\n\n assert_eq!(cur_task.effective_priority().unwrap(), 15);\n\n\n\n m[3].lock().unwrap();\n\n assert_eq!(cur_task.priority().unwrap(), 15);\n\n assert_eq!(cur_task.effective_priority().unwrap(), 3);\n\n\n\n m[2].lock().unwrap();\n\n assert_eq!(cur_task.priority().unwrap(), 15);\n\n assert_eq!(cur_task.effective_priority().unwrap(), 2);\n\n\n\n m[1].lock().unwrap();\n\n assert_eq!(cur_task.priority().unwrap(), 15);\n\n assert_eq!(cur_task.effective_priority().unwrap(), 1);\n\n\n\n m[0].lock().unwrap();\n", "file_path": "src/r3_test_suite/src/kernel_tests/mutex_nesting.rs", "rank": 80, "score": 95640.35443100735 }, { "content": "fn startup_hook<System: Kernel, D: Driver<App<System>>>(_: usize) {\n\n assert_eq!(\n\n Task::<System>::current(),\n\n Err(r3::kernel::GetCurrentTaskError::BadContext)\n\n );\n\n}\n\n\n", "file_path": "src/r3_test_suite/src/kernel_tests/task_misc.rs", "rank": 81, "score": 95640.35443100735 }, { "content": "fn task0_body<System: Kernel, D: Driver<App<System>>>(_: usize) {\n\n let App { seq, eg } = D::app();\n\n\n\n seq.expect_and_replace(1, 2);\n\n System::sleep(Duration::from_millis(300)).unwrap();\n\n // `task0` goes into sleep. `task1` wakes up first.\n\n // `task0` follows:\n\n seq.expect_and_replace(3, 4);\n\n eg.signal(1).unwrap();\n\n // preempted by `task1`, which we just woke up\n\n\n\n // back from `task1`\n\n seq.expect_and_replace(6, 7);\n\n D::success();\n\n}\n\n\n", "file_path": "src/r3_test_suite/src/kernel_tests/semaphore_timeout.rs", "rank": 82, "score": 95640.35443100735 }, { "content": "fn task_body<System: Kernel, D: Driver<App<System>>>(_: usize) {\n\n assert!(!System::has_cpu_lock());\n\n\n\n // Acquire CPU Lock\n\n System::acquire_cpu_lock().unwrap();\n\n\n\n // Can't do it again because it's already acquired\n\n assert!(System::has_cpu_lock());\n\n assert_eq!(\n\n System::acquire_cpu_lock(),\n\n Err(r3::kernel::CpuLockError::BadContext),\n\n );\n\n assert!(System::has_cpu_lock());\n\n\n\n // Release CPU Lock\n\n unsafe { System::release_cpu_lock() }.unwrap();\n\n\n\n // Can't do it again because it's already released\n\n assert!(!System::has_cpu_lock());\n\n assert_eq!(\n\n unsafe { System::release_cpu_lock() },\n\n Err(r3::kernel::CpuLockError::BadContext),\n\n );\n\n assert!(!System::has_cpu_lock());\n\n\n\n D::success();\n\n}\n", "file_path": "src/r3_test_suite/src/kernel_tests/cpu_lock.rs", "rank": 83, "score": 95640.35443100735 }, { "content": "fn task1_body<System: Kernel, D: Driver<App<System>>>(_: usize) {\n\n let App { seq, eg } = D::app();\n\n\n\n seq.expect_and_replace(0, 1);\n\n\n\n assert_eq!(\n\n // start waiting, switching to `task0`\n\n eg.wait_one_timeout(Duration::from_millis(200)),\n\n // ... the control is returned on timeout\n\n Err(WaitSemaphoreTimeoutError::Timeout),\n\n );\n\n\n\n seq.expect_and_replace(2, 3);\n\n\n\n // start waiting. wakes up when `task0` signals the semaphore\n\n eg.wait_one_timeout(Duration::from_millis(200)).unwrap();\n\n\n\n seq.expect_and_replace(4, 5);\n\n\n\n // this doesn't block\n\n eg.signal(1).unwrap();\n\n eg.wait_one_timeout(Duration::from_millis(200)).unwrap();\n\n\n\n seq.expect_and_replace(5, 6);\n\n}\n", "file_path": "src/r3_test_suite/src/kernel_tests/semaphore_timeout.rs", "rank": 84, "score": 95640.35443100735 }, { "content": "fn task_body<System: Kernel, D: Driver<App<System>>>(_: usize) {\n\n // `PartialEq`\n\n let app = D::app();\n\n assert_ne!(app.eg1, app.eg2);\n\n assert_eq!(app.eg1, app.eg1);\n\n assert_eq!(app.eg2, app.eg2);\n\n\n\n // `Hash`\n\n let hash = |x: Semaphore<System>| {\n\n use core::hash::{Hash, Hasher};\n\n let mut hasher = WyHash::with_seed(42);\n\n x.hash(&mut hasher);\n\n hasher.finish()\n\n };\n\n assert_eq!(hash(app.eg1), hash(app.eg1));\n\n assert_eq!(hash(app.eg2), hash(app.eg2));\n\n\n\n // Invalid semaphore ID\n\n let bad_eg: Semaphore<System> = unsafe { Semaphore::from_id(NonZeroUsize::new(42).unwrap()) };\n\n assert_eq!(bad_eg.get(), Err(r3::kernel::GetSemaphoreError::BadId));\n", "file_path": "src/r3_test_suite/src/kernel_tests/semaphore_misc.rs", "rank": 85, "score": 95640.35443100735 }, { "content": "fn task3_body<System: Kernel, D: Driver<App<System>>>(_: usize) {\n\n // Current task (again)\n\n assert_eq!(Task::current().unwrap(), Some(D::app().task3));\n\n\n\n D::success();\n\n}\n", "file_path": "src/r3_test_suite/src/kernel_tests/task_misc.rs", "rank": 86, "score": 95640.35443100735 }, { "content": "fn startup_hook<System: Kernel, D: Driver<App<System>>>(_: usize) {\n\n let int = if let Some(int) = D::app().int {\n\n int\n\n } else {\n\n return;\n\n };\n\n\n\n let managed_range = System::MANAGED_INTERRUPT_PRIORITY_RANGE;\n\n\n\n // `set_priority` is disallowed in a boot context\n\n assert_eq!(\n\n int.set_priority(managed_range.start),\n\n Err(kernel::SetInterruptLinePriorityError::BadContext),\n\n );\n\n\n\n // Other methods are allowed in a boot context\n\n int.enable().unwrap();\n\n int.disable().unwrap();\n\n match int.is_pending() {\n\n Ok(false) | Err(kernel::QueryInterruptLineError::NotSupported) => {}\n", "file_path": "src/r3_test_suite/src/kernel_tests/interrupt_misc.rs", "rank": 87, "score": 95640.35443100735 }, { "content": "fn startup_hook<System: Kernel, D: Driver<App<System>>>(_: usize) {\n\n D::app().seq.expect_and_replace(0, 1);\n\n\n\n assert!(System::has_cpu_lock());\n\n\n\n let int = if let Some(int) = D::app().int {\n\n int\n\n } else {\n\n log::warn!(\"No interrupt lines defined, skipping the test\");\n\n D::success();\n\n return;\n\n };\n\n\n\n int.enable().unwrap();\n\n int.pend().unwrap();\n\n\n\n D::app().seq.expect_and_replace(1, 2);\n\n}\n\n\n", "file_path": "src/r3_test_suite/src/kernel_tests/interrupt_during_boot.rs", "rank": 88, "score": 95640.35443100735 }, { "content": "fn task_body<System: Kernel, D: Driver<App<System>>>(_: usize) {\n\n let expected_alignment = System::STACK_ALIGN;\n\n for task_cb in System::task_cb_pool() {\n\n let stack = task_cb.attr.stack.as_ptr();\n\n let start = stack as *mut u8;\n\n let end = start.wrapping_add(stack.len());\n\n log::trace!(\"stack = {:?}..{:?}\", start, end);\n\n\n\n assert_eq!(start as usize % expected_alignment, 0);\n\n assert_eq!(end as usize % expected_alignment, 0);\n\n }\n\n D::success();\n\n}\n", "file_path": "src/r3_port_std/tests/kernel_tests/stack_align.rs", "rank": 89, "score": 95640.35443100735 }, { "content": "fn isr<System: Kernel, D: Driver<App<System>>>(_: usize) {\n\n D::app().task2.activate().unwrap();\n\n}\n\n\n", "file_path": "src/r3_test_suite/src/kernel_tests/interrupt_task_activate.rs", "rank": 90, "score": 95640.35443100735 }, { "content": "#[cfg(not(feature = \"priority_boost\"))]\n\nfn task_body<System: Kernel, D: Driver<App<System>>>(_: usize) {\n\n // Priority Boost is always inactive when it's statically disabled\n\n assert!(!System::is_priority_boost_active());\n\n\n\n // Can't deactivate Priority Boost because it's already deactivated\n\n assert_eq!(\n\n unsafe { System::unboost_priority() },\n\n Err(r3::kernel::BoostPriorityError::BadContext),\n\n );\n\n\n\n D::success();\n\n}\n", "file_path": "src/r3_test_suite/src/kernel_tests/priority_boost.rs", "rank": 91, "score": 95640.35443100735 }, { "content": "fn task_body<System: Kernel, D: Driver<App<System>>>(_: usize) {\n\n let int = if let Some(int) = D::app().int {\n\n int\n\n } else {\n\n log::warn!(\"No interrupt lines defined, skipping the test\");\n\n D::success();\n\n return;\n\n };\n\n\n\n let managed_range = System::MANAGED_INTERRUPT_PRIORITY_RANGE;\n\n\n\n if managed_range.end > managed_range.start {\n\n for pri in managed_range.clone() {\n\n int.set_priority(pri).unwrap();\n\n }\n\n\n\n for pri in managed_range.clone() {\n\n unsafe { int.set_priority_unchecked(pri) }.unwrap();\n\n }\n\n\n", "file_path": "src/r3_test_suite/src/kernel_tests/interrupt_misc.rs", "rank": 92, "score": 95640.35443100735 }, { "content": "fn isr<System: Kernel, D: Driver<App<System>>>(_: usize) {\n\n let app = D::app();\n\n\n\n app.seq.expect_and_replace(1, 2);\n\n\n\n // Non-task context\n\n assert_matches!(app.eg1.lock(), Err(mutex::LockError::BadContext));\n\n assert_matches!(app.eg1.try_lock(), Err(mutex::TryLockError::BadContext));\n\n}\n", "file_path": "src/r3_test_suite/src/kernel_tests/sync_mutex_misc.rs", "rank": 93, "score": 95640.35443100735 }, { "content": "fn task2_body<System: Kernel, D: Driver<App<System>>>(_: usize) {\n\n D::app().seq.expect_and_replace(0, 1);\n\n\n\n System::park().unwrap(); // blocks, switching to `task1`\n\n\n\n D::app().seq.expect_and_replace(2, 3);\n\n\n\n assert_eq!(\n\n // blocks, switching to `task1`\n\n System::park(),\n\n Err(r3::kernel::ParkError::Interrupted)\n\n );\n\n\n\n D::app().seq.expect_and_replace(4, 5);\n\n\n\n // Give a park token to itself\n\n D::app().task2.unpark_exact().unwrap();\n\n // `park` doesn't block if the task already has a token\n\n System::park().unwrap();\n\n\n\n D::app().task2.unpark_exact().unwrap();\n\n assert_eq!(\n\n D::app().task2.unpark_exact(),\n\n Err(r3::kernel::UnparkExactError::QueueOverflow)\n\n );\n\n\n\n D::success();\n\n}\n", "file_path": "src/r3_test_suite/src/kernel_tests/task_park.rs", "rank": 94, "score": 95640.35443100735 }, { "content": "/// Get the current event time.\n\nfn current_time<System: Kernel>(mut lock: CpuLockTokenRefMut<'_, System>) -> Time32 {\n\n let (duration_since_last_tick, _) = duration_since_last_tick::<System>(lock.borrow_mut());\n\n\n\n let g_timeout = System::g_timeout();\n\n g_timeout\n\n .last_tick_time\n\n .get(&*lock)\n\n .wrapping_add(duration_since_last_tick)\n\n}\n\n\n", "file_path": "src/r3/src/kernel/timeout.rs", "rank": 95, "score": 95640.35443100735 }, { "content": "fn task2_body<System: Kernel, D: Driver<App<System>>>(_: usize) {\n\n unreachable!();\n\n}\n\n\n", "file_path": "src/r3_test_suite/src/kernel_tests/task_misc.rs", "rank": 96, "score": 95640.35443100735 }, { "content": "fn task2_body<System: Kernel, D: Driver<App<System>>>(_: usize) {\n\n let app = D::app();\n\n let [m1, ..] = app.m;\n\n\n\n app.seq.expect_and_replace(3, 4);\n\n\n\n // `m1` was abandoned by `task1`.\n\n assert!(!m1.is_locked().unwrap());\n\n assert_eq!(m1.lock(), Err(r3::kernel::LockMutexError::Abandoned));\n\n\n\n // When `Abandoned` is returned, the ownership is given to the calling task\n\n // (This doesn't happen for other kinds of errors)\n\n assert!(m1.is_locked().unwrap());\n\n\n\n m1.unlock().unwrap();\n\n\n\n // The \"abandoned\" status lasts until it's explicitly cleared\n\n assert_eq!(m1.lock(), Err(r3::kernel::LockMutexError::Abandoned));\n\n m1.unlock().unwrap();\n\n assert_eq!(m1.try_lock(), Err(r3::kernel::TryLockMutexError::Abandoned));\n", "file_path": "src/r3_test_suite/src/kernel_tests/mutex_misc.rs", "rank": 97, "score": 95640.35443100735 }, { "content": "fn task3_body<System: Kernel, D: Driver<App<System>>>(_: usize) {\n\n let app = D::app();\n\n let [_, m2, ..] = app.m;\n\n\n\n let cur_task: Task<System> = Task::current().unwrap().unwrap();\n\n assert_eq!(cur_task.priority().unwrap(), 2);\n\n assert_eq!(cur_task.effective_priority().unwrap(), 2);\n\n\n\n m2.lock().unwrap();\n\n}\n\n\n", "file_path": "src/r3_test_suite/src/kernel_tests/mutex_misc.rs", "rank": 98, "score": 95640.35443100735 }, { "content": "#[macro_export]\n\nmacro_rules! use_systick_tickful {\n\n (unsafe impl PortTimer for $ty:ty) => {\n\n const _: () = {\n\n use $crate::r3::{\n\n kernel::{cfg::CfgBuilder, PortTimer, UTicks},\n\n utils::Init,\n\n };\n\n use $crate::systick_tickful::imp;\n\n\n\n static TIMER_STATE: imp::State<$ty> = Init::INIT;\n\n\n\n impl PortTimer for $ty {\n\n const MAX_TICK_COUNT: UTicks = u32::MAX;\n\n const MAX_TIMEOUT: UTicks = u32::MAX;\n\n\n\n unsafe fn tick_count() -> UTicks {\n\n // Safety: CPU Lock active\n\n unsafe { TIMER_STATE.tick_count() }\n\n }\n", "file_path": "src/r3_port_arm_m/src/systick_tickful/cfg.rs", "rank": 99, "score": 26.966251271781474 } ]
Rust
rain_server/src/governor/tasks/instance.rs
baajur/rain
477948554150760164c6fe48eac27bcf06c7933b
use chrono::{DateTime, Utc}; use futures::Future; use rain_core::{comm::*, errors::*}; use error_chain::bail; use governor::graph::{ExecutorRef, TaskRef, TaskState}; use governor::rpc::executor::data_output_from_spec; use governor::state::State; use governor::tasks; pub struct TaskInstance { task_ref: TaskRef, cancel_sender: Option<::futures::unsync::oneshot::Sender<()>>, start_timestamp: DateTime<Utc>, } pub type TaskFuture = Future<Item = (), Error = Error>; pub type TaskResult = Result<Box<TaskFuture>>; fn fail_unknown_type(_state: &mut State, task_ref: TaskRef) -> TaskResult { bail!("Unknown task type {}", task_ref.get().spec.task_type) } struct KillOnDrop { executor_ref: Option<ExecutorRef>, } impl KillOnDrop { pub fn new(executor_ref: ExecutorRef) -> Self { KillOnDrop { executor_ref: Some(executor_ref), } } pub fn deactive(&mut self) -> ExecutorRef { ::std::mem::replace(&mut self.executor_ref, None).unwrap() } } impl Drop for KillOnDrop { fn drop(&mut self) { if let Some(ref sw) = self.executor_ref { sw.get_mut().kill(); } } } impl TaskInstance { pub fn start(state: &mut State, task_ref: TaskRef) { { let mut task = task_ref.get_mut(); state.alloc_resources(&task.spec.resources); task.state = TaskState::Running; state.task_updated(&task_ref); } let task_fn = { let task = task_ref.get(); let task_type: &str = task.spec.task_type.as_ref(); match task_type { task_type if !task_type.starts_with("buildin/") => Self::start_task_in_executor, "buildin/run" => tasks::run::task_run, "buildin/concat" => tasks::basic::task_concat, "buildin/open" => tasks::basic::task_open, "buildin/export" => tasks::basic::task_export, "buildin/slice_directory" => tasks::basic::task_slice_directory, "buildin/make_directory" => tasks::basic::task_make_directory, "buildin/sleep" => tasks::basic::task_sleep, _ => fail_unknown_type, } }; let future: Box<TaskFuture> = match task_fn(state, task_ref.clone()) { Ok(f) => f, Err(e) => { state.unregister_task(&task_ref); let mut task = task_ref.get_mut(); state.free_resources(&task.spec.resources); task.set_failed(e.description().to_string()); state.task_updated(&task_ref); return; } }; let (sender, receiver) = ::futures::unsync::oneshot::channel::<()>(); let task_id = task_ref.get().spec.id; let instance = TaskInstance { task_ref: task_ref, cancel_sender: Some(sender), start_timestamp: Utc::now(), }; let state_ref = state.self_ref(); state.graph.running_tasks.insert(task_id, instance); state.spawn_panic_on_error( future .map(|()| true) .select(receiver.map(|()| false).map_err(|_| unreachable!())) .then(move |r| { let mut state = state_ref.get_mut(); let instance = state.graph.running_tasks.remove(&task_id).unwrap(); state.task_updated(&instance.task_ref); state.unregister_task(&instance.task_ref); let mut task = instance.task_ref.get_mut(); state.free_resources(&task.spec.resources); task.info.governor = format!("{}", state.governor_id()); task.info.start_time = instance.start_timestamp.to_rfc3339(); task.info.duration = Some( Utc::now() .signed_duration_since(instance.start_timestamp) .num_milliseconds() as f32 * 0.001f32, ); match r { Ok((true, _)) => { let all_finished = task.outputs.iter().all(|o| o.get().is_finished()); if !all_finished { task.set_failed("Some of outputs were not produced".into()); } else { for output in &task.outputs { state.object_is_finished(output); } log::debug!("Task was successfully finished"); task.state = TaskState::Finished; } } Ok((false, _)) => { log::debug!("Task {} was terminated", task.spec.id); task.set_failed("Task terminated by server".into()); } Err((e, _)) => { task.set_failed(e.description().to_string()); } }; Ok(()) }), ); } pub fn stop(&mut self) { let cancel_sender = ::std::mem::replace(&mut self.cancel_sender, None); if let Some(sender) = cancel_sender { sender.send(()).unwrap(); } else { log::debug!("Task stopping is already in progress"); } } fn start_task_in_executor(state: &mut State, task_ref: TaskRef) -> TaskResult { let future = { let task = task_ref.get(); let first: &str = task.spec.task_type.split('/').next().unwrap(); state.get_executor(first)? }; let state_ref = state.self_ref(); Ok(Box::new(future.and_then(move |executor_ref| { let mut sw_wrapper = KillOnDrop::new(executor_ref.clone()); let task_ref2 = task_ref.clone(); let task = task_ref2.get(); let executor_ref2 = executor_ref.clone(); let mut executor = executor_ref2.get_mut(); executor.send_task(&task, &executor_ref).then(move |r| { sw_wrapper.deactive(); match r { Ok(ResultMsg { task: task_id, info, success, outputs, cached_objects, }) => { let result: Result<()> = { let mut task = task_ref.get_mut(); let executor = executor_ref.get(); let work_dir = executor.work_dir(); assert!(task.spec.id == task_id); task.info = info; if success { log::debug!("Task id={} finished in executor", task.spec.id); for (co, output) in outputs.into_iter().zip(&task.outputs) { let mut o = output.get_mut(); o.info = co.info.clone(); let data = data_output_from_spec( &state_ref.get(), work_dir, co, o.spec.data_type, )?; o.set_data(data)?; } Ok(()) } else { log::debug!("Task id={} failed in executor", task.spec.id); Err("Task failed in executor".into()) } }; let mut state = state_ref.get_mut(); for object_id in cached_objects { let obj_ref = state.graph.objects.get(&object_id).unwrap(); obj_ref .get_mut() .executor_cache .insert(executor_ref.clone()); } state.graph.idle_executors.insert(executor_ref); result } Err(_) => Err(format!( "Lost connection to executor\n{}", executor_ref .get() .get_log_tails(state_ref.get().log_dir(), 4096) ).into()), } }) }))) } }
use chrono::{DateTime, Utc}; use futures::Future; use rain_core::{comm::*, errors::*}; use error_chain::bail; use governor::graph::{ExecutorRef, TaskRef, TaskState}; use governor::rpc::executor::data_output_from_spec; use governor::state::State; use governor::tasks; pub struct TaskInstance { task_ref: TaskRef, cancel_sender: Option<::futures::unsync::oneshot::Sender<()>>, start_timestamp: DateTime<Utc>, } pub type TaskFuture = Future<Item = (), Error = Error>; pub type TaskResult = Result<Box<TaskFuture>>; fn fail_unknown_type(_state: &mut State, task_ref: TaskRef) -> TaskResult { bail!("Unknown task type {}", task_ref.get().spec.task_type) } struct KillOnDrop { executor_ref: Option<ExecutorRef>, } impl KillOnDrop { pub fn new(execut
terminated", task.spec.id); task.set_failed("Task terminated by server".into()); } Err((e, _)) => { task.set_failed(e.description().to_string()); } }; Ok(()) }), ); } pub fn stop(&mut self) { let cancel_sender = ::std::mem::replace(&mut self.cancel_sender, None); if let Some(sender) = cancel_sender { sender.send(()).unwrap(); } else { log::debug!("Task stopping is already in progress"); } } fn start_task_in_executor(state: &mut State, task_ref: TaskRef) -> TaskResult { let future = { let task = task_ref.get(); let first: &str = task.spec.task_type.split('/').next().unwrap(); state.get_executor(first)? }; let state_ref = state.self_ref(); Ok(Box::new(future.and_then(move |executor_ref| { let mut sw_wrapper = KillOnDrop::new(executor_ref.clone()); let task_ref2 = task_ref.clone(); let task = task_ref2.get(); let executor_ref2 = executor_ref.clone(); let mut executor = executor_ref2.get_mut(); executor.send_task(&task, &executor_ref).then(move |r| { sw_wrapper.deactive(); match r { Ok(ResultMsg { task: task_id, info, success, outputs, cached_objects, }) => { let result: Result<()> = { let mut task = task_ref.get_mut(); let executor = executor_ref.get(); let work_dir = executor.work_dir(); assert!(task.spec.id == task_id); task.info = info; if success { log::debug!("Task id={} finished in executor", task.spec.id); for (co, output) in outputs.into_iter().zip(&task.outputs) { let mut o = output.get_mut(); o.info = co.info.clone(); let data = data_output_from_spec( &state_ref.get(), work_dir, co, o.spec.data_type, )?; o.set_data(data)?; } Ok(()) } else { log::debug!("Task id={} failed in executor", task.spec.id); Err("Task failed in executor".into()) } }; let mut state = state_ref.get_mut(); for object_id in cached_objects { let obj_ref = state.graph.objects.get(&object_id).unwrap(); obj_ref .get_mut() .executor_cache .insert(executor_ref.clone()); } state.graph.idle_executors.insert(executor_ref); result } Err(_) => Err(format!( "Lost connection to executor\n{}", executor_ref .get() .get_log_tails(state_ref.get().log_dir(), 4096) ).into()), } }) }))) } }
or_ref: ExecutorRef) -> Self { KillOnDrop { executor_ref: Some(executor_ref), } } pub fn deactive(&mut self) -> ExecutorRef { ::std::mem::replace(&mut self.executor_ref, None).unwrap() } } impl Drop for KillOnDrop { fn drop(&mut self) { if let Some(ref sw) = self.executor_ref { sw.get_mut().kill(); } } } impl TaskInstance { pub fn start(state: &mut State, task_ref: TaskRef) { { let mut task = task_ref.get_mut(); state.alloc_resources(&task.spec.resources); task.state = TaskState::Running; state.task_updated(&task_ref); } let task_fn = { let task = task_ref.get(); let task_type: &str = task.spec.task_type.as_ref(); match task_type { task_type if !task_type.starts_with("buildin/") => Self::start_task_in_executor, "buildin/run" => tasks::run::task_run, "buildin/concat" => tasks::basic::task_concat, "buildin/open" => tasks::basic::task_open, "buildin/export" => tasks::basic::task_export, "buildin/slice_directory" => tasks::basic::task_slice_directory, "buildin/make_directory" => tasks::basic::task_make_directory, "buildin/sleep" => tasks::basic::task_sleep, _ => fail_unknown_type, } }; let future: Box<TaskFuture> = match task_fn(state, task_ref.clone()) { Ok(f) => f, Err(e) => { state.unregister_task(&task_ref); let mut task = task_ref.get_mut(); state.free_resources(&task.spec.resources); task.set_failed(e.description().to_string()); state.task_updated(&task_ref); return; } }; let (sender, receiver) = ::futures::unsync::oneshot::channel::<()>(); let task_id = task_ref.get().spec.id; let instance = TaskInstance { task_ref: task_ref, cancel_sender: Some(sender), start_timestamp: Utc::now(), }; let state_ref = state.self_ref(); state.graph.running_tasks.insert(task_id, instance); state.spawn_panic_on_error( future .map(|()| true) .select(receiver.map(|()| false).map_err(|_| unreachable!())) .then(move |r| { let mut state = state_ref.get_mut(); let instance = state.graph.running_tasks.remove(&task_id).unwrap(); state.task_updated(&instance.task_ref); state.unregister_task(&instance.task_ref); let mut task = instance.task_ref.get_mut(); state.free_resources(&task.spec.resources); task.info.governor = format!("{}", state.governor_id()); task.info.start_time = instance.start_timestamp.to_rfc3339(); task.info.duration = Some( Utc::now() .signed_duration_since(instance.start_timestamp) .num_milliseconds() as f32 * 0.001f32, ); match r { Ok((true, _)) => { let all_finished = task.outputs.iter().all(|o| o.get().is_finished()); if !all_finished { task.set_failed("Some of outputs were not produced".into()); } else { for output in &task.outputs { state.object_is_finished(output); } log::debug!("Task was successfully finished"); task.state = TaskState::Finished; } } Ok((false, _)) => { log::debug!("Task {} was
random
[]
Rust
src/lua_tables.rs
perdumonocle/pm_rlua
eb09eba488249f47aaba4709a49108526f939a22
use std::marker::PhantomData; use libc; use td_clua::{self, lua_State}; use LuaGuard; use LuaPush; use LuaRead; pub struct LuaTable { table: *mut lua_State, pop: i32, index: i32, } impl LuaRead for LuaTable { fn lua_read_with_pop(lua: *mut lua_State, index: i32, pop: i32) -> Option<LuaTable> { if unsafe { td_clua::lua_istable(lua, index) } { for _ in 0..pop { unsafe { td_clua::lua_pushnil(lua); } } Some(LuaTable { table: lua, pop: pop, index: index, }) } else { None } } } impl Drop for LuaTable { fn drop(&mut self) { if self.pop != 0 { unsafe { td_clua::lua_pop(self.table, self.pop); }; self.pop = 0; } } } pub struct LuaTableIterator<'t, K, V> { table: &'t mut LuaTable, finished: bool, marker: PhantomData<(K, V)>, } impl LuaTable { pub fn into_inner(self) -> *mut lua_State { self.table } pub fn iter<K, V>(&mut self) -> LuaTableIterator<K, V> { unsafe { td_clua::lua_pushnil(self.table) }; LuaTableIterator { table: self, finished: false, marker: PhantomData, } } pub fn query<'a, R, I>(&'a mut self, index: I) -> Option<R> where R: LuaRead, I: LuaPush, { index.push_to_lua(self.table); unsafe { td_clua::lua_gettable( self.table, if self.index > 0 { self.index } else { self.index - 1 }, ); } let _guard = LuaGuard::new(self.table, 1); LuaRead::lua_read_with_pop(self.table, -1, 1) } pub fn set<I, V>(&mut self, index: I, value: V) where I: LuaPush, V: LuaPush, { index.push_to_lua(self.table); value.push_to_lua(self.table); unsafe { td_clua::lua_settable( self.table, if self.index > 0 { self.index } else { self.index - 2 }, ); } } pub fn register<I>(&mut self, index: I, func: extern "C" fn(*mut lua_State) -> libc::c_int) where I: LuaPush, { index.push_to_lua(self.table); unsafe { td_clua::lua_pushcfunction(self.table, func); td_clua::lua_settable( self.table, if self.index > 0 { self.index } else { self.index - 2 }, ); } } pub fn empty_table<I>(&mut self, index: I) -> LuaTable where I: LuaPush + Clone, { index.clone().push_to_lua(self.table); unsafe { td_clua::lua_newtable(self.table); td_clua::lua_settable( self.table, if self.index > 0 { self.index } else { self.index - 2 }, ); } self.query(index).unwrap() } pub fn table_len(&mut self) -> usize { unsafe { td_clua::lua_rawlen(self.table, self.index) } } pub fn get_or_create_metatable(&mut self) -> LuaTable { let result = unsafe { td_clua::lua_getmetatable(self.table, self.index) }; if result == 0 { unsafe { td_clua::lua_newtable(self.table); td_clua::lua_setmetatable(self.table, -2); let r = td_clua::lua_getmetatable(self.table, self.index); assert!(r != 0); } } LuaTable { table: self.table, pop: 1, index: -1, } } } impl<'t, K, V> Iterator for LuaTableIterator<'t, K, V> where K: LuaRead + 'static, V: LuaRead + 'static, { type Item = Option<(K, V)>; fn next(&mut self) -> Option<Option<(K, V)>> { if self.finished { return None; } let state = self.table.table; if unsafe { !td_clua::lua_istable(state, -2) || td_clua::lua_next(state, -2) == 0 } { self.finished = true; return None; } let key = LuaRead::lua_read_at_position(state, -2); let value = LuaRead::lua_read_at_position(state, -1); unsafe { td_clua::lua_pop(state, 1) }; if key.is_none() || value.is_none() { Some(None) } else { Some(Some((key.unwrap(), value.unwrap()))) } } } impl<'t, K, V> Drop for LuaTableIterator<'t, K, V> { fn drop(&mut self) { if !self.finished { unsafe { td_clua::lua_pop(self.table.table, 1) } } } }
use std::marker::PhantomData; use libc; use td_clua::{self, lua_State}; use LuaGuard; use LuaPush; use LuaRead; pub struct LuaTable { table: *mut lua_State, pop: i32, index: i32, } impl LuaRead for LuaTable { fn lua_read_with_pop(lua: *mut lua_State, index: i32, pop: i32) -> Option<LuaTable> { if unsafe { td_clua::lua_istable(lua, index) } { for _ in 0..pop { unsafe { td_clua::lua_pushnil(lua); } } Some(LuaTable { table: lua, pop: pop, index: index, }) } else { None } } } impl Drop for LuaTable { fn drop(&mut self) {
} } pub struct LuaTableIterator<'t, K, V> { table: &'t mut LuaTable, finished: bool, marker: PhantomData<(K, V)>, } impl LuaTable { pub fn into_inner(self) -> *mut lua_State { self.table } pub fn iter<K, V>(&mut self) -> LuaTableIterator<K, V> { unsafe { td_clua::lua_pushnil(self.table) }; LuaTableIterator { table: self, finished: false, marker: PhantomData, } } pub fn query<'a, R, I>(&'a mut self, index: I) -> Option<R> where R: LuaRead, I: LuaPush, { index.push_to_lua(self.table); unsafe { td_clua::lua_gettable( self.table, if self.index > 0 { self.index } else { self.index - 1 }, ); } let _guard = LuaGuard::new(self.table, 1); LuaRead::lua_read_with_pop(self.table, -1, 1) } pub fn set<I, V>(&mut self, index: I, value: V) where I: LuaPush, V: LuaPush, { index.push_to_lua(self.table); value.push_to_lua(self.table); unsafe { td_clua::lua_settable( self.table, if self.index > 0 { self.index } else { self.index - 2 }, ); } } pub fn register<I>(&mut self, index: I, func: extern "C" fn(*mut lua_State) -> libc::c_int) where I: LuaPush, { index.push_to_lua(self.table); unsafe { td_clua::lua_pushcfunction(self.table, func); td_clua::lua_settable( self.table, if self.index > 0 { self.index } else { self.index - 2 }, ); } } pub fn empty_table<I>(&mut self, index: I) -> LuaTable where I: LuaPush + Clone, { index.clone().push_to_lua(self.table); unsafe { td_clua::lua_newtable(self.table); td_clua::lua_settable( self.table, if self.index > 0 { self.index } else { self.index - 2 }, ); } self.query(index).unwrap() } pub fn table_len(&mut self) -> usize { unsafe { td_clua::lua_rawlen(self.table, self.index) } } pub fn get_or_create_metatable(&mut self) -> LuaTable { let result = unsafe { td_clua::lua_getmetatable(self.table, self.index) }; if result == 0 { unsafe { td_clua::lua_newtable(self.table); td_clua::lua_setmetatable(self.table, -2); let r = td_clua::lua_getmetatable(self.table, self.index); assert!(r != 0); } } LuaTable { table: self.table, pop: 1, index: -1, } } } impl<'t, K, V> Iterator for LuaTableIterator<'t, K, V> where K: LuaRead + 'static, V: LuaRead + 'static, { type Item = Option<(K, V)>; fn next(&mut self) -> Option<Option<(K, V)>> { if self.finished { return None; } let state = self.table.table; if unsafe { !td_clua::lua_istable(state, -2) || td_clua::lua_next(state, -2) == 0 } { self.finished = true; return None; } let key = LuaRead::lua_read_at_position(state, -2); let value = LuaRead::lua_read_at_position(state, -1); unsafe { td_clua::lua_pop(state, 1) }; if key.is_none() || value.is_none() { Some(None) } else { Some(Some((key.unwrap(), value.unwrap()))) } } } impl<'t, K, V> Drop for LuaTableIterator<'t, K, V> { fn drop(&mut self) { if !self.finished { unsafe { td_clua::lua_pop(self.table.table, 1) } } } }
if self.pop != 0 { unsafe { td_clua::lua_pop(self.table, self.pop); }; self.pop = 0; }
if_condition
[ { "content": "///\n\npub fn read_userdata<'t, T>(lua: *mut td_clua::lua_State, index: i32) -> Option<&'t mut T>\n\nwhere\n\n T: 'static + Any,\n\n{\n\n unsafe {\n\n let expected_typeid = format!(\"{:?}\", TypeId::of::<T>());\n\n let data_ptr = td_clua::lua_touserdata(lua, index);\n\n if data_ptr.is_null() {\n\n return None;\n\n }\n\n if td_clua::lua_getmetatable(lua, index) == 0 {\n\n return None;\n\n }\n\n\n\n \"__typeid\".push_to_lua(lua);\n\n td_clua::lua_gettable(lua, -2);\n\n match <String as LuaRead>::lua_read(lua) {\n\n Some(ref val) if val == &expected_typeid => {}\n\n _ => {\n\n return None;\n\n }\n\n }\n\n td_clua::lua_pop(lua, 2);\n\n Some(&mut *(data_ptr as *mut T))\n\n }\n\n}\n\n\n", "file_path": "src/userdata.rs", "rank": 0, "score": 158878.3046136529 }, { "content": "///in runtime call hotfix func(reload code) or hotfix_file func(reload file)\n\n///we will keep the old data but function, but hotfix not support change name,\n\n///if we add new upvalue, it'a also support\n\n///so after hotfix, the function is new and the data is old, so we success hotfix\n\npub fn load_hot_fix(lua: &mut Lua) {\n\n let func = r\"\n", "file_path": "src/hotfix.rs", "rank": 1, "score": 152460.49999704302 }, { "content": "fn push_iter<V, I>(lua: *mut lua_State, iterator: I) -> i32\n\nwhere\n\n V: LuaPush,\n\n I: Iterator<Item = V>,\n\n{\n\n // creating empty table\n\n unsafe { td_clua::lua_newtable(lua) };\n\n\n\n for (elem, index) in iterator.zip(1..) {\n\n let size = elem.push_to_lua(lua);\n\n\n\n match size {\n\n 0 => continue,\n\n 1 => {\n\n let index = index as u32;\n\n index.push_to_lua(lua);\n\n unsafe { td_clua::lua_insert(lua, -2) }\n\n unsafe { td_clua::lua_settable(lua, -3) }\n\n }\n\n 2 => unsafe { td_clua::lua_settable(lua, -3) },\n\n _ => unreachable!(),\n\n }\n\n }\n\n\n\n 1\n\n}\n\n\n", "file_path": "src/rust_tables.rs", "rank": 2, "score": 137974.60536738235 }, { "content": "fn push_rec_iter<V, I>(lua: *mut lua_State, iterator: I) -> i32\n\nwhere\n\n V: LuaPush,\n\n I: Iterator<Item = V>,\n\n{\n\n let (nrec, _) = iterator.size_hint();\n\n\n\n // creating empty table with pre-allocated non-array elements\n\n unsafe { td_clua::lua_createtable(lua, 0, nrec as i32) };\n\n\n\n for elem in iterator {\n\n let size = elem.push_to_lua(lua);\n\n\n\n match size {\n\n 0 => continue,\n\n 2 => unsafe { td_clua::lua_settable(lua, -3) },\n\n _ => unreachable!(),\n\n }\n\n }\n\n\n", "file_path": "src/rust_tables.rs", "rank": 3, "score": 134691.82466201126 }, { "content": "/// Pushes an object as a user data.\n\n///\n\n/// In Lua, a user data is anything that is not recognized by Lua. When the script attempts to\n\n/// copy a user data, instead only a reference to the data is copied.\n\n///\n\n/// The way a Lua script can use the user data depends on the content of the **metatable**, which\n\n/// is a Lua table linked to the object.\n\n///\n\n/// # Arguments\n\n///\n\n/// - `metatable`: Function that fills the metatable of the object.\n\n///\n\npub fn push_userdata<'a, T, F>(data: T, lua: *mut td_clua::lua_State, mut metatable: F) -> i32\n\nwhere\n\n F: FnMut(LuaTable),\n\n T: 'a + Any,\n\n{\n\n let typeid = format!(\"{:?}\", TypeId::of::<T>());\n\n let lua_data_raw =\n\n unsafe { td_clua::lua_newuserdata(lua, mem::size_of::<T>() as libc::size_t) };\n\n\n\n // creating a metatable\n\n unsafe {\n\n ptr::write(lua_data_raw as *mut _, data);\n\n\n\n td_clua::lua_newtable(lua);\n\n\n\n // index \"__typeid\" corresponds to the hash of the TypeId of T\n\n \"__typeid\".push_to_lua(lua);\n\n typeid.push_to_lua(lua);\n\n td_clua::lua_settable(lua, -3);\n\n\n", "file_path": "src/userdata.rs", "rank": 4, "score": 128483.50186558376 }, { "content": "#[test]\n\nfn table_over_table() {\n\n let mut lua = Lua::new();\n\n\n\n let _: () = lua.exec_string(\"a = { 10, { 8, 7 }, 6 }\").unwrap();\n\n let mut table: LuaTable = lua.query(\"a\").unwrap();\n\n\n\n let x: i32 = table.query(1).unwrap();\n\n assert_eq!(x, 10);\n\n\n\n {\n\n let mut subtable: LuaTable = table.query(2).unwrap();\n\n\n\n let y: i32 = subtable.query(1).unwrap();\n\n assert_eq!(y, 8);\n\n\n\n let z: i32 = subtable.query(2).unwrap();\n\n assert_eq!(z, 7);\n\n }\n\n\n\n let w: i32 = table.query(3).unwrap();\n\n assert_eq!(w, 6);\n\n}\n\n\n", "file_path": "tests/lua_tables.rs", "rank": 5, "score": 102476.75517771016 }, { "content": "#[test]\n\nfn metatable() {\n\n let mut lua = Lua::new();\n\n\n\n let _: () = lua.exec_string(\"a = { 9, 8, 7 }\").unwrap();\n\n\n\n {\n\n let mut table: LuaTable = lua.query(\"a\").unwrap();\n\n\n\n let mut metatable = table.get_or_create_metatable();\n\n fn handler() -> i32 {\n\n 5\n\n };\n\n metatable.set(\"__add\".to_string(), pm_rlua::function0(handler));\n\n }\n\n\n\n let r: i32 = lua.exec_string(\"return a + a\").unwrap();\n\n assert_eq!(r, 5);\n\n}\n\n\n", "file_path": "tests/lua_tables.rs", "rank": 6, "score": 94130.57341634993 }, { "content": "#[test]\n\nfn iterable() {\n\n let mut lua = Lua::new();\n\n\n\n let _: () = lua.exec_string(\"a = { 9, 8, 7 }\").unwrap();\n\n\n\n let mut table: LuaTable = lua.query(\"a\").unwrap();\n\n let mut counter = 0;\n\n\n\n for (key, value) in table.iter().filter_map(|e| e) {\n\n let _: u32 = key;\n\n let _: u32 = value;\n\n assert_eq!(key + value, 10);\n\n counter += 1;\n\n }\n\n\n\n assert_eq!(counter, 3);\n\n}\n\n\n", "file_path": "tests/lua_tables.rs", "rank": 7, "score": 94130.57341634993 }, { "content": "#[test]\n\nfn iterable_multipletimes() {\n\n let mut lua = Lua::new();\n\n\n\n let _: () = lua.exec_string(\"a = { 9, 8, 7 }\").unwrap();\n\n\n\n let mut table: LuaTable = lua.query(\"a\").unwrap();\n\n\n\n for _ in 0..10 {\n\n let table_content: Vec<Option<(u32, u32)>> = table.iter().collect();\n\n assert_eq!(\n\n table_content,\n\n vec![Some((1, 9)), Some((2, 8)), Some((3, 7))]\n\n );\n\n }\n\n}\n\n\n", "file_path": "tests/lua_tables.rs", "rank": 8, "score": 90250.75454748854 }, { "content": "#[test]\n\nfn get_set() {\n\n let mut lua = Lua::new();\n\n\n\n let _: () = lua.exec_string(\"a = { 9, 8, 7 }\").unwrap();\n\n let mut table: LuaTable = lua.query(\"a\").unwrap();\n\n\n\n let x: i32 = table.query(2).unwrap();\n\n assert_eq!(x, 8);\n\n\n\n table.set(3, \"hello\");\n\n let y: String = table.query(3).unwrap();\n\n assert_eq!(y, \"hello\");\n\n\n\n let z: i32 = table.query(1).unwrap();\n\n assert_eq!(z, 9);\n\n}\n\n\n", "file_path": "tests/lua_tables.rs", "rank": 9, "score": 90250.75454748854 }, { "content": "#[test]\n\nfn empty_array() {\n\n let mut lua = Lua::new();\n\n\n\n {\n\n let mut array = lua.empty_table(\"a\");\n\n array.set(\"b\", 3)\n\n }\n\n\n\n let mut table: LuaTable = lua.query(\"a\").unwrap();\n\n assert!(3 == table.query(\"b\").unwrap());\n\n}\n", "file_path": "tests/lua_tables.rs", "rank": 10, "score": 90250.75454748854 }, { "content": "/// Types that can be given to a Lua context, for example with `lua.set()` or as a return value\n\n/// of a function.\n\npub trait LuaPush {\n\n /// Pushes the value on the top of the stack.\n\n ///\n\n /// Must return a guard representing the elements that have been pushed.\n\n ///\n\n /// You can implement this for any type you want by redirecting to call to\n\n /// another implementation (for example `5.push_to_lua`) or by calling\n\n /// `userdata::push_userdata`.\n\n fn push_to_lua(self, lua: *mut lua_State) -> i32;\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 11, "score": 89833.79143553022 }, { "content": "/// Types that can be obtained from a Lua context.\n\n///\n\n/// Most types that implement `LuaPush` also implement `LuaRead`, but this is not always the case\n\n/// (for example `&'static str` implements `LuaPush` but not `LuaRead`).\n\npub trait LuaRead: Sized {\n\n /// Reads the data from Lua.\n\n fn lua_read(lua: *mut lua_State) -> Option<Self> {\n\n LuaRead::lua_read_at_position(lua, -1)\n\n }\n\n\n\n /// Reads the data from Lua at a given position.\n\n fn lua_read_at_position(lua: *mut lua_State, index: i32) -> Option<Self> {\n\n LuaRead::lua_read_with_pop(lua, index, 0)\n\n }\n\n\n\n /// Reads the data from Lua at a given position.\n\n fn lua_read_with_pop(lua: *mut lua_State, index: i32, pop: i32) -> Option<Self>;\n\n}\n\n\n\nimpl Drop for Lua {\n\n fn drop(&mut self) {\n\n if self.own {\n\n unsafe { td_clua::lua_close(self.lua) }\n\n }\n", "file_path": "src/lib.rs", "rank": 12, "score": 82775.37121475776 }, { "content": "/// Pushes an object as a user data.\n\n///\n\n/// In Lua, a user data is anything that is not recognized by Lua. When the script attempts to\n\n/// copy a user data, instead only a reference to the data is copied.\n\n///\n\n/// The way a Lua script can use the user data depends on the content of the **metatable**, which\n\n/// is a Lua table linked to the object.\n\n///\n\n/// # Arguments\n\n///\n\n/// - `metatable`: Function that fills the metatable of the object.\n\n///\n\npub fn push_lightuserdata<'a, T, F>(\n\n data: &'a mut T,\n\n lua: *mut td_clua::lua_State,\n\n mut metatable: F,\n\n) -> i32\n\nwhere\n\n F: FnMut(LuaTable),\n\n T: 'a + Any,\n\n{\n\n let typeid = format!(\"{:?}\", TypeId::of::<T>());\n\n unsafe {\n\n td_clua::lua_pushlightuserdata(lua, mem::transmute(data));\n\n };\n\n\n\n // creating a metatable\n\n unsafe {\n\n td_clua::lua_newtable(lua);\n\n\n\n // index \"__typeid\" corresponds to the hash of the TypeId of T\n\n \"__typeid\".push_to_lua(lua);\n", "file_path": "src/userdata.rs", "rank": 13, "score": 70396.87369281531 }, { "content": "#[test]\n\nfn custom_struct() {\n\n #[derive(Clone, Debug)]\n\n struct TestLuaSturct {\n\n index: i32,\n\n }\n\n\n\n impl NewStruct for TestLuaSturct {\n\n fn new() -> TestLuaSturct {\n\n println!(\"new !!!!!!!!!!!!!!\");\n\n TestLuaSturct { index: 19 }\n\n }\n\n\n\n fn name() -> &'static str {\n\n \"TestLuaSturct\"\n\n }\n\n }\n\n\n\n impl Drop for TestLuaSturct {\n\n fn drop(&mut self) {\n\n println!(\"drop test_lua_struct\");\n", "file_path": "tests/userdata.rs", "rank": 14, "score": 70042.02568219841 }, { "content": "pub trait NewStruct {\n\n fn new() -> Self;\n\n fn name() -> &'static str;\n\n}\n\n\n\npub struct LuaStruct<T> {\n\n lua: *mut lua_State,\n\n light: bool,\n\n marker: PhantomData<T>,\n\n}\n\n\n\nimpl<T> LuaStruct<T>\n\nwhere\n\n T: NewStruct + Any,\n\n{\n\n pub fn new(lua: *mut lua_State) -> LuaStruct<T> {\n\n LuaStruct {\n\n lua: lua,\n\n light: false,\n\n marker: PhantomData,\n", "file_path": "src/userdata.rs", "rank": 15, "score": 69628.18071644625 }, { "content": "#[test]\n\nfn write() {\n\n let mut lua = Lua::new();\n\n\n\n lua.set(\"a\", vec![9, 8, 7]);\n\n\n\n let mut table: LuaTable = lua.query(\"a\").unwrap();\n\n\n\n let values: Vec<(i32, i32)> = table.iter().filter_map(|e| e).collect();\n\n assert_eq!(values, vec!((1, 9), (2, 8), (3, 7)));\n\n}\n\n\n", "file_path": "tests/rust_tables.rs", "rank": 16, "score": 68968.61624951518 }, { "content": "#[test]\n\nfn hotfix_table() {\n\n let mut lua = Lua::new();\n\n lua.openlibs();\n\n lua.enable_hotfix();\n\n let _: Option<()> = lua.exec_string(\n\n r\"\n\n local value = {1, 2}\n", "file_path": "tests/hotfix.rs", "rank": 17, "score": 68968.61624951518 }, { "content": "#[test]\n\nfn read_i32s() {\n\n let mut lua = Lua::new();\n\n\n\n lua.set(\"a\", 2);\n\n\n\n let x: i32 = lua.query(\"a\").unwrap();\n\n assert_eq!(x, 2);\n\n\n\n let y: i8 = lua.query(\"a\").unwrap();\n\n assert_eq!(y, 2);\n\n\n\n let z: i16 = lua.query(\"a\").unwrap();\n\n assert_eq!(z, 2);\n\n\n\n let w: i32 = lua.query(\"a\").unwrap();\n\n assert_eq!(w, 2);\n\n\n\n let a: u32 = lua.query(\"a\").unwrap();\n\n assert_eq!(a, 2);\n\n\n\n let b: u8 = lua.query(\"a\").unwrap();\n\n assert_eq!(b, 2);\n\n\n\n let c: u16 = lua.query(\"a\").unwrap();\n\n assert_eq!(c, 2);\n\n\n\n let d: () = lua.query(\"a\").unwrap();\n\n assert_eq!(d, ());\n\n}\n\n\n", "file_path": "tests/basic_types.rs", "rank": 18, "score": 67221.19703776261 }, { "content": "#[test]\n\nfn i32_to_string() {\n\n let mut lua = Lua::new();\n\n\n\n lua.set(\"a\", 2);\n\n\n\n let x: String = lua.query(\"a\").unwrap();\n\n assert_eq!(x, \"2\");\n\n}\n\n\n", "file_path": "tests/basic_types.rs", "rank": 19, "score": 67221.19703776261 }, { "content": "#[test]\n\nfn write_i32s() {\n\n // TODO:\n\n\n\n let mut lua = Lua::new();\n\n\n\n lua.set(\"a\", 2);\n\n let x: i32 = lua.query(\"a\").unwrap();\n\n assert_eq!(x, 2);\n\n}\n\n\n", "file_path": "tests/basic_types.rs", "rank": 20, "score": 67221.19703776261 }, { "content": "#[test]\n\nfn string_to_i32() {\n\n let mut lua = Lua::new();\n\n\n\n lua.set(\"a\", \"2\");\n\n lua.set(\"b\", \"aaa\");\n\n\n\n let x: i32 = lua.query(\"a\").unwrap();\n\n assert_eq!(x, 2);\n\n\n\n let y: Option<i32> = lua.query(\"b\");\n\n assert!(y.is_none());\n\n}\n", "file_path": "tests/basic_types.rs", "rank": 21, "score": 67221.19703776261 }, { "content": "#[test]\n\nfn call_and_read_table() {\n\n let mut lua = Lua::new();\n\n\n\n let mut val: LuaTable = lua.exec_string(\"return {1, 2, 3};\").unwrap();\n\n assert_eq!(val.query::<u8, _>(1).unwrap(), 1);\n\n assert_eq!(val.query::<u8, _>(2).unwrap(), 2);\n\n assert_eq!(val.query::<u8, _>(3).unwrap(), 3);\n\n}\n\n\n", "file_path": "tests/functions.rs", "rank": 22, "score": 66400.96125029077 }, { "content": "#[test]\n\nfn write_set() {\n\n let mut lua = Lua::new();\n\n\n\n let mut set = HashSet::new();\n\n set.insert(5);\n\n set.insert(8);\n\n set.insert(13);\n\n set.insert(21);\n\n set.insert(34);\n\n set.insert(55);\n\n\n\n lua.set(\"a\", set.clone());\n\n\n\n let mut table: LuaTable = lua.query(\"a\").unwrap();\n\n\n\n let values: HashSet<i32> = table\n\n .iter()\n\n .filter_map(|e| e)\n\n .map(|(elem, set): (i32, bool)| {\n\n assert!(set);\n\n elem\n\n })\n\n .collect();\n\n\n\n assert_eq!(values, set);\n\n}\n", "file_path": "tests/rust_tables.rs", "rank": 23, "score": 66400.96125029077 }, { "content": "#[test]\n\nfn write_map() {\n\n let mut lua = Lua::new();\n\n\n\n let mut map = HashMap::new();\n\n map.insert(5, 8);\n\n map.insert(13, 21);\n\n map.insert(34, 55);\n\n\n\n lua.set(\"a\", map.clone());\n\n\n\n let mut table: LuaTable = lua.query(\"a\").unwrap();\n\n\n\n let values: HashMap<i32, i32> = table.iter().filter_map(|e| e).collect();\n\n assert_eq!(values, map);\n\n}\n\n\n", "file_path": "tests/rust_tables.rs", "rank": 24, "score": 66400.96125029077 }, { "content": "extern crate pm_rlua;\n\n\n\nuse pm_rlua::{Lua, LuaTable};\n\n\n\n#[test]\n", "file_path": "tests/lua_tables.rs", "rank": 36, "score": 53008.52283846721 }, { "content": "#[test]\n\nfn readwrite() {\n\n #[derive(Clone)]\n\n struct Foo;\n\n impl<'a> LuaPush for Foo {\n\n fn push_to_lua(self, lua: *mut lua_State) -> i32 {\n\n pm_rlua::userdata::push_userdata(self, lua, |_| {})\n\n }\n\n }\n\n impl<'a> LuaRead for &'a mut Foo {\n\n fn lua_read_with_pop(lua: *mut lua_State, index: i32, _pop: i32) -> Option<&'a mut Foo> {\n\n pm_rlua::userdata::read_userdata(lua, index)\n\n }\n\n }\n\n\n\n let mut lua = Lua::new();\n\n\n\n lua.set(\"a\", Foo {});\n\n let _: &mut Foo = lua.query(\"a\").unwrap();\n\n}\n\n\n", "file_path": "tests/userdata.rs", "rank": 37, "score": 45462.14060936393 }, { "content": "#[test]\n\nfn metatables() {\n\n #[derive(Clone)]\n\n struct Foo;\n\n impl<'a> LuaPush for Foo {\n\n fn push_to_lua(self, lua: *mut lua_State) -> i32 {\n\n pm_rlua::userdata::push_userdata(self, lua, |mut table| {\n\n table.set(\n\n \"__index\".to_string(),\n\n vec![\n\n // (\"test\".to_string(), pm_rlua::function0(|| 5)),\n\n (\"test1\".to_string(), pm_rlua::function1(|a: i32| a)),\n\n ],\n\n );\n\n })\n\n }\n\n }\n\n\n\n let mut lua = Lua::new();\n\n\n\n lua.set(\"a\", Foo);\n\n\n\n let x: i32 = lua.exec_string(\"return a.test1(5)\").unwrap();\n\n assert_eq!(x, 5);\n\n}\n\n\n", "file_path": "tests/userdata.rs", "rank": 38, "score": 45462.14060936393 }, { "content": "#[test]\n\nfn closures() {\n\n let mut lua = Lua::new();\n\n\n\n lua.set(\"add\", pm_rlua::function2(|a: i32, b: i32| a + b));\n\n lua.set(\"sub\", pm_rlua::function2(|a: i32, b: i32| a - b));\n\n\n\n let val1: i32 = lua.exec_string(\"return add(3, 7)\").unwrap();\n\n assert_eq!(val1, 10);\n\n\n\n let val2: i32 = lua.exec_string(\"return sub(5, 2)\").unwrap();\n\n assert_eq!(val2, 3);\n\n}\n\n\n", "file_path": "tests/functions.rs", "rank": 39, "score": 45462.14060936393 }, { "content": "#[test]\n\nfn hotfix() {\n\n let mut lua = Lua::new();\n\n lua.openlibs();\n\n lua.enable_hotfix();\n\n let _: Option<()> = lua.exec_string(\n\n r\"\n\n local a = 1\n\n local b = 2\n", "file_path": "tests/hotfix.rs", "rank": 40, "score": 45462.14060936393 }, { "content": "#[test]\n\nfn basic() {\n\n let mut lua = Lua::new();\n\n let val: Option<i32> = lua.exec_string(\"return 5;\");\n\n assert_eq!(val.unwrap(), 5);\n\n}\n\n\n", "file_path": "tests/functions.rs", "rank": 41, "score": 45462.14060936393 }, { "content": "#[test]\n\nfn closures_lifetime() {\n\n fn t<F>(f: F)\n\n where\n\n F: Fn(i32, i32) -> i32,\n\n {\n\n let mut lua = Lua::new();\n\n\n\n lua.set(\"add\", pm_rlua::function2(f));\n\n\n\n let val1: i32 = lua.exec_string(\"return add(3, 7)\").unwrap();\n\n assert_eq!(val1, 10);\n\n }\n\n\n\n t(|a, b| a + b);\n\n}\n\n\n", "file_path": "tests/functions.rs", "rank": 42, "score": 44046.49408146918 }, { "content": "#[test]\n\nfn destructor_called() {\n\n use std::sync::{Arc, Mutex};\n\n\n\n let called = Arc::new(Mutex::new(false));\n\n\n\n struct Foo {\n\n called: Arc<Mutex<bool>>,\n\n }\n\n\n\n impl Drop for Foo {\n\n fn drop(&mut self) {\n\n let mut called = self.called.lock().unwrap();\n\n (*called) = true;\n\n }\n\n }\n\n\n\n impl<'a> LuaPush for Foo {\n\n fn push_to_lua(self, lua: *mut lua_State) -> i32 {\n\n pm_rlua::userdata::push_userdata(self, lua, |_| {})\n\n }\n", "file_path": "tests/userdata.rs", "rank": 43, "score": 44046.49408146918 }, { "content": "#[test]\n\nfn hotfix_module() {\n\n let mut lua = Lua::new();\n\n lua.openlibs();\n\n lua.enable_hotfix();\n\n let _: Option<()> = lua.exec_string(\n\n r\"\n\n USER_D = {}\n\n local _ENV = USER_D\n\n local a = 1\n\n local b = 2\n", "file_path": "tests/hotfix.rs", "rank": 44, "score": 44046.49408146918 }, { "content": "#[test]\n\nfn one_argument() {\n\n let mut lua = Lua::new();\n\n\n\n fn plus_one(val: i32) -> i32 {\n\n val + 1\n\n };\n\n lua.set(\"plus_one\", pm_rlua::function1(plus_one));\n\n\n\n let val: i32 = lua.exec_string(\"return plus_one(3)\").unwrap();\n\n assert_eq!(val, 4);\n\n}\n\n\n", "file_path": "tests/functions.rs", "rank": 45, "score": 44046.49408146918 }, { "content": "#[test]\n\nfn execution_error() {\n\n let mut lua = Lua::new();\n\n\n\n let val: Option<()> = lua.exec_string(\"return a:hello()\");\n\n assert!(val.is_none());\n\n}\n\n\n", "file_path": "tests/functions.rs", "rank": 46, "score": 44046.49408146918 }, { "content": "#[test]\n\nfn syntax_error() {\n\n let mut lua = Lua::new();\n\n let val: Option<()> = lua.exec_string(\"pm_rlua\");\n\n assert!(val.is_none());\n\n}\n\n\n", "file_path": "tests/functions.rs", "rank": 47, "score": 44046.49408146918 }, { "content": "#[test]\n\nfn two_arguments() {\n\n let mut lua = Lua::new();\n\n\n\n fn add(val1: i32, val2: i32) -> i32 {\n\n val1 + val2\n\n };\n\n lua.set(\"add\", pm_rlua::function2(add));\n\n\n\n let val: i32 = lua.exec_string(\"return add(3, 7)\").unwrap();\n\n assert_eq!(val, 10);\n\n}\n\n\n", "file_path": "tests/functions.rs", "rank": 48, "score": 44046.49408146918 }, { "content": "#[test]\n\nfn type_check() {\n\n #[derive(Clone)]\n\n struct Foo;\n\n impl<'a> LuaPush for Foo {\n\n fn push_to_lua(self, lua: *mut lua_State) -> i32 {\n\n pm_rlua::userdata::push_userdata(self, lua, |_| {})\n\n }\n\n }\n\n impl<'a> LuaRead for &'a mut Foo {\n\n fn lua_read_with_pop(lua: *mut lua_State, index: i32, _pop: i32) -> Option<&'a mut Foo> {\n\n pm_rlua::userdata::read_userdata(lua, index)\n\n }\n\n }\n\n\n\n #[derive(Clone)]\n\n struct Bar;\n\n impl<'a> LuaPush for Bar {\n\n fn push_to_lua(self, lua: *mut lua_State) -> i32 {\n\n pm_rlua::userdata::push_userdata(self, lua, |_| {})\n\n }\n", "file_path": "tests/userdata.rs", "rank": 49, "score": 44046.49408146918 }, { "content": "#[test]\n\nfn simple_function() {\n\n let mut lua = Lua::new();\n\n\n\n fn ret5() -> i32 {\n\n 5\n\n };\n\n lua.set(\"ret5\", pm_rlua::function0(ret5));\n\n\n\n let val: i32 = lua.exec_string(\"return ret5()\").unwrap();\n\n assert_eq!(val, 5);\n\n}\n\n\n", "file_path": "tests/functions.rs", "rank": 50, "score": 44046.49408146918 }, { "content": "#[test]\n\nfn readwrite_floats() {\n\n let mut lua = Lua::new();\n\n\n\n lua.set(\"a\", 2.51234 as f32);\n\n lua.set(\"b\", 3.4123456789 as f64);\n\n\n\n let x: f32 = lua.query(\"a\").unwrap();\n\n assert!(x - 2.51234 < 0.000001);\n\n\n\n let y: f64 = lua.query(\"a\").unwrap();\n\n assert!(y - 2.51234 < 0.000001);\n\n\n\n let z: f32 = lua.query(\"b\").unwrap();\n\n assert!(z - 3.4123456789 < 0.000001);\n\n\n\n let w: f64 = lua.query(\"b\").unwrap();\n\n assert!(w - 3.4123456789 < 0.000001);\n\n}\n\n\n", "file_path": "tests/basic_types.rs", "rank": 51, "score": 42778.49586358179 }, { "content": "#[test]\n\nfn test_exec_func() {\n\n let mut lua = Lua::new();\n\n {\n\n let mut index = 5;\n\n lua.set(\"add\", pm_rlua::function1(|a: i32| index += a));\n\n let success: i32 = lua.exec_func1(\"add\", 3);\n\n assert!(success == 0);\n\n assert_eq!(index, 8);\n\n }\n\n {\n\n let mut index = 5;\n\n lua.set(\n\n \"sub\",\n\n pm_rlua::function3(|a: i32, b: u32, _c: String| index -= a + b as i32),\n\n );\n\n let success: i32 = lua.exec_func3(\"sub\", 3, 1, \"\".to_string());\n\n assert!(success == 0);\n\n assert_eq!(index, 1);\n\n }\n\n}\n\n\n", "file_path": "tests/functions.rs", "rank": 52, "score": 42778.49586358179 }, { "content": "#[test]\n\nfn get_set_test() {\n\n let mut lua = Lua::new();\n\n #[derive(Clone, Debug)]\n\n struct Foo {\n\n a: i32,\n\n };\n\n\n\n impl<'a> pm_rlua::LuaPush for Foo {\n\n fn push_to_lua(self, lua: *mut lua_State) -> i32 {\n\n pm_rlua::userdata::push_userdata(self, lua, |_| {})\n\n }\n\n }\n\n impl<'a> pm_rlua::LuaRead for &'a mut Foo {\n\n fn lua_read_with_pop(lua: *mut lua_State, index: i32, _pop: i32) -> Option<&'a mut Foo> {\n\n pm_rlua::userdata::read_userdata(lua, index)\n\n }\n\n }\n\n\n\n let xx = Foo { a: 10 };\n\n lua.set(\"a\", xx);\n\n let get: &mut Foo = lua.query(\"a\").unwrap();\n\n assert!(get.a == 10);\n\n get.a = 100;\n\n\n\n let get: &mut Foo = lua.query(\"a\").unwrap();\n\n assert!(get.a == 100);\n\n}\n\n\n", "file_path": "tests/userdata.rs", "rank": 53, "score": 42778.49586358179 }, { "content": "#[test]\n\nfn closures_extern_access() {\n\n let mut a = 5;\n\n\n\n {\n\n let mut lua = Lua::new();\n\n\n\n lua.set(\"inc\", pm_rlua::function0(|| a += 1));\n\n for _ in 0..15 {\n\n let _: () = lua.exec_string(\"inc()\").unwrap();\n\n }\n\n }\n\n\n\n assert_eq!(a, 20)\n\n}\n\n\n", "file_path": "tests/functions.rs", "rank": 54, "score": 42778.49586358179 }, { "content": "#[test]\n\nfn wrong_arguments_types() {\n\n let mut lua = Lua::new();\n\n\n\n fn add(val1: i32, val2: i32) -> i32 {\n\n val1 + val2\n\n };\n\n lua.set(\"add\", pm_rlua::function2(add));\n\n let val: Option<i32> = lua.exec_string(\"return add(3, \\\"hello\\\")\");\n\n match val {\n\n None => (),\n\n _ => panic!(),\n\n }\n\n}\n\n\n", "file_path": "tests/functions.rs", "rank": 55, "score": 42778.49586358179 }, { "content": "#[test]\n\nfn hotfix_local_funcion() {\n\n let mut lua = Lua::new();\n\n lua.openlibs();\n\n lua.enable_hotfix();\n\n let _: Option<()> = lua.exec_string(\n\n r\"\n\n CACHE_D = {}\n\n local _ENV = CACHE_D\n\n\n\n timer_map = {}\n\n\n\n local function delete_timer(rid)\n\n local map_info = timer_map[rid]\n\n if map_info == nil then\n\n return\n\n end\n\n if is_valid_timer(map_info['timer_id']) then\n\n delete_timer(map_info['timer_id'])\n\n end\n\n timer_map[rid] = nil\n\n end\n\n\n\n local function load_user_callback(data)\n\n assert(data['rid'] ~= nil, 'callback rid must no empty')\n\n if data.is_redis then\n\n delete_timer(data['rid'])\n\n end\n\n load_data_from_db(data['rid'], load_user_callback)\n\n end\n\n\n", "file_path": "tests/hotfix.rs", "rank": 56, "score": 42778.49586358179 }, { "content": "#[test]\n\nfn readwrite_strings() {\n\n let mut lua = Lua::new();\n\n\n\n lua.set(\"a\", \"hello\");\n\n lua.set(\"b\", \"hello\".to_string());\n\n let unvaild = String::from_utf8_lossy(&[\n\n 8, 0, 34, 0, 3, 0, 58, 0, 0, 0, 33, 0, 40, 0, 34, 0, 3, 0, 26, 0, 0, 0, 34, 0, 127, 0, 35,\n\n 0, 0, 0, 35, 0, 14,\n\n ])\n\n .to_string();\n\n lua.set(\"c\", unvaild);\n\n\n\n let x: String = lua.query(\"a\").unwrap();\n\n assert_eq!(x, \"hello\");\n\n\n\n let y: String = lua.query(\"b\").unwrap();\n\n assert_eq!(y, \"hello\");\n\n\n\n let z: String = lua.query(\"c\").unwrap();\n\n assert_eq!(z, \"UNVAILED STRING\");\n\n}\n\n\n", "file_path": "tests/basic_types.rs", "rank": 57, "score": 42778.49586358179 }, { "content": "#[test]\n\nfn readwrite_bools() {\n\n let mut lua = Lua::new();\n\n\n\n lua.set(\"a\", true);\n\n lua.set(\"b\", false);\n\n\n\n let x: bool = lua.query(\"a\").unwrap();\n\n assert_eq!(x, true);\n\n\n\n let y: bool = lua.query(\"b\").unwrap();\n\n assert_eq!(y, false);\n\n}\n\n\n", "file_path": "tests/basic_types.rs", "rank": 58, "score": 42778.49586358179 }, { "content": "#[test]\n\nfn test_exec_func_by_param() {\n\n let mut lua = Lua::new();\n\n lua.openlibs();\n\n let func = r\"\n", "file_path": "tests/functions.rs", "rank": 59, "score": 41636.19182424588 }, { "content": "/// Trait implemented on `Function` to mimic `FnMut`.\n\npub trait FunctionExt<P> {\n\n type Output;\n\n\n\n fn call_mut(&mut self, params: P) -> Self::Output;\n\n}\n\n\n\nmacro_rules! impl_function_ext {\n\n () => (\n\n impl<Z, R> FunctionExt<()> for Function<Z, (), R> where Z: FnMut() -> R {\n\n type Output = R;\n\n\n\n #[allow(non_snake_case)]\n\n fn call_mut(&mut self, _: ()) -> Self::Output {\n\n (self.function)()\n\n }\n\n }\n\n\n\n impl<Z, R> LuaPush for Function<Z, (), R>\n\n where Z: FnMut() -> R,\n\n R: LuaPush + 'static\n", "file_path": "src/functions.rs", "rank": 60, "score": 41475.33932079628 }, { "content": "\n\nimpl<T> LuaRead for Vec<T>\n\nwhere\n\n T: LuaRead,\n\n{\n\n fn lua_read_with_pop(lua: *mut lua_State, index: i32, _pop: i32) -> Option<Vec<T>> {\n\n let mut lua_table: LuaTable =\n\n unwrap_or!(LuaRead::lua_read_at_position(lua, index), return None);\n\n let mut result = vec![];\n\n let len = lua_table.table_len();\n\n for i in 1..(len + 1) {\n\n let val: T = unwrap_or!(lua_table.query(i), return None);\n\n result.push(val);\n\n }\n\n Some(result)\n\n }\n\n}\n", "file_path": "src/rust_tables.rs", "rank": 61, "score": 26392.064375738395 }, { "content": "\n\nimpl<K, V> LuaPush for HashMap<K, V>\n\nwhere\n\n K: LuaPush + Eq + Hash,\n\n V: LuaPush,\n\n{\n\n fn push_to_lua(self, lua: *mut lua_State) -> i32 {\n\n push_rec_iter(lua, self.into_iter())\n\n }\n\n}\n\n\n\nimpl<K> LuaPush for HashSet<K>\n\nwhere\n\n K: LuaPush + Eq + Hash,\n\n{\n\n fn push_to_lua(self, lua: *mut lua_State) -> i32 {\n\n use std::iter;\n\n push_rec_iter(lua, self.into_iter().zip(iter::repeat(true)))\n\n }\n\n}\n", "file_path": "src/rust_tables.rs", "rank": 62, "score": 26383.92859629773 }, { "content": " 1\n\n}\n\n\n\nimpl<T> LuaPush for Vec<T>\n\nwhere\n\n T: LuaPush,\n\n{\n\n fn push_to_lua(self, lua: *mut lua_State) -> i32 {\n\n push_iter(lua, self.into_iter())\n\n }\n\n}\n\n\n\nimpl<'a, T> LuaPush for &'a [T]\n\nwhere\n\n T: Clone + LuaPush,\n\n{\n\n fn push_to_lua(self, lua: *mut lua_State) -> i32 {\n\n push_iter(lua, self.iter().map(|e| e.clone()))\n\n }\n\n}\n", "file_path": "src/rust_tables.rs", "rank": 63, "score": 26383.683392468145 }, { "content": "use td_clua;\n\nuse td_clua::lua_State;\n\n\n\nuse LuaPush;\n\nuse LuaRead;\n\nuse LuaTable;\n\n\n\nuse std::collections::{HashMap, HashSet};\n\nuse std::hash::Hash;\n\n\n", "file_path": "src/rust_tables.rs", "rank": 64, "score": 26382.123108675118 }, { "content": "extern crate pm_rlua;\n\n\n\nuse pm_rlua::{Lua, LuaTable};\n\nuse std::collections::{HashMap, HashSet};\n\n\n\n#[test]\n", "file_path": "tests/rust_tables.rs", "rank": 65, "score": 26381.42912384288 }, { "content": " function update_table(env_t, g_t, name, deep)\n\n if protection[env_t] or protection[g_t] then return end\n\n if env_t == g_t then return end\n\n local signature = tostring(g_t)..tostring(env_t)\n\n if visited_sig[signature] then return end\n\n visited_sig[signature] = true\n\n for name, value in pairs(env_t) do\n\n local old_value = g_t[name]\n\n if type(value) == type(old_value) then\n\n if type(value) == 'function' then\n\n update_func(value, old_value, name, deep..' '..name..' ')\n\n g_t[name] = value\n\n elseif type(value) == 'table' then\n\n update_table(value, old_value, name, deep..' '..name..' ')\n\n end\n\n else\n\n g_t[name] = value\n\n end\n\n end\n\n\n\n local old_meta = debug.getmetatable(g_t)\n\n local new_meta = debug.getmetatable(env_t)\n\n if type(old_meta) == 'table' and type(new_meta) == 'table' then\n\n update_table(new_meta, old_meta, name..'s Meta', deep..' '..name..'s Meta'..' ' )\n\n end\n\n end\n\n\n", "file_path": "src/hotfix.rs", "rank": 66, "score": 21391.397383416836 }, { "content": "\n\nimpl LuaPush for bool {\n\n fn push_to_lua(self, lua: *mut lua_State) -> i32 {\n\n unsafe { td_clua::lua_pushboolean(lua, self as libc::c_int) };\n\n 1\n\n }\n\n}\n\n\n\nimpl LuaRead for bool {\n\n fn lua_read_with_pop(lua: *mut lua_State, index: i32, _pop: i32) -> Option<bool> {\n\n if !unsafe { td_clua::lua_isboolean(lua, index) } {\n\n return None;\n\n }\n\n\n\n Some(unsafe { td_clua::lua_toboolean(lua, index) != 0 })\n\n }\n\n}\n\n\n\nimpl LuaPush for () {\n\n fn push_to_lua(self, lua: *mut lua_State) -> i32 {\n", "file_path": "src/values.rs", "rank": 67, "score": 21.107828978891323 }, { "content": "\n\nmacro_rules! numeric_impl(\n\n ($t:ident) => (\n\n impl LuaPush for $t {\n\n fn push_to_lua(self, lua: *mut lua_State) -> i32 {\n\n unsafe { td_clua::lua_pushnumber(lua, self as f64) };\n\n 1\n\n }\n\n }\n\n\n\n impl LuaRead for $t {\n\n fn lua_read_with_pop(lua: *mut lua_State, index: i32, _pop: i32) -> Option<$t> {\n\n let mut success = 0;\n\n let val = unsafe { td_clua::lua_tonumberx(lua, index, &mut success) };\n\n match success {\n\n 0 => None,\n\n _ => Some(val as $t)\n\n }\n\n }\n\n }\n", "file_path": "src/values.rs", "rank": 68, "score": 19.600287641322524 }, { "content": "use std::ffi::{CStr, CString};\n\n\n\nuse libc;\n\nuse td_clua;\n\nuse td_clua::lua_State;\n\n\n\nuse LuaPush;\n\nuse LuaRead;\n\n\n\nmacro_rules! integer_impl(\n\n ($t:ident) => (\n\n impl LuaPush for $t {\n\n fn push_to_lua(self, lua: *mut lua_State) -> i32 {\n\n unsafe { td_clua::lua_pushinteger(lua, self as td_clua::lua_Integer) };\n\n 1\n\n }\n\n }\n\n\n\n impl LuaRead for $t {\n\n fn lua_read_with_pop(lua: *mut lua_State, index: i32, _pop: i32) -> Option<$t> {\n", "file_path": "src/values.rs", "rank": 69, "score": 19.56746987185574 }, { "content": " None => {\n\n let mut index = table.empty_table(\"__index\");\n\n index.set(name, param);\n\n }\n\n };\n\n };\n\n self\n\n }\n\n\n\n pub fn register(\n\n &mut self,\n\n name: &str,\n\n func: extern \"C\" fn(*mut td_clua::lua_State) -> libc::c_int,\n\n ) -> &mut LuaStruct<T> {\n\n let tname = T::name();\n\n let mut lua = Lua::from_existing_state(self.lua, false);\n\n if let Some(mut table) = lua.query::<LuaTable, _>(tname.clone()) {\n\n match table.query::<LuaTable, _>(\"__index\") {\n\n Some(mut index) => {\n\n index.register(name, func);\n", "file_path": "src/userdata.rs", "rank": 70, "score": 18.55001388751327 }, { "content": " fn lua_read_with_pop(lua: *mut lua_State, index: i32, _pop: i32) -> Option<String> {\n\n let mut size: libc::size_t = 0;\n\n let c_str_raw = unsafe { td_clua::lua_tolstring(lua, index, &mut size) };\n\n if c_str_raw.is_null() {\n\n return None;\n\n }\n\n\n\n let c_str = unsafe { CStr::from_ptr(c_str_raw) };\n\n let c_str = String::from_utf8_lossy(c_str.to_bytes());\n\n Some(c_str.to_string())\n\n }\n\n}\n\n\n\nimpl<'s> LuaPush for &'s str {\n\n fn push_to_lua(self, lua: *mut lua_State) -> i32 {\n\n let value = CString::new(&self[..]).unwrap();\n\n unsafe { td_clua::lua_pushstring(lua, value.as_ptr()) };\n\n 1\n\n }\n\n}\n", "file_path": "src/values.rs", "rank": 71, "score": 17.96324883278926 }, { "content": "#### User data\n\n\n\nWhen you expose functions to Lua, you may wish to read or write more elaborate objects. This is called a **user data**.\n\n\n\nTo do so, you should implement the `LuaPush` for your types.\n\nThis is usually done by redirecting the call to `userdata::push_userdata`.\n\nit will operate the ref of object\n\nif you use `userdata::push_userdata` the userdata will copy one time, for lua gc manager\n\nif you use `userdata::push_lightuserdata` the userdata life manager by rust, so none copy will occup\n\n\n\n```rust\n\n#[derive(Clone, Debug)]\n\nstruct Foo {\n\n a : i32,\n\n};\n\n\n\nimpl<'a> pm_rlua::LuaPush for &'a mut Foo {\n\n fn push_to_lua(self, lua: *mut c_lua::lua_State) -> i32 {\n\n pm_rlua::userdata::push_userdata(self, lua, |_|{})\n\n }\n\n}\n\nimpl<'a> pm_rlua::LuaRead for &'a mut Foo {\n\n fn lua_read_at_position(lua: *mut c_lua::lua_State, index: i32) -> Option<&'a mut Foo> {\n\n pm_rlua::userdata::read_userdata(lua, index)\n\n }\n\n}\n\n\n\nlet xx = &mut Foo {\n\n a : 10,\n\n};\n\nlua.set(\"a\", xx);\n\nlet get: &mut Foo = lua.query(\"a\").unwrap();\n\nassert!(get.a == 10);\n\nget.a = 100;\n\n\n\nlet get: &mut Foo = lua.query(\"a\").unwrap();\n\nassert!(get.a == 100);\n\n```\n\nuse lightuserdata you can change\n\n```rust\n\nimpl<'a> pm_rlua::LuaPush for &'a mut Foo {\n\n fn push_to_lua(self, lua: *mut c_lua::lua_State) -> i32 {\n\n pm_rlua::userdata::push_lightuserdata(self, lua, |_|{})\n\n }\n\n}\n\n```\n\n\n\ncustom lua call userdata need impl NewStruct\n\n```rust\n\n#[derive(Clone, Debug)]\n\nstruct TestLuaSturct {\n\n index : i32,\n\n}\n\n\n\nimpl NewStruct for TestLuaSturct {\n\n fn new() -> TestLuaSturct {\n\n println!(\"new !!!!!!!!!!!!!!\");\n\n TestLuaSturct {\n\n index : 19,\n\n }\n\n }\n\n\n\n fn name() -> &'static str {\n\n \"TestLuaSturct\"\n\n }\n\n}\n\n\n\nimpl<'a> LuaRead for &'a mut TestLuaSturct {\n\n fn lua_read_at_position(lua: *mut c_lua::lua_State, index: i32) -> Option<&'a mut TestLuaSturct> {\n\n pm_rlua::userdata::read_userdata(lua, index)\n\n }\n\n}\n\n```\n\n\n", "file_path": "README.md", "rank": 72, "score": 17.929632581950504 }, { "content": " }\n\n impl<'a> LuaRead for &'a mut Bar {\n\n fn lua_read_with_pop(lua: *mut lua_State, index: i32, _pop: i32) -> Option<&'a mut Bar> {\n\n pm_rlua::userdata::read_userdata(lua, index)\n\n }\n\n }\n\n\n\n let mut lua = Lua::new();\n\n\n\n lua.set(\"a\", Foo);\n\n\n\n let x: Option<&mut Bar> = lua.query(\"a\");\n\n assert!(x.is_none())\n\n}\n\n\n", "file_path": "tests/userdata.rs", "rank": 73, "score": 17.436063937852495 }, { "content": "use td_clua;\n\n\n\nuse LuaPush;\n\nuse LuaRead;\n\n\n\nmacro_rules! tuple_impl {\n\n ($ty:ident) => (\n\n impl<$ty> LuaPush for ($ty,) where $ty: LuaPush {\n\n fn push_to_lua(self, lua: *mut td_clua::lua_State) -> i32 {\n\n self.0.push_to_lua(lua)\n\n }\n\n }\n\n\n\n impl<$ty> LuaRead for ($ty,) where $ty: LuaRead {\n\n fn lua_read_with_pop(lua: *mut td_clua::lua_State, index: i32, _pop: i32) -> Option<($ty,)> {\n\n LuaRead::lua_read_at_position(lua, index).map(|v| (v,))\n\n }\n\n }\n\n );\n\n\n", "file_path": "src/tuples.rs", "rank": 74, "score": 16.705221107120874 }, { "content": " }\n\n}\n\n\n\nimpl Drop for LuaGuard {\n\n fn drop(&mut self) {\n\n if self.size != 0 {\n\n unsafe { td_clua::lua_pop(self.lua, self.size) }\n\n }\n\n }\n\n}\n", "file_path": "src/lib.rs", "rank": 75, "score": 16.69222919126632 }, { "content": " unsafe { td_clua::lua_pushnil(lua) };\n\n 1\n\n }\n\n}\n\n\n\nimpl LuaRead for () {\n\n fn lua_read_with_pop(_: *mut lua_State, _: i32, _pop: i32) -> Option<()> {\n\n Some(())\n\n }\n\n}\n", "file_path": "src/values.rs", "rank": 76, "score": 16.39423494692345 }, { "content": " }\n\n\n\n td_clua::lua_setmetatable(self.lua, -2);\n\n }\n\n td_clua::lua_pop(self.lua, 1);\n\n }\n\n self\n\n }\n\n\n\n pub fn def<P>(&mut self, name: &str, param: P) -> &mut LuaStruct<T>\n\n where\n\n P: LuaPush,\n\n {\n\n let tname = T::name();\n\n let mut lua = Lua::from_existing_state(self.lua, false);\n\n if let Some(mut table) = lua.query::<LuaTable, _>(tname.clone()) {\n\n match table.query::<LuaTable, _>(\"__index\") {\n\n Some(mut index) => {\n\n index.set(name, param);\n\n }\n", "file_path": "src/userdata.rs", "rank": 77, "score": 16.366622982803168 }, { "content": "pub mod lua_tables;\n\npub mod rust_tables;\n\npub mod tuples;\n\npub mod userdata;\n\npub mod values;\n\n\n\npub use functions::{\n\n function0, function1, function10, function2, function3, function4, function5, function6,\n\n function7, function8, function9, Function,\n\n};\n\npub use lua_tables::LuaTable;\n\npub use td_clua::*;\n\npub use userdata::{push_lightuserdata, push_userdata, read_userdata, LuaStruct, NewStruct};\n\npub struct Lua {\n\n lua: *mut lua_State,\n\n own: bool,\n\n}\n\n\n\npub struct LuaGuard {\n\n pub lua: *mut lua_State,\n", "file_path": "src/lib.rs", "rank": 78, "score": 16.216619075702145 }, { "content": " }\n\n }\n\n\n\n impl<'a> LuaRead for &'a mut TestLuaSturct {\n\n fn lua_read_with_pop(\n\n lua: *mut lua_State,\n\n index: i32,\n\n _pop: i32,\n\n ) -> Option<&'a mut TestLuaSturct> {\n\n pm_rlua::userdata::read_userdata(lua, index)\n\n }\n\n }\n\n\n\n let mut lua = Lua::new();\n\n lua.openlibs();\n\n fn one_arg(obj: &mut TestLuaSturct) -> i32 {\n\n obj.index = 10;\n\n 5\n\n };\n\n fn two_arg(obj: &mut TestLuaSturct, index: i32) {\n", "file_path": "tests/userdata.rs", "rank": 79, "score": 15.97695691332747 }, { "content": " pub fn from_existing_state(lua: *mut lua_State, close_at_the_end: bool) -> Lua {\n\n Lua {\n\n lua: lua,\n\n own: close_at_the_end,\n\n }\n\n }\n\n\n\n pub fn register<I>(\n\n &mut self,\n\n index: I,\n\n func: extern \"C\" fn(*mut td_clua::lua_State) -> libc::c_int,\n\n ) -> i32\n\n where\n\n I: Borrow<str>,\n\n {\n\n let index = CString::new(index.borrow()).unwrap();\n\n unsafe { td_clua::lua_register(self.state(), index.as_ptr(), func) };\n\n 0\n\n }\n\n\n", "file_path": "src/lib.rs", "rank": 80, "score": 15.900074493339213 }, { "content": " // TODO: what if T or U are also tuples? indices won't match\n\n #[allow(unused_assignments)]\n\n #[allow(non_snake_case)]\n\n impl<$first: LuaRead, $($other: LuaRead),+>\n\n LuaRead for ($first, $($other),+)\n\n {\n\n fn lua_read_with_pop(lua: *mut td_clua::lua_State, index: i32, _pop: i32) -> Option<($first, $($other),+)> {\n\n let mut i = index;\n\n let $first: $first = match LuaRead::lua_read_at_position(lua, i) {\n\n Some(v) => v,\n\n None => return None\n\n };\n\n\n\n i += 1;\n\n\n\n $(\n\n let $other: $other = match LuaRead::lua_read_at_position(lua, i) {\n\n Some(v) => v,\n\n None => return None\n\n };\n", "file_path": "src/tuples.rs", "rank": 81, "score": 15.722531199136835 }, { "content": " let mut index = 0;\n\n $(\n\n index += $p.push_to_lua(self.state());\n\n )*\n\n\n\n let success = td_clua::lua_pcall(state, index, 0, -index - 2);\n\n if success != 0 {\n\n td_clua::lua_pop(state, 1);\n\n }\n\n td_clua::lua_pop(state, 1);\n\n success\n\n }\n\n }\n\n )\n\n}\n\n\n\nimpl Default for Lua {\n\n fn default() -> Self {\n\n Self::new()\n\n }\n", "file_path": "src/lib.rs", "rank": 82, "score": 15.53518455836693 }, { "content": " }\n\n }\n\n\n\n pub fn new_light(lua: *mut lua_State) -> LuaStruct<T> {\n\n LuaStruct {\n\n lua: lua,\n\n light: true,\n\n marker: PhantomData,\n\n }\n\n }\n\n\n\n pub fn ensure_matetable(&mut self) {\n\n let name = T::name();\n\n let mut lua = Lua::from_existing_state(self.lua, false);\n\n\n\n match lua.query::<LuaTable, _>(name.clone()) {\n\n Some(_) => {}\n\n None => unsafe {\n\n td_clua::lua_newtable(self.lua);\n\n\n", "file_path": "src/userdata.rs", "rank": 83, "score": 15.514587058920087 }, { "content": " let mut success = 0;\n\n let val = unsafe { td_clua::lua_tointegerx(lua, index, &mut success) };\n\n match success {\n\n 0 => None,\n\n _ => Some(val as $t)\n\n }\n\n }\n\n }\n\n );\n\n);\n\n\n\ninteger_impl!(i8);\n\ninteger_impl!(i16);\n\ninteger_impl!(i32);\n\ninteger_impl!(i64);\n\ninteger_impl!(u8);\n\ninteger_impl!(u16);\n\ninteger_impl!(u32);\n\ninteger_impl!(u64);\n\ninteger_impl!(usize);\n", "file_path": "src/values.rs", "rank": 84, "score": 14.696720145998853 }, { "content": "use libc;\n\nuse td_clua::{self, lua_State};\n\nuse LuaPush;\n\nuse LuaRead;\n\n\n\nuse std::marker::PhantomData;\n\nuse std::mem;\n\nuse std::ptr;\n\n\n\nmacro_rules! impl_function {\n\n ($name:ident, $($p:ident),*) => (\n\n/// Wraps a type that implements `FnMut` so that it can be used by hlua.\n\n///\n\n/// This is only needed because of a limitation in Rust's inferrence system.\n\n pub fn $name<Z, R $(, $p)*>(f: Z) -> Function<Z, ($($p,)*), R> where Z: FnMut($($p),*) -> R {\n\n Function {\n\n function: f,\n\n marker: PhantomData,\n\n }\n\n }\n", "file_path": "src/functions.rs", "rank": 85, "score": 14.000276512052004 }, { "content": "use std::any::{Any, TypeId};\n\nuse std::boxed::Box;\n\nuse std::ffi::CString;\n\nuse std::marker::PhantomData;\n\nuse std::mem;\n\nuse std::ptr;\n\n\n\nuse libc;\n\nuse td_clua;\n\nuse td_clua::lua_State;\n\nuse Lua;\n\nuse LuaPush;\n\nuse LuaRead;\n\nuse LuaTable;\n\n\n\n// Called when an object inside Lua is being dropped.\n\n#[inline]\n\nextern \"C\" fn destructor_wrapper<T>(lua: *mut td_clua::lua_State) -> libc::c_int {\n\n unsafe {\n\n let obj = td_clua::lua_touserdata(lua, -1);\n", "file_path": "src/userdata.rs", "rank": 86, "score": 13.815829178122804 }, { "content": " {\n\n fn push_to_lua(self, lua: *mut lua_State) -> i32 {\n\n unsafe {\n\n // pushing the function pointer as a userdata\n\n let lua_data = td_clua::lua_newuserdata(lua, mem::size_of::<Z>() as libc::size_t);\n\n let lua_data: *mut Z = mem::transmute(lua_data);\n\n ptr::write(lua_data, self.function);\n\n\n\n // pushing wrapper as a closure\n\n let wrapper: extern fn(*mut td_clua::lua_State) -> libc::c_int = wrapper::<Self, _, R>;\n\n td_clua::lua_pushcclosure(lua, wrapper, 1);\n\n 1\n\n }\n\n }\n\n }\n\n );\n\n\n\n ($($p:ident),+) => (\n\n impl<Z, R $(,$p)*> FunctionExt<($($p,)*)> for Function<Z, ($($p,)*), R> where Z: FnMut($($p),*) -> R {\n\n type Output = R;\n", "file_path": "src/functions.rs", "rank": 87, "score": 13.6522922955922 }, { "content": "\n\n #[allow(non_snake_case)]\n\n fn call_mut(&mut self, params: ($($p,)*)) -> Self::Output {\n\n let ($($p,)*) = params;\n\n (self.function)($($p),*)\n\n }\n\n }\n\n\n\n impl<Z, R $(,$p: 'static)+> LuaPush for Function<Z, ($($p,)*), R>\n\n where Z: FnMut($($p),*) -> R,\n\n ($($p,)*): LuaRead,\n\n R: LuaPush + 'static\n\n {\n\n fn push_to_lua(self, lua: *mut lua_State) -> i32 {\n\n unsafe {\n\n // pushing the function pointer as a userdata\n\n let lua_data = td_clua::lua_newuserdata(lua, mem::size_of::<Z>() as libc::size_t);\n\n let lua_data: *mut Z = mem::transmute(lua_data);\n\n ptr::write(lua_data, self.function);\n\n\n", "file_path": "src/functions.rs", "rank": 88, "score": 13.608132036350131 }, { "content": " /// Opens all standard Lua libraries.\n\n /// This is done by calling `luaL_openlibs`.\n\n pub fn openlibs(&mut self) {\n\n unsafe { td_clua::luaL_openlibs(self.lua) }\n\n }\n\n\n\n /// Reads the value of a global variable.\n\n pub fn query<V, I>(&mut self, index: I) -> Option<V>\n\n where\n\n I: Borrow<str>,\n\n V: LuaRead,\n\n {\n\n let index = CString::new(index.borrow()).unwrap();\n\n unsafe {\n\n td_clua::lua_getglobal(self.lua, index.as_ptr());\n\n }\n\n let _guard = LuaGuard::new(self.lua, 1);\n\n LuaRead::lua_read_with_pop(self.state(), -1, 1)\n\n }\n\n\n", "file_path": "src/lib.rs", "rank": 89, "score": 13.512590942149089 }, { "content": " pub size: i32,\n\n}\n\n\n\nimpl LuaGuard {\n\n pub fn forget(mut self) -> i32 {\n\n let size = self.size;\n\n self.size = 0;\n\n size\n\n }\n\n\n\n pub fn empty(&self) -> LuaGuard {\n\n LuaGuard {\n\n lua: self.lua,\n\n size: 0,\n\n }\n\n }\n\n\n\n pub fn new_empty(lua: *mut lua_State) -> LuaGuard {\n\n LuaGuard { lua: lua, size: 0 }\n\n }\n", "file_path": "src/lib.rs", "rank": 90, "score": 13.470128385752094 }, { "content": " .unwrap();\n\n unsafe {\n\n td_clua::luaL_error(self.state(), err_detail.as_ptr());\n\n }\n\n }\n\n 1\n\n }\n\n\n\n /// enable hotfix, can update the new func, and the old data will be keep and bind to the new func\n\n pub fn enable_hotfix(&mut self) {\n\n hotfix::load_hot_fix(self);\n\n }\n\n\n\n pub fn exec_gc(&mut self) -> i32 {\n\n unsafe { td_clua::lua_gc(self.state(), td_clua::LUA_GCCOLLECT, 0) as i32 }\n\n }\n\n\n\n impl_exec_func!(exec_func0,);\n\n impl_exec_func!(exec_func1, A);\n\n impl_exec_func!(exec_func2, A, B);\n", "file_path": "src/lib.rs", "rank": 91, "score": 12.74346831621465 }, { "content": " }\n\n None => {\n\n let mut index = table.empty_table(\"__index\");\n\n index.register(name, func);\n\n }\n\n };\n\n };\n\n self\n\n }\n\n}\n", "file_path": "src/userdata.rs", "rank": 92, "score": 12.720243822017128 }, { "content": " ptr::drop_in_place(obj as *mut T);\n\n 0\n\n }\n\n}\n\n\n\nextern \"C\" fn constructor_wrapper<T>(lua: *mut td_clua::lua_State) -> libc::c_int\n\nwhere\n\n T: NewStruct + Any,\n\n{\n\n let t = T::new();\n\n let lua_data_raw =\n\n unsafe { td_clua::lua_newuserdata(lua, mem::size_of::<T>() as libc::size_t) };\n\n unsafe {\n\n ptr::write(lua_data_raw as *mut _, t);\n\n }\n\n let typeid = CString::new(T::name()).unwrap();\n\n unsafe {\n\n td_clua::lua_getglobal(lua, typeid.as_ptr());\n\n td_clua::lua_setmetatable(lua, -2);\n\n }\n", "file_path": "src/userdata.rs", "rank": 93, "score": 12.60838181300924 }, { "content": " /// Modifies the value of a global variable.\n\n pub fn set<I, V>(&mut self, index: I, value: V)\n\n where\n\n I: Borrow<str>,\n\n for<'a> V: LuaPush,\n\n {\n\n let index = CString::new(index.borrow()).unwrap();\n\n value.push_to_lua(self.state());\n\n unsafe {\n\n td_clua::lua_setglobal(self.lua, index.as_ptr());\n\n }\n\n }\n\n\n\n pub fn exec_string<I, R>(&mut self, index: I) -> Option<R>\n\n where\n\n I: Borrow<str>,\n\n R: LuaRead,\n\n {\n\n let index = CString::new(index.borrow()).unwrap();\n\n unsafe {\n", "file_path": "src/lib.rs", "rank": 94, "score": 12.504622417188779 }, { "content": " let state = self.state();\n\n let error = CString::new(\"error_handle\").unwrap();\n\n td_clua::lua_getglobal(state, error.as_ptr());\n\n td_clua::luaL_loadstring(state, index.as_ptr());\n\n let success = td_clua::lua_pcall(state, 0, 1, -2);\n\n if success != 0 {\n\n td_clua::lua_pop(state, 1);\n\n return None;\n\n }\n\n LuaRead::lua_read(state)\n\n }\n\n }\n\n\n\n pub fn exec_func<I, R>(&mut self, index: I) -> Option<R>\n\n where\n\n I: Borrow<str>,\n\n R: LuaRead,\n\n {\n\n let index = CString::new(index.borrow()).unwrap();\n\n unsafe {\n", "file_path": "src/lib.rs", "rank": 95, "score": 12.245383975833086 }, { "content": " {\n\n let index2 = CString::new(index.borrow()).unwrap();\n\n unsafe {\n\n td_clua::lua_newtable(self.state());\n\n td_clua::lua_setglobal(self.state(), index2.as_ptr());\n\n }\n\n self.query(index).unwrap()\n\n }\n\n\n\n pub fn add_lualoader(\n\n &mut self,\n\n func: extern \"C\" fn(*mut td_clua::lua_State) -> libc::c_int,\n\n ) -> i32 {\n\n let state = self.state();\n\n unsafe {\n\n let package = CString::new(\"package\").unwrap();\n\n let searchers = CString::new(\"searchers\").unwrap();\n\n td_clua::lua_getglobal(state, package.as_ptr());\n\n td_clua::lua_getfield(state, -1, searchers.as_ptr());\n\n td_clua::lua_pushcfunction(state, func);\n", "file_path": "src/lib.rs", "rank": 96, "score": 12.230245393585061 }, { "content": " let state = self.state();\n\n let error = CString::new(\"error_handle\").unwrap();\n\n let top = td_clua::lua_gettop(state);\n\n td_clua::lua_getglobal(state, index.as_ptr());\n\n td_clua::lua_insert(state, -top - 1);\n\n td_clua::lua_getglobal(state, error.as_ptr());\n\n td_clua::lua_insert(state, -top - 2);\n\n let success = td_clua::lua_pcall(state, top, 1, -top - 2);\n\n if success != 0 {\n\n td_clua::lua_pop(state, 1);\n\n return None;\n\n }\n\n LuaRead::lua_read(state)\n\n }\n\n }\n\n\n\n /// Inserts an empty table, then loads it.\n\n pub fn empty_table<I>(&mut self, index: I) -> LuaTable\n\n where\n\n I: Borrow<str>,\n", "file_path": "src/lib.rs", "rank": 97, "score": 12.003713325454823 }, { "content": "\n\n pub fn new(lua: *mut lua_State, size: i32) -> LuaGuard {\n\n LuaGuard {\n\n lua: lua,\n\n size: size,\n\n }\n\n }\n\n}\n\n\n\nmacro_rules! impl_exec_func {\n\n ($name:ident, $($p:ident),*) => (\n\n #[allow(non_snake_case, unused_mut)]\n\n pub fn $name<Z, $($p),*>(&mut self, func_name : Z, $($p : $p, )*) -> i32 where Z: Borrow<str>, $($p : LuaPush),* {\n\n let func_name = CString::new(func_name.borrow()).unwrap();\n\n unsafe {\n\n let state = self.state();\n\n let error = CString::new(\"error_handle\").unwrap();\n\n lua_getglobal(state, error.as_ptr());\n\n td_clua::lua_getglobal(state, func_name.as_ptr());\n\n\n", "file_path": "src/lib.rs", "rank": 98, "score": 11.490460080531953 }, { "content": " );\n\n);\n\n\n\nnumeric_impl!(f32);\n\nnumeric_impl!(f64);\n\n\n\nimpl LuaPush for String {\n\n fn push_to_lua(self, lua: *mut lua_State) -> i32 {\n\n if let Ok(value) = CString::new(&self[..]) {\n\n unsafe { td_clua::lua_pushstring(lua, value.as_ptr()) };\n\n 1\n\n } else {\n\n let value = CString::new(&\"UNVAILED STRING\"[..]).unwrap();\n\n unsafe { td_clua::lua_pushstring(lua, value.as_ptr()) };\n\n 1\n\n }\n\n }\n\n}\n\n\n\nimpl LuaRead for String {\n", "file_path": "src/values.rs", "rank": 99, "score": 11.47648995582452 } ]
Rust
tokens/src/tests_events.rs
ajuna-network/open-runtime-module-library
070457de18d26d2daed6abdfa57d8a951a525605
#![cfg(test)] use super::*; use frame_support::assert_ok; use mock::{Event, *}; #[test] fn pallet_multicurrency_deposit_events() { ExtBuilder::default() .balances(vec![(ALICE, DOT, 100), (BOB, DOT, 100)]) .build() .execute_with(|| { assert_ok!(<Tokens as MultiCurrency<AccountId>>::transfer(DOT, &ALICE, &BOB, 10)); System::assert_last_event(Event::Tokens(crate::Event::Transfer { currency_id: DOT, from: ALICE, to: BOB, amount: 10, })); assert_ok!(<Tokens as MultiCurrency<AccountId>>::deposit(DOT, &ALICE, 10)); System::assert_last_event(Event::Tokens(crate::Event::Deposited { currency_id: DOT, who: ALICE, amount: 10, })); assert_ok!(<Tokens as MultiCurrency<AccountId>>::withdraw(DOT, &ALICE, 10)); System::assert_last_event(Event::Tokens(crate::Event::Withdrawn { currency_id: DOT, who: ALICE, amount: 10, })); assert_ok!(<Tokens as MultiReservableCurrency<AccountId>>::reserve(DOT, &ALICE, 50)); assert_eq!(<Tokens as MultiCurrency<AccountId>>::slash(DOT, &ALICE, 60), 0); System::assert_last_event(Event::Tokens(crate::Event::Slashed { currency_id: DOT, who: ALICE, free_amount: 40, reserved_amount: 20, })); }); } #[test] fn pallet_multicurrency_extended_deposit_events() { ExtBuilder::default() .balances(vec![(ALICE, DOT, 100), (BOB, DOT, 100)]) .build() .execute_with(|| { assert_ok!(<Tokens as MultiCurrencyExtended<AccountId>>::update_balance( DOT, &ALICE, 500 )); System::assert_last_event(Event::Tokens(crate::Event::Deposited { currency_id: DOT, who: ALICE, amount: 500, })); assert_ok!(<Tokens as MultiCurrencyExtended<AccountId>>::update_balance( DOT, &ALICE, -500 )); System::assert_last_event(Event::Tokens(crate::Event::Withdrawn { currency_id: DOT, who: ALICE, amount: 500, })); }); } #[test] fn pallet_multi_lockable_currency_deposit_events() { ExtBuilder::default() .balances(vec![(ALICE, DOT, 100)]) .build() .execute_with(|| { assert_ok!(<Tokens as MultiLockableCurrency<AccountId>>::set_lock( [0u8; 8], DOT, &ALICE, 10 )); System::assert_last_event(Event::Tokens(crate::Event::LockSet { lock_id: [0u8; 8], currency_id: DOT, who: ALICE, amount: 10, })); assert_ok!(<Tokens as MultiLockableCurrency<AccountId>>::remove_lock( [0u8; 8], DOT, &ALICE )); System::assert_last_event(Event::Tokens(crate::Event::LockRemoved { lock_id: [0u8; 8], currency_id: DOT, who: ALICE, })); }); } #[test] fn pallet_multi_reservable_currency_deposit_events() { ExtBuilder::default() .balances(vec![(ALICE, DOT, 1000), (BOB, DOT, 1000)]) .build() .execute_with(|| { assert_ok!(<Tokens as MultiReservableCurrency<AccountId>>::reserve( DOT, &ALICE, 500 )); System::assert_last_event(Event::Tokens(crate::Event::Reserved { currency_id: DOT, who: ALICE, amount: 500, })); assert_eq!( <Tokens as MultiReservableCurrency<AccountId>>::slash_reserved(DOT, &ALICE, 300), 0 ); System::assert_last_event(Event::Tokens(crate::Event::Slashed { currency_id: DOT, who: ALICE, free_amount: 0, reserved_amount: 300, })); assert_eq!( <Tokens as MultiReservableCurrency<AccountId>>::unreserve(DOT, &ALICE, 100), 0 ); System::assert_last_event(Event::Tokens(crate::Event::Unreserved { currency_id: DOT, who: ALICE, amount: 100, })); assert_ok!(<Tokens as MultiReservableCurrency<AccountId>>::repatriate_reserved( DOT, &ALICE, &BOB, 100, BalanceStatus::Free )); System::assert_last_event(Event::Tokens(crate::Event::ReserveRepatriated { currency_id: DOT, from: ALICE, to: BOB, amount: 100, status: BalanceStatus::Free, })); }); } #[test] fn pallet_fungibles_mutate_deposit_events() { ExtBuilder::default() .balances(vec![(ALICE, DOT, 100)]) .build() .execute_with(|| { assert_ok!(<Tokens as fungibles::Mutate<AccountId>>::mint_into(DOT, &ALICE, 500)); System::assert_last_event(Event::Tokens(crate::Event::Deposited { currency_id: DOT, who: ALICE, amount: 500, })); assert_ok!(<Tokens as fungibles::Mutate<AccountId>>::burn_from(DOT, &ALICE, 500)); System::assert_last_event(Event::Tokens(crate::Event::Withdrawn { currency_id: DOT, who: ALICE, amount: 500, })); }); } #[test] fn pallet_fungibles_transfer_deposit_events() { ExtBuilder::default() .balances(vec![(ALICE, DOT, 100), (BOB, DOT, 100)]) .build() .execute_with(|| { assert_ok!(<Tokens as fungibles::Transfer<AccountId>>::transfer( DOT, &ALICE, &BOB, 50, true )); System::assert_last_event(Event::Tokens(crate::Event::Transfer { currency_id: DOT, from: ALICE, to: BOB, amount: 50, })); }); } #[test] fn pallet_fungibles_unbalanced_deposit_events() { ExtBuilder::default() .balances(vec![(ALICE, DOT, 100)]) .build() .execute_with(|| { assert_ok!(<Tokens as MultiReservableCurrency<AccountId>>::reserve(DOT, &ALICE, 50)); assert_ok!(<Tokens as fungibles::Unbalanced<AccountId>>::set_balance( DOT, &ALICE, 500 )); System::assert_last_event(Event::Tokens(crate::Event::BalanceSet { currency_id: DOT, who: ALICE, free: 500, reserved: 50, })); <Tokens as fungibles::Unbalanced<AccountId>>::set_total_issuance(DOT, 1000); System::assert_last_event(Event::Tokens(crate::Event::TotalIssuanceSet { currency_id: DOT, amount: 1000, })); }); } #[test] fn pallet_fungibles_mutate_hold_deposit_events() { ExtBuilder::default() .balances(vec![(ALICE, DOT, 100), (BOB, DOT, 100)]) .build() .execute_with(|| { assert_ok!(<Tokens as fungibles::MutateHold<AccountId>>::hold(DOT, &ALICE, 50)); System::assert_last_event(Event::Tokens(crate::Event::Reserved { currency_id: DOT, who: ALICE, amount: 50, })); assert_ok!(<Tokens as fungibles::MutateHold<AccountId>>::transfer_held( DOT, &ALICE, &BOB, 50, true, true )); System::assert_last_event(Event::Tokens(crate::Event::ReserveRepatriated { currency_id: DOT, from: ALICE, to: BOB, amount: 50, status: BalanceStatus::Reserved, })); System::reset_events(); assert_eq!( <Tokens as fungibles::MutateHold<AccountId>>::release(DOT, &BOB, 50, true), Ok(50) ); System::assert_last_event(Event::Tokens(crate::Event::Unreserved { currency_id: DOT, who: BOB, amount: 50, })); }); } #[test] fn currency_adapter_pallet_currency_deposit_events() { ExtBuilder::default() .balances(vec![(ALICE, DOT, 100), (BOB, DOT, 100)]) .build() .execute_with(|| { std::mem::forget(<MockCurrencyAdapter as PalletCurrency<AccountId>>::burn(500)); System::assert_last_event(Event::Tokens(crate::Event::TotalIssuanceSet { currency_id: DOT, amount: 0, })); std::mem::forget(<MockCurrencyAdapter as PalletCurrency<AccountId>>::issue(200)); System::assert_last_event(Event::Tokens(crate::Event::TotalIssuanceSet { currency_id: DOT, amount: 200, })); assert_ok!(<MockCurrencyAdapter as PalletCurrency<AccountId>>::transfer( &ALICE, &BOB, 50, ExistenceRequirement::AllowDeath )); System::assert_last_event(Event::Tokens(crate::Event::Transfer { currency_id: DOT, from: ALICE, to: BOB, amount: 50, })); assert_ok!(<Tokens as MultiReservableCurrency<AccountId>>::reserve(DOT, &BOB, 50)); std::mem::forget(<MockCurrencyAdapter as PalletCurrency<AccountId>>::slash(&BOB, 110)); System::assert_last_event(Event::Tokens(crate::Event::Slashed { currency_id: DOT, who: BOB, free_amount: 100, reserved_amount: 10, })); std::mem::forget(<MockCurrencyAdapter as PalletCurrency<AccountId>>::make_free_balance_be(&BOB, 200)); System::assert_last_event(Event::Tokens(crate::Event::BalanceSet { currency_id: DOT, who: BOB, free: 200, reserved: 40, })); }); }
#![cfg(test)] use super::*; use frame_support::assert_ok; use mock::{Event, *}; #[test] fn pallet_multicurrency_deposit_events() { ExtBuilder::default() .balances(vec![(ALICE, DOT, 100), (BOB, DOT, 100)]) .build() .execute_with(|| { assert_ok!(<Tokens as MultiCurrency<AccountId>>::transfer(DOT, &ALICE, &BOB, 10)); System::assert_last_event(Event::Tokens(crate::Event::Transfer { currency_id: DOT, from: ALICE, to: BOB, amount: 10, })); assert_ok!(<Tokens as MultiCurrency<AccountId>>::deposit(DOT, &ALICE, 10)); System::assert_last_event(Event::Tokens(crate::Event::Deposited { currency_id: DOT, who: ALICE, amount: 10, })); assert_ok!(<Tokens as MultiCurrency<AccountId>>::withdraw(DOT, &ALICE, 10)); System::assert_last_event(Event::Tokens(crate::Event::Withdrawn { currency_id: DOT, who: ALICE, amount: 10, })); assert_ok!(<Tokens as MultiReservableCurrency<AccountId>>::reserve(DOT, &ALICE, 50)); assert_eq!(<Tokens as MultiCurrency<AccountId>>::slash(DOT, &ALICE, 60), 0); System::assert_last_event(Event::Tokens(crate::Event::Slashed { currency_id: DOT, who: ALICE, free_amount: 40, reserved_amount: 20, })); }); } #[test] fn pallet_multicurrency_extend
] fn pallet_fungibles_unbalanced_deposit_events() { ExtBuilder::default() .balances(vec![(ALICE, DOT, 100)]) .build() .execute_with(|| { assert_ok!(<Tokens as MultiReservableCurrency<AccountId>>::reserve(DOT, &ALICE, 50)); assert_ok!(<Tokens as fungibles::Unbalanced<AccountId>>::set_balance( DOT, &ALICE, 500 )); System::assert_last_event(Event::Tokens(crate::Event::BalanceSet { currency_id: DOT, who: ALICE, free: 500, reserved: 50, })); <Tokens as fungibles::Unbalanced<AccountId>>::set_total_issuance(DOT, 1000); System::assert_last_event(Event::Tokens(crate::Event::TotalIssuanceSet { currency_id: DOT, amount: 1000, })); }); } #[test] fn pallet_fungibles_mutate_hold_deposit_events() { ExtBuilder::default() .balances(vec![(ALICE, DOT, 100), (BOB, DOT, 100)]) .build() .execute_with(|| { assert_ok!(<Tokens as fungibles::MutateHold<AccountId>>::hold(DOT, &ALICE, 50)); System::assert_last_event(Event::Tokens(crate::Event::Reserved { currency_id: DOT, who: ALICE, amount: 50, })); assert_ok!(<Tokens as fungibles::MutateHold<AccountId>>::transfer_held( DOT, &ALICE, &BOB, 50, true, true )); System::assert_last_event(Event::Tokens(crate::Event::ReserveRepatriated { currency_id: DOT, from: ALICE, to: BOB, amount: 50, status: BalanceStatus::Reserved, })); System::reset_events(); assert_eq!( <Tokens as fungibles::MutateHold<AccountId>>::release(DOT, &BOB, 50, true), Ok(50) ); System::assert_last_event(Event::Tokens(crate::Event::Unreserved { currency_id: DOT, who: BOB, amount: 50, })); }); } #[test] fn currency_adapter_pallet_currency_deposit_events() { ExtBuilder::default() .balances(vec![(ALICE, DOT, 100), (BOB, DOT, 100)]) .build() .execute_with(|| { std::mem::forget(<MockCurrencyAdapter as PalletCurrency<AccountId>>::burn(500)); System::assert_last_event(Event::Tokens(crate::Event::TotalIssuanceSet { currency_id: DOT, amount: 0, })); std::mem::forget(<MockCurrencyAdapter as PalletCurrency<AccountId>>::issue(200)); System::assert_last_event(Event::Tokens(crate::Event::TotalIssuanceSet { currency_id: DOT, amount: 200, })); assert_ok!(<MockCurrencyAdapter as PalletCurrency<AccountId>>::transfer( &ALICE, &BOB, 50, ExistenceRequirement::AllowDeath )); System::assert_last_event(Event::Tokens(crate::Event::Transfer { currency_id: DOT, from: ALICE, to: BOB, amount: 50, })); assert_ok!(<Tokens as MultiReservableCurrency<AccountId>>::reserve(DOT, &BOB, 50)); std::mem::forget(<MockCurrencyAdapter as PalletCurrency<AccountId>>::slash(&BOB, 110)); System::assert_last_event(Event::Tokens(crate::Event::Slashed { currency_id: DOT, who: BOB, free_amount: 100, reserved_amount: 10, })); std::mem::forget(<MockCurrencyAdapter as PalletCurrency<AccountId>>::make_free_balance_be(&BOB, 200)); System::assert_last_event(Event::Tokens(crate::Event::BalanceSet { currency_id: DOT, who: BOB, free: 200, reserved: 40, })); }); }
ed_deposit_events() { ExtBuilder::default() .balances(vec![(ALICE, DOT, 100), (BOB, DOT, 100)]) .build() .execute_with(|| { assert_ok!(<Tokens as MultiCurrencyExtended<AccountId>>::update_balance( DOT, &ALICE, 500 )); System::assert_last_event(Event::Tokens(crate::Event::Deposited { currency_id: DOT, who: ALICE, amount: 500, })); assert_ok!(<Tokens as MultiCurrencyExtended<AccountId>>::update_balance( DOT, &ALICE, -500 )); System::assert_last_event(Event::Tokens(crate::Event::Withdrawn { currency_id: DOT, who: ALICE, amount: 500, })); }); } #[test] fn pallet_multi_lockable_currency_deposit_events() { ExtBuilder::default() .balances(vec![(ALICE, DOT, 100)]) .build() .execute_with(|| { assert_ok!(<Tokens as MultiLockableCurrency<AccountId>>::set_lock( [0u8; 8], DOT, &ALICE, 10 )); System::assert_last_event(Event::Tokens(crate::Event::LockSet { lock_id: [0u8; 8], currency_id: DOT, who: ALICE, amount: 10, })); assert_ok!(<Tokens as MultiLockableCurrency<AccountId>>::remove_lock( [0u8; 8], DOT, &ALICE )); System::assert_last_event(Event::Tokens(crate::Event::LockRemoved { lock_id: [0u8; 8], currency_id: DOT, who: ALICE, })); }); } #[test] fn pallet_multi_reservable_currency_deposit_events() { ExtBuilder::default() .balances(vec![(ALICE, DOT, 1000), (BOB, DOT, 1000)]) .build() .execute_with(|| { assert_ok!(<Tokens as MultiReservableCurrency<AccountId>>::reserve( DOT, &ALICE, 500 )); System::assert_last_event(Event::Tokens(crate::Event::Reserved { currency_id: DOT, who: ALICE, amount: 500, })); assert_eq!( <Tokens as MultiReservableCurrency<AccountId>>::slash_reserved(DOT, &ALICE, 300), 0 ); System::assert_last_event(Event::Tokens(crate::Event::Slashed { currency_id: DOT, who: ALICE, free_amount: 0, reserved_amount: 300, })); assert_eq!( <Tokens as MultiReservableCurrency<AccountId>>::unreserve(DOT, &ALICE, 100), 0 ); System::assert_last_event(Event::Tokens(crate::Event::Unreserved { currency_id: DOT, who: ALICE, amount: 100, })); assert_ok!(<Tokens as MultiReservableCurrency<AccountId>>::repatriate_reserved( DOT, &ALICE, &BOB, 100, BalanceStatus::Free )); System::assert_last_event(Event::Tokens(crate::Event::ReserveRepatriated { currency_id: DOT, from: ALICE, to: BOB, amount: 100, status: BalanceStatus::Free, })); }); } #[test] fn pallet_fungibles_mutate_deposit_events() { ExtBuilder::default() .balances(vec![(ALICE, DOT, 100)]) .build() .execute_with(|| { assert_ok!(<Tokens as fungibles::Mutate<AccountId>>::mint_into(DOT, &ALICE, 500)); System::assert_last_event(Event::Tokens(crate::Event::Deposited { currency_id: DOT, who: ALICE, amount: 500, })); assert_ok!(<Tokens as fungibles::Mutate<AccountId>>::burn_from(DOT, &ALICE, 500)); System::assert_last_event(Event::Tokens(crate::Event::Withdrawn { currency_id: DOT, who: ALICE, amount: 500, })); }); } #[test] fn pallet_fungibles_transfer_deposit_events() { ExtBuilder::default() .balances(vec![(ALICE, DOT, 100), (BOB, DOT, 100)]) .build() .execute_with(|| { assert_ok!(<Tokens as fungibles::Transfer<AccountId>>::transfer( DOT, &ALICE, &BOB, 50, true )); System::assert_last_event(Event::Tokens(crate::Event::Transfer { currency_id: DOT, from: ALICE, to: BOB, amount: 50, })); }); } #[test
random
[ { "content": "fn concrete_fungible(amount: u128) -> MultiAsset {\n\n\t(MOCK_CONCRETE_FUNGIBLE_ID, amount).into()\n\n}\n\n\n", "file_path": "unknown-tokens/src/tests.rs", "rank": 0, "score": 182153.9216231135 }, { "content": "fn abstract_fungible(amount: u128) -> MultiAsset {\n\n\t(mock_abstract_fungible_id(), amount).into()\n\n}\n\n\n", "file_path": "unknown-tokens/src/tests.rs", "rank": 1, "score": 182153.9216231135 }, { "content": "#[test]\n\nfn test_reserve_payment_amount_works() {\n\n\tnew_test_ext().execute_with(|| {\n\n\t\tlet creator_initial_balance = 100;\n\n\t\tlet payment_amount = 20;\n\n\t\tlet expected_incentive_amount = payment_amount / INCENTIVE_PERCENTAGE as u128;\n\n\t\tlet expected_fee_amount = 0;\n\n\n\n\t\t// the payment amount should not be reserved\n\n\t\tassert_eq!(Tokens::free_balance(CURRENCY_ID, &PAYMENT_CREATOR), 100);\n\n\t\tassert_eq!(Tokens::free_balance(CURRENCY_ID, &PAYMENT_RECIPENT), 0);\n\n\n\n\t\t// should be able to create a payment with available balance within a\n\n\t\t// transaction\n\n\t\tassert_ok!(with_transaction(|| TransactionOutcome::Commit({\n\n\t\t\t<Payment as PaymentHandler<Test>>::create_payment(\n\n\t\t\t\t&PAYMENT_CREATOR,\n\n\t\t\t\t&PAYMENT_RECIPENT,\n\n\t\t\t\tCURRENCY_ID,\n\n\t\t\t\tpayment_amount,\n\n\t\t\t\tPaymentState::Created,\n", "file_path": "payments/src/tests.rs", "rank": 2, "score": 171088.30305940288 }, { "content": "#[test]\n\nfn send_with_zero_amount() {\n\n\tTestNet::reset();\n\n\n\n\tParaA::execute_with(|| {\n\n\t\tassert_noop!(\n\n\t\t\tParaXTokens::transfer(\n\n\t\t\t\tSome(ALICE).into(),\n\n\t\t\t\tCurrencyId::B,\n\n\t\t\t\t0,\n\n\t\t\t\tBox::new(\n\n\t\t\t\t\t(\n\n\t\t\t\t\t\tParent,\n\n\t\t\t\t\t\tParachain(2),\n\n\t\t\t\t\t\tJunction::AccountId32 {\n\n\t\t\t\t\t\t\tnetwork: NetworkId::Any,\n\n\t\t\t\t\t\t\tid: BOB.into(),\n\n\t\t\t\t\t\t},\n\n\t\t\t\t\t)\n\n\t\t\t\t\t\t.into()\n\n\t\t\t\t),\n", "file_path": "xtokens/src/tests.rs", "rank": 3, "score": 166692.68955607538 }, { "content": "#[test]\n\nfn cannot_use_fund_if_not_claimed() {\n\n\tExtBuilder::build().execute_with(|| {\n\n\t\tlet schedule = VestingSchedule {\n\n\t\t\tstart: 10u64,\n\n\t\t\tperiod: 10u64,\n\n\t\t\tperiod_count: 1u32,\n\n\t\t\tper_period: 50u64,\n\n\t\t};\n\n\t\tassert_ok!(Vesting::vested_transfer(Origin::signed(ALICE), BOB, schedule));\n\n\t\tassert!(PalletBalances::ensure_can_withdraw(&BOB, 1, WithdrawReasons::TRANSFER, 49).is_err());\n\n\t});\n\n}\n\n\n", "file_path": "vesting/src/tests.rs", "rank": 4, "score": 166688.802793329 }, { "content": "#[test]\n\nfn no_op_if_amount_is_zero() {\n\n\tExtBuilder::default().build().execute_with(|| {\n\n\t\tassert_ok!(Tokens::ensure_can_withdraw(DOT, &ALICE, 0));\n\n\t\tassert_ok!(Tokens::transfer(Some(ALICE).into(), BOB, DOT, 0));\n\n\t\tassert_ok!(Tokens::transfer(Some(ALICE).into(), ALICE, DOT, 0));\n\n\t\tassert_ok!(Tokens::deposit(DOT, &ALICE, 0));\n\n\t\tassert_ok!(Tokens::withdraw(DOT, &ALICE, 0));\n\n\t\tassert_eq!(Tokens::slash(DOT, &ALICE, 0), 0);\n\n\t\tassert_eq!(Tokens::slash(DOT, &ALICE, 1), 1);\n\n\t\tassert_ok!(Tokens::update_balance(DOT, &ALICE, 0));\n\n\t});\n\n}\n\n\n", "file_path": "tokens/src/tests_multicurrency.rs", "rank": 5, "score": 163544.62788981225 }, { "content": "#[test]\n\nfn used_weight_works() {\n\n\tnew_test_ext().execute_with(|| {\n\n\t\tlet result: PostDispatchInfo = TestModule::expect_100(Origin::signed(100)).unwrap();\n\n\t\t// Check used weight is correct\n\n\t\tassert_eq!(Some(100), result.actual_weight);\n\n\t\t// Check that the method ran correctly\n\n\t\tassert_eq!(Some(100), TestModule::something());\n\n\n\n\t\tlet result: PostDispatchInfo = TestModule::expect_500(Origin::signed(100)).unwrap();\n\n\t\tassert_eq!(Some(500), result.actual_weight);\n\n\t\tassert_eq!(Some(600), TestModule::something());\n\n\t});\n\n}\n\n\n", "file_path": "weight-meter/src/tests.rs", "rank": 6, "score": 163540.8331741689 }, { "content": "#[test]\n\nfn used_weight_branch_works() {\n\n\tnew_test_ext().execute_with(|| {\n\n\t\tlet result: PostDispatchInfo = TestModule::expect_100_or_200(Origin::signed(100), false).unwrap();\n\n\t\t// Check used weight is correct\n\n\t\tassert_eq!(Some(100), result.actual_weight);\n\n\t\t// Check that the method ran correctly\n\n\t\tassert_eq!(Some(100), TestModule::something());\n\n\n\n\t\tlet result: PostDispatchInfo = TestModule::expect_100_or_200(Origin::signed(100), true).unwrap();\n\n\t\t// Check used weight is correct\n\n\t\tassert_eq!(Some(200), result.actual_weight);\n\n\t\t// Check that the method ran correctly\n\n\t\tassert_eq!(Some(300), TestModule::something());\n\n\t});\n\n}\n\n\n", "file_path": "weight-meter/src/tests.rs", "rank": 7, "score": 160538.5158956371 }, { "content": "#[test]\n\nfn used_weight_nested_calls_works() {\n\n\tnew_test_ext().execute_with(|| {\n\n\t\tlet result: PostDispatchInfo = TestModule::nested_inner_methods(Origin::signed(100)).unwrap();\n\n\t\t// Check used weight is correct\n\n\t\tassert_eq!(Some(300), result.actual_weight);\n\n\t});\n\n}\n\n\n", "file_path": "weight-meter/src/tests.rs", "rank": 8, "score": 157671.97081984975 }, { "content": "pub fn build() -> std::io::Result<Vec<u8>> {\n\n\tlet manifest_dir = std::env::var(\"CARGO_MANIFEST_DIR\").unwrap();\n\n\tlet pkg_name = std::env::var(\"CARGO_PKG_NAME\").unwrap();\n\n\n\n\tlet random = thread_rng()\n\n\t\t.sample_iter(&Alphanumeric)\n\n\t\t.take(16)\n\n\t\t.map(char::from)\n\n\t\t.collect::<String>();\n\n\n\n\tlet mut out_dir = std::path::PathBuf::from(manifest_dir);\n\n\tout_dir.push(format!(\"target/release/build/{}-{}/out\", pkg_name, random));\n\n\n\n\tstd::env::set_var(\"OUT_DIR\", out_dir.display().to_string());\n\n\n\n\tlet mut project_cargo_toml = std::env::current_dir()?;\n\n\tproject_cargo_toml.push(\"Cargo.toml\");\n\n\n\n\tlet default_rustflags = \"-Clink-arg=--export=__heap_base -C link-arg=--import-memory\";\n\n\tlet cargo_cmd = match prerequisites::check() {\n", "file_path": "bencher/src/build_wasm/mod.rs", "rank": 9, "score": 141535.13203820738 }, { "content": "fn subtract_fee(asset: &MultiAsset, amount: u128) -> MultiAsset {\n\n\tlet final_amount = fungible_amount(asset).checked_sub(amount).expect(\"fee too low; qed\");\n\n\tMultiAsset {\n\n\t\tfun: Fungible(final_amount),\n\n\t\tid: asset.id.clone(),\n\n\t}\n\n}\n", "file_path": "xtokens/src/lib.rs", "rank": 10, "score": 132689.86160204496 }, { "content": "#[test]\n\nfn test_cancel_works() {\n\n\tnew_test_ext().execute_with(|| {\n\n\t\tlet creator_initial_balance = 100;\n\n\t\tlet payment_amount = 40;\n\n\t\tlet expected_incentive_amount = payment_amount / INCENTIVE_PERCENTAGE as u128;\n\n\n\n\t\t// should be able to create a payment with available balance\n\n\t\tassert_ok!(Payment::pay(\n\n\t\t\tOrigin::signed(PAYMENT_CREATOR),\n\n\t\t\tPAYMENT_RECIPENT,\n\n\t\t\tCURRENCY_ID,\n\n\t\t\tpayment_amount,\n\n\t\t\tNone\n\n\t\t));\n\n\n\n\t\tassert_eq!(\n\n\t\t\tPaymentStore::<Test>::get(PAYMENT_CREATOR, PAYMENT_RECIPENT),\n\n\t\t\tSome(PaymentDetail {\n\n\t\t\t\tasset: CURRENCY_ID,\n\n\t\t\t\tamount: payment_amount,\n", "file_path": "payments/src/tests.rs", "rank": 11, "score": 131022.03684027697 }, { "content": "#[test]\n\nfn set_value() {\n\n\tlet weight = ModuleWeights::<Runtime>::set_value() + ModuleWeights::<Runtime>::set_foo();\n\n\tExtBuilder::default().build().execute_with(|| {\n\n\t\tassert_eq!(\n\n\t\t\tTest::set_value(Origin::signed(1), 1),\n\n\t\t\tOk(PostDispatchInfo {\n\n\t\t\t\tactual_weight: Some(weight),\n\n\t\t\t\t..Default::default()\n\n\t\t\t})\n\n\t\t);\n\n\t});\n\n}\n", "file_path": "bencher/test/src/tests.rs", "rank": 12, "score": 131022.03684027697 }, { "content": "#[test]\n\nfn test_release_works() {\n\n\tnew_test_ext().execute_with(|| {\n\n\t\tlet creator_initial_balance = 100;\n\n\t\tlet payment_amount = 40;\n\n\t\tlet expected_incentive_amount = payment_amount / INCENTIVE_PERCENTAGE as u128;\n\n\n\n\t\t// should be able to create a payment with available balance\n\n\t\tassert_ok!(Payment::pay(\n\n\t\t\tOrigin::signed(PAYMENT_CREATOR),\n\n\t\t\tPAYMENT_RECIPENT,\n\n\t\t\tCURRENCY_ID,\n\n\t\t\tpayment_amount,\n\n\t\t\tNone\n\n\t\t));\n\n\t\tassert_eq!(\n\n\t\t\tPaymentStore::<Test>::get(PAYMENT_CREATOR, PAYMENT_RECIPENT),\n\n\t\t\tSome(PaymentDetail {\n\n\t\t\t\tasset: CURRENCY_ID,\n\n\t\t\t\tamount: payment_amount,\n\n\t\t\t\tincentive_amount: expected_incentive_amount,\n", "file_path": "payments/src/tests.rs", "rank": 13, "score": 131022.03684027697 }, { "content": "#[test]\n\nfn test_request_refund() {\n\n\tnew_test_ext().execute_with(|| {\n\n\t\tlet payment_amount = 20;\n\n\t\tlet expected_incentive_amount = payment_amount / INCENTIVE_PERCENTAGE as u128;\n\n\t\tlet expected_cancel_block = CANCEL_BLOCK_BUFFER + 1;\n\n\n\n\t\tassert_ok!(Payment::pay(\n\n\t\t\tOrigin::signed(PAYMENT_CREATOR),\n\n\t\t\tPAYMENT_RECIPENT,\n\n\t\t\tCURRENCY_ID,\n\n\t\t\tpayment_amount,\n\n\t\t\tNone\n\n\t\t));\n\n\n\n\t\tassert_ok!(Payment::request_refund(\n\n\t\t\tOrigin::signed(PAYMENT_CREATOR),\n\n\t\t\tPAYMENT_RECIPENT\n\n\t\t));\n\n\n\n\t\t// do not overwrite payment\n", "file_path": "payments/src/tests.rs", "rank": 14, "score": 131022.03684027697 }, { "content": "#[test]\n\nfn test_dispute_refund() {\n\n\tnew_test_ext().execute_with(|| {\n\n\t\tlet payment_amount = 20;\n\n\t\tlet expected_incentive_amount = payment_amount / INCENTIVE_PERCENTAGE as u128;\n\n\t\tlet expected_cancel_block = CANCEL_BLOCK_BUFFER + 1;\n\n\n\n\t\tassert_ok!(Payment::pay(\n\n\t\t\tOrigin::signed(PAYMENT_CREATOR),\n\n\t\t\tPAYMENT_RECIPENT,\n\n\t\t\tCURRENCY_ID,\n\n\t\t\tpayment_amount,\n\n\t\t\tNone\n\n\t\t));\n\n\n\n\t\t// cannot dispute if refund is not requested\n\n\t\tassert_noop!(\n\n\t\t\tPayment::dispute_refund(Origin::signed(PAYMENT_RECIPENT), PAYMENT_CREATOR),\n\n\t\t\tError::InvalidAction\n\n\t\t);\n\n\t\t// creator requests a refund\n", "file_path": "payments/src/tests.rs", "rank": 15, "score": 131022.03684027697 }, { "content": "#[test]\n\nfn test_request_payment() {\n\n\tnew_test_ext().execute_with(|| {\n\n\t\tlet payment_amount = 20;\n\n\t\tlet expected_incentive_amount = 0;\n\n\n\n\t\tassert_ok!(Payment::request_payment(\n\n\t\t\tOrigin::signed(PAYMENT_RECIPENT),\n\n\t\t\tPAYMENT_CREATOR,\n\n\t\t\tCURRENCY_ID,\n\n\t\t\tpayment_amount,\n\n\t\t));\n\n\n\n\t\tassert_noop!(\n\n\t\t\tPayment::request_refund(Origin::signed(PAYMENT_CREATOR), PAYMENT_RECIPENT),\n\n\t\t\tcrate::Error::<Test>::InvalidAction\n\n\t\t);\n\n\n\n\t\tassert_eq!(\n\n\t\t\tPaymentStore::<Test>::get(PAYMENT_CREATOR, PAYMENT_RECIPENT),\n\n\t\t\tSome(PaymentDetail {\n", "file_path": "payments/src/tests.rs", "rank": 16, "score": 131022.03684027697 }, { "content": "#[test]\n\nfn test_accept_and_pay() {\n\n\tnew_test_ext().execute_with(|| {\n\n\t\tlet creator_initial_balance = 100;\n\n\t\tlet payment_amount = 20;\n\n\t\tlet expected_incentive_amount = 0;\n\n\n\n\t\tassert_ok!(Payment::request_payment(\n\n\t\t\tOrigin::signed(PAYMENT_RECIPENT),\n\n\t\t\tPAYMENT_CREATOR,\n\n\t\t\tCURRENCY_ID,\n\n\t\t\tpayment_amount,\n\n\t\t));\n\n\n\n\t\tassert_eq!(\n\n\t\t\tPaymentStore::<Test>::get(PAYMENT_CREATOR, PAYMENT_RECIPENT),\n\n\t\t\tSome(PaymentDetail {\n\n\t\t\t\tasset: CURRENCY_ID,\n\n\t\t\t\tamount: payment_amount,\n\n\t\t\t\tincentive_amount: expected_incentive_amount,\n\n\t\t\t\tstate: PaymentState::PaymentRequested,\n", "file_path": "payments/src/tests.rs", "rank": 17, "score": 131022.03684027697 }, { "content": "#[test]\n\nfn test_pay_works() {\n\n\tnew_test_ext().execute_with(|| {\n\n\t\tlet creator_initial_balance = 100;\n\n\t\tlet payment_amount = 20;\n\n\t\tlet expected_incentive_amount = payment_amount / INCENTIVE_PERCENTAGE as u128;\n\n\n\n\t\t// the payment amount should not be reserved\n\n\t\tassert_eq!(\n\n\t\t\tTokens::free_balance(CURRENCY_ID, &PAYMENT_CREATOR),\n\n\t\t\tcreator_initial_balance\n\n\t\t);\n\n\t\tassert_eq!(Tokens::free_balance(CURRENCY_ID, &PAYMENT_RECIPENT), 0);\n\n\n\n\t\t// should be able to create a payment with available balance\n\n\t\tassert_ok!(Payment::pay(\n\n\t\t\tOrigin::signed(PAYMENT_CREATOR),\n\n\t\t\tPAYMENT_RECIPENT,\n\n\t\t\tCURRENCY_ID,\n\n\t\t\tpayment_amount,\n\n\t\t\tNone\n", "file_path": "payments/src/tests.rs", "rank": 18, "score": 131022.03684027697 }, { "content": "#[test]\n\nfn test_resolve_payment_works() {\n\n\tnew_test_ext().execute_with(|| {\n\n\t\tlet creator_initial_balance = 100;\n\n\t\tlet payment_amount = 40;\n\n\n\n\t\t// should be able to create a payment with available balance\n\n\t\tassert_ok!(Payment::pay(\n\n\t\t\tOrigin::signed(PAYMENT_CREATOR),\n\n\t\t\tPAYMENT_RECIPENT,\n\n\t\t\tCURRENCY_ID,\n\n\t\t\tpayment_amount,\n\n\t\t\tNone\n\n\t\t));\n\n\n\n\t\t// should fail for non whitelisted caller\n\n\t\tassert_noop!(\n\n\t\t\tPayment::resolve_payment(\n\n\t\t\t\tOrigin::signed(PAYMENT_CREATOR),\n\n\t\t\t\tPAYMENT_CREATOR,\n\n\t\t\t\tPAYMENT_RECIPENT,\n", "file_path": "payments/src/tests.rs", "rank": 19, "score": 129193.33781319825 }, { "content": "#[test]\n\nfn test_settle_payment_works_for_70_30() {\n\n\tnew_test_ext().execute_with(|| {\n\n\t\tlet creator_initial_balance = 100;\n\n\t\tlet payment_amount = 10;\n\n\t\tlet expected_fee_amount = payment_amount / MARKETPLACE_FEE_PERCENTAGE as u128;\n\n\n\n\t\t// the payment amount should not be reserved\n\n\t\tassert_eq!(\n\n\t\t\tTokens::free_balance(CURRENCY_ID, &PAYMENT_CREATOR),\n\n\t\t\tcreator_initial_balance\n\n\t\t);\n\n\t\tassert_eq!(Tokens::free_balance(CURRENCY_ID, &PAYMENT_RECIPENT_FEE_CHARGED), 0);\n\n\n\n\t\t// should be able to create a payment with available balance within a\n\n\t\t// transaction\n\n\t\tassert_ok!(Payment::pay(\n\n\t\t\tOrigin::signed(PAYMENT_CREATOR),\n\n\t\t\tPAYMENT_RECIPENT_FEE_CHARGED,\n\n\t\t\tCURRENCY_ID,\n\n\t\t\tpayment_amount,\n", "file_path": "payments/src/tests.rs", "rank": 20, "score": 129193.33781319825 }, { "content": "#[test]\n\nfn test_do_not_overwrite_logic_works() {\n\n\tnew_test_ext().execute_with(|| {\n\n\t\tlet payment_amount = 40;\n\n\t\tlet expected_incentive_amount = payment_amount / INCENTIVE_PERCENTAGE as u128;\n\n\n\n\t\tassert_ok!(Payment::pay(\n\n\t\t\tOrigin::signed(PAYMENT_CREATOR),\n\n\t\t\tPAYMENT_RECIPENT,\n\n\t\t\tCURRENCY_ID,\n\n\t\t\tpayment_amount,\n\n\t\t\tNone\n\n\t\t));\n\n\n\n\t\tassert_noop!(\n\n\t\t\tPayment::pay(\n\n\t\t\t\tOrigin::signed(PAYMENT_CREATOR),\n\n\t\t\t\tPAYMENT_RECIPENT,\n\n\t\t\t\tCURRENCY_ID,\n\n\t\t\t\tpayment_amount,\n\n\t\t\t\tNone\n", "file_path": "payments/src/tests.rs", "rank": 21, "score": 129193.33781319825 }, { "content": "#[test]\n\nfn test_pay_with_remark_works() {\n\n\tnew_test_ext().execute_with(|| {\n\n\t\tlet creator_initial_balance = 100;\n\n\t\tlet payment_amount = 40;\n\n\t\tlet expected_incentive_amount = payment_amount / INCENTIVE_PERCENTAGE as u128;\n\n\n\n\t\t// should be able to create a payment with available balance\n\n\t\tassert_ok!(Payment::pay(\n\n\t\t\tOrigin::signed(PAYMENT_CREATOR),\n\n\t\t\tPAYMENT_RECIPENT,\n\n\t\t\tCURRENCY_ID,\n\n\t\t\tpayment_amount,\n\n\t\t\tSome(vec![1u8; 10].try_into().unwrap())\n\n\t\t));\n\n\t\tassert_eq!(\n\n\t\t\tPaymentStore::<Test>::get(PAYMENT_CREATOR, PAYMENT_RECIPENT),\n\n\t\t\tSome(PaymentDetail {\n\n\t\t\t\tasset: CURRENCY_ID,\n\n\t\t\t\tamount: payment_amount,\n\n\t\t\t\tincentive_amount: expected_incentive_amount,\n", "file_path": "payments/src/tests.rs", "rank": 22, "score": 129193.33781319825 }, { "content": "#[test]\n\nfn benchmarks_generate_unit_tests() {\n\n\tnew_test_ext().execute_with(|| {\n\n\t\tassert_ok!(Benchmark::test_benchmark_set_value());\n\n\t\tassert_ok!(Benchmark::test_benchmark_other_name());\n\n\t\tassert_ok!(Benchmark::test_benchmark_sort_vector());\n\n\t\tassert_err!(Benchmark::test_benchmark_bad_origin(), \"Bad origin\");\n\n\t\tassert_err!(Benchmark::test_benchmark_bad_verify(), \"You forgot to sort!\");\n\n\t});\n\n}\n", "file_path": "benchmarking/src/tests.rs", "rank": 23, "score": 129193.33781319825 }, { "content": "#[test]\n\nfn test_existential_deposits() {\n\n\tTestNet::reset();\n\n\n\n\tParaA::execute_with(|| {\n\n\t\tlet metadata = AssetMetadata {\n\n\t\t\texistential_deposit: 100,\n\n\t\t\t..dummy_metadata()\n\n\t\t};\n\n\t\tassert_ok!(AssetRegistry::register_asset(Origin::root(), metadata, None));\n\n\n\n\t\tassert_ok!(Tokens::set_balance(\n\n\t\t\tOrigin::root(),\n\n\t\t\tALICE,\n\n\t\t\tCurrencyId::RegisteredAsset(1),\n\n\t\t\t1_000,\n\n\t\t\t0\n\n\t\t));\n\n\n\n\t\t// transferring at existential_deposit succeeds\n\n\t\tassert_ok!(Tokens::transfer(\n", "file_path": "asset-registry/src/tests.rs", "rank": 24, "score": 129193.33781319825 }, { "content": "#[test]\n\nfn test_create_payment_works() {\n\n\tnew_test_ext().execute_with(|| {\n\n\t\tlet creator_initial_balance = 100;\n\n\t\tlet payment_amount = 20;\n\n\t\tlet expected_incentive_amount = payment_amount / INCENTIVE_PERCENTAGE as u128;\n\n\t\tlet expected_fee_amount = 0;\n\n\n\n\t\t// the payment amount should not be reserved\n\n\t\tassert_eq!(\n\n\t\t\tTokens::free_balance(CURRENCY_ID, &PAYMENT_CREATOR),\n\n\t\t\tcreator_initial_balance\n\n\t\t);\n\n\t\tassert_eq!(Tokens::free_balance(CURRENCY_ID, &PAYMENT_RECIPENT), 0);\n\n\n\n\t\t// should be able to create a payment with available balance within a\n\n\t\t// transaction\n\n\t\tassert_ok!(with_transaction(|| TransactionOutcome::Commit({\n\n\t\t\t<Payment as PaymentHandler<Test>>::create_payment(\n\n\t\t\t\t&PAYMENT_CREATOR,\n\n\t\t\t\t&PAYMENT_RECIPENT,\n", "file_path": "payments/src/tests.rs", "rank": 25, "score": 129193.33781319825 }, { "content": "#[test]\n\nfn test_requested_payment_cannot_be_released() {\n\n\tnew_test_ext().execute_with(|| {\n\n\t\tlet payment_amount = 20;\n\n\n\n\t\tassert_ok!(Payment::request_payment(\n\n\t\t\tOrigin::signed(PAYMENT_RECIPENT),\n\n\t\t\tPAYMENT_CREATOR,\n\n\t\t\tCURRENCY_ID,\n\n\t\t\tpayment_amount,\n\n\t\t));\n\n\n\n\t\t// requested payment cannot be released\n\n\t\tassert_noop!(\n\n\t\t\tPayment::release(Origin::signed(PAYMENT_CREATOR), PAYMENT_RECIPENT),\n\n\t\t\tError::InvalidAction\n\n\t\t);\n\n\t});\n\n}\n\n\n", "file_path": "payments/src/tests.rs", "rank": 26, "score": 129193.33781319825 }, { "content": "#[test]\n\nfn test_settle_payment_works_for_50_50() {\n\n\tnew_test_ext().execute_with(|| {\n\n\t\tlet creator_initial_balance = 100;\n\n\t\tlet payment_amount = 10;\n\n\t\tlet expected_fee_amount = payment_amount / MARKETPLACE_FEE_PERCENTAGE as u128;\n\n\n\n\t\t// the payment amount should not be reserved\n\n\t\tassert_eq!(Tokens::free_balance(CURRENCY_ID, &PAYMENT_CREATOR), 100);\n\n\t\tassert_eq!(Tokens::free_balance(CURRENCY_ID, &PAYMENT_RECIPENT_FEE_CHARGED), 0);\n\n\n\n\t\t// should be able to create a payment with available balance within a\n\n\t\t// transaction\n\n\t\tassert_ok!(Payment::pay(\n\n\t\t\tOrigin::signed(PAYMENT_CREATOR),\n\n\t\t\tPAYMENT_RECIPENT_FEE_CHARGED,\n\n\t\t\tCURRENCY_ID,\n\n\t\t\tpayment_amount,\n\n\t\t\tNone\n\n\t\t));\n\n\n", "file_path": "payments/src/tests.rs", "rank": 27, "score": 129193.33781319825 }, { "content": "#[test]\n\nfn test_automatic_refund_works() {\n\n\tnew_test_ext().execute_with(|| {\n\n\t\tlet creator_initial_balance = 100;\n\n\t\tlet payment_amount = 20;\n\n\t\tlet expected_incentive_amount = payment_amount / INCENTIVE_PERCENTAGE as u128;\n\n\t\tconst CANCEL_PERIOD: u64 = 600;\n\n\t\tconst CANCEL_BLOCK: u64 = CANCEL_PERIOD + 1;\n\n\n\n\t\tassert_ok!(Payment::pay(\n\n\t\t\tOrigin::signed(PAYMENT_CREATOR),\n\n\t\t\tPAYMENT_RECIPENT,\n\n\t\t\tCURRENCY_ID,\n\n\t\t\tpayment_amount,\n\n\t\t\tNone\n\n\t\t));\n\n\n\n\t\tassert_ok!(Payment::request_refund(\n\n\t\t\tOrigin::signed(PAYMENT_CREATOR),\n\n\t\t\tPAYMENT_RECIPENT\n\n\t\t));\n", "file_path": "payments/src/tests.rs", "rank": 28, "score": 129193.33781319825 }, { "content": "#[test]\n\nfn test_settle_payment_works_for_cancel() {\n\n\tnew_test_ext().execute_with(|| {\n\n\t\tlet creator_initial_balance = 100;\n\n\t\tlet payment_amount = 20;\n\n\n\n\t\t// the payment amount should not be reserved\n\n\t\tassert_eq!(\n\n\t\t\tTokens::free_balance(CURRENCY_ID, &PAYMENT_CREATOR),\n\n\t\t\tcreator_initial_balance\n\n\t\t);\n\n\t\tassert_eq!(Tokens::free_balance(CURRENCY_ID, &PAYMENT_RECIPENT), 0);\n\n\n\n\t\t// should be able to create a payment with available balance within a\n\n\t\t// transaction\n\n\t\tassert_ok!(Payment::pay(\n\n\t\t\tOrigin::signed(PAYMENT_CREATOR),\n\n\t\t\tPAYMENT_RECIPENT,\n\n\t\t\tCURRENCY_ID,\n\n\t\t\tpayment_amount,\n\n\t\t\tNone\n", "file_path": "payments/src/tests.rs", "rank": 29, "score": 127435.76970499437 }, { "content": "#[test]\n\nfn test_charging_fee_payment_works() {\n\n\tnew_test_ext().execute_with(|| {\n\n\t\tlet creator_initial_balance = 100;\n\n\t\tlet payment_amount = 40;\n\n\t\tlet expected_incentive_amount = payment_amount / INCENTIVE_PERCENTAGE as u128;\n\n\t\tlet expected_fee_amount = payment_amount / MARKETPLACE_FEE_PERCENTAGE as u128;\n\n\n\n\t\t// should be able to create a payment with available balance\n\n\t\tassert_ok!(Payment::pay(\n\n\t\t\tOrigin::signed(PAYMENT_CREATOR),\n\n\t\t\tPAYMENT_RECIPENT_FEE_CHARGED,\n\n\t\t\tCURRENCY_ID,\n\n\t\t\tpayment_amount,\n\n\t\t\tNone\n\n\t\t));\n\n\t\tassert_eq!(\n\n\t\t\tPaymentStore::<Test>::get(PAYMENT_CREATOR, PAYMENT_RECIPENT_FEE_CHARGED),\n\n\t\t\tSome(PaymentDetail {\n\n\t\t\t\tasset: CURRENCY_ID,\n\n\t\t\t\tamount: payment_amount,\n", "file_path": "payments/src/tests.rs", "rank": 30, "score": 127435.76970499437 }, { "content": "#[test]\n\nfn test_settle_payment_works_for_release() {\n\n\tnew_test_ext().execute_with(|| {\n\n\t\tlet creator_initial_balance = 100;\n\n\t\tlet payment_amount = 20;\n\n\n\n\t\t// the payment amount should not be reserved\n\n\t\tassert_eq!(\n\n\t\t\tTokens::free_balance(CURRENCY_ID, &PAYMENT_CREATOR),\n\n\t\t\tcreator_initial_balance\n\n\t\t);\n\n\t\tassert_eq!(Tokens::free_balance(CURRENCY_ID, &PAYMENT_RECIPENT), 0);\n\n\n\n\t\t// should be able to create a payment with available balance within a\n\n\t\t// transaction\n\n\t\tassert_ok!(Payment::pay(\n\n\t\t\tOrigin::signed(PAYMENT_CREATOR),\n\n\t\t\tPAYMENT_RECIPENT,\n\n\t\t\tCURRENCY_ID,\n\n\t\t\tpayment_amount,\n\n\t\t\tNone\n", "file_path": "payments/src/tests.rs", "rank": 31, "score": 127435.76970499437 }, { "content": "#[test]\n\nfn test_update_metadata_works() {\n\n\tTestNet::reset();\n\n\n\n\tParaA::execute_with(|| {\n\n\t\tlet old_metadata = dummy_metadata();\n\n\t\tassert_ok!(AssetRegistry::register_asset(\n\n\t\t\tOrigin::root(),\n\n\t\t\told_metadata.clone(),\n\n\t\t\tNone\n\n\t\t));\n\n\n\n\t\tlet new_metadata = AssetMetadata {\n\n\t\t\tdecimals: 11,\n\n\t\t\tname: \"para A native token2\".as_bytes().to_vec(),\n\n\t\t\tsymbol: \"paraA2\".as_bytes().to_vec(),\n\n\t\t\texistential_deposit: 1,\n\n\t\t\tlocation: Some(MultiLocation::new(1, X2(Parachain(1), GeneralKey(vec![1]))).into()),\n\n\t\t\tadditional: CustomMetadata {\n\n\t\t\t\tfee_per_second: 2_000_000_000_000,\n\n\t\t\t},\n", "file_path": "asset-registry/src/tests.rs", "rank": 32, "score": 127435.76970499437 }, { "content": "/// Returns if the project should be built as a release.\n\nfn is_release_build() -> bool {\n\n\tif let Ok(var) = env::var(WASM_BUILD_TYPE_ENV) {\n\n\t\tmatch var.as_str() {\n\n\t\t\t\"release\" => true,\n\n\t\t\t\"debug\" => false,\n\n\t\t\tvar => panic!(\n\n\t\t\t\t\"Unexpected value for `{}` env variable: {}\\nOne of the following are expected: `debug` or `release`.\",\n\n\t\t\t\tWASM_BUILD_TYPE_ENV, var,\n\n\t\t\t),\n\n\t\t}\n\n\t} else {\n\n\t\ttrue\n\n\t}\n\n}\n\n\n", "file_path": "bencher/src/build_wasm/wasm_project.rs", "rank": 33, "score": 126321.40655751895 }, { "content": "#[test]\n\nfn test_automatic_refund_works_for_multiple_payments() {\n\n\tnew_test_ext().execute_with(|| {\n\n\t\tconst CANCEL_PERIOD: u64 = 600;\n\n\n\n\t\tassert_ok!(Payment::pay(\n\n\t\t\tOrigin::signed(PAYMENT_CREATOR),\n\n\t\t\tPAYMENT_RECIPENT,\n\n\t\t\tCURRENCY_ID,\n\n\t\t\t20,\n\n\t\t\tNone\n\n\t\t));\n\n\n\n\t\tassert_ok!(Payment::pay(\n\n\t\t\tOrigin::signed(PAYMENT_CREATOR_TWO),\n\n\t\t\tPAYMENT_RECIPENT_TWO,\n\n\t\t\tCURRENCY_ID,\n\n\t\t\t20,\n\n\t\t\tNone\n\n\t\t));\n\n\n", "file_path": "payments/src/tests.rs", "rank": 34, "score": 125745.0738627824 }, { "content": "#[test]\n\nfn test_requested_payment_can_be_cancelled_by_requestor() {\n\n\tnew_test_ext().execute_with(|| {\n\n\t\tlet payment_amount = 20;\n\n\n\n\t\tassert_ok!(Payment::request_payment(\n\n\t\t\tOrigin::signed(PAYMENT_RECIPENT),\n\n\t\t\tPAYMENT_CREATOR,\n\n\t\t\tCURRENCY_ID,\n\n\t\t\tpayment_amount,\n\n\t\t));\n\n\n\n\t\tassert_ok!(Payment::cancel(Origin::signed(PAYMENT_RECIPENT), PAYMENT_CREATOR));\n\n\n\n\t\t// the request should be removed from storage\n\n\t\tassert_eq!(PaymentStore::<Test>::get(PAYMENT_CREATOR, PAYMENT_RECIPENT), None);\n\n\t});\n\n}\n\n\n", "file_path": "payments/src/tests.rs", "rank": 35, "score": 125745.0738627824 }, { "content": "#[test]\n\nfn test_accept_and_pay_should_charge_fee_correctly() {\n\n\tnew_test_ext().execute_with(|| {\n\n\t\tlet creator_initial_balance = 100;\n\n\t\tlet payment_amount = 20;\n\n\t\tlet expected_incentive_amount = 0;\n\n\t\tlet expected_fee_amount = payment_amount / MARKETPLACE_FEE_PERCENTAGE as u128;\n\n\n\n\t\tassert_ok!(Payment::request_payment(\n\n\t\t\tOrigin::signed(PAYMENT_RECIPENT_FEE_CHARGED),\n\n\t\t\tPAYMENT_CREATOR,\n\n\t\t\tCURRENCY_ID,\n\n\t\t\tpayment_amount,\n\n\t\t));\n\n\n\n\t\tassert_eq!(\n\n\t\t\tPaymentStore::<Test>::get(PAYMENT_CREATOR, PAYMENT_RECIPENT_FEE_CHARGED),\n\n\t\t\tSome(PaymentDetail {\n\n\t\t\t\tasset: CURRENCY_ID,\n\n\t\t\t\tamount: payment_amount,\n\n\t\t\t\tincentive_amount: expected_incentive_amount,\n", "file_path": "payments/src/tests.rs", "rank": 36, "score": 125745.0738627824 }, { "content": "#[test]\n\nfn test_charging_fee_payment_works_when_canceled() {\n\n\tnew_test_ext().execute_with(|| {\n\n\t\tlet creator_initial_balance = 100;\n\n\t\tlet payment_amount = 40;\n\n\t\tlet expected_incentive_amount = payment_amount / INCENTIVE_PERCENTAGE as u128;\n\n\t\tlet expected_fee_amount = payment_amount / MARKETPLACE_FEE_PERCENTAGE as u128;\n\n\n\n\t\t// should be able to create a payment with available balance\n\n\t\tassert_ok!(Payment::pay(\n\n\t\t\tOrigin::signed(PAYMENT_CREATOR),\n\n\t\t\tPAYMENT_RECIPENT_FEE_CHARGED,\n\n\t\t\tCURRENCY_ID,\n\n\t\t\tpayment_amount,\n\n\t\t\tNone\n\n\t\t));\n\n\t\tassert_eq!(\n\n\t\t\tPaymentStore::<Test>::get(PAYMENT_CREATOR, PAYMENT_RECIPENT_FEE_CHARGED),\n\n\t\t\tSome(PaymentDetail {\n\n\t\t\t\tasset: CURRENCY_ID,\n\n\t\t\t\tamount: payment_amount,\n", "file_path": "payments/src/tests.rs", "rank": 37, "score": 125745.0738627824 }, { "content": "/// tests the SequentialId AssetProcessor\n\nfn test_sequential_id_normal_behavior() {\n\n\tTestNet::reset();\n\n\n\n\tParaA::execute_with(|| {\n\n\t\tlet metadata1 = dummy_metadata();\n\n\n\n\t\tlet metadata2 = AssetMetadata {\n\n\t\t\tname: \"para A native token 2\".as_bytes().to_vec(),\n\n\t\t\tsymbol: \"paraA2\".as_bytes().to_vec(),\n\n\t\t\tlocation: Some(MultiLocation::new(1, X2(Parachain(1), GeneralKey(vec![1]))).into()),\n\n\t\t\t..dummy_metadata()\n\n\t\t};\n\n\t\tAssetRegistry::register_asset(Origin::root(), metadata1.clone(), None).unwrap();\n\n\t\tAssetRegistry::register_asset(Origin::root(), metadata2.clone(), None).unwrap();\n\n\n\n\t\tassert_eq!(AssetRegistry::metadata(1).unwrap(), metadata1);\n\n\t\tassert_eq!(AssetRegistry::metadata(2).unwrap(), metadata2);\n\n\t});\n\n}\n\n\n", "file_path": "asset-registry/src/tests.rs", "rank": 38, "score": 125744.87236741246 }, { "content": "/// tests FixedRateAssetRegistryTrader\n\nfn test_fixed_rate_asset_trader() {\n\n\tTestNet::reset();\n\n\n\n\tlet metadata = dummy_metadata();\n\n\n\n\tParaB::execute_with(|| {\n\n\t\tAssetRegistry::register_asset(Origin::root(), metadata.clone(), None).unwrap();\n\n\t});\n\n\n\n\tParaA::execute_with(|| {\n\n\t\tlet para_a_metadata = AssetMetadata {\n\n\t\t\tlocation: Some(MultiLocation::new(0, X1(GeneralKey(vec![0]))).into()),\n\n\t\t\t..metadata.clone()\n\n\t\t};\n\n\t\tAssetRegistry::register_asset(Origin::root(), para_a_metadata, None).unwrap();\n\n\n\n\t\tassert_ok!(ParaTokens::deposit(CurrencyId::RegisteredAsset(1), &ALICE, 1_000));\n\n\n\n\t\tassert_ok!(ParaXTokens::transfer(\n\n\t\t\tSome(ALICE).into(),\n", "file_path": "asset-registry/src/tests.rs", "rank": 39, "score": 125744.82437259074 }, { "content": "// This function basically just builds a genesis storage key/value store\n\n// according to our desired mockup.\n\nfn new_test_ext() -> sp_io::TestExternalities {\n\n\tframe_system::GenesisConfig::default()\n\n\t\t.build_storage::<Test>()\n\n\t\t.unwrap()\n\n\t\t.into()\n\n}\n\n\n\nruntime_benchmarks! {\n\n\t{ Test, test }\n\n\n\n\tset_value {\n\n\t\tlet b in 1 .. 1000;\n\n\t\tlet caller = account::<AccountId>(\"caller\", 0, 0);\n\n\t}: _ (RawOrigin::Signed(caller), b)\n\n\tverify {\n\n\t\tassert_eq!(Pallet::value(), Some(b));\n\n\t}\n\n\n\n\tother_name {\n\n\t\tlet b in 1 .. 1000;\n", "file_path": "benchmarking/src/tests.rs", "rank": 40, "score": 124753.8166749716 }, { "content": "#[test]\n\nfn test_register_duplicate_location_returns_error() {\n\n\tTestNet::reset();\n\n\n\n\tParaA::execute_with(|| {\n\n\t\tlet metadata = dummy_metadata();\n\n\n\n\t\tassert_ok!(AssetRegistry::register_asset(Origin::root(), metadata.clone(), None));\n\n\t\tassert_noop!(\n\n\t\t\tAssetRegistry::register_asset(Origin::root(), metadata.clone(), None),\n\n\t\t\tError::<para::Runtime>::ConflictingLocation\n\n\t\t);\n\n\t});\n\n}\n\n\n", "file_path": "asset-registry/src/tests.rs", "rank": 41, "score": 124117.33698998786 }, { "content": "#[test]\n\nfn test_accept_and_pay_should_fail_for_non_payment_requested() {\n\n\tnew_test_ext().execute_with(|| {\n\n\t\tassert_ok!(Payment::pay(\n\n\t\t\tOrigin::signed(PAYMENT_CREATOR),\n\n\t\t\tPAYMENT_RECIPENT,\n\n\t\t\tCURRENCY_ID,\n\n\t\t\t20,\n\n\t\t\tNone\n\n\t\t));\n\n\n\n\t\tassert_noop!(\n\n\t\t\tPayment::accept_and_pay(Origin::signed(PAYMENT_CREATOR), PAYMENT_RECIPENT,),\n\n\t\t\tError::InvalidAction\n\n\t\t);\n\n\t});\n\n}\n\n\n", "file_path": "payments/src/tests.rs", "rank": 42, "score": 124117.33698998786 }, { "content": "#[test]\n\nfn test_update_metadata_fails_with_unknown_asset() {\n\n\tTestNet::reset();\n\n\n\n\tParaA::execute_with(|| {\n\n\t\tlet old_metadata = dummy_metadata();\n\n\t\tassert_ok!(AssetRegistry::register_asset(\n\n\t\t\tOrigin::root(),\n\n\t\t\told_metadata.clone(),\n\n\t\t\tNone\n\n\t\t));\n\n\n\n\t\tassert_noop!(\n\n\t\t\tAssetRegistry::update_asset(Origin::root(), 4, None, None, None, None, None, None,),\n\n\t\t\tError::<para::Runtime>::AssetNotFound\n\n\t\t);\n\n\t});\n\n}\n\n\n", "file_path": "asset-registry/src/tests.rs", "rank": 43, "score": 124117.33698998786 }, { "content": "#[test]\n\nfn transfer_should_fail() {\n\n\tExtBuilder::default().build().execute_with(|| {\n\n\t\tassert_ok!(NonFungibleTokenModule::create_class(&ALICE, vec![1], ()));\n\n\t\tassert_ok!(NonFungibleTokenModule::mint(&BOB, CLASS_ID, vec![1], ()));\n\n\t\tassert_noop!(\n\n\t\t\tNonFungibleTokenModule::transfer(&BOB, &ALICE, (CLASS_ID, TOKEN_ID_NOT_EXIST)),\n\n\t\t\tError::<Runtime>::TokenNotFound\n\n\t\t);\n\n\t\tassert_noop!(\n\n\t\t\tNonFungibleTokenModule::transfer(&ALICE, &BOB, (CLASS_ID, TOKEN_ID)),\n\n\t\t\tError::<Runtime>::NoPermission\n\n\t\t);\n\n\t\tassert_noop!(\n\n\t\t\tNonFungibleTokenModule::mint(&BOB, CLASS_ID_NOT_EXIST, vec![1], ()),\n\n\t\t\tError::<Runtime>::ClassNotFound\n\n\t\t);\n\n\t\tassert_noop!(\n\n\t\t\tNonFungibleTokenModule::transfer(&ALICE, &ALICE, (CLASS_ID, TOKEN_ID)),\n\n\t\t\tError::<Runtime>::NoPermission\n\n\t\t);\n\n\t});\n\n}\n\n\n", "file_path": "nft/src/tests.rs", "rank": 44, "score": 123089.17520125736 }, { "content": "#[test]\n\nfn mint_should_work() {\n\n\tExtBuilder::default().build().execute_with(|| {\n\n\t\tlet next_class_id = NonFungibleTokenModule::next_class_id();\n\n\t\tassert_eq!(next_class_id, CLASS_ID);\n\n\t\tassert_ok!(NonFungibleTokenModule::create_class(&ALICE, vec![1], ()));\n\n\t\tassert_eq!(NonFungibleTokenModule::next_token_id(CLASS_ID), 0);\n\n\t\tassert_ok!(NonFungibleTokenModule::mint(&BOB, CLASS_ID, vec![1], ()));\n\n\t\tassert_eq!(NonFungibleTokenModule::next_token_id(CLASS_ID), 1);\n\n\t\tassert_ok!(NonFungibleTokenModule::mint(&BOB, CLASS_ID, vec![1], ()));\n\n\t\tassert_eq!(NonFungibleTokenModule::next_token_id(CLASS_ID), 2);\n\n\n\n\t\tlet next_class_id = NonFungibleTokenModule::next_class_id();\n\n\t\tassert_ok!(NonFungibleTokenModule::create_class(&ALICE, vec![1], ()));\n\n\t\tassert_eq!(NonFungibleTokenModule::next_token_id(next_class_id), 0);\n\n\t\tassert_ok!(NonFungibleTokenModule::mint(&BOB, next_class_id, vec![1], ()));\n\n\t\tassert_eq!(NonFungibleTokenModule::next_token_id(next_class_id), 1);\n\n\n\n\t\tassert_eq!(NonFungibleTokenModule::next_token_id(CLASS_ID), 2);\n\n\t});\n\n}\n\n\n", "file_path": "nft/src/tests.rs", "rank": 45, "score": 123089.17520125736 }, { "content": "#[test]\n\nfn dispatch_as_work() {\n\n\tExtBuilder::default().build().execute_with(|| {\n\n\t\tlet ensure_root_call = Call::System(frame_system::Call::fill_block { ratio: Perbill::one() });\n\n\t\tlet ensure_signed_call = Call::System(frame_system::Call::remark { remark: vec![] });\n\n\t\tassert_ok!(Authority::dispatch_as(\n\n\t\t\tOrigin::root(),\n\n\t\t\tMockAsOriginId::Root,\n\n\t\t\tBox::new(ensure_root_call)\n\n\t\t));\n\n\t\tassert_ok!(Authority::dispatch_as(\n\n\t\t\tOrigin::root(),\n\n\t\t\tMockAsOriginId::Account1,\n\n\t\t\tBox::new(ensure_signed_call.clone())\n\n\t\t));\n\n\t\tassert_noop!(\n\n\t\t\tAuthority::dispatch_as(\n\n\t\t\t\tOrigin::signed(1),\n\n\t\t\t\tMockAsOriginId::Root,\n\n\t\t\t\tBox::new(ensure_signed_call.clone())\n\n\t\t\t),\n", "file_path": "authority/src/tests.rs", "rank": 46, "score": 123089.17520125736 }, { "content": "#[test]\n\nfn claim_works() {\n\n\tExtBuilder::build().execute_with(|| {\n\n\t\tlet schedule = VestingSchedule {\n\n\t\t\tstart: 0u64,\n\n\t\t\tperiod: 10u64,\n\n\t\t\tperiod_count: 2u32,\n\n\t\t\tper_period: 10u64,\n\n\t\t};\n\n\t\tassert_ok!(Vesting::vested_transfer(Origin::signed(ALICE), BOB, schedule));\n\n\n\n\t\tMockBlockNumberProvider::set(11);\n\n\t\t// remain locked if not claimed\n\n\t\tassert!(PalletBalances::transfer(Origin::signed(BOB), ALICE, 10).is_err());\n\n\t\t// unlocked after claiming\n\n\t\tassert_ok!(Vesting::claim(Origin::signed(BOB)));\n\n\t\tassert!(VestingSchedules::<Runtime>::contains_key(BOB));\n\n\t\tassert_ok!(PalletBalances::transfer(Origin::signed(BOB), ALICE, 10));\n\n\t\t// more are still locked\n\n\t\tassert!(PalletBalances::transfer(Origin::signed(BOB), ALICE, 1).is_err());\n\n\n", "file_path": "vesting/src/tests.rs", "rank": 47, "score": 123089.17520125736 }, { "content": "#[test]\n\nfn claim_for_works() {\n\n\tExtBuilder::build().execute_with(|| {\n\n\t\tlet schedule = VestingSchedule {\n\n\t\t\tstart: 0u64,\n\n\t\t\tperiod: 10u64,\n\n\t\t\tperiod_count: 2u32,\n\n\t\t\tper_period: 10u64,\n\n\t\t};\n\n\t\tassert_ok!(Vesting::vested_transfer(Origin::signed(ALICE), BOB, schedule));\n\n\n\n\t\tassert_ok!(Vesting::claim_for(Origin::signed(ALICE), BOB));\n\n\n\n\t\tassert_eq!(\n\n\t\t\tPalletBalances::locks(&BOB).get(0),\n\n\t\t\tSome(&BalanceLock {\n\n\t\t\t\tid: VESTING_LOCK_ID,\n\n\t\t\t\tamount: 20u64,\n\n\t\t\t\treasons: Reasons::All,\n\n\t\t\t})\n\n\t\t);\n", "file_path": "vesting/src/tests.rs", "rank": 48, "score": 123089.17520125736 }, { "content": "#[test]\n\nfn transfer_should_work() {\n\n\tExtBuilder::default().build().execute_with(|| {\n\n\t\tassert_ok!(NonFungibleTokenModule::create_class(&ALICE, vec![1], ()));\n\n\t\tassert_ok!(NonFungibleTokenModule::mint(&BOB, CLASS_ID, vec![1], ()));\n\n\t\tassert_ok!(NonFungibleTokenModule::transfer(&BOB, &BOB, (CLASS_ID, TOKEN_ID)));\n\n\t\tassert_ok!(NonFungibleTokenModule::transfer(&BOB, &ALICE, (CLASS_ID, TOKEN_ID)));\n\n\t\tassert_ok!(NonFungibleTokenModule::transfer(&ALICE, &BOB, (CLASS_ID, TOKEN_ID)));\n\n\t\tassert!(NonFungibleTokenModule::is_owner(&BOB, (CLASS_ID, TOKEN_ID)));\n\n\t});\n\n}\n\n\n", "file_path": "nft/src/tests.rs", "rank": 49, "score": 123089.17520125736 }, { "content": "#[test]\n\nfn do_withdraw_should_work() {\n\n\tExtBuilder::default()\n\n\t\t.balances(vec![(ALICE, DOT, 100)])\n\n\t\t.build()\n\n\t\t.execute_with(|| {\n\n\t\t\t// always ok if amount is zero\n\n\t\t\tassert!(!Accounts::<Runtime>::contains_key(BOB, DOT));\n\n\t\t\tassert_ok!(Tokens::do_withdraw(DOT, &BOB, 0, ExistenceRequirement::KeepAlive, true));\n\n\t\t\tassert!(!Accounts::<Runtime>::contains_key(BOB, DOT));\n\n\n\n\t\t\tassert_noop!(\n\n\t\t\t\tTokens::do_withdraw(DOT, &ALICE, 101, ExistenceRequirement::KeepAlive, true),\n\n\t\t\t\tError::<Runtime>::BalanceTooLow\n\n\t\t\t);\n\n\n\n\t\t\tassert_ok!(Tokens::set_lock(ID_1, DOT, &ALICE, 10));\n\n\t\t\tassert_noop!(\n\n\t\t\t\tTokens::do_withdraw(DOT, &ALICE, 91, ExistenceRequirement::KeepAlive, true),\n\n\t\t\t\tError::<Runtime>::LiquidityRestrictions\n\n\t\t\t);\n", "file_path": "tokens/src/tests.rs", "rank": 50, "score": 123089.17520125736 }, { "content": "#[test]\n\nfn burn_should_fail() {\n\n\tExtBuilder::default().build().execute_with(|| {\n\n\t\tassert_ok!(NonFungibleTokenModule::create_class(&ALICE, vec![1], ()));\n\n\t\tassert_ok!(NonFungibleTokenModule::mint(&BOB, CLASS_ID, vec![1], ()));\n\n\t\tassert_noop!(\n\n\t\t\tNonFungibleTokenModule::burn(&BOB, (CLASS_ID, TOKEN_ID_NOT_EXIST)),\n\n\t\t\tError::<Runtime>::TokenNotFound\n\n\t\t);\n\n\n\n\t\tassert_noop!(\n\n\t\t\tNonFungibleTokenModule::burn(&ALICE, (CLASS_ID, TOKEN_ID)),\n\n\t\t\tError::<Runtime>::NoPermission\n\n\t\t);\n\n\t});\n\n\n\n\tExtBuilder::default().build().execute_with(|| {\n\n\t\tassert_ok!(NonFungibleTokenModule::create_class(&ALICE, vec![1], ()));\n\n\t\tassert_ok!(NonFungibleTokenModule::mint(&BOB, CLASS_ID, vec![1], ()));\n\n\n\n\t\tClasses::<Runtime>::mutate(CLASS_ID, |class_info| {\n\n\t\t\tclass_info.as_mut().unwrap().total_issuance = 0;\n\n\t\t});\n\n\t\tassert_noop!(\n\n\t\t\tNonFungibleTokenModule::burn(&BOB, (CLASS_ID, TOKEN_ID)),\n\n\t\t\tArithmeticError::Overflow,\n\n\t\t);\n\n\t});\n\n}\n\n\n", "file_path": "nft/src/tests.rs", "rank": 51, "score": 123089.17520125736 }, { "content": "#[test]\n\nfn do_transfer_should_work() {\n\n\tExtBuilder::default()\n\n\t\t.balances(vec![(ALICE, DOT, 100), (BOB, DOT, 100)])\n\n\t\t.build()\n\n\t\t.execute_with(|| {\n\n\t\t\t// always ok when from == to\n\n\t\t\tassert_ok!(Tokens::do_transfer(\n\n\t\t\t\tDOT,\n\n\t\t\t\t&ALICE,\n\n\t\t\t\t&ALICE,\n\n\t\t\t\t101,\n\n\t\t\t\tExistenceRequirement::KeepAlive\n\n\t\t\t));\n\n\n\n\t\t\tassert_eq!(Tokens::free_balance(DOT, &ALICE), 100);\n\n\t\t\tassert_eq!(Tokens::free_balance(DOT, &BOB), 100);\n\n\t\t\tassert_eq!(Tokens::free_balance(DOT, &CHARLIE), 0);\n\n\n\n\t\t\tassert_noop!(\n\n\t\t\t\tTokens::do_transfer(DOT, &ALICE, &BOB, 101, ExistenceRequirement::KeepAlive),\n", "file_path": "tokens/src/tests.rs", "rank": 52, "score": 123089.17520125736 }, { "content": "#[test]\n\nfn mint_should_fail() {\n\n\tExtBuilder::default().build().execute_with(|| {\n\n\t\tassert_ok!(NonFungibleTokenModule::create_class(&ALICE, vec![1], ()));\n\n\t\tClasses::<Runtime>::mutate(CLASS_ID, |class_info| {\n\n\t\t\tclass_info.as_mut().unwrap().total_issuance = <Runtime as Config>::TokenId::max_value();\n\n\t\t});\n\n\t\tassert_noop!(\n\n\t\t\tNonFungibleTokenModule::mint(&BOB, CLASS_ID, vec![1], ()),\n\n\t\t\tArithmeticError::Overflow,\n\n\t\t);\n\n\n\n\t\tNextTokenId::<Runtime>::mutate(CLASS_ID, |id| *id = <Runtime as Config>::TokenId::max_value());\n\n\t\tassert_noop!(\n\n\t\t\tNonFungibleTokenModule::mint(&BOB, CLASS_ID, vec![1], ()),\n\n\t\t\tError::<Runtime>::NoAvailableTokenId\n\n\t\t);\n\n\t});\n\n}\n\n\n", "file_path": "nft/src/tests.rs", "rank": 53, "score": 123089.17520125736 }, { "content": "#[test]\n\nfn do_deposit_should_work() {\n\n\tExtBuilder::default()\n\n\t\t.balances(vec![(ALICE, DOT, 100)])\n\n\t\t.build()\n\n\t\t.execute_with(|| {\n\n\t\t\t// always ok if deposit amount is zero\n\n\t\t\tassert_ok!(Tokens::do_deposit(DOT, &CHARLIE, 0, true, true));\n\n\t\t\tassert_ok!(Tokens::do_deposit(DOT, &CHARLIE, 0, false, true));\n\n\n\n\t\t\tassert!(!Accounts::<Runtime>::contains_key(CHARLIE, DOT));\n\n\t\t\tassert_eq!(Tokens::free_balance(DOT, &CHARLIE), 0);\n\n\t\t\tassert_eq!(Tokens::total_issuance(DOT), 100);\n\n\t\t\tassert_ok!(Tokens::do_deposit(DOT, &CHARLIE, 10, false, true));\n\n\t\t\tassert!(Accounts::<Runtime>::contains_key(CHARLIE, DOT));\n\n\t\t\tassert_eq!(Tokens::free_balance(DOT, &CHARLIE), 10);\n\n\t\t\tassert_eq!(Tokens::total_issuance(DOT), 110);\n\n\n\n\t\t\tassert!(Accounts::<Runtime>::contains_key(ALICE, DOT));\n\n\t\t\tassert_eq!(Tokens::free_balance(DOT, &ALICE), 100);\n\n\t\t\tassert_ok!(Tokens::do_deposit(DOT, &ALICE, 10, true, true));\n", "file_path": "tokens/src/tests.rs", "rank": 54, "score": 123089.17520125736 }, { "content": "#[test]\n\nfn should_combined_data() {\n\n\tnew_test_ext().execute_with(|| {\n\n\t\tlet key: u32 = 50;\n\n\n\n\t\tassert_ok!(ModuleOracle::feed_values(Origin::signed(1), vec![(key, 1300)]));\n\n\t\tassert_ok!(ModuleOracle::feed_values(Origin::signed(2), vec![(key, 1000)]));\n\n\t\tassert_ok!(ModuleOracle::feed_values(Origin::signed(3), vec![(key, 1200)]));\n\n\n\n\t\tlet expected = Some(TimestampedValue {\n\n\t\t\tvalue: 1200,\n\n\t\t\ttimestamp: 12345,\n\n\t\t});\n\n\n\n\t\tassert_eq!(ModuleOracle::get(&key), expected);\n\n\n\n\t\tTimestamp::set_timestamp(23456);\n\n\n\n\t\tassert_eq!(ModuleOracle::get(&key), expected);\n\n\t});\n\n}\n\n\n", "file_path": "oracle/src/tests.rs", "rank": 55, "score": 123089.17520125736 }, { "content": "#[test]\n\nfn bid_should_work() {\n\n\tExtBuilder::default().build().execute_with(|| {\n\n\t\tSystem::set_block_number(1);\n\n\t\tassert_ok!(AuctionModule::new_auction(0, Some(5)), 0);\n\n\t\tassert_eq!(\n\n\t\t\tAuctionModule::auction_info(0),\n\n\t\t\tSome(AuctionInfo {\n\n\t\t\t\tbid: None,\n\n\t\t\t\tstart: 0,\n\n\t\t\t\tend: Some(5)\n\n\t\t\t})\n\n\t\t);\n\n\t\tassert_ok!(AuctionModule::bid(Origin::signed(ALICE), 0, 20));\n\n\t\tSystem::assert_last_event(Event::AuctionModule(crate::Event::Bid {\n\n\t\t\tauction_id: 0,\n\n\t\t\tbidder: ALICE,\n\n\t\t\tamount: 20,\n\n\t\t}));\n\n\t\tassert_eq!(\n\n\t\t\tAuctionModule::auction_info(0),\n\n\t\t\tSome(AuctionInfo {\n\n\t\t\t\tbid: Some((ALICE, 20)),\n\n\t\t\t\tstart: 0,\n\n\t\t\t\tend: Some(11)\n\n\t\t\t})\n\n\t\t);\n\n\t});\n\n}\n\n\n", "file_path": "auction/src/tests.rs", "rank": 56, "score": 123089.17520125736 }, { "content": "#[test]\n\nfn send_as_sovereign() {\n\n\tTestNet::reset();\n\n\n\n\tRelay::execute_with(|| {\n\n\t\tlet _ = RelayBalances::deposit_creating(&para_a_account(), 1_000_000_000_000);\n\n\t});\n\n\n\n\tParaA::execute_with(|| {\n\n\t\tuse xcm::latest::OriginKind::SovereignAccount;\n\n\n\n\t\tlet call =\n\n\t\t\trelay::Call::System(frame_system::Call::<relay::Runtime>::remark_with_event { remark: vec![1, 1, 1] });\n\n\t\tlet assets: MultiAsset = (Here, 1_000_000_000_000).into();\n\n\t\tassert_ok!(para::OrmlXcm::send_as_sovereign(\n\n\t\t\tpara::Origin::root(),\n\n\t\t\tBox::new(Parent.into()),\n\n\t\t\tBox::new(VersionedXcm::from(Xcm(vec![\n\n\t\t\t\tWithdrawAsset(assets.clone().into()),\n\n\t\t\t\tBuyExecution {\n\n\t\t\t\t\tfees: assets,\n", "file_path": "xtokens/src/tests.rs", "rank": 57, "score": 123089.17520125736 }, { "content": "#[test]\n\nfn burn_should_work() {\n\n\tExtBuilder::default().build().execute_with(|| {\n\n\t\tassert_ok!(NonFungibleTokenModule::create_class(&ALICE, vec![1], ()));\n\n\t\tassert_ok!(NonFungibleTokenModule::mint(&BOB, CLASS_ID, vec![1], ()));\n\n\t\tassert_ok!(NonFungibleTokenModule::burn(&BOB, (CLASS_ID, TOKEN_ID)));\n\n\t});\n\n}\n\n\n", "file_path": "nft/src/tests.rs", "rank": 58, "score": 123089.17520125736 }, { "content": "#[test]\n\nfn bid_should_fail() {\n\n\tExtBuilder::default().build().execute_with(|| {\n\n\t\tassert_ok!(AuctionModule::new_auction(10, Some(100)), 0);\n\n\t\tassert_ok!(AuctionModule::new_auction(0, Some(100)), 1);\n\n\t\tassert_noop!(\n\n\t\t\tAuctionModule::bid(Origin::signed(ALICE), 0, 20),\n\n\t\t\tError::<Runtime>::AuctionNotStarted\n\n\t\t);\n\n\t\tassert_noop!(\n\n\t\t\tAuctionModule::bid(Origin::signed(BOB), 1, 20),\n\n\t\t\tError::<Runtime>::BidNotAccepted,\n\n\t\t);\n\n\t\tassert_noop!(\n\n\t\t\tAuctionModule::bid(Origin::signed(ALICE), 1, 0),\n\n\t\t\tError::<Runtime>::InvalidBidPrice,\n\n\t\t);\n\n\t});\n\n}\n\n\n", "file_path": "auction/src/tests.rs", "rank": 59, "score": 123089.17520125736 }, { "content": "#[test]\n\nfn transfer_should_work() {\n\n\tExtBuilder::default()\n\n\t\t.balances(vec![(ALICE, DOT, 100), (BOB, DOT, 100)])\n\n\t\t.build()\n\n\t\t.execute_with(|| {\n\n\t\t\tassert_ok!(Tokens::transfer(Some(ALICE).into(), BOB, DOT, 50));\n\n\t\t\tSystem::assert_last_event(Event::Tokens(crate::Event::Transfer {\n\n\t\t\t\tcurrency_id: DOT,\n\n\t\t\t\tfrom: ALICE,\n\n\t\t\t\tto: BOB,\n\n\t\t\t\tamount: 50,\n\n\t\t\t}));\n\n\t\t\tassert_eq!(Tokens::free_balance(DOT, &ALICE), 50);\n\n\t\t\tassert_eq!(Tokens::free_balance(DOT, &BOB), 150);\n\n\t\t\tassert_eq!(Tokens::total_issuance(DOT), 200);\n\n\n\n\t\t\tassert_noop!(\n\n\t\t\t\tTokens::transfer(Some(ALICE).into(), BOB, DOT, 60),\n\n\t\t\t\tError::<Runtime>::BalanceTooLow,\n\n\t\t\t);\n", "file_path": "tokens/src/tests.rs", "rank": 60, "score": 123089.17520125736 }, { "content": "#[test]\n\nfn test_register_duplicate_asset_id_returns_error() {\n\n\tTestNet::reset();\n\n\n\n\tParaA::execute_with(|| {\n\n\t\tassert_ok!(AssetRegistry::register_asset(Origin::root(), dummy_metadata(), Some(1)));\n\n\t\tassert_noop!(\n\n\t\t\tAssetRegistry::do_register_asset_without_asset_processor(dummy_metadata(), 1),\n\n\t\t\tError::<para::Runtime>::ConflictingAssetId\n\n\t\t);\n\n\t});\n\n}\n\n\n", "file_path": "asset-registry/src/tests.rs", "rank": 61, "score": 122548.95592401043 }, { "content": "#[test]\n\nfn test_sequential_id_with_invalid_id_returns_error() {\n\n\tTestNet::reset();\n\n\n\n\tParaA::execute_with(|| {\n\n\t\tassert_ok!(AssetRegistry::register_asset(Origin::root(), dummy_metadata(), Some(1)));\n\n\t\tassert_noop!(\n\n\t\t\tAssetRegistry::register_asset(Origin::root(), dummy_metadata(), Some(1)),\n\n\t\t\tError::<para::Runtime>::InvalidAssetId\n\n\t\t);\n\n\t});\n\n}\n\n\n\n#[test]\n", "file_path": "asset-registry/src/tests.rs", "rank": 62, "score": 122548.95592401043 }, { "content": "#[test]\n\nfn u128_should_work() {\n\n\tExtBuilder::default().build().execute_with(|| {\n\n\t\tlet update: GraduallyUpdate<StorageKeyBytes<Runtime>, StorageValueBytes<Runtime>> = GraduallyUpdate {\n\n\t\t\tkey: vec![1].try_into().unwrap(),\n\n\t\t\ttarget_value: 30u128.encode().try_into().unwrap(),\n\n\t\t\tper_block: 1u128.encode().try_into().unwrap(),\n\n\t\t};\n\n\t\tassert_ok!(GraduallyUpdateModule::gradually_update(Origin::root(), update.clone()));\n\n\t\tassert_eq!(storage_get(&update.key), Vec::<u8>::new());\n\n\t\tGraduallyUpdateModule::on_finalize(10);\n\n\t\tassert_eq!(\n\n\t\t\tstorage_get(&update.key),\n\n\t\t\tvec![10, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]\n\n\t\t);\n\n\t\tGraduallyUpdateModule::on_finalize(15);\n\n\t\tassert_eq!(\n\n\t\t\tstorage_get(&update.key),\n\n\t\t\tvec![10, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]\n\n\t\t);\n\n\t\tGraduallyUpdateModule::on_finalize(20);\n", "file_path": "gradually-update/src/tests.rs", "rank": 63, "score": 120922.42743832488 }, { "content": "#[test]\n\nfn create_class_should_work() {\n\n\tExtBuilder::default().build().execute_with(|| {\n\n\t\tassert_ok!(NonFungibleTokenModule::create_class(&ALICE, vec![1], ()));\n\n\t});\n\n}\n\n\n", "file_path": "nft/src/tests.rs", "rank": 64, "score": 120922.42743832488 }, { "content": "#[test]\n\nfn update_auction_should_work() {\n\n\tExtBuilder::default().build().execute_with(|| {\n\n\t\tassert_ok!(AuctionModule::new_auction(10, Some(100)), 0);\n\n\t\tassert_noop!(\n\n\t\t\tAuctionModule::update_auction(\n\n\t\t\t\t1,\n\n\t\t\t\tAuctionInfo {\n\n\t\t\t\t\tbid: Some((ALICE, 100)),\n\n\t\t\t\t\tstart: 10,\n\n\t\t\t\t\tend: Some(100)\n\n\t\t\t\t}\n\n\t\t\t),\n\n\t\t\tError::<Runtime>::AuctionNotExist,\n\n\t\t);\n\n\t\tassert_ok!(AuctionModule::update_auction(\n\n\t\t\t0,\n\n\t\t\tAuctionInfo {\n\n\t\t\t\tbid: Some((ALICE, 100)),\n\n\t\t\t\tstart: 10,\n\n\t\t\t\tend: Some(100)\n\n\t\t\t}\n\n\t\t));\n\n\t});\n\n}\n\n\n", "file_path": "auction/src/tests.rs", "rank": 65, "score": 120922.42743832488 }, { "content": "#[test]\n\nfn remove_auction_should_work() {\n\n\tExtBuilder::default().build().execute_with(|| {\n\n\t\tassert_ok!(AuctionModule::new_auction(10, Some(100)), 0);\n\n\t\tassert_eq!(AuctionModule::auctions_index(), 1);\n\n\t\tassert!(AuctionModule::auctions(0).is_some());\n\n\t\tassert_eq!(AuctionModule::auction_end_time(100, 0), Some(()));\n\n\t\tAuctionModule::remove_auction(0);\n\n\t\tassert_eq!(AuctionModule::auctions(0), None);\n\n\t\tassert_eq!(AuctionModule::auction_end_time(100, 0), None);\n\n\t});\n\n}\n\n\n", "file_path": "auction/src/tests.rs", "rank": 66, "score": 120922.42743832488 }, { "content": "#[test]\n\nfn schedule_dispatch_after_work() {\n\n\tExtBuilder::default().build().execute_with(|| {\n\n\t\tlet ensure_root_call = Call::System(frame_system::Call::fill_block { ratio: Perbill::one() });\n\n\t\tlet call = Call::Authority(authority::Call::dispatch_as {\n\n\t\t\tas_origin: MockAsOriginId::Root,\n\n\t\t\tcall: Box::new(ensure_root_call),\n\n\t\t});\n\n\t\trun_to_block(1);\n\n\t\tassert_eq!(\n\n\t\t\tAuthority::schedule_dispatch(Origin::root(), DispatchTime::At(0), 0, true, Box::new(call.clone())),\n\n\t\t\tErr(ArithmeticError::Overflow.into())\n\n\t\t);\n\n\n\n\t\tassert_ok!(Authority::schedule_dispatch(\n\n\t\t\tOrigin::root(),\n\n\t\t\tDispatchTime::After(0),\n\n\t\t\t0,\n\n\t\t\ttrue,\n\n\t\t\tBox::new(call.clone())\n\n\t\t));\n", "file_path": "authority/src/tests.rs", "rank": 67, "score": 120922.42743832488 }, { "content": "#[test]\n\nfn benchmarks_macro_works() {\n\n\t// Check benchmark creation for `set_value`.\n\n\tlet selected_benchmark = SelectedBenchmark::set_value;\n\n\n\n\tlet components = <SelectedBenchmark as BenchmarkingSetup<Test>>::components(&selected_benchmark);\n\n\tassert_eq!(components, vec![(BenchmarkParameter::b, 1, 1000)]);\n\n\n\n\tlet closure = <SelectedBenchmark as BenchmarkingSetup<Test>>::instance(\n\n\t\t&selected_benchmark,\n\n\t\t&[(BenchmarkParameter::b, 1)],\n\n\t\ttrue,\n\n\t)\n\n\t.expect(\"failed to create closure\");\n\n\n\n\tnew_test_ext().execute_with(|| {\n\n\t\tassert_eq!(closure(), Ok(()));\n\n\t});\n\n}\n\n\n", "file_path": "benchmarking/src/tests.rs", "rank": 68, "score": 120922.42743832488 }, { "content": "#[test]\n\nfn schedule_dispatch_at_work() {\n\n\tExtBuilder::default().build().execute_with(|| {\n\n\t\tlet ensure_root_call = Call::System(frame_system::Call::fill_block { ratio: Perbill::one() });\n\n\t\tlet call = Call::Authority(authority::Call::dispatch_as {\n\n\t\t\tas_origin: MockAsOriginId::Root,\n\n\t\t\tcall: Box::new(ensure_root_call),\n\n\t\t});\n\n\t\trun_to_block(1);\n\n\t\tassert_eq!(\n\n\t\t\tAuthority::schedule_dispatch(Origin::root(), DispatchTime::At(1), 0, true, Box::new(call.clone())),\n\n\t\t\tErr(Error::<Runtime>::FailedToSchedule.into())\n\n\t\t);\n\n\n\n\t\tassert_ok!(Authority::schedule_dispatch(\n\n\t\t\tOrigin::root(),\n\n\t\t\tDispatchTime::At(2),\n\n\t\t\t0,\n\n\t\t\ttrue,\n\n\t\t\tBox::new(call.clone())\n\n\t\t));\n", "file_path": "authority/src/tests.rs", "rank": 69, "score": 120922.42743832488 }, { "content": "#[test]\n\nfn authorize_call_works() {\n\n\tExtBuilder::default().build().execute_with(|| {\n\n\t\trun_to_block(1);\n\n\t\tlet ensure_root_call = Call::System(frame_system::Call::fill_block { ratio: Perbill::one() });\n\n\t\tlet call = Call::Authority(authority::Call::dispatch_as {\n\n\t\t\tas_origin: MockAsOriginId::Root,\n\n\t\t\tcall: Box::new(ensure_root_call),\n\n\t\t});\n\n\t\tlet hash = <Runtime as frame_system::Config>::Hashing::hash_of(&call);\n\n\n\n\t\t// works without account\n\n\t\tassert_ok!(Authority::authorize_call(Origin::root(), Box::new(call.clone()), None));\n\n\t\tassert_eq!(Authority::saved_calls(&hash), Some((call.clone(), None)));\n\n\t\tSystem::assert_last_event(mock::Event::Authority(Event::AuthorizedCall { hash, caller: None }));\n\n\n\n\t\t// works with account\n\n\t\tassert_ok!(Authority::authorize_call(\n\n\t\t\tOrigin::root(),\n\n\t\t\tBox::new(call.clone()),\n\n\t\t\tSome(1)\n\n\t\t));\n\n\t\tassert_eq!(Authority::saved_calls(&hash), Some((call.clone(), Some(1))));\n\n\t\tSystem::assert_last_event(mock::Event::Authority(Event::AuthorizedCall { hash, caller: Some(1) }));\n\n\t});\n\n}\n\n\n", "file_path": "authority/src/tests.rs", "rank": 70, "score": 120922.42743832488 }, { "content": "#[test]\n\nfn call_size_limit() {\n\n\tassert!(\n\n\t\tcore::mem::size_of::<authority::Call::<Runtime>>() <= 200,\n\n\t\t\"size of Call is more than 200 bytes: some calls have too big arguments, use Box to \\\n\n\t\treduce the size of Call.\n\n\t\tIf the limit is too strong, maybe consider increasing the limit\",\n\n\t);\n\n}\n\n\n", "file_path": "authority/src/tests.rs", "rank": 71, "score": 120922.42743832488 }, { "content": "#[test]\n\nfn change_member_should_work() {\n\n\tnew_test_ext().execute_with(|| {\n\n\t\tOracleMembers::set(vec![2, 3, 4]);\n\n\t\t<ModuleOracle as ChangeMembers<AccountId>>::change_members_sorted(&[4], &[1], &[2, 3, 4]);\n\n\t\tassert_noop!(\n\n\t\t\tModuleOracle::feed_values(Origin::signed(1), vec![(50, 1000)]),\n\n\t\t\tError::<Test, _>::NoPermission,\n\n\t\t);\n\n\t\tassert_ok!(ModuleOracle::feed_values(Origin::signed(2), vec![(50, 1000)]));\n\n\t\tassert_ok!(ModuleOracle::feed_values(Origin::signed(4), vec![(50, 1000)]));\n\n\t});\n\n}\n\n\n", "file_path": "oracle/src/tests.rs", "rank": 72, "score": 120922.42743832488 }, { "content": "#[test]\n\nfn new_auction_should_work() {\n\n\tExtBuilder::default().build().execute_with(|| {\n\n\t\tassert_ok!(AuctionModule::new_auction(10, Some(100)), 0);\n\n\t});\n\n}\n\n\n", "file_path": "auction/src/tests.rs", "rank": 73, "score": 120922.42743832488 }, { "content": "#[test]\n\nfn call_event_should_work() {\n\n\tExtBuilder::default()\n\n\t\t.one_hundred_for_alice_n_bob()\n\n\t\t.build()\n\n\t\t.execute_with(|| {\n\n\t\t\tSystem::set_block_number(1);\n\n\n\n\t\t\tassert_ok!(Currencies::transfer(Some(ALICE).into(), BOB, X_TOKEN_ID, 50));\n\n\t\t\tassert_eq!(Currencies::free_balance(X_TOKEN_ID, &ALICE), 50);\n\n\t\t\tassert_eq!(Currencies::free_balance(X_TOKEN_ID, &BOB), 150);\n\n\t\t\tSystem::assert_last_event(Event::Tokens(orml_tokens::Event::Transfer {\n\n\t\t\t\tcurrency_id: X_TOKEN_ID,\n\n\t\t\t\tfrom: ALICE,\n\n\t\t\t\tto: BOB,\n\n\t\t\t\tamount: 50,\n\n\t\t\t}));\n\n\n\n\t\t\tassert_ok!(<Currencies as MultiCurrency<AccountId>>::transfer(\n\n\t\t\t\tX_TOKEN_ID, &ALICE, &BOB, 10\n\n\t\t\t));\n", "file_path": "currencies/src/tests.rs", "rank": 74, "score": 120922.42743832488 }, { "content": "#[test]\n\nfn trigger_call_works() {\n\n\tExtBuilder::default().build().execute_with(|| {\n\n\t\trun_to_block(1);\n\n\t\tlet ensure_root_call = Call::System(frame_system::Call::fill_block { ratio: Perbill::one() });\n\n\t\tlet call = Call::Authority(authority::Call::dispatch_as {\n\n\t\t\tas_origin: MockAsOriginId::Root,\n\n\t\t\tcall: Box::new(ensure_root_call),\n\n\t\t});\n\n\t\tlet hash = <Runtime as frame_system::Config>::Hashing::hash_of(&call);\n\n\n\n\t\tlet call_weight_bound = call.get_dispatch_info().weight;\n\n\n\n\t\t// call not authorized yet\n\n\t\tassert_noop!(\n\n\t\t\tAuthority::trigger_call(Origin::signed(1), hash, call_weight_bound),\n\n\t\t\tError::<Runtime>::CallNotAuthorized\n\n\t\t);\n\n\n\n\t\tassert_ok!(Authority::authorize_call(Origin::root(), Box::new(call.clone()), None));\n\n\n", "file_path": "authority/src/tests.rs", "rank": 75, "score": 120922.42743832488 }, { "content": "#[test]\n\nfn auction_info_should_work() {\n\n\tExtBuilder::default().build().execute_with(|| {\n\n\t\tassert_ok!(AuctionModule::new_auction(10, Some(100)), 0);\n\n\t\tassert_eq!(\n\n\t\t\tAuctionModule::auction_info(0),\n\n\t\t\tSome(AuctionInfo {\n\n\t\t\t\tbid: None,\n\n\t\t\t\tstart: 10,\n\n\t\t\t\tend: Some(100)\n\n\t\t\t})\n\n\t\t);\n\n\t});\n\n}\n\n\n", "file_path": "auction/src/tests.rs", "rank": 76, "score": 120922.42743832488 }, { "content": "#[test]\n\nfn withdraw_consequence_should_work() {\n\n\tExtBuilder::default().build().execute_with(|| {\n\n\t\tassert_eq!(\n\n\t\t\tTokens::withdraw_consequence(\n\n\t\t\t\t&ALICE,\n\n\t\t\t\tDOT,\n\n\t\t\t\t0,\n\n\t\t\t\t&AccountData {\n\n\t\t\t\t\tfree: 1,\n\n\t\t\t\t\treserved: 0,\n\n\t\t\t\t\tfrozen: 0\n\n\t\t\t\t}\n\n\t\t\t)\n\n\t\t\t.into_result(),\n\n\t\t\tOk(Zero::zero())\n\n\t\t);\n\n\n\n\t\t// total issuance underflow\n\n\t\tassert_ok!(Tokens::update_balance(DOT, &ALICE, 2));\n\n\t\tassert_eq!(Tokens::total_issuance(DOT), 2);\n", "file_path": "tokens/src/tests.rs", "rank": 77, "score": 120922.42743832488 }, { "content": "#[test]\n\nfn get_all_values_should_work() {\n\n\tnew_test_ext().execute_with(|| {\n\n\t\tlet eur: u32 = 1;\n\n\t\tlet jpy: u32 = 2;\n\n\n\n\t\tassert_eq!(ModuleOracle::get_all_values(), vec![]);\n\n\n\n\t\t// feed eur & jpy\n\n\t\tassert_ok!(ModuleOracle::feed_values(Origin::signed(1), vec![(eur, 1300)]));\n\n\t\tassert_ok!(ModuleOracle::feed_values(Origin::signed(2), vec![(eur, 1000)]));\n\n\t\tassert_ok!(ModuleOracle::feed_values(Origin::signed(3), vec![(jpy, 9000)]));\n\n\n\n\t\t// not enough eur & jpy prices\n\n\t\tassert_eq!(ModuleOracle::get(&eur), None);\n\n\t\tassert_eq!(ModuleOracle::get(&jpy), None);\n\n\t\tassert_eq!(ModuleOracle::get_all_values(), vec![]);\n\n\n\n\t\t// finalize block\n\n\t\tModuleOracle::on_finalize(1);\n\n\n", "file_path": "oracle/src/tests.rs", "rank": 78, "score": 120922.42743832488 }, { "content": "#[test]\n\nfn deposit_consequence_should_work() {\n\n\tExtBuilder::default().build().execute_with(|| {\n\n\t\tassert_eq!(\n\n\t\t\tTokens::deposit_consequence(\n\n\t\t\t\t&CHARLIE,\n\n\t\t\t\tDOT,\n\n\t\t\t\t0,\n\n\t\t\t\t&AccountData {\n\n\t\t\t\t\tfree: 1,\n\n\t\t\t\t\treserved: 0,\n\n\t\t\t\t\tfrozen: 0\n\n\t\t\t\t}\n\n\t\t\t)\n\n\t\t\t.into_result(),\n\n\t\t\tOk(())\n\n\t\t);\n\n\n\n\t\t// total issuance overflow\n\n\t\tassert_eq!(\n\n\t\t\tTokens::deposit_consequence(\n", "file_path": "tokens/src/tests.rs", "rank": 79, "score": 120922.42743832488 }, { "content": "#[test]\n\nfn should_feed_values_from_root() {\n\n\tnew_test_ext().execute_with(|| {\n\n\t\tlet root_feeder: AccountId = RootOperatorAccountId::get();\n\n\n\n\t\tassert_ok!(ModuleOracle::feed_values(\n\n\t\t\tOrigin::root(),\n\n\t\t\tvec![(50, 1000), (51, 900), (52, 800)]\n\n\t\t));\n\n\n\n\t\tassert_eq!(\n\n\t\t\tModuleOracle::raw_values(&root_feeder, &50),\n\n\t\t\tSome(TimestampedValue {\n\n\t\t\t\tvalue: 1000,\n\n\t\t\t\ttimestamp: 12345,\n\n\t\t\t})\n\n\t\t);\n\n\n\n\t\tassert_eq!(\n\n\t\t\tModuleOracle::raw_values(&root_feeder, &51),\n\n\t\t\tSome(TimestampedValue {\n", "file_path": "oracle/src/tests.rs", "rank": 80, "score": 120922.42743832488 }, { "content": "#[test]\n\nfn should_read_raw_values() {\n\n\tnew_test_ext().execute_with(|| {\n\n\t\tlet key: u32 = 50;\n\n\n\n\t\tlet raw_values = ModuleOracle::read_raw_values(&key);\n\n\t\tassert_eq!(raw_values, vec![]);\n\n\n\n\t\tassert_ok!(ModuleOracle::feed_values(Origin::signed(1), vec![(key, 1000)]));\n\n\t\tassert_ok!(ModuleOracle::feed_values(Origin::signed(2), vec![(key, 1200)]));\n\n\n\n\t\tlet raw_values = ModuleOracle::read_raw_values(&key);\n\n\t\tassert_eq!(\n\n\t\t\traw_values,\n\n\t\t\tvec![\n\n\t\t\t\tTimestampedValue {\n\n\t\t\t\t\tvalue: 1000,\n\n\t\t\t\t\ttimestamp: 12345,\n\n\t\t\t\t},\n\n\t\t\t\tTimestampedValue {\n\n\t\t\t\t\tvalue: 1200,\n\n\t\t\t\t\ttimestamp: 12345,\n\n\t\t\t\t},\n\n\t\t\t]\n\n\t\t);\n\n\t});\n\n}\n\n\n", "file_path": "oracle/src/tests.rs", "rank": 81, "score": 120922.42743832488 }, { "content": "#[test]\n\nfn multiple_calls_should_fail() {\n\n\tnew_test_ext().execute_with(|| {\n\n\t\tassert_ok!(ModuleOracle::feed_values(Origin::signed(1), vec![(50, 1300)]));\n\n\t\tassert_noop!(\n\n\t\t\tModuleOracle::feed_values(Origin::signed(1), vec![(50, 1300)]),\n\n\t\t\tError::<Test, _>::AlreadyFeeded,\n\n\t\t);\n\n\n\n\t\tModuleOracle::on_finalize(1);\n\n\n\n\t\tassert_ok!(ModuleOracle::feed_values(Origin::signed(1), vec![(50, 1300)]));\n\n\t});\n\n}\n\n\n", "file_path": "oracle/src/tests.rs", "rank": 82, "score": 120922.42743832488 }, { "content": "#[test]\n\nfn values_are_updated_on_feed() {\n\n\tnew_test_ext().execute_with(|| {\n\n\t\tassert_ok!(ModuleOracle::feed_values(Origin::signed(1), vec![(50, 900)]));\n\n\t\tassert_ok!(ModuleOracle::feed_values(Origin::signed(2), vec![(50, 1000)]));\n\n\n\n\t\tassert_eq!(ModuleOracle::values(50), None);\n\n\n\n\t\t// Upon the third price feed, the value is updated immediately after `combine`\n\n\t\t// can produce valid result.\n\n\t\tassert_ok!(ModuleOracle::feed_values(Origin::signed(3), vec![(50, 1100)]));\n\n\t\tassert_eq!(\n\n\t\t\tModuleOracle::values(50),\n\n\t\t\tSome(TimestampedValue {\n\n\t\t\t\tvalue: 1000,\n\n\t\t\t\ttimestamp: 12345,\n\n\t\t\t})\n\n\t\t);\n\n\t});\n\n}\n", "file_path": "oracle/src/tests.rs", "rank": 83, "score": 120922.42743832488 }, { "content": "#[test]\n\nfn set_balance_should_work() {\n\n\tExtBuilder::default()\n\n\t\t.balances(vec![(ALICE, DOT, 100), (BOB, DOT, 100)])\n\n\t\t.build()\n\n\t\t.execute_with(|| {\n\n\t\t\t// bad origin\n\n\t\t\tassert_noop!(Tokens::set_balance(Some(ALICE).into(), ALICE, DOT, 200, 100), BadOrigin);\n\n\n\n\t\t\t// total balance overflow\n\n\t\t\tassert_noop!(\n\n\t\t\t\tTokens::set_balance(RawOrigin::Root.into(), ALICE, DOT, Balance::max_value(), 1),\n\n\t\t\t\tArithmeticError::Overflow\n\n\t\t\t);\n\n\n\n\t\t\t// total issurance overflow\n\n\t\t\tassert_noop!(\n\n\t\t\t\tTokens::set_balance(RawOrigin::Root.into(), ALICE, DOT, Balance::max_value(), 0),\n\n\t\t\t\tArithmeticError::Overflow\n\n\t\t\t);\n\n\n", "file_path": "tokens/src/tests.rs", "rank": 84, "score": 120922.42743832488 }, { "content": "#[test]\n\nfn cliff_vesting_works() {\n\n\tconst VESTING_AMOUNT: u64 = 12;\n\n\tconst VESTING_PERIOD: u64 = 20;\n\n\n\n\tExtBuilder::build().execute_with(|| {\n\n\t\tlet cliff_schedule = VestingSchedule {\n\n\t\t\tstart: VESTING_PERIOD - 1,\n\n\t\t\tperiod: 1,\n\n\t\t\tperiod_count: 1,\n\n\t\t\tper_period: VESTING_AMOUNT,\n\n\t\t};\n\n\n\n\t\tlet balance_lock = BalanceLock {\n\n\t\t\tid: VESTING_LOCK_ID,\n\n\t\t\tamount: VESTING_AMOUNT,\n\n\t\t\treasons: Reasons::All,\n\n\t\t};\n\n\n\n\t\tassert_eq!(PalletBalances::free_balance(BOB), 0);\n\n\t\tassert_ok!(Vesting::vested_transfer(Origin::signed(ALICE), BOB, cliff_schedule));\n", "file_path": "vesting/src/tests.rs", "rank": 85, "score": 120922.42743832488 }, { "content": "#[test]\n\nfn multi_currency_should_work() {\n\n\tExtBuilder::default()\n\n\t\t.one_hundred_for_alice_n_bob()\n\n\t\t.build()\n\n\t\t.execute_with(|| {\n\n\t\t\tassert_ok!(Currencies::transfer(Some(ALICE).into(), BOB, X_TOKEN_ID, 50));\n\n\t\t\tassert_eq!(Currencies::free_balance(X_TOKEN_ID, &ALICE), 50);\n\n\t\t\tassert_eq!(Currencies::free_balance(X_TOKEN_ID, &BOB), 150);\n\n\t\t});\n\n}\n\n\n", "file_path": "currencies/src/tests.rs", "rank": 86, "score": 120922.42743832488 }, { "content": "#[test]\n\nfn genesis_issuance_should_work() {\n\n\tExtBuilder::default()\n\n\t\t.balances(vec![(ALICE, DOT, 100), (BOB, DOT, 100)])\n\n\t\t.build()\n\n\t\t.execute_with(|| {\n\n\t\t\tassert_eq!(Tokens::free_balance(DOT, &ALICE), 100);\n\n\t\t\tassert_eq!(Tokens::free_balance(DOT, &BOB), 100);\n\n\t\t\tassert_eq!(Tokens::free_balance(DOT, &DustReceiver::get()), 0);\n\n\t\t\tassert_eq!(Tokens::total_issuance(DOT), 200);\n\n\t\t});\n\n}\n\n\n\n// *************************************************\n\n// tests for call\n\n// *************************************************\n\n\n", "file_path": "tokens/src/tests.rs", "rank": 87, "score": 120922.42743832488 }, { "content": "#[test]\n\nfn native_currency_should_work() {\n\n\tExtBuilder::default()\n\n\t\t.one_hundred_for_alice_n_bob()\n\n\t\t.build()\n\n\t\t.execute_with(|| {\n\n\t\t\tassert_ok!(Currencies::transfer_native_currency(Some(ALICE).into(), BOB, 50));\n\n\t\t\tassert_eq!(NativeCurrency::free_balance(&ALICE), 50);\n\n\t\t\tassert_eq!(NativeCurrency::free_balance(&BOB), 150);\n\n\n\n\t\t\tassert_ok!(NativeCurrency::transfer(&ALICE, &BOB, 10));\n\n\t\t\tassert_eq!(NativeCurrency::free_balance(&ALICE), 40);\n\n\t\t\tassert_eq!(NativeCurrency::free_balance(&BOB), 160);\n\n\n\n\t\t\tassert_eq!(Currencies::slash(NATIVE_CURRENCY_ID, &ALICE, 10), 0);\n\n\t\t\tassert_eq!(NativeCurrency::free_balance(&ALICE), 30);\n\n\t\t\tassert_eq!(NativeCurrency::total_issuance(), 190);\n\n\t\t});\n\n}\n\n\n", "file_path": "currencies/src/tests.rs", "rank": 88, "score": 120922.42743832488 }, { "content": "#[test]\n\nfn cleanup_auction_should_work() {\n\n\tExtBuilder::default().build().execute_with(|| {\n\n\t\tassert_ok!(AuctionModule::new_auction(10, Some(100)), 0);\n\n\t\tassert_eq!(AuctionModule::auctions_index(), 1);\n\n\t\tassert_ok!(AuctionModule::new_auction(10, Some(50)), 1);\n\n\t\tassert_eq!(AuctionModule::auctions_index(), 2);\n\n\t\tassert!(AuctionModule::auctions(0).is_some());\n\n\t\tassert!(AuctionModule::auctions(1).is_some());\n\n\n\n\t\tassert_eq!(<AuctionEndTime<Runtime>>::iter_prefix(0).count(), 0);\n\n\t\tassert_eq!(<AuctionEndTime<Runtime>>::iter_prefix(50).count(), 1);\n\n\t\tassert_eq!(<AuctionEndTime<Runtime>>::iter_prefix(100).count(), 1);\n\n\n\n\t\tAuctionModule::on_finalize(50);\n\n\t\tassert!(AuctionModule::auctions(0).is_some());\n\n\t\tassert!(!AuctionModule::auctions(1).is_some());\n\n\t\tassert_eq!(<AuctionEndTime<Runtime>>::iter_prefix(0).count(), 0);\n\n\t\tassert_eq!(<AuctionEndTime<Runtime>>::iter_prefix(50).count(), 0);\n\n\t\tassert_eq!(<AuctionEndTime<Runtime>>::iter_prefix(100).count(), 1);\n\n\n\n\t\tAuctionModule::on_finalize(100);\n\n\t\tassert!(!AuctionModule::auctions(0).is_some());\n\n\t\tassert!(!AuctionModule::auctions(1).is_some());\n\n\t\tassert_eq!(<AuctionEndTime<Runtime>>::iter_prefix(0).count(), 0);\n\n\t\tassert_eq!(<AuctionEndTime<Runtime>>::iter_prefix(50).count(), 0);\n\n\t\tassert_eq!(<AuctionEndTime<Runtime>>::iter_prefix(100).count(), 0);\n\n\t});\n\n}\n\n\n", "file_path": "auction/src/tests.rs", "rank": 89, "score": 120922.42743832488 }, { "content": "#[test]\n\nfn vested_transfer_works() {\n\n\tExtBuilder::build().execute_with(|| {\n\n\t\tSystem::set_block_number(1);\n\n\n\n\t\tlet schedule = VestingSchedule {\n\n\t\t\tstart: 0u64,\n\n\t\t\tperiod: 10u64,\n\n\t\t\tperiod_count: 1u32,\n\n\t\t\tper_period: 100u64,\n\n\t\t};\n\n\t\tassert_ok!(Vesting::vested_transfer(Origin::signed(ALICE), BOB, schedule.clone()));\n\n\t\tassert_eq!(Vesting::vesting_schedules(&BOB), vec![schedule.clone()]);\n\n\t\tSystem::assert_last_event(Event::Vesting(crate::Event::VestingScheduleAdded {\n\n\t\t\tfrom: ALICE,\n\n\t\t\tto: BOB,\n\n\t\t\tvesting_schedule: schedule,\n\n\t\t}));\n\n\t});\n\n}\n\n\n", "file_path": "vesting/src/tests.rs", "rank": 90, "score": 120922.42743832488 }, { "content": "#[test]\n\nfn destroy_class_should_work() {\n\n\tExtBuilder::default().build().execute_with(|| {\n\n\t\tassert_ok!(NonFungibleTokenModule::create_class(&ALICE, vec![1], ()));\n\n\t\tassert_ok!(NonFungibleTokenModule::mint(&BOB, CLASS_ID, vec![1], ()));\n\n\t\tassert_ok!(NonFungibleTokenModule::burn(&BOB, (CLASS_ID, TOKEN_ID)));\n\n\t\tassert_ok!(NonFungibleTokenModule::destroy_class(&ALICE, CLASS_ID));\n\n\t\tassert!(!Classes::<Runtime>::contains_key(CLASS_ID));\n\n\t\tassert!(!NextTokenId::<Runtime>::contains_key(CLASS_ID));\n\n\t});\n\n}\n\n\n", "file_path": "nft/src/tests.rs", "rank": 91, "score": 120922.42743832488 }, { "content": "#[test]\n\nfn should_feed_values_from_member() {\n\n\tnew_test_ext().execute_with(|| {\n\n\t\tSystem::set_block_number(1);\n\n\t\tlet account_id: AccountId = 1;\n\n\n\n\t\tassert_noop!(\n\n\t\t\tModuleOracle::feed_values(Origin::signed(5), vec![(50, 1000), (51, 900), (52, 800)]),\n\n\t\t\tError::<Test, _>::NoPermission,\n\n\t\t);\n\n\n\n\t\tassert_eq!(\n\n\t\t\tModuleOracle::feed_values(Origin::signed(account_id), vec![(50, 1000), (51, 900), (52, 800)])\n\n\t\t\t\t.unwrap()\n\n\t\t\t\t.pays_fee,\n\n\t\t\tPays::No\n\n\t\t);\n\n\t\tSystem::assert_last_event(Event::ModuleOracle(crate::Event::NewFeedData {\n\n\t\t\tsender: 1,\n\n\t\t\tvalues: vec![(50, 1000), (51, 900), (52, 800)],\n\n\t\t}));\n", "file_path": "oracle/src/tests.rs", "rank": 92, "score": 120922.42743832488 }, { "content": "#[test]\n\nfn force_transfer_should_work() {\n\n\tExtBuilder::default()\n\n\t\t.balances(vec![(ALICE, DOT, 100), (BOB, DOT, 100)])\n\n\t\t.build()\n\n\t\t.execute_with(|| {\n\n\t\t\tassert!(Accounts::<Runtime>::contains_key(ALICE, DOT));\n\n\t\t\tassert_eq!(Tokens::free_balance(DOT, &ALICE), 100);\n\n\t\t\tassert_eq!(Tokens::free_balance(DOT, &BOB), 100);\n\n\t\t\tassert_noop!(\n\n\t\t\t\tTokens::force_transfer(Some(ALICE).into(), ALICE, BOB, DOT, 100),\n\n\t\t\t\tBadOrigin\n\n\t\t\t);\n\n\n\n\t\t\t// imply AllowDeath\n\n\t\t\tassert_ok!(Tokens::force_transfer(RawOrigin::Root.into(), ALICE, BOB, DOT, 100));\n\n\t\t\tSystem::assert_last_event(Event::Tokens(crate::Event::Transfer {\n\n\t\t\t\tcurrency_id: DOT,\n\n\t\t\t\tfrom: ALICE,\n\n\t\t\t\tto: BOB,\n\n\t\t\t\tamount: 100,\n\n\t\t\t}));\n\n\t\t\tassert!(!Accounts::<Runtime>::contains_key(ALICE, DOT));\n\n\t\t\tassert_eq!(Tokens::free_balance(DOT, &ALICE), 0);\n\n\t\t\tassert_eq!(Tokens::free_balance(DOT, &BOB), 200);\n\n\t\t});\n\n}\n\n\n", "file_path": "tokens/src/tests.rs", "rank": 93, "score": 120922.42743832488 }, { "content": "#[test]\n\nfn fixedu128_should_work() {\n\n\tExtBuilder::default().build().execute_with(|| {\n\n\t\tlet update: GraduallyUpdate<StorageKeyBytes<Runtime>, StorageValueBytes<Runtime>> = GraduallyUpdate {\n\n\t\t\tkey: vec![1].try_into().unwrap(),\n\n\t\t\ttarget_value: FixedU128::saturating_from_rational(30, 1).encode().try_into().unwrap(),\n\n\t\t\tper_block: FixedU128::saturating_from_rational(1, 1).encode().try_into().unwrap(),\n\n\t\t};\n\n\t\tassert_ok!(GraduallyUpdateModule::gradually_update(Origin::root(), update.clone()));\n\n\t\tassert_eq!(storage_get(&update.key), Vec::<u8>::new());\n\n\t\tGraduallyUpdateModule::on_finalize(10);\n\n\t\tassert_eq!(\n\n\t\t\tstorage_get(&update.key),\n\n\t\t\tvec![0, 0, 232, 137, 4, 35, 199, 138, 0, 0, 0, 0, 0, 0, 0, 0]\n\n\t\t);\n\n\t\tGraduallyUpdateModule::on_finalize(15);\n\n\t\tassert_eq!(\n\n\t\t\tstorage_get(&update.key),\n\n\t\t\tvec![0, 0, 232, 137, 4, 35, 199, 138, 0, 0, 0, 0, 0, 0, 0, 0]\n\n\t\t);\n\n\t\tGraduallyUpdateModule::on_finalize(20);\n", "file_path": "gradually-update/src/tests.rs", "rank": 94, "score": 120922.42743832488 }, { "content": "#[test]\n\nfn permill_should_work() {\n\n\tExtBuilder::default().build().execute_with(|| {\n\n\t\tlet update: GraduallyUpdate<StorageKeyBytes<Runtime>, StorageValueBytes<Runtime>> = GraduallyUpdate {\n\n\t\t\tkey: vec![1].try_into().unwrap(),\n\n\t\t\ttarget_value: Permill::from_percent(30).encode().try_into().unwrap(),\n\n\t\t\tper_block: Permill::from_percent(1).encode().try_into().unwrap(),\n\n\t\t};\n\n\t\tassert_ok!(GraduallyUpdateModule::gradually_update(Origin::root(), update.clone()));\n\n\t\tassert_eq!(storage_get(&update.key), Vec::<u8>::new());\n\n\t\tGraduallyUpdateModule::on_finalize(10);\n\n\t\tassert_eq!(storage_get(&update.key), vec![160, 134, 1, 0]);\n\n\t\tGraduallyUpdateModule::on_finalize(15);\n\n\t\tassert_eq!(storage_get(&update.key), vec![160, 134, 1, 0]);\n\n\t\tGraduallyUpdateModule::on_finalize(20);\n\n\t\tassert_eq!(storage_get(&update.key), vec![64, 13, 3, 0]);\n\n\t\tGraduallyUpdateModule::on_finalize(40);\n\n\t\tassert_eq!(storage_get(&update.key), vec![224, 147, 4, 0]);\n\n\t});\n\n}\n\n\n", "file_path": "gradually-update/src/tests.rs", "rank": 95, "score": 120922.42743832488 }, { "content": "#[test]\n\nfn endowed_account_work() {\n\n\tExtBuilder::default().build().execute_with(|| {\n\n\t\tassert_eq!(System::providers(&ALICE), 0);\n\n\t\tassert!(!Accounts::<Runtime>::contains_key(ALICE, DOT));\n\n\t\tTokens::set_free_balance(DOT, &ALICE, 100);\n\n\t\tSystem::assert_last_event(Event::Tokens(crate::Event::Endowed {\n\n\t\t\tcurrency_id: DOT,\n\n\t\t\twho: ALICE,\n\n\t\t\tamount: 100,\n\n\t\t}));\n\n\t\tassert_eq!(System::providers(&ALICE), 1);\n\n\t\tassert!(Accounts::<Runtime>::contains_key(ALICE, DOT));\n\n\t});\n\n}\n\n\n", "file_path": "tokens/src/tests.rs", "rank": 96, "score": 120922.42743832488 }, { "content": "#[test]\n\nfn u32_should_work() {\n\n\tExtBuilder::default().build().execute_with(|| {\n\n\t\tlet update: GraduallyUpdate<StorageKeyBytes<Runtime>, StorageValueBytes<Runtime>> = GraduallyUpdate {\n\n\t\t\tkey: vec![1].try_into().unwrap(),\n\n\t\t\ttarget_value: 30u32.encode().try_into().unwrap(),\n\n\t\t\tper_block: 1u32.encode().try_into().unwrap(),\n\n\t\t};\n\n\t\tassert_ok!(GraduallyUpdateModule::gradually_update(Origin::root(), update.clone()));\n\n\t\tassert_eq!(storage_get(&update.key), Vec::<u8>::new());\n\n\t\tGraduallyUpdateModule::on_finalize(10);\n\n\t\tassert_eq!(storage_get(&update.key), vec![10, 0, 0, 0]);\n\n\t\tGraduallyUpdateModule::on_finalize(15);\n\n\t\tassert_eq!(storage_get(&update.key), vec![10, 0, 0, 0]);\n\n\t\tGraduallyUpdateModule::on_finalize(20);\n\n\t\tassert_eq!(storage_get(&update.key), vec![20, 0, 0, 0]);\n\n\t\tGraduallyUpdateModule::on_finalize(40);\n\n\t\tassert_eq!(storage_get(&update.key), vec![30, 0, 0, 0]);\n\n\t});\n\n}\n\n\n", "file_path": "gradually-update/src/tests.rs", "rank": 97, "score": 120922.42743832488 }, { "content": "#[test]\n\nfn destroy_class_should_fail() {\n\n\tExtBuilder::default().build().execute_with(|| {\n\n\t\tassert_ok!(NonFungibleTokenModule::create_class(&ALICE, vec![1], ()));\n\n\t\tassert_ok!(NonFungibleTokenModule::mint(&BOB, CLASS_ID, vec![1], ()));\n\n\t\tassert_noop!(\n\n\t\t\tNonFungibleTokenModule::destroy_class(&ALICE, CLASS_ID_NOT_EXIST),\n\n\t\t\tError::<Runtime>::ClassNotFound\n\n\t\t);\n\n\n\n\t\tassert_noop!(\n\n\t\t\tNonFungibleTokenModule::destroy_class(&BOB, CLASS_ID),\n\n\t\t\tError::<Runtime>::NoPermission\n\n\t\t);\n\n\n\n\t\tassert_noop!(\n\n\t\t\tNonFungibleTokenModule::destroy_class(&ALICE, CLASS_ID),\n\n\t\t\tError::<Runtime>::CannotDestroyClass\n\n\t\t);\n\n\n\n\t\tassert_ok!(NonFungibleTokenModule::burn(&BOB, (CLASS_ID, TOKEN_ID)));\n\n\t\tassert_ok!(NonFungibleTokenModule::destroy_class(&ALICE, CLASS_ID));\n\n\t\tassert!(!Classes::<Runtime>::contains_key(CLASS_ID));\n\n\t});\n\n}\n\n\n", "file_path": "nft/src/tests.rs", "rank": 98, "score": 120922.42743832488 }, { "content": "#[test]\n\nfn create_class_should_fail() {\n\n\tExtBuilder::default().build().execute_with(|| {\n\n\t\tNextClassId::<Runtime>::mutate(|id| *id = <Runtime as Config>::ClassId::max_value());\n\n\t\tassert_noop!(\n\n\t\t\tNonFungibleTokenModule::create_class(&ALICE, vec![1], ()),\n\n\t\t\tError::<Runtime>::NoAvailableClassId\n\n\t\t);\n\n\t});\n\n}\n\n\n", "file_path": "nft/src/tests.rs", "rank": 99, "score": 120922.42743832488 } ]
Rust
alvr/common/src/audio.rs
zarik5/ALVR
7ed89fc8525647d058fa812af8be88f23f8a17a0
use crate::*; #[cfg(windows)] use std::ptr; #[cfg(windows)] use widestring::*; #[cfg(windows)] use winapi::{ shared::{winerror::*, wtypes::VT_LPWSTR}, um::{ combaseapi::*, coml2api::STGM_READ, functiondiscoverykeys_devpkey::PKEY_Device_FriendlyName, mmdeviceapi::*, objbase::CoInitialize, propidl::PROPVARIANT, propsys::IPropertyStore, }, Class, Interface, }; #[cfg(windows)] use wio::com::ComPtr; #[derive(serde::Serialize)] pub struct AudioDevicesDesc { pub list: Vec<(String, String)>, pub default: Option<String>, } #[cfg(windows)] fn get_device_name(mm_device: ComPtr<IMMDevice>) -> StrResult<String> { unsafe { let mut property_store_ptr: *mut IPropertyStore = ptr::null_mut(); let hr = mm_device.OpenPropertyStore(STGM_READ, &mut property_store_ptr as _); if FAILED(hr) { return trace_str!("IMMDevice::OpenPropertyStore failed: hr = 0x{:08x}", hr); } let property_store = ComPtr::from_raw(property_store_ptr); let mut prop_variant = PROPVARIANT::default(); let hr = property_store.GetValue(&PKEY_Device_FriendlyName, &mut prop_variant); if FAILED(hr) { return trace_str!("IPropertyStore::GetValue failed: hr = 0x{:08x}", hr); } if prop_variant.vt as u32 != VT_LPWSTR { return trace_str!( "PKEY_Device_FriendlyName variant type is {} - expected VT_LPWSTR", prop_variant.vt ); } let res = trace_err!(U16CStr::from_ptr_str(*prop_variant.data.pwszVal()).to_string()); let hr = PropVariantClear(&mut prop_variant); if FAILED(hr) { return trace_str!("PropVariantClear failed: hr = 0x{:08x}", hr); } res } } #[cfg(windows)] fn get_audio_device_id_and_name(device: ComPtr<IMMDevice>) -> StrResult<(String, String)> { let id_str = unsafe { let mut id_str_ptr = ptr::null_mut(); device.GetId(&mut id_str_ptr); let id_str = trace_err!(U16CStr::from_ptr_str(id_str_ptr).to_string())?; CoTaskMemFree(id_str_ptr as _); id_str }; Ok((id_str, get_device_name(device)?)) } #[cfg(not(windows))] pub fn output_audio_devices() -> StrResult<AudioDevicesDesc> { todo!() } #[cfg(windows)] pub fn output_audio_devices() -> StrResult<AudioDevicesDesc> { let mut device_list = vec![]; unsafe { CoInitialize(ptr::null_mut()); let mut mm_device_enumerator_ptr: *mut IMMDeviceEnumerator = ptr::null_mut(); let hr = CoCreateInstance( &MMDeviceEnumerator::uuidof(), ptr::null_mut(), CLSCTX_ALL, &IMMDeviceEnumerator::uuidof(), &mut mm_device_enumerator_ptr as *mut _ as _, ); if FAILED(hr) { return trace_str!( "CoCreateInstance(IMMDeviceEnumerator) failed: hr = 0x{:08x}", hr ); } let mm_device_enumerator = ComPtr::from_raw(mm_device_enumerator_ptr); let mut default_mm_device_ptr: *mut IMMDevice = ptr::null_mut(); let hr = mm_device_enumerator.GetDefaultAudioEndpoint( eRender, eConsole, &mut default_mm_device_ptr as *mut _, ); if hr == HRESULT_FROM_WIN32(ERROR_NOT_FOUND) { return trace_str!("No default audio endpoint found. No audio device?"); } if FAILED(hr) { return trace_str!( "IMMDeviceEnumerator::GetDefaultAudioEndpoint failed: hr = 0x{:08x}", hr ); } let default_mm_device = ComPtr::from_raw(default_mm_device_ptr); let (default_id, default_name) = get_audio_device_id_and_name(default_mm_device)?; device_list.push((default_id.clone(), default_name.clone())); let mut mm_device_collection_ptr: *mut IMMDeviceCollection = ptr::null_mut(); let hr = mm_device_enumerator.EnumAudioEndpoints( eRender, DEVICE_STATE_ACTIVE, &mut mm_device_collection_ptr as _, ); if FAILED(hr) { return trace_str!( "IMMDeviceEnumerator::EnumAudioEndpoints failed: hr = 0x{:08x}", hr ); } let mm_device_collection = ComPtr::from_raw(mm_device_collection_ptr); #[allow(unused_mut)] let mut count = 0; let hr = mm_device_collection.GetCount(&count); if FAILED(hr) { return trace_str!("IMMDeviceCollection::GetCount failed: hr = 0x{:08x}", hr); } debug!("Active render endpoints found: {}", count); debug!("DefaultDevice:{} ID:{}", default_name, default_id); for i in 0..count { let mut mm_device_ptr: *mut IMMDevice = ptr::null_mut(); let hr = mm_device_collection.Item(i, &mut mm_device_ptr as _); if FAILED(hr) { warn!("Crash!"); return trace_str!("IMMDeviceCollection::Item failed: hr = 0x{:08x}", hr); } let mm_device = ComPtr::from_raw(mm_device_ptr); let (id, name) = get_audio_device_id_and_name(mm_device)?; if id == default_id { continue; } debug!("Device{}:{} ID:{}", i, name, id); device_list.push((id, name)); } } let default = device_list.get(0).map(|dev| dev.0.clone()); let audio_devices_desc = AudioDevicesDesc { list: device_list, default, }; Ok(audio_devices_desc) }
use crate::*; #[cfg(windows)] use std::ptr; #[cfg(windows)] use widestring::*; #[cfg(windows)] use winapi::{ shared::{winerror::*, wtypes::VT_LPWSTR}, um::{ combaseapi::*, coml2api::STGM_READ, functiondiscoverykeys_devpkey::PKEY_Device_FriendlyName, mmdeviceapi::*, objbase::CoInitialize, propidl::PROPVARIANT, propsys::IPropertyStore, }, Class, Interface, }; #[cfg(windows)] use wio::com::ComPtr; #[derive(serde::Serialize)] pub struct AudioDevicesDesc { pub list: Vec<(String, String)>, pub default: Option<String>, } #[cfg(windows)] fn get_device_name(mm_device: ComPtr<IMMDevice>) -> StrResult<String> { unsafe { let mut property_store_ptr: *mut IPropertyStore = ptr::null_mut(); let hr = mm_device.OpenPropertyStore(STGM_READ, &mut property_store_ptr as _); if FAILED(hr) { return trace_str!("IMMDevice::OpenPropertyStore failed: hr = 0x{:08x}", hr); } let property_store = ComPtr::from_raw(property_store_ptr); let mut prop_variant = PROPVARIANT::default(); let hr = property_store.GetValue(&PKEY_Device_FriendlyName, &mut prop_variant); if FAILED(hr) { return trace_str!("IPropertyStore::GetValue failed: hr = 0x{:08x}", hr); } if prop_variant.vt as u32 != VT_LPWSTR { return trace_str!( "PKEY_Device_FriendlyName variant type is {} - expected VT_LPWSTR", prop_variant.vt ); } let res = trace_err!(U16CStr::from_ptr_str(*prop_variant.data.pwszVal()).to_string()); let hr = PropVariantClear(&mut prop_variant); if FAILED(hr) { return trace_str!("PropVariantClear failed: hr = 0x{:08x}", hr); } res } } #[cfg(windows)] fn get_audio_device_id_and_name(device: ComPtr<IMMDevice>) -> StrResult<(String, String)> { let id_str = unsafe { let mut id_str_ptr = ptr::null_mut(); device.GetId(&mut id_str_ptr); let id_str = trace_err!(U16CStr::from_ptr_str(id_str_ptr).to_string())?; CoTaskMemFree(id_str_ptr as _); id_str }; Ok((id_str, get_device_name(device)?)) } #[cfg(not(windows))] pub fn output_audio_devices() -> StrResult<AudioDevicesDesc> { todo!() } #[cfg(windows)] pub fn output_audio_devices() -> StrResult<AudioDevicesDesc> { let mut device_list = vec![]; unsafe { CoInitialize(ptr::null_mut()); let mut mm_device_enumerator_ptr: *mut IMMDeviceEnumerator = ptr::null_mut(); let hr = CoCreateInstance( &MMDeviceEnumerator::uuidof(), ptr::null_mut(), CLSCTX_ALL, &IMMDeviceEnumerator::uuidof(), &mut mm_device_enumerator_ptr as *mut _ as _, ); if FAILED(hr) { return trace_str!( "CoCreateInstance(IMMDeviceEnumerator) failed: hr = 0x{:08x}", hr ); } let mm_device_enumerator = ComPtr::from_raw(mm_device_enumerator_ptr); let mut default_mm_device_ptr: *mut IMMDevice = ptr::null_mut(); let hr = mm_device_enumerator.GetDefaultAudioEndpoint( eRender, eConsole, &mut default_mm_device_ptr as *mut _, ); if hr == HRESULT_FROM_WIN32(ERROR_NOT_FOUND) { return trace_str!("No default audio endpoint found. No audio device?"); } if FAILED(hr) { return trace_str!( "IMMDeviceEnumerator::GetDefaultAudioEndpoint failed: hr = 0x{:08x}", hr ); } let default_mm_device = ComPtr::from_raw(default_mm_device_ptr); let (default_id, default_name) = get_audio_device_id_and_name(default_mm_device)?; device_list.push((default_id.clone(), default_name.clone())); let mut mm_device_collection_ptr: *mut IMMDeviceCollection = ptr::null_mut(); let hr = mm_device_enumerator.EnumAudioEndpoints( eRender, DEVICE_STATE_ACTIVE, &mut mm_device_collection_ptr as _, ); if FAILED(hr) { return trace_str!( "IMMDeviceEnumerator::EnumAudioEndpoints failed: hr = 0x{:08x}", hr ); } let mm_device_collection = ComPtr::from_raw(mm_device_collection_ptr); #[allow(unused_mut)] let mut count = 0;
let hr = mm_device_collection.GetCount(&count); if FAILED(hr) { return trace_str!("IMMDeviceCollection::GetCount failed: hr = 0x{:08x}", hr); } debug!("Active render endpoints found: {}", count); debug!("DefaultDevice:{} ID:{}", default_name, default_id); for i in 0..count { let mut mm_device_ptr: *mut IMMDevice = ptr::null_mut(); let hr = mm_device_collection.Item(i, &mut mm_device_ptr as _); if FAILED(hr) { warn!("Crash!"); return trace_str!("IMMDeviceCollection::Item failed: hr = 0x{:08x}", hr); } let mm_device = ComPtr::from_raw(mm_device_ptr); let (id, name) = get_audio_device_id_and_name(mm_device)?; if id == default_id { continue; } debug!("Device{}:{} ID:{}", i, name, id); device_list.push((id, name)); } } let default = device_list.get(0).map(|dev| dev.0.clone()); let audio_devices_desc = AudioDevicesDesc { list: device_list, default, }; Ok(audio_devices_desc) }
function_block-function_prefix_line
[ { "content": "#[cfg(windows)]\n\npub fn get_windows_device_id(device: &AudioDevice) -> StrResult<String> {\n\n unsafe {\n\n let mm_device = get_windows_device(device)?;\n\n\n\n let mut id_str_ptr = ptr::null_mut();\n\n mm_device.GetId(&mut id_str_ptr);\n\n let id_str = trace_err!(U16CStr::from_ptr_str(id_str_ptr).to_string())?;\n\n CoTaskMemFree(id_str_ptr as _);\n\n\n\n Ok(id_str)\n\n }\n\n}\n\n\n\n// device must be an output device\n", "file_path": "alvr/audio/src/lib.rs", "rank": 0, "score": 311433.52852003183 }, { "content": "pub fn get_sample_rate(device: &AudioDevice) -> StrResult<u32> {\n\n let maybe_config_range = trace_err!(device.inner.supported_output_configs())?.next();\n\n let config = if let Some(config) = maybe_config_range {\n\n config\n\n } else {\n\n trace_none!(trace_err!(device.inner.supported_input_configs())?.next())?\n\n };\n\n\n\n // Assumption: device is in shared mode: this means that there is one and fixed sample rate,\n\n // format and channel count\n\n Ok(config.min_sample_rate().0)\n\n}\n\n\n\n#[cfg_attr(not(windows), allow(unused_variables))]\n\npub async fn record_audio_loop(\n\n device: AudioDevice,\n\n channels_count: u16,\n\n sample_rate: u32,\n\n mute: bool,\n\n mut sender: StreamSender<()>,\n", "file_path": "alvr/audio/src/lib.rs", "rank": 1, "score": 308145.5397336768 }, { "content": "#[cfg_attr(not(target_os = \"linux\"), allow(unused_variables))]\n\npub fn get_devices_list(linux_backend: LinuxAudioBackend) -> StrResult<AudioDevicesList> {\n\n #[cfg(target_os = \"linux\")]\n\n let host = match linux_backend {\n\n LinuxAudioBackend::Alsa => cpal::host_from_id(cpal::HostId::Alsa).unwrap(),\n\n LinuxAudioBackend::Jack => cpal::host_from_id(cpal::HostId::Jack).unwrap(),\n\n };\n\n #[cfg(not(target_os = \"linux\"))]\n\n let host = cpal::default_host();\n\n\n\n let output = trace_err!(host.output_devices())?\n\n .filter_map(|d| d.name().ok())\n\n .collect::<Vec<_>>();\n\n let input = trace_err!(host.input_devices())?\n\n .filter_map(|d| d.name().ok())\n\n .collect::<Vec<_>>();\n\n\n\n Ok(AudioDevicesList { output, input })\n\n}\n\n\n\npub enum AudioDeviceType {\n", "file_path": "alvr/audio/src/lib.rs", "rank": 2, "score": 301172.04889990634 }, { "content": "pub fn get_gpu_names() -> Vec<String> {\n\n GPU_ADAPTERS\n\n .iter()\n\n .map(|a| a.get_info().name)\n\n .collect::<Vec<_>>()\n\n}\n\n\n", "file_path": "alvr/server/src/graphics_info.rs", "rank": 3, "score": 271033.2910761467 }, { "content": "pub fn is_same_device(device1: &AudioDevice, device2: &AudioDevice) -> bool {\n\n if let (Ok(name1), Ok(name2)) = (device1.inner.name(), device2.inner.name()) {\n\n name1 == name2\n\n } else {\n\n false\n\n }\n\n}\n\n\n", "file_path": "alvr/audio/src/lib.rs", "rank": 4, "score": 265649.11589966284 }, { "content": "pub fn update_client_list(hostname: String, action: ClientListAction) {\n\n let mut client_connections = SESSION_MANAGER.lock().get().client_connections.clone();\n\n\n\n let maybe_client_entry = client_connections.entry(hostname);\n\n\n\n let mut updated = false;\n\n match action {\n\n ClientListAction::AddIfMissing { display_name } => {\n\n if let Entry::Vacant(new_entry) = maybe_client_entry {\n\n let client_connection_desc = ClientConnectionDesc {\n\n trusted: false,\n\n manual_ips: HashSet::new(),\n\n display_name,\n\n };\n\n new_entry.insert(client_connection_desc);\n\n\n\n updated = true;\n\n }\n\n }\n\n ClientListAction::TrustAndMaybeAddIp(maybe_ip) => {\n", "file_path": "alvr/server/src/lib.rs", "rank": 5, "score": 244506.3680088633 }, { "content": "pub fn create_default_sampler(device: &Device) -> Sampler {\n\n device.create_sampler(&SamplerDescriptor {\n\n address_mode_u: AddressMode::ClampToEdge,\n\n address_mode_v: AddressMode::ClampToEdge,\n\n mag_filter: FilterMode::Linear,\n\n min_filter: FilterMode::Linear,\n\n mipmap_filter: FilterMode::Linear,\n\n ..Default::default()\n\n })\n\n}\n\n\n\npub struct BindingDesc<'a> {\n\n pub index: u32,\n\n pub binding_type: BindingType,\n\n pub array_size: Option<usize>,\n\n pub resource: BindingResource<'a>,\n\n}\n\n\n", "file_path": "alvr/experiments/graphics/src/lib.rs", "rank": 6, "score": 243778.89988623324 }, { "content": "pub fn crate_version<P: AsRef<Path>>(crate_path: P) -> String {\n\n let result = Command::new(format!(\"cargo\"))\n\n .args([\"pkgid\", \"--quiet\"])\n\n .current_dir(crate_path)\n\n .output()\n\n .unwrap()\n\n .stdout;\n\n let version_str = std::str::from_utf8(&result).unwrap().trim().to_string();\n\n if let Some((_, v)) = version_str.rsplit_once('#') {\n\n let mut x = v.trim().to_string();\n\n if let Some((_, xx)) = x.rsplit_once('@') {\n\n x = xx.trim().to_string();\n\n }\n\n if let Some((_, xx)) = x.rsplit_once(':') {\n\n x = xx.trim().to_string();\n\n }\n\n x\n\n } else {\n\n String::from(\"0.0.0\")\n\n }\n\n}\n", "file_path": "alvr/xtask/src/command.rs", "rank": 7, "score": 237122.47955427563 }, { "content": "type RequestHandler = dyn FnMut(String) -> StrResult<String>;\n", "file_path": "alvr/experiments/gui/src/dashboard/mod.rs", "rank": 8, "score": 236843.73087127856 }, { "content": "pub fn version() -> String {\n\n version_from_dir(\"common\")\n\n}\n\n\n", "file_path": "alvr/xtask/src/version.rs", "rank": 9, "score": 229176.64484915335 }, { "content": "pub fn load_config_string() -> String {\n\n // todo\n\n \"\".into()\n\n}\n\n\n", "file_path": "alvr/experiments/client/src/storage/desktop.rs", "rank": 10, "score": 228399.26263414056 }, { "content": "pub fn load_config_string() -> String {\n\n let vm_ptr = ndk_glue::native_activity().vm();\n\n let vm = unsafe { jni::JavaVM::from_raw(vm_ptr).unwrap() };\n\n let env = vm.attach_current_thread().unwrap();\n\n\n\n let shared_preferences = get_preferences_object(&env);\n\n\n\n let key = env.new_string(CONFIG_KEY).unwrap();\n\n let default = env.new_string(\"\").unwrap();\n\n\n\n let config = env\n\n .call_method(\n\n shared_preferences,\n\n \"getString\",\n\n \"(Ljava/lang/String;Ljava/lang/String;)Ljava/lang/String;\",\n\n &[key.into(), default.into()],\n\n )\n\n .unwrap();\n\n\n\n env.get_string(config.l().unwrap().into()).unwrap().into()\n\n}\n\n\n", "file_path": "alvr/experiments/client/src/storage/android.rs", "rank": 11, "score": 228399.26263414056 }, { "content": "pub fn alxr_version() -> String {\n\n version_from_dir(\"openxr-client/alxr-common\")\n\n}\n\n\n", "file_path": "alvr/xtask/src/version.rs", "rank": 12, "score": 224651.19568575546 }, { "content": " class StringType = std::string, class BooleanType = bool,\n", "file_path": "alvr/client/android/app/include/tinygltf/json.hpp", "rank": 13, "score": 223118.15402021538 }, { "content": " class String;\n", "file_path": "alvr/server/cpp/platform/win32/amf/include/core/Variant.h", "rank": 14, "score": 222613.21045670816 }, { "content": "pub fn date_utc_yyyymmdd() -> String {\n\n let output = if cfg!(windows) {\n\n Command::new(\"powershell\")\n\n .arg(\"(Get-Date).ToUniversalTime().ToString(\\\"yyyy.MM.dd\\\")\")\n\n .output()\n\n .unwrap()\n\n } else {\n\n Command::new(\"date\")\n\n .args(&[\"-u\", \"+%Y.%m.%d\"])\n\n .output()\n\n .unwrap()\n\n };\n\n\n\n String::from_utf8_lossy(&output.stdout)\n\n .as_ref()\n\n .to_owned()\n\n .replace('\\r', \"\")\n\n .replace('\\n', \"\")\n\n}\n\n\n", "file_path": "alvr/xtask/src/command.rs", "rank": 15, "score": 220361.66268698726 }, { "content": " class StringType, class BooleanType, class NumberIntegerType, \\\n", "file_path": "alvr/client/android/app/include/tinygltf/json.hpp", "rank": 16, "score": 218471.3332001905 }, { "content": " //----------------------------------------------------------------------------------------------\n\n // helper String class\n\n //----------------------------------------------------------------------------------------------\n\n class AMFVariant::String\n\n {\n\n friend class AMFVariant;\n\n private:\n\n void Free()\n\n {\n\n if (m_Str != NULL)\n\n {\n\n AMFVariantFreeString(m_Str);\n\n m_Str = NULL;\n\n }\n\n }\n\n public:\n\n String() :m_Str(NULL){}\n\n String(const char* str) : m_Str(NULL)\n\n {\n\n m_Str = AMFVariantDuplicateString(str);\n\n }\n\n String(const String& p_other) : m_Str(NULL)\n\n {\n", "file_path": "alvr/server/cpp/platform/win32/amf/include/core/Variant.h", "rank": 17, "score": 216962.03520643542 }, { "content": "#[cfg(target_os = \"macos\")]\n\npub fn get_screen_size() -> StrResult<(u32, u32)> {\n\n Ok((0, 0))\n\n}\n", "file_path": "alvr/server/src/graphics_info.rs", "rank": 18, "score": 216334.86203454848 }, { "content": "// Consistent across architectures, might not be consistent across different compiler versions.\n\npub fn hash_string(string: &str) -> u64 {\n\n let mut hasher = DefaultHasher::new();\n\n string.hash(&mut hasher);\n\n hasher.finish()\n\n}\n\n\n\npub const HEAD_PATH: &str = \"/user/head\";\n\npub const LEFT_HAND_PATH: &str = \"/user/hand/left\";\n\npub const RIGHT_HAND_PATH: &str = \"/user/hand/right\";\n\npub const LEFT_CONTROLLER_HAPTIC_PATH: &str = \"/user/hand/left/output/haptic\";\n\npub const RIGHT_CONTROLLER_HAPTIC_PATH: &str = \"/user/hand/right/output/haptic\";\n\n\n\nlazy_static! {\n\n pub static ref HEAD_ID: u64 = hash_string(HEAD_PATH);\n\n pub static ref LEFT_HAND_ID: u64 = hash_string(LEFT_HAND_PATH);\n\n pub static ref RIGHT_HAND_ID: u64 = hash_string(RIGHT_HAND_PATH);\n\n pub static ref LEFT_CONTROLLER_HAPTIC_ID: u64 = hash_string(LEFT_CONTROLLER_HAPTIC_PATH);\n\n pub static ref RIGHT_CONTROLLER_HAPTIC_ID: u64 = hash_string(RIGHT_CONTROLLER_HAPTIC_PATH);\n\n}\n", "file_path": "alvr/common/src/lib.rs", "rank": 19, "score": 216182.96403705736 }, { "content": "pub fn store_config_string(config: String) {\n\n let vm_ptr = ndk_glue::native_activity().vm();\n\n let vm = unsafe { jni::JavaVM::from_raw(vm_ptr).unwrap() };\n\n let env = vm.attach_current_thread().unwrap();\n\n\n\n let shared_preferences = get_preferences_object(&env);\n\n\n\n let editor = env\n\n .call_method(\n\n shared_preferences,\n\n \"edit\",\n\n \"()Landroid/content/SharedPreferences$Editor;\",\n\n &[],\n\n )\n\n .unwrap()\n\n .l()\n\n .unwrap();\n\n\n\n let key = env.new_string(CONFIG_KEY).unwrap();\n\n let value = env.new_string(config).unwrap();\n", "file_path": "alvr/experiments/client/src/storage/android.rs", "rank": 20, "score": 215711.81023814605 }, { "content": "pub fn store_config_string(config: String) {\n\n // todo\n\n}\n", "file_path": "alvr/experiments/client/src/storage/desktop.rs", "rank": 21, "score": 215711.81023814605 }, { "content": "pub fn session_settings_default() -> SettingsDefault {\n\n SettingsDefault {\n\n video: VideoDescDefault {\n\n adapter_index: 0,\n\n render_resolution: FrameSizeDefault {\n\n variant: FrameSizeDefaultVariant::Scale,\n\n Scale: 0.75,\n\n Absolute: FrameSizeAbsoluteDefault {\n\n width: 2880,\n\n height: 1600,\n\n },\n\n },\n\n recommended_target_resolution: FrameSizeDefault {\n\n variant: FrameSizeDefaultVariant::Scale,\n\n Scale: 0.75,\n\n Absolute: FrameSizeAbsoluteDefault {\n\n width: 2880,\n\n height: 1600,\n\n },\n\n },\n", "file_path": "alvr/session/src/settings.rs", "rank": 22, "score": 212048.10856009703 }, { "content": "pub fn execute_default_pass(\n\n encoder: &mut CommandEncoder,\n\n pipeline: &RenderPipeline,\n\n bind_group: &BindGroup,\n\n push_constants: &[u8],\n\n output: &TextureView,\n\n) {\n\n let mut pass = encoder.begin_render_pass(&RenderPassDescriptor {\n\n color_attachments: &[RenderPassColorAttachment {\n\n view: output,\n\n resolve_target: None,\n\n ops: Operations {\n\n load: LoadOp::Clear(Color::BLACK),\n\n store: true,\n\n },\n\n }],\n\n ..Default::default()\n\n });\n\n\n\n pass.set_pipeline(pipeline);\n\n pass.set_bind_group(0, bind_group, &[]);\n\n pass.set_push_constants(ShaderStages::FRAGMENT, 0, push_constants);\n\n\n\n pass.draw(0..4, 0..1);\n\n\n\n // here the pass is dropped and applied to the command encoder\n\n}\n", "file_path": "alvr/experiments/graphics/src/lib.rs", "rank": 23, "score": 209107.9454740031 }, { "content": "#[inline]\n\npub fn get_next_frame_batch(\n\n sample_buffer: &mut VecDeque<f32>,\n\n channels_count: usize,\n\n batch_frames_count: usize,\n\n) -> Vec<f32> {\n\n if sample_buffer.len() / channels_count >= batch_frames_count {\n\n let mut batch = sample_buffer\n\n .drain(0..batch_frames_count * channels_count)\n\n .collect::<Vec<_>>();\n\n\n\n if sample_buffer.len() / channels_count < batch_frames_count {\n\n // Render fade-out. It is completely contained in the current batch\n\n for f in 0..batch_frames_count {\n\n let volume = 1. - f as f32 / batch_frames_count as f32;\n\n for c in 0..channels_count {\n\n batch[f * channels_count + c] *= volume;\n\n }\n\n }\n\n }\n\n // fade-ins and cross-fades are rendered in the receive loop directly inside sample_buffer.\n", "file_path": "alvr/audio/src/lib.rs", "rank": 24, "score": 209095.06393987132 }, { "content": "// Create wgpu-compatible Vulkan device. Corresponds to xrCreateVulkanDeviceKHR\n\npub fn create_vulkan_device(\n\n entry: ash::Entry,\n\n version: u32,\n\n instance: &ash::Instance,\n\n physical_device: vk::PhysicalDevice,\n\n create_info: &vk::DeviceCreateInfo,\n\n) -> StrResult<ash::Device> {\n\n let temp_adapter =\n\n get_temporary_hal_adapter(entry, version, instance.clone(), physical_device)?;\n\n\n\n let wgpu_extensions = temp_adapter\n\n .adapter\n\n .required_device_extensions(temp_adapter.features);\n\n let mut extensions_ptrs = wgpu_extensions\n\n .iter()\n\n .map(|x| x.as_ptr())\n\n .collect::<Vec<_>>();\n\n let mut enabled_phd_features = temp_adapter.adapter.physical_device_features(\n\n &wgpu_extensions,\n\n temp_adapter.features,\n", "file_path": "alvr/experiments/graphics/src/convert.rs", "rank": 25, "score": 209072.0195019717 }, { "content": "// All bindings map to the bind group 0\n\npub fn create_default_render_pipeline(\n\n label: &str,\n\n device: &Device,\n\n fragment_shader: &str,\n\n bindings: Vec<BindingDesc>,\n\n push_constants_size: usize,\n\n) -> (RenderPipeline, BindGroup) {\n\n let quad_shader = quad_shader(device);\n\n\n\n let fragment_shader = device.create_shader_module(&ShaderModuleDescriptor {\n\n label: Some(label),\n\n source: ShaderSource::Wgsl(fragment_shader.into()),\n\n });\n\n\n\n let layout_entries = bindings\n\n .iter()\n\n .map(|binding| BindGroupLayoutEntry {\n\n binding: binding.index,\n\n visibility: ShaderStages::FRAGMENT,\n\n ty: binding.binding_type,\n", "file_path": "alvr/experiments/graphics/src/lib.rs", "rank": 26, "score": 205036.08772227395 }, { "content": "// Corresponds to xrGetVulkanGraphicsDeviceKHR\n\npub fn get_vulkan_graphics_device(\n\n instance: &ash::Instance,\n\n adapter_index: Option<usize>,\n\n) -> StrResult<vk::PhysicalDevice> {\n\n let mut physical_devices = unsafe { trace_err!(instance.enumerate_physical_devices())? };\n\n\n\n Ok(physical_devices.remove(adapter_index.unwrap_or(0)))\n\n}\n\n\n", "file_path": "alvr/experiments/graphics/src/convert.rs", "rank": 27, "score": 205000.32955031304 }, { "content": "pub fn split_string(source: &str, start_pattern: &str, end: char) -> (String, String, String) {\n\n let start_idx = source.find(start_pattern).unwrap() + start_pattern.len();\n\n let end_idx = start_idx + source[start_idx..].find(end).unwrap();\n\n\n\n (\n\n source[..start_idx].to_owned(),\n\n source[start_idx..end_idx].to_owned(),\n\n source[end_idx..].to_owned(),\n\n )\n\n}\n\n\n", "file_path": "alvr/xtask/src/version.rs", "rank": 28, "score": 200659.76261736237 }, { "content": "pub fn dynlib_fname(name: &str) -> String {\n\n format!(\"{DLL_PREFIX}{name}{DLL_SUFFIX}\")\n\n}\n\n\n", "file_path": "alvr/filesystem/src/lib.rs", "rank": 29, "score": 200389.1277160864 }, { "content": "pub fn exec_fname(name: &str) -> String {\n\n format!(\"{name}{EXE_SUFFIX}\")\n\n}\n\n\n", "file_path": "alvr/filesystem/src/lib.rs", "rank": 30, "score": 200389.1277160864 }, { "content": "#[cfg(windows)]\n\nfn get_windows_device(device: &AudioDevice) -> StrResult<ComPtr<IMMDevice>> {\n\n let device_name = trace_err!(device.inner.name())?;\n\n\n\n unsafe {\n\n CoInitializeEx(ptr::null_mut(), COINIT_MULTITHREADED);\n\n\n\n let mut mm_device_enumerator_ptr: *mut IMMDeviceEnumerator = ptr::null_mut();\n\n let hr = CoCreateInstance(\n\n &MMDeviceEnumerator::uuidof(),\n\n ptr::null_mut(),\n\n CLSCTX_ALL,\n\n &IMMDeviceEnumerator::uuidof(),\n\n &mut mm_device_enumerator_ptr as *mut _ as _,\n\n );\n\n if FAILED(hr) {\n\n return fmt_e!(\"CoCreateInstance(IMMDeviceEnumerator) failed: hr = 0x{hr:08x}\",);\n\n }\n\n let mm_device_enumerator = ComPtr::from_raw(mm_device_enumerator_ptr);\n\n\n\n let mut mm_device_collection_ptr: *mut IMMDeviceCollection = ptr::null_mut();\n", "file_path": "alvr/audio/src/lib.rs", "rank": 31, "score": 195452.4146229534 }, { "content": "pub fn quad_shader(device: &Device) -> ShaderModule {\n\n device.create_shader_module(&ShaderModuleDescriptor {\n\n label: None,\n\n source: ShaderSource::Wgsl(QUAD_SHADER_WGSL.into()),\n\n })\n\n}\n\n\n", "file_path": "alvr/experiments/graphics/src/lib.rs", "rank": 32, "score": 194189.87367080667 }, { "content": "#[cfg(windows)]\n\nfn set_mute_windows_device(device: &AudioDevice, mute: bool) -> StrResult {\n\n unsafe {\n\n let mm_device = get_windows_device(device)?;\n\n\n\n let mut endpoint_volume_ptr: *mut IAudioEndpointVolume = ptr::null_mut();\n\n let hr = mm_device.Activate(\n\n &IAudioEndpointVolume::uuidof(),\n\n CLSCTX_ALL,\n\n ptr::null_mut(),\n\n &mut endpoint_volume_ptr as *mut _ as _,\n\n );\n\n if FAILED(hr) {\n\n return fmt_e!(\n\n \"IMMDevice::Activate() for IAudioEndpointVolume failed: hr = 0x{hr:08x}\"\n\n );\n\n }\n\n let endpoint_volume = ComPtr::from_raw(endpoint_volume_ptr);\n\n\n\n let hr = endpoint_volume.SetMute(mute as _, ptr::null_mut());\n\n if FAILED(hr) {\n\n return fmt_e!(\"Failed to mute audio device: hr = 0x{hr:08x}\");\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "alvr/audio/src/lib.rs", "rank": 33, "score": 192976.62136487156 }, { "content": "pub fn init_logging(log_sender: Sender<String>, events_sender: Sender<String>) {\n\n let mut log_dispatch = Dispatch::new().format(move |out, message, record| {\n\n let maybe_event = format!(\"{message}\");\n\n if maybe_event.contains(\"#{\") {\n\n let event_data = maybe_event.replace(\"#{\", \"{\").replace(\"}#\", \"}\");\n\n events_sender.send(event_data).ok();\n\n } else {\n\n let severity = match record.level() {\n\n log::Level::Error => EventSeverity::Error,\n\n log::Level::Warn => EventSeverity::Warning,\n\n log::Level::Info => EventSeverity::Info,\n\n log::Level::Debug | log::Level::Trace => EventSeverity::Debug,\n\n };\n\n\n\n let event = ServerEvent::Raw(Raw {\n\n timestamp: chrono::Local::now().format(\"%H:%M:%S.%f\").to_string(),\n\n severity,\n\n content: message.to_string(),\n\n });\n\n\n", "file_path": "alvr/server/src/logging_backend.rs", "rank": 34, "score": 192127.57761280902 }, { "content": " //----------------------------------------------------------------------------------------------\n\n // helper WString class\n\n //----------------------------------------------------------------------------------------------\n\n class AMFVariant::WString\n\n {\n\n friend class AMFVariant;\n\n private:\n\n void Free()\n\n {\n\n if (m_Str != NULL)\n\n {\n\n AMFVariantFreeWString(m_Str);\n\n m_Str = NULL;\n\n }\n\n }\n\n public:\n\n WString() :m_Str(NULL){}\n\n WString(const wchar_t* str) : m_Str(NULL)\n\n {\n\n m_Str = AMFVariantDuplicateWString(str);\n\n }\n\n WString(const WString& p_other) : m_Str(NULL)\n\n {\n", "file_path": "alvr/server/cpp/platform/win32/amf/include/core/Variant.h", "rank": 35, "score": 188991.78633783772 }, { "content": " class WString;\n\n\n\n public:\n\n AMFVariant() { AMFVariantInit(this); }\n\n explicit AMFVariant(const AMFVariantStruct& other) { AMFVariantInit(this); AMFVariantCopy(this, const_cast<AMFVariantStruct*>(&other)); }\n\n\n\n explicit AMFVariant(const AMFVariantStruct* pOther);\n\n template<typename T>\n\n explicit AMFVariant(const AMFInterfacePtr_T<T>& pValue);\n\n\n\n AMFVariant(const AMFVariant& other) { AMFVariantInit(this); AMFVariantCopy(this, const_cast<AMFVariantStruct*>(static_cast<const AMFVariantStruct*>(&other))); }\n\n\n\n explicit AMF_INLINE AMFVariant(amf_bool value) { AMFVariantInit(this); AMFVariantAssignBool(this, value); }\n\n explicit AMF_INLINE AMFVariant(amf_int64 value) { AMFVariantInit(this); AMFVariantAssignInt64(this, value); }\n\n explicit AMF_INLINE AMFVariant(amf_uint64 value) { AMFVariantInit(this); AMFVariantAssignInt64(this, (amf_int64)value); }\n\n explicit AMF_INLINE AMFVariant(amf_int32 value) { AMFVariantInit(this); AMFVariantAssignInt64(this, value); }\n\n explicit AMF_INLINE AMFVariant(amf_uint32 value) { AMFVariantInit(this); AMFVariantAssignInt64(this, value); }\n\n explicit AMF_INLINE AMFVariant(amf_double value) { AMFVariantInit(this); AMFVariantAssignDouble(this, value); }\n\n explicit AMF_INLINE AMFVariant(amf_float value) { AMFVariantInit(this); AMFVariantAssignFloat(this, value); }\n\n explicit AMF_INLINE AMFVariant(const AMFRect & value) { AMFVariantInit(this); AMFVariantAssignRect(this, &value); }\n", "file_path": "alvr/server/cpp/platform/win32/amf/include/core/Variant.h", "rank": 36, "score": 187223.58834204788 }, { "content": "struct is_compatible_string_type\n\n : is_compatible_string_type_impl<BasicJsonType, ConstructibleStringType> {};\n\n\n\ntemplate <typename BasicJsonType, typename ConstructibleStringType,\n\n typename = void>\n", "file_path": "alvr/client/android/app/include/tinygltf/json.hpp", "rank": 37, "score": 187054.72598574514 }, { "content": "struct is_constructible_string_type\n\n : is_constructible_string_type_impl<BasicJsonType, ConstructibleStringType> {};\n\n\n\ntemplate <typename BasicJsonType, typename CompatibleArrayType, typename = void>\n", "file_path": "alvr/client/android/app/include/tinygltf/json.hpp", "rank": 38, "score": 187054.72598574514 }, { "content": " //----------------------------------------------------------------------------------------------\n\n // AMF_INLINE Variant helper class\n\n //----------------------------------------------------------------------------------------------\n\n class AMFVariant : public AMFVariantStruct\n\n {\n\n public:\n", "file_path": "alvr/server/cpp/platform/win32/amf/include/core/Variant.h", "rank": 39, "score": 186256.35579429485 }, { "content": "pub fn get_registered_drivers() -> StrResult<Vec<PathBuf>> {\n\n Ok(crate::from_openvr_paths(trace_none!(\n\n crate::load_openvr_paths_json()?.get_mut(\"external_drivers\")\n\n )?))\n\n}\n\n\n", "file_path": "alvr/commands/src/openvr_drivers.rs", "rank": 40, "score": 185958.76545361851 }, { "content": "pub fn load_asset(fname: &str) -> Vec<u8> {\n\n let asset_manager = ndk_glue::native_activity().asset_manager();\n\n let fname_cstring = CString::new(fname).unwrap();\n\n let mut asset = asset_manager.open(fname_cstring.as_c_str()).unwrap();\n\n asset.get_buffer().unwrap().to_vec()\n\n}\n\n\n", "file_path": "alvr/experiments/client/src/storage/android.rs", "rank": 41, "score": 184141.5236842222 }, { "content": "pub fn load_asset(fname: &str) -> Vec<u8> {\n\n // todo\n\n vec![]\n\n}\n\n\n", "file_path": "alvr/experiments/client/src/storage/desktop.rs", "rank": 42, "score": 184141.5236842222 }, { "content": "struct is_constructible_string_type_impl <\n\n BasicJsonType, ConstructibleStringType,\n\n enable_if_t<is_detected_exact<typename BasicJsonType::string_t::value_type,\n\n value_type_t, ConstructibleStringType>::value >>\n\n{\n\n static constexpr auto value =\n\n std::is_constructible<ConstructibleStringType,\n\n typename BasicJsonType::string_t>::value;\n\n};\n\n\n\ntemplate <typename BasicJsonType, typename ConstructibleStringType>\n", "file_path": "alvr/client/android/app/include/tinygltf/json.hpp", "rank": 43, "score": 183194.61974614378 }, { "content": "struct is_compatible_string_type_impl <\n\n BasicJsonType, CompatibleStringType,\n\n enable_if_t<is_detected_exact<typename BasicJsonType::string_t::value_type,\n\n value_type_t, CompatibleStringType>::value >>\n\n{\n\n static constexpr auto value =\n\n std::is_constructible<typename BasicJsonType::string_t, CompatibleStringType>::value;\n\n};\n\n\n\ntemplate <typename BasicJsonType, typename ConstructibleStringType>\n", "file_path": "alvr/client/android/app/include/tinygltf/json.hpp", "rank": 44, "score": 183194.61974614378 }, { "content": "pub fn bump_version(maybe_version: Option<String>, is_nightly: bool) {\n\n let mut version = maybe_version.unwrap_or_else(version);\n\n\n\n if is_nightly {\n\n version = format!(\"{version}+nightly.{}\", date_utc_yyyymmdd());\n\n }\n\n\n\n for dir_name in [\n\n \"audio\",\n\n \"client\",\n\n \"commands\",\n\n \"common\",\n\n \"filesystem\",\n\n \"launcher\",\n\n \"server\",\n\n \"session\",\n\n \"sockets\",\n\n \"vrcompositor-wrapper\",\n\n \"vulkan-layer\",\n\n \"xtask\",\n\n ] {\n\n bump_cargo_version(dir_name, &version);\n\n }\n\n bump_client_gradle_version(&version, is_nightly);\n\n bump_rpm_spec_version(&version, is_nightly);\n\n bump_deb_control_version(&version);\n\n\n\n println!(\"Git tag:\\nv{version}\");\n\n}\n\n\n", "file_path": "alvr/xtask/src/version.rs", "rank": 45, "score": 180081.71777712484 }, { "content": "#[cfg(not(target_os = \"macos\"))]\n\npub fn to_vulkan_images(textures: &[Texture]) -> Vec<vk::Image> {\n\n textures\n\n .iter()\n\n .map(|tex| unsafe {\n\n let mut handle = vk::Image::null();\n\n tex.as_hal::<hal::api::Vulkan, _>(|tex| {\n\n handle = tex.unwrap().raw_handle();\n\n });\n\n\n\n handle\n\n })\n\n .collect()\n\n}\n\n\n\npub enum SwapchainCreateData {\n\n // Used for the Vulkan layer and client\n\n External {\n\n images: Vec<vk::Image>,\n\n hal_usage: hal::TextureUses,\n\n drop_guard: Option<Arc<dyn Any + Send + Sync>>,\n", "file_path": "alvr/experiments/graphics/src/convert.rs", "rank": 46, "score": 180058.41642738483 }, { "content": "struct wide_string_input_helper<WideStringType, 2>\n\n{\n\n // UTF-16\n\n static void fill_buffer(const WideStringType& str, size_t& current_wchar, std::array<std::char_traits<char>::int_type, 4>& utf8_bytes, size_t& utf8_bytes_index, size_t& utf8_bytes_filled)\n\n {\n\n utf8_bytes_index = 0;\n\n\n\n if (current_wchar == str.size())\n\n {\n\n utf8_bytes[0] = std::char_traits<char>::eof();\n\n utf8_bytes_filled = 1;\n\n }\n\n else\n\n {\n\n // get the current character\n\n const auto wc = static_cast<int>(str[current_wchar++]);\n\n\n\n // UTF-16 to UTF-8 encoding\n\n if (wc < 0x80)\n\n {\n", "file_path": "alvr/client/android/app/include/tinygltf/json.hpp", "rank": 47, "score": 177822.29280368384 }, { "content": "struct is_compatible_string_type_impl : std::false_type {};\n\n\n\ntemplate <typename BasicJsonType, typename CompatibleStringType>\n", "file_path": "alvr/client/android/app/include/tinygltf/json.hpp", "rank": 48, "score": 177694.21260334182 }, { "content": "struct is_constructible_string_type_impl : std::false_type {};\n\n\n\ntemplate <typename BasicJsonType, typename ConstructibleStringType>\n", "file_path": "alvr/client/android/app/include/tinygltf/json.hpp", "rank": 49, "score": 177694.21260334182 }, { "content": "pub fn version_from_dir<P: AsRef<Path>>(dir: P) -> String {\n\n let manifest_path = packages_dir().join(dir).join(\"Cargo.toml\");\n\n println!(\"cargo:rerun-if-changed={}\", manifest_path.to_string_lossy());\n\n\n\n let manifest = fs::read_to_string(manifest_path).unwrap();\n\n let (_, version, _) = split_string(&manifest, \"version = \\\"\", '\\\"');\n\n\n\n version\n\n}\n\n\n", "file_path": "alvr/xtask/src/version.rs", "rank": 50, "score": 176772.4418820693 }, { "content": "pub fn bump_alxr_version(maybe_version: Option<String>, is_nightly: bool) {\n\n let mut version = maybe_version.unwrap_or_else(alxr_version);\n\n if is_nightly {\n\n version = format!(\"{version}+nightly.{}\", date_utc_yyyymmdd());\n\n }\n\n\n\n let base_dir = PathBuf::from(\"openxr-client\");\n\n for dir_name in [\n\n \"alxr-engine-sys\",\n\n \"alxr-common\",\n\n \"alxr-client\",\n\n \"alxr-android-client\",\n\n \"alxr-android-client/pico-neo\",\n\n \"alxr-android-client/quest\",\n\n ]\n\n .into_iter()\n\n .map(|d| base_dir.join(&d).to_str().unwrap().to_owned())\n\n {\n\n bump_cargo_version(&dir_name, &version);\n\n }\n", "file_path": "alvr/xtask/src/version.rs", "rank": 51, "score": 176734.6588891036 }, { "content": "pub fn create_identity(hostname: Option<String>) -> StrResult<PrivateIdentity> {\n\n let hostname = hostname.unwrap_or(format!(\n\n \"{}{}{}{}.client.alvr\",\n\n rand::thread_rng().gen_range(0..10),\n\n rand::thread_rng().gen_range(0..10),\n\n rand::thread_rng().gen_range(0..10),\n\n rand::thread_rng().gen_range(0..10),\n\n ));\n\n\n\n #[cfg(target_os = \"android\")]\n\n let certificate = trace_err!(rcgen::generate_simple_self_signed([hostname.clone()]))?;\n\n\n\n #[cfg(not(target_os = \"android\"))]\n\n return Ok(PrivateIdentity {\n\n hostname,\n\n certificate_pem: String::new(),\n\n key_pem: String::new(),\n\n });\n\n\n\n #[cfg(target_os = \"android\")]\n", "file_path": "alvr/sockets/src/lib.rs", "rank": 52, "score": 176734.6588891036 }, { "content": "#[derive(Debug)]\n\nstruct StringError(String);\n\n\n\nimpl Display for StringError {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n f.write_str(&self.0)\n\n }\n\n}\n\n\n\nimpl Error for StringError {}\n\n\n", "file_path": "alvr/xtask/src/command.rs", "rank": 53, "score": 176589.41632766766 }, { "content": "#[proc_macro_derive(SettingsSchema, attributes(schema))]\n\npub fn create_settings_schema_fn_and_default_ty(input: TokenStream) -> TokenStream {\n\n let input = syn::parse_macro_input!(input as DeriveInput);\n\n\n\n match schema(input) {\n\n Ok(tokens) => tokens.into(),\n\n Err(e) => e,\n\n }\n\n}\n", "file_path": "alvr/settings-schema-derive/src/lib.rs", "rank": 54, "score": 175905.5275351678 }, { "content": "fn get_case(attrs: Vec<Attribute>) -> Result<Option<String>, TokenStream> {\n\n for attr in schema_attrs(attrs, \"serde\") {\n\n let parsed_attr = attr\n\n .parse_meta()\n\n .map_err(|e| e.to_compile_error().into_token_stream())?;\n\n if let Meta::List(args_list) = parsed_attr {\n\n for arg in args_list.nested {\n\n if let NestedMeta::Meta(Meta::NameValue(name_value_arg)) = arg {\n\n if let Some(arg_ident) = name_value_arg.path.get_ident() {\n\n if arg_ident == \"rename_all\" {\n\n if let Lit::Str(lit_str) = name_value_arg.lit {\n\n return Ok(Some(lit_str.value()));\n\n }\n\n }\n\n }\n\n }\n\n }\n\n }\n\n }\n\n Ok(None)\n\n}\n\n\n", "file_path": "alvr/settings-schema-derive/src/lib.rs", "rank": 55, "score": 167186.14182463498 }, { "content": " enum class PhysicalDeviceType\n\n {\n\n eOther = VK_PHYSICAL_DEVICE_TYPE_OTHER,\n\n eIntegratedGpu = VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU,\n\n eDiscreteGpu = VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU,\n\n eVirtualGpu = VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU,\n\n eCpu = VK_PHYSICAL_DEVICE_TYPE_CPU};\n\n\n\n VULKAN_HPP_INLINE std::string to_string( PhysicalDeviceType value )\n\n {\n\n switch ( value )\n\n {\n\n case PhysicalDeviceType::eOther : return \"Other\";\n\n case PhysicalDeviceType::eIntegratedGpu : return \"IntegratedGpu\";\n\n case PhysicalDeviceType::eDiscreteGpu : return \"DiscreteGpu\";\n\n case PhysicalDeviceType::eVirtualGpu : return \"VirtualGpu\";\n\n case PhysicalDeviceType::eCpu : return \"Cpu\";\n\n default: return \"invalid ( \" + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + \" )\";\n\n }\n\n }\n\n\n", "file_path": "alvr/server/cpp/platform/win32/amf/include/core/vulkan/vulkan.hpp", "rank": 56, "score": 165609.84768437213 }, { "content": "struct has_non_default_from_json : std::false_type {};\n\n\n\ntemplate<typename BasicJsonType, typename T>\n", "file_path": "alvr/client/android/app/include/tinygltf/json.hpp", "rank": 57, "score": 164786.76789864927 }, { "content": " enum class DeviceEventTypeEXT\n\n {\n\n eDisplayHotplug = VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT};\n\n\n\n VULKAN_HPP_INLINE std::string to_string( DeviceEventTypeEXT value )\n\n {\n\n switch ( value )\n\n {\n\n case DeviceEventTypeEXT::eDisplayHotplug : return \"DisplayHotplug\";\n\n default: return \"invalid ( \" + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + \" )\";\n\n }\n\n }\n\n\n", "file_path": "alvr/server/cpp/platform/win32/amf/include/core/vulkan/vulkan.hpp", "rank": 58, "score": 162424.67110065155 }, { "content": " class amf_list<AMFInterfacePtr_T<_Interf> >\n\n : public std::list<AMFInterfacePtr_TAdapted<_Interf>, amf_allocator<AMFInterfacePtr_TAdapted<_Interf> > >\n\n {};\n\n#if defined(__GNUC__)\n\n // restore gcc warnings\n\n #pragma GCC diagnostic pop\n\n#endif\n\n}\n\n//-------------------------------------------------------------------------------------------------\n\n// string classes\n\n//-------------------------------------------------------------------------------------------------\n\n\n\ntypedef std::basic_string<char, std::char_traits<char>, amf::amf_allocator<char> > amf_string;\n\ntypedef std::basic_string<wchar_t, std::char_traits<wchar_t>, amf::amf_allocator<wchar_t> > amf_wstring;\n\n\n\nnamespace amf\n\n{\n\n //-------------------------------------------------------------------------------------------------\n\n // string conversion\n\n //-------------------------------------------------------------------------------------------------\n", "file_path": "alvr/server/cpp/platform/win32/amf/common/AMFSTL.h", "rank": 59, "score": 161647.49185778605 }, { "content": "pub fn shutdown_runtime() {\n\n alvr_session::log_event(ServerEvent::ServerQuitting);\n\n\n\n if let Some(window) = MAYBE_WINDOW.lock().take() {\n\n window.close();\n\n }\n\n\n\n SHUTDOWN_NOTIFIER.notify_waiters();\n\n\n\n if let Some(runtime) = RUNTIME.lock().take() {\n\n runtime.shutdown_background();\n\n // shutdown_background() is non blocking and it does not guarantee that every internal\n\n // thread is terminated in a timely manner. Using shutdown_background() instead of just\n\n // dropping the runtime has the benefit of giving SteamVR a chance to clean itself as\n\n // much as possible before the process is killed because of alvr_launcher timeout.\n\n }\n\n}\n\n\n", "file_path": "alvr/server/src/lib.rs", "rank": 60, "score": 161516.2125296807 }, { "content": "#[cfg_attr(target_os = \"android\", ndk_glue::main)]\n\npub fn main() {\n\n env_logger::init();\n\n log::error!(\"enter main\");\n\n\n\n show_err(run());\n\n\n\n #[cfg(target_os = \"android\")]\n\n ndk_glue::native_activity().finish();\n\n}\n\n\n", "file_path": "alvr/experiments/client/src/lib.rs", "rank": 61, "score": 161516.2125296807 }, { "content": "pub fn invoke_installer() {\n\n try_close_steamvr_gracefully();\n\n\n\n spawn_no_window(Command::new(afs::installer_path()).arg(\"-q\"));\n\n\n\n // delete crash_log.txt (take advantage of the occasion to do some routine cleaning)\n\n fs::remove_file(\n\n afs::filesystem_layout_from_launcher_exe(&env::current_exe().unwrap()).crash_log(),\n\n )\n\n .ok();\n\n}\n", "file_path": "alvr/launcher/src/commands.rs", "rank": 62, "score": 161516.2125296807 }, { "content": "pub fn build_server(\n\n is_release: bool,\n\n experiments: bool,\n\n fetch_crates: bool,\n\n bundle_ffmpeg: bool,\n\n no_nvidia: bool,\n\n gpl: bool,\n\n root: Option<String>,\n\n reproducible: bool,\n\n) {\n\n // Always use CustomRoot for contructing the build directory. The actual runtime layout is respected\n\n let layout = Layout::new(&afs::server_build_dir());\n\n\n\n let build_type = if is_release { \"release\" } else { \"debug\" };\n\n\n\n let build_flags = format!(\n\n \"{} {}\",\n\n if is_release { \"--release\" } else { \"\" },\n\n if reproducible {\n\n \"--offline --locked\"\n", "file_path": "alvr/xtask/src/main.rs", "rank": 63, "score": 161516.2125296807 }, { "content": "pub fn run_as_shell_in(\n\n workdir: &Path,\n\n shell: &str,\n\n shell_flag: &str,\n\n cmd: &str,\n\n) -> Result<(), Box<dyn Error>> {\n\n println!(\"\\n> {cmd}\");\n\n\n\n let output = Command::new(shell)\n\n .args(&[shell_flag, cmd])\n\n .stdout(Stdio::inherit())\n\n .current_dir(workdir)\n\n .spawn()?\n\n .wait_with_output()?;\n\n\n\n if output.status.success() {\n\n Ok(())\n\n } else {\n\n Err(Box::new(StringError(format!(\n\n \"Command failed: {}\",\n\n String::from_utf8_lossy(&output.stderr)\n\n ))))\n\n }\n\n}\n\n\n", "file_path": "alvr/xtask/src/command.rs", "rank": 64, "score": 161516.2125296807 }, { "content": "pub fn fix_steamvr() {\n\n // If ALVR driver does not start use a more destructive approach: delete openvrpaths.vrpath then recreate it\n\n if let Ok(path) = alvr_commands::openvr_source_file_path() {\n\n fs::remove_file(path).ok();\n\n\n\n maybe_launch_steamvr();\n\n thread::sleep(Duration::from_secs(5));\n\n kill_steamvr();\n\n thread::sleep(Duration::from_secs(5));\n\n }\n\n\n\n unblock_alvr_addon().ok();\n\n}\n\n\n", "file_path": "alvr/launcher/src/commands.rs", "rank": 65, "score": 161516.2125296807 }, { "content": "// this will not kill the child process \"ALVR launcher\"\n\npub fn kill_steamvr() {\n\n let mut system = System::new_with_specifics(\n\n RefreshKind::new().with_processes(ProcessRefreshKind::everything()),\n\n );\n\n system.refresh_processes();\n\n\n\n // first kill vrmonitor, then kill vrserver if it is hung.\n\n\n\n for process in system.processes_by_name(&afs::exec_fname(\"vrmonitor\")) {\n\n #[cfg(not(windows))]\n\n process.kill_with(Signal::Term);\n\n #[cfg(windows)]\n\n kill_process(process.pid().as_u32());\n\n }\n\n\n\n thread::sleep(Duration::from_secs(1));\n\n\n\n for process in system.processes_by_name(&afs::exec_fname(\"vrserver\")) {\n\n #[cfg(not(windows))]\n\n process.kill_with(Signal::Term);\n\n #[cfg(windows)]\n\n kill_process(process.pid().as_u32());\n\n }\n\n}\n\n\n", "file_path": "alvr/launcher/src/commands.rs", "rank": 66, "score": 161516.2125296807 }, { "content": "pub fn restart_steamvr() {\n\n try_close_steamvr_gracefully();\n\n\n\n if alvr_common::show_err(maybe_register_alvr_driver()).is_some() {\n\n maybe_launch_steamvr();\n\n }\n\n}\n\n\n", "file_path": "alvr/launcher/src/commands.rs", "rank": 67, "score": 161516.2125296807 }, { "content": "pub fn publish_server(is_nightly: bool, root: Option<String>, reproducible: bool, gpl: bool) {\n\n let bundle_ffmpeg = cfg!(target_os = \"linux\");\n\n build_server(\n\n true,\n\n false,\n\n false,\n\n bundle_ffmpeg,\n\n false,\n\n gpl,\n\n root,\n\n reproducible,\n\n );\n\n\n\n // Add licenses\n\n let licenses_dir = afs::server_build_dir().join(\"licenses\");\n\n fs::create_dir_all(&licenses_dir).unwrap();\n\n fs::copy(\n\n afs::workspace_dir().join(\"LICENSE\"),\n\n licenses_dir.join(\"ALVR.txt\"),\n\n )\n", "file_path": "alvr/xtask/src/packaging.rs", "rank": 68, "score": 160175.2782070251 }, { "content": " class NumberUnsignedType, class NumberFloatType, \\\n\n template<typename> class AllocatorType, \\\n\n template<typename, typename = void> class JSONSerializer>\n\n\n\n#define NLOHMANN_BASIC_JSON_TPL \\\n\n basic_json<ObjectType, ArrayType, StringType, BooleanType, \\\n\n NumberIntegerType, NumberUnsignedType, NumberFloatType, \\\n\n AllocatorType, JSONSerializer>\n\n\n\n// #include <nlohmann/detail/meta/cpp_future.hpp>\n\n\n\n\n\n#include <ciso646> // not\n\n#include <cstddef> // size_t\n\n#include <type_traits> // conditional, enable_if, false_type, integral_constant, is_constructible, is_integral, is_same, remove_cv, remove_reference, true_type\n\n\n\nnamespace nlohmann\n\n{\n\nnamespace detail\n\n{\n\n// alias templates to reduce boilerplate\n\ntemplate<bool B, typename T = void>\n\nusing enable_if_t = typename std::enable_if<B, T>::type;\n\n\n\ntemplate<typename T>\n\nusing uncvref_t = typename std::remove_cv<typename std::remove_reference<T>::type>::type;\n\n\n\n// implementation of C++14 index_sequence and affiliates\n\n// source: https://stackoverflow.com/a/32223343\n\ntemplate<std::size_t... Ints>\n", "file_path": "alvr/client/android/app/include/tinygltf/json.hpp", "rank": 69, "score": 159413.84325501166 }, { "content": "pub fn build_alxr_uwp(root: Option<String>, arch: UWPArch, flags: AlxBuildFlags) {\n\n if let Some(root) = root {\n\n env::set_var(\"ALVR_ROOT_DIR\", root);\n\n }\n\n\n\n let build_flags = flags.make_build_string();\n\n let target_dir = afs::target_dir();\n\n let build_type = if flags.is_release { \"release\" } else { \"debug\" };\n\n let target_type = format!(\"{arch}-uwp-windows-msvc\");\n\n let artifacts_dir = target_dir.join(&target_type).join(build_type);\n\n\n\n let alxr_client_build_dir = afs::alxr_uwp_build_dir(build_type);\n\n //fs::remove_dir_all(&alxr_client_build_dir).ok();\n\n fs::create_dir_all(&alxr_client_build_dir).unwrap();\n\n\n\n if flags.fetch_crates {\n\n command::run(\"cargo update\").unwrap();\n\n }\n\n\n\n let alxr_client_dir = afs::workspace_dir().join(\"alvr/openxr-client/alxr-client/uwp\");\n", "file_path": "alvr/xtask/src/main.rs", "rank": 70, "score": 158713.8545473805 }, { "content": "pub fn build_alxr_client(root: Option<String>, ffmpeg_version: &str, flags: AlxBuildFlags) {\n\n if let Some(root) = root {\n\n env::set_var(\"ALVR_ROOT_DIR\", root);\n\n }\n\n\n\n let build_flags = flags.make_build_string();\n\n let target_dir = afs::target_dir();\n\n let build_type = if flags.is_release { \"release\" } else { \"debug\" };\n\n let artifacts_dir = target_dir.join(build_type);\n\n\n\n let alxr_client_build_dir = afs::alxr_client_build_dir(build_type, !flags.no_nvidia);\n\n fs::remove_dir_all(&alxr_client_build_dir).ok();\n\n fs::create_dir_all(&alxr_client_build_dir).unwrap();\n\n\n\n let bundle_ffmpeg_enabled = cfg!(target_os = \"linux\") && flags.bundle_ffmpeg;\n\n if bundle_ffmpeg_enabled {\n\n assert!(!ffmpeg_version.is_empty(), \"ffmpeg-version is empty!\");\n\n\n\n let ffmpeg_build_dir = &alxr_client_build_dir;\n\n dependencies::build_ffmpeg_linux_install(\n", "file_path": "alvr/xtask/src/main.rs", "rank": 71, "score": 158713.8545473805 }, { "content": "pub fn notify_shutdown_driver() {\n\n thread::spawn(|| {\n\n RESTART_NOTIFIER.notify_waiters();\n\n\n\n // give time to the control loop to send the restart packet (not crucial)\n\n thread::sleep(Duration::from_millis(100));\n\n\n\n shutdown_runtime();\n\n\n\n unsafe { ShutdownSteamvr() };\n\n });\n\n}\n\n\n", "file_path": "alvr/server/src/lib.rs", "rank": 72, "score": 158500.1056354901 }, { "content": "pub fn notify_application_update() {\n\n notify_shutdown_driver();\n\n\n\n alvr_commands::invoke_application_update(&FILESYSTEM_LAYOUT.launcher_exe()).ok();\n\n}\n\n\n\npub enum ClientListAction {\n\n AddIfMissing { display_name: String },\n\n TrustAndMaybeAddIp(Option<IpAddr>),\n\n RemoveIpOrEntry(Option<IpAddr>),\n\n}\n\n\n", "file_path": "alvr/server/src/lib.rs", "rank": 73, "score": 158500.1056354901 }, { "content": "pub fn set_panic_hook() {\n\n std::panic::set_hook(Box::new(|panic_info| {\n\n let message = panic_info\n\n .payload()\n\n .downcast_ref::<&str>()\n\n .unwrap_or(&\"Unavailable\");\n\n let err_str = format!(\n\n \"Message: {message:?}\\nBacktrace:\\n{:?}\",\n\n backtrace::Backtrace::new()\n\n );\n\n\n\n log::error!(\"{err_str}\");\n\n\n\n #[cfg(windows)]\n\n std::thread::spawn(move || {\n\n msgbox::create(\"ALVR panicked\", &err_str, msgbox::IconType::Error).ok();\n\n });\n\n }))\n\n}\n\n\n", "file_path": "alvr/common/src/logging.rs", "rank": 74, "score": 158500.1056354901 }, { "content": "pub fn notify_restart_driver() {\n\n notify_shutdown_driver();\n\n\n\n alvr_commands::restart_steamvr(&FILESYSTEM_LAYOUT.launcher_exe()).ok();\n\n}\n\n\n", "file_path": "alvr/server/src/lib.rs", "rank": 75, "score": 158500.1056354901 }, { "content": "// Avoid Oculus link popups when debugging the client\n\npub fn kill_oculus_processes() {\n\n command::run_without_shell(\n\n \"powershell\",\n\n &[\n\n \"Start-Process\",\n\n \"taskkill\",\n\n \"-ArgumentList\",\n\n \"\\\"/F /IM OVR* /T\\\"\",\n\n \"-Verb\",\n\n \"runAs\",\n\n ],\n\n )\n\n .unwrap();\n\n}\n\n\n", "file_path": "alvr/xtask/src/main.rs", "rank": 76, "score": 158500.1056354901 }, { "content": "pub fn maybe_launch_steamvr() {\n\n let mut system = System::new_with_specifics(\n\n RefreshKind::new().with_processes(ProcessRefreshKind::everything()),\n\n );\n\n system.refresh_processes();\n\n\n\n if system\n\n .processes_by_name(&afs::exec_fname(\"vrserver\"))\n\n .count()\n\n == 0\n\n {\n\n #[cfg(windows)]\n\n spawn_no_window(Command::new(\"cmd\").args(&[\"/C\", \"start\", \"steam://rungameid/250820\"]));\n\n #[cfg(not(windows))]\n\n spawn_no_window(Command::new(\"steam\").args(&[\"steam://rungameid/250820\"]));\n\n }\n\n}\n\n\n", "file_path": "alvr/launcher/src/commands.rs", "rank": 77, "score": 158500.1056354901 }, { "content": "pub fn find_resolved_so_paths(\n\n bin_or_so: &std::path::Path,\n\n depends_so: &str,\n\n) -> Vec<std::path::PathBuf> {\n\n let cmdline = format!(\n\n \"ldd {} | cut -d '>' -f 2 | awk \\'{{print $1}}\\' | grep {}\",\n\n bin_or_so.display(),\n\n depends_so\n\n );\n\n std::process::Command::new(\"sh\")\n\n .args(&[\"-c\", &cmdline])\n\n .stdout(std::process::Stdio::piped())\n\n .spawn()\n\n .map_or(vec![], |mut child| {\n\n let mut result = std::io::BufReader::new(child.stdout.take().unwrap())\n\n .lines()\n\n .filter(|line| line.is_ok())\n\n .map(|line| std::path::PathBuf::from(line.unwrap()).canonicalize()) // canonicalize resolves symlinks\n\n .filter(|result| result.is_ok())\n\n .map(|pp| pp.unwrap())\n\n .collect::<Vec<_>>();\n\n result.dedup();\n\n result\n\n })\n\n}\n", "file_path": "alvr/xtask/src/dependencies.rs", "rank": 78, "score": 158500.1056354901 }, { "content": "pub fn build_alxr_android(\n\n root: Option<String>,\n\n client_flavor: AndroidFlavor,\n\n flags: AlxBuildFlags,\n\n) {\n\n let build_type = if flags.is_release { \"release\" } else { \"debug\" };\n\n let build_flags = flags.make_build_string();\n\n\n\n if let Some(root) = root {\n\n env::set_var(\"ALVR_ROOT_DIR\", root);\n\n }\n\n\n\n if flags.fetch_crates {\n\n command::run(\"cargo update\").unwrap();\n\n }\n\n install_alxr_depends();\n\n\n\n let alxr_client_build_dir = afs::alxr_android_build_dir(build_type);\n\n //fs::remove_dir_all(&alxr_client_build_dir).ok();\n\n fs::create_dir_all(&alxr_client_build_dir).unwrap();\n", "file_path": "alvr/xtask/src/main.rs", "rank": 79, "score": 158500.1056354901 }, { "content": "pub fn init_logging() {\n\n #[cfg(target_os = \"android\")]\n\n android_logger::init_once(\n\n android_logger::Config::default()\n\n .with_tag(\"[ALVR NATIVE-RUST]\")\n\n .with_min_level(log::Level::Info),\n\n );\n\n\n\n alvr_common::set_panic_hook();\n\n}\n", "file_path": "alvr/client/src/logging_backend.rs", "rank": 80, "score": 158500.1056354901 }, { "content": "pub fn remove_build_dir() {\n\n let build_dir = afs::build_dir();\n\n fs::remove_dir_all(&build_dir).ok();\n\n}\n\n\n", "file_path": "alvr/xtask/src/main.rs", "rank": 81, "score": 158500.1056354901 }, { "content": " enum class DeviceMemoryReportEventTypeEXT\n\n {\n\n eAllocate = VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_ALLOCATE_EXT,\n\n eFree = VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_FREE_EXT,\n\n eImport = VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_IMPORT_EXT,\n\n eUnimport = VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_UNIMPORT_EXT,\n\n eAllocationFailed = VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_ALLOCATION_FAILED_EXT};\n\n\n\n VULKAN_HPP_INLINE std::string to_string( DeviceMemoryReportEventTypeEXT value )\n\n {\n\n switch ( value )\n\n {\n\n case DeviceMemoryReportEventTypeEXT::eAllocate : return \"Allocate\";\n\n case DeviceMemoryReportEventTypeEXT::eFree : return \"Free\";\n\n case DeviceMemoryReportEventTypeEXT::eImport : return \"Import\";\n\n case DeviceMemoryReportEventTypeEXT::eUnimport : return \"Unimport\";\n\n case DeviceMemoryReportEventTypeEXT::eAllocationFailed : return \"AllocationFailed\";\n\n default: return \"invalid ( \" + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + \" )\";\n\n }\n\n }\n\n\n", "file_path": "alvr/server/cpp/platform/win32/amf/include/core/vulkan/vulkan.hpp", "rank": 82, "score": 156466.50608714233 }, { "content": "class extension_list {\n\n public:\n\n extension_list(const util::allocator &allocator);\n\n\n\n extension_list(const extension_list &rhs) = delete;\n\n const extension_list &operator=(const extension_list &rhs) = delete;\n\n\n\n /**\n\n * @brief Obtain a vector of #VkExtensionProperties equivalent to this extension_list object.\n\n */\n\n const util::vector<VkExtensionProperties> &get_extension_props() const { return m_ext_props; }\n\n\n\n /**\n\n * @brief Get the allocator used to manage the memory of this object.\n\n */\n\n const util::allocator get_allocator() const { return m_alloc; }\n\n\n\n /**\n\n * @brief Append pointers to extension strings to the given vector.\n\n *\n", "file_path": "alvr/vulkan-layer/util/extension_list.hpp", "rank": 83, "score": 156052.1120525076 }, { "content": "pub fn build_alxr_app_image(_root: Option<String>, _ffmpeg_version: &str, _flags: AlxBuildFlags) {\n\n println!(\"Not Implemented!\");\n\n // setup_cargo_appimage();\n\n\n\n // // let target_dir = afs::target_dir();\n\n\n\n // // let bundle_ffmpeg_enabled = cfg!(target_os = \"linux\") && flags.bundle_ffmpeg;\n\n // // if bundle_ffmpeg_enabled {\n\n // // assert!(!ffmpeg_version.is_empty(), \"ffmpeg-version is empty!\");\n\n\n\n // // let ffmpeg_lib_dir = &alxr_client_build_dir;\n\n // // dependencies::build_ffmpeg_linux_install(true, ffmpeg_version, /*enable_decoders=*/true, &ffmpeg_lib_dir);\n\n\n\n // // assert!(ffmpeg_lib_dir.exists());\n\n // // env::set_var(\"ALXR_BUNDLE_FFMPEG_INSTALL_PATH\", ffmpeg_lib_dir.to_str().unwrap());\n\n // // }\n\n\n\n // if let Some(root) = root {\n\n // env::set_var(\"ALVR_ROOT_DIR\", root);\n\n // }\n", "file_path": "alvr/xtask/src/main.rs", "rank": 84, "score": 156029.61695837718 }, { "content": "class TrackedDevice {\n\n public:\n\n uint64_t device_path;\n\n vr::TrackedDeviceIndex_t object_id = vr::k_unTrackedDeviceIndexInvalid;\n\n vr::PropertyContainerHandle_t prop_container = vr::k_ulInvalidPropertyContainer;\n\n\n\n void set_prop(OpenvrProperty prop);\n\n\n\n TrackedDevice(uint64_t device_path) : device_path(device_path) {}\n\n};", "file_path": "alvr/server/cpp/alvr_server/TrackedDevice.h", "rank": 85, "score": 155994.43413862857 }, { "content": "pub fn create_texture_set(\n\n device: &Device,\n\n data: SwapchainCreateData,\n\n info: SwapchainCreateInfo,\n\n) -> Vec<Texture> {\n\n let wgpu_usage = {\n\n let mut wgpu_usage = TextureUsages::empty();\n\n\n\n if info.usage.contains(SwapchainUsageFlags::SAMPLED) {\n\n wgpu_usage |= TextureUsages::TEXTURE_BINDING;\n\n }\n\n if info.usage.contains(SwapchainUsageFlags::COLOR_ATTACHMENT) {\n\n wgpu_usage |= TextureUsages::RENDER_ATTACHMENT;\n\n }\n\n if info\n\n .usage\n\n .contains(SwapchainUsageFlags::DEPTH_STENCIL_ATTACHMENT)\n\n {\n\n wgpu_usage |= TextureUsages::RENDER_ATTACHMENT;\n\n }\n", "file_path": "alvr/experiments/graphics/src/convert.rs", "rank": 86, "score": 155641.2313926672 }, { "content": "pub fn build_ffmpeg_linux_install(\n\n nvenc_flag: bool,\n\n version_tag: &str,\n\n enable_decoders: bool,\n\n install_path: &std::path::Path,\n\n) -> std::path::PathBuf {\n\n /* dependencies: build-essential pkg-config nasm libva-dev libdrm-dev libvulkan-dev\n\n libx264-dev libx265-dev libffmpeg-nvenc-dev nvidia-cuda-toolkit\n\n */\n\n\n\n let download_path = afs::deps_dir().join(\"linux\");\n\n let ffmpeg_path = download_path.join(format!(\"FFmpeg-{}\", version_tag));\n\n if !ffmpeg_path.exists() {\n\n download_and_extract_zip(\n\n format!(\n\n \"https://codeload.github.com/FFmpeg/FFmpeg/zip/{}\",\n\n version_tag\n\n )\n\n .as_str(),\n\n &download_path,\n", "file_path": "alvr/xtask/src/dependencies.rs", "rank": 87, "score": 155641.2313926672 }, { "content": "// Create wgpu-compatible Vulkan instance. Corresponds to xrCreateVulkanInstanceKHR\n\npub fn create_vulkan_instance(\n\n entry: &ash::Entry,\n\n info: &vk::InstanceCreateInfo,\n\n) -> StrResult<ash::Instance> {\n\n let mut extensions_ptrs =\n\n get_vulkan_instance_extensions(entry, unsafe { (*info.p_application_info).api_version })?\n\n .iter()\n\n .map(|x| x.as_ptr())\n\n .collect::<Vec<_>>();\n\n\n\n extensions_ptrs.extend_from_slice(unsafe {\n\n slice::from_raw_parts(\n\n info.pp_enabled_extension_names,\n\n info.enabled_extension_count as _,\n\n )\n\n });\n\n\n\n let layers = vec![CStr::from_bytes_with_nul(b\"VK_LAYER_KHRONOS_validation\\0\").unwrap()];\n\n let layers_ptrs = layers.iter().map(|x| x.as_ptr()).collect::<Vec<_>>();\n\n\n", "file_path": "alvr/experiments/graphics/src/convert.rs", "rank": 88, "score": 155641.2313926672 }, { "content": "pub fn split(\n\n graphics_context: Arc<GraphicsContext>,\n\n codec_type: CodecType,\n\n csd_0: &[u8],\n\n extra_options: &[(String, MediacodecDataType)],\n\n output_texture: Arc<Texture>,\n\n output_size: UVec2,\n\n slice_index: u32,\n\n) -> StrResult<(\n\n VideoDecoderEnqueuer,\n\n VideoDecoderDequeuer,\n\n VideoDecoderFrameGrabber,\n\n)> {\n\n let mut swapchain = trace_err!(ImageReader::new_with_usage(\n\n 1,\n\n 1,\n\n ImageFormat::PRIVATE,\n\n HardwareBufferUsage::GPU_SAMPLED_IMAGE,\n\n 4, // 2 concurrent locks on application side, 1 render surface for Mediacodec, 1 for safety\n\n ))?;\n", "file_path": "alvr/experiments/client/src/video_decoder/mediacodec.rs", "rank": 89, "score": 155641.2313926672 }, { "content": "pub fn shutdown() {\n\n ON_PAUSE_NOTIFIER.notify_waiters();\n\n drop(RUNTIME.lock().take());\n\n}\n\n\n\npub unsafe extern \"C\" fn path_string_to_hash(path: *const ::std::os::raw::c_char) -> u64 {\n\n alvr_common::hash_string(CStr::from_ptr(path).to_str().unwrap())\n\n}\n\n\n\npub extern \"C\" fn input_send(data_ptr: *const TrackingInfo) {\n\n #[inline(always)]\n\n fn from_tracking_quat(quat: &TrackingQuat) -> Quat {\n\n Quat::from_xyzw(quat.x, quat.y, quat.z, quat.w)\n\n }\n\n #[inline(always)]\n\n fn from_tracking_quat_val(quat: TrackingQuat) -> Quat {\n\n from_tracking_quat(&quat)\n\n }\n\n #[inline(always)]\n\n fn from_tracking_vector3(vec: &TrackingVector3) -> Vec3 {\n", "file_path": "alvr/openxr-client/alxr-common/src/lib.rs", "rank": 90, "score": 155641.2313926672 }, { "content": "// Hal adapter used to get required device extensions and features\n\npub fn get_temporary_hal_adapter(\n\n entry: ash::Entry,\n\n version: u32,\n\n instance: ash::Instance,\n\n physical_device: vk::PhysicalDevice,\n\n) -> StrResult<hal::ExposedAdapter<hal::api::Vulkan>> {\n\n let instance_extensions = get_vulkan_instance_extensions(&entry, version)?;\n\n\n\n let mut flags = hal::InstanceFlags::empty();\n\n if cfg!(debug_assertions) {\n\n flags |= hal::InstanceFlags::VALIDATION;\n\n flags |= hal::InstanceFlags::DEBUG;\n\n };\n\n\n\n let hal_instance = unsafe {\n\n trace_err!(<hal::api::Vulkan as hal::Api>::Instance::from_raw(\n\n entry,\n\n instance,\n\n version,\n\n instance_extensions,\n\n flags,\n\n false,\n\n None, // <-- the instance is not destroyed on drop\n\n ))?\n\n };\n\n\n\n trace_none!(hal_instance.expose_adapter(physical_device))\n\n}\n\n\n", "file_path": "alvr/experiments/graphics/src/convert.rs", "rank": 91, "score": 152940.91917425083 }, { "content": "#[cfg_attr(target_os = \"android\", ndk_glue::main(backtrace = \"on\"))]\n\npub fn main() {\n\n println!(\"{:?}\", *APP_CONFIG);\n\n let mut app = AppData {\n\n destroy_requested: false,\n\n resumed: false,\n\n };\n\n run(&mut app).unwrap();\n\n println!(\"successfully shutdown.\");\n\n // the ndk_glue api does not automatically call this and without\n\n // it main will hang on exit, currently there seems to be no plans to\n\n // make it automatic, refer to:\n\n // https://github.com/rust-windowing/android-ndk-rs/issues/154\n\n ndk_glue::native_activity().finish();\n\n}\n\n\n", "file_path": "alvr/openxr-client/alxr-android-client/src/lib.rs", "rank": 92, "score": 152927.6070947308 }, { "content": "// Get extensions needed by wgpu. Corresponds to xrGetVulkanInstanceExtensionsKHR\n\npub fn get_vulkan_instance_extensions(\n\n entry: &ash::Entry,\n\n version: u32,\n\n) -> StrResult<Vec<&'static CStr>> {\n\n let mut flags = hal::InstanceFlags::empty();\n\n if cfg!(debug_assertions) {\n\n flags |= hal::InstanceFlags::VALIDATION;\n\n flags |= hal::InstanceFlags::DEBUG;\n\n }\n\n\n\n trace_err!(<hal::api::Vulkan as hal::Api>::Instance::required_extensions(entry, flags))\n\n}\n\n\n", "file_path": "alvr/experiments/graphics/src/convert.rs", "rank": 93, "score": 152927.6070947308 }, { "content": "pub fn is_nightly() -> bool {\n\n ALVR_VERSION.build.contains(\"nightly\")\n\n}\n\n\n", "file_path": "alvr/common/src/lib.rs", "rank": 94, "score": 152731.3529188827 }, { "content": "pub fn is_stable() -> bool {\n\n ALVR_VERSION.pre == Prerelease::EMPTY && !is_nightly()\n\n}\n\n\n", "file_path": "alvr/common/src/lib.rs", "rank": 95, "score": 152731.3529188827 }, { "content": "class output_string_adapter : public output_adapter_protocol<CharType>\n\n{\n\n public:\n\n explicit output_string_adapter(StringType& s) noexcept\n\n : str(s)\n\n {}\n\n\n\n void write_character(CharType c) override\n\n {\n\n str.push_back(c);\n\n }\n\n\n\n void write_characters(const CharType* s, std::size_t length) override\n\n {\n\n str.append(s, length);\n\n }\n\n\n\n private:\n\n StringType& str;\n\n};\n\n\n\ntemplate<typename CharType, typename StringType = std::basic_string<CharType>>\n", "file_path": "alvr/client/android/app/include/tinygltf/json.hpp", "rank": 96, "score": 151996.48197690427 }, { "content": "pub fn create_swapchain(\n\n device: &Device,\n\n session: &xr::Session<xr::Vulkan>,\n\n size: UVec2,\n\n) -> OpenxrSwapchain {\n\n const FORMAT: vk::Format = vk::Format::R8G8B8A8_SRGB;\n\n\n\n let usage = xr::SwapchainUsageFlags::COLOR_ATTACHMENT | xr::SwapchainUsageFlags::SAMPLED;\n\n // This corresponds to USAGE\n\n let hal_usage = hal::TextureUses::COLOR_TARGET | hal::TextureUses::RESOURCE;\n\n\n\n let swapchain = session\n\n .create_swapchain(&xr::SwapchainCreateInfo {\n\n create_flags: xr::SwapchainCreateFlags::EMPTY,\n\n usage_flags: usage,\n\n format: FORMAT.as_raw() as _,\n\n sample_count: 1,\n\n width: size.x,\n\n height: size.y,\n\n face_count: 1,\n", "file_path": "alvr/experiments/client/src/xr/openxr/graphics_interop.rs", "rank": 97, "score": 150348.43747365923 }, { "content": "#[cfg(windows)]\n\nfn kill_process(pid: u32) {\n\n use std::os::windows::process::CommandExt;\n\n Command::new(\"taskkill.exe\")\n\n .args(&[\"/PID\", &pid.to_string(), \"/F\"])\n\n .creation_flags(CREATE_NO_WINDOW)\n\n .output()\n\n .ok();\n\n}\n\n\n", "file_path": "alvr/launcher/src/commands.rs", "rank": 98, "score": 149786.65119767498 }, { "content": "use widestring::U16CStr;\n\n#[cfg(windows)]\n\nuse winapi::{\n\n shared::{winerror::FAILED, wtypes::VT_LPWSTR},\n\n um::{\n\n combaseapi::{CoCreateInstance, CoInitializeEx, CoTaskMemFree, CLSCTX_ALL},\n\n coml2api::STGM_READ,\n\n endpointvolume::IAudioEndpointVolume,\n\n functiondiscoverykeys_devpkey::PKEY_Device_FriendlyName,\n\n mmdeviceapi::{\n\n eAll, IMMDevice, IMMDeviceCollection, IMMDeviceEnumerator, MMDeviceEnumerator,\n\n DEVICE_STATE_ACTIVE,\n\n },\n\n objbase::COINIT_MULTITHREADED,\n\n propidl::{PropVariantClear, PROPVARIANT},\n\n propsys::IPropertyStore,\n\n },\n\n Class, Interface,\n\n};\n\n#[cfg(windows)]\n", "file_path": "alvr/audio/src/lib.rs", "rank": 99, "score": 58.497671938823174 } ]
Rust
tests/tests.rs
zhiburt/bumpalo
38054c706cda77a07a07c3eda27bbeb6ee93a706
use bumpalo::Bump; use std::alloc::Layout; use std::mem; use std::usize; #[test] fn can_iterate_over_allocated_things() { let mut bump = Bump::new(); const MAX: u64 = 131_072; let mut chunk_ends = vec![]; let mut last = None; for i in 0..MAX { let this = bump.alloc(i); assert_eq!(*this, i); let this = this as *const _ as usize; if match last { Some(last) if last - mem::size_of::<u64>() == this => false, _ => true, } { let chunk_end = this + mem::size_of::<u64>(); println!("new chunk ending @ 0x{:x}", chunk_end); assert!( !chunk_ends.contains(&chunk_end), "should not have already allocated this chunk" ); chunk_ends.push(chunk_end); } last = Some(this); } let mut seen = vec![false; MAX as usize]; chunk_ends.reverse(); for ch in bump.iter_allocated_chunks() { let chunk_end = ch.as_ptr() as usize + ch.len(); println!("iter chunk ending @ {:#x}", chunk_end); assert_eq!( chunk_ends.pop().unwrap(), chunk_end, "should iterate over each chunk once, in order they were allocated in" ); let (before, mid, after) = unsafe { ch.align_to::<u64>() }; assert!(before.is_empty()); assert!(after.is_empty()); for i in mid { assert!(*i < MAX, "{} < {} (aka {:x} < {:x})", i, MAX, i, MAX); seen[*i as usize] = true; } } assert!(seen.iter().all(|s| *s)); } #[test] #[should_panic(expected = "out of memory")] fn oom_instead_of_bump_pointer_overflow() { let bump = Bump::new(); let x = bump.alloc(0_u8); let p = x as *mut u8 as usize; let size = usize::MAX - p + 1; let align = 1; let layout = match Layout::from_size_align(size, align) { Err(e) => { eprintln!("Layout::from_size_align errored: {}", e); return; } Ok(l) => l, }; bump.alloc_layout(layout); } #[test] fn force_new_chunk_fits_well() { let b = Bump::new(); b.alloc_layout(Layout::from_size_align(1, 1).unwrap()); b.alloc_layout(Layout::from_size_align(100_001, 1).unwrap()); b.alloc_layout(Layout::from_size_align(100_003, 1).unwrap()); } #[test] fn alloc_with_strong_alignment() { let b = Bump::new(); b.alloc_layout(Layout::from_size_align(4096, 64).unwrap()); } #[test] fn alloc_slice_copy() { let b = Bump::new(); let src: &[u16] = &[0xFEED, 0xFACE, 0xA7, 0xCAFE]; let dst = b.alloc_slice_copy(src); assert_eq!(src, dst); } #[test] fn alloc_slice_clone() { let b = Bump::new(); let src = vec![vec![0], vec![1, 2], vec![3, 4, 5], vec![6, 7, 8, 9]]; let dst = b.alloc_slice_clone(&src); assert_eq!(src, dst); } #[test] fn small_size_and_large_align() { let b = Bump::new(); let layout = std::alloc::Layout::from_size_align(1, 0x1000).unwrap(); b.alloc_layout(layout); } fn with_capacity_helper<I, T>(iter: I) where T: Copy + Eq, I: Clone + Iterator<Item = T>, { for &initial_size in &[0, 1, 8, 11, 0x1000, 0x12345] { let mut b = Bump::with_capacity(initial_size); for v in iter.clone() { b.alloc(v); } let pushed_values = b.iter_allocated_chunks().flat_map(|c| { let (before, mid, after) = unsafe { c.align_to::<T>() }; assert!(before.is_empty()); assert!(after.is_empty()); mid.iter().rev().copied() }); assert!(pushed_values.eq(iter.clone())); } } #[test] fn with_capacity_test() { with_capacity_helper(0u8..255); with_capacity_helper(0u16..10000); with_capacity_helper(0u32..10000); with_capacity_helper(0u64..10000); with_capacity_helper(0u128..10000); } #[test] fn test_reset() { let mut b = Bump::new(); for i in 0u64..10_000 { b.alloc(i); } assert!(b.iter_allocated_chunks().count() > 1); let last_chunk = b.iter_allocated_chunks().last().unwrap(); let start = last_chunk.as_ptr() as usize; let end = start + last_chunk.len(); dbg!((start, end)); b.reset(); assert_eq!(end - mem::size_of::<u64>(), b.alloc(0u64) as *const u64 as usize); assert_eq!(b.iter_allocated_chunks().count(), 1); }
use bumpalo::Bump; use std::alloc::Layout; use std::mem; use std::usize; #[test] fn can_iterate_over_allocated_things() { let mut bump = Bump::new(); const MAX: u64 = 131_072; let mut chunk_ends = vec![]; let mut last = None; for i in 0..MAX { let this = bump.alloc(i); assert_eq!(*this, i); let this = this as *const _ as usize; if match last { Some(last) if last - mem::size_of::<u64>() == this => false, _ => true, } { let chunk_end = this + mem::size_of::<u64>(); println!("new chunk ending @ 0x{:x}", chunk_end); assert!( !chunk_ends.contains(&chunk_end), "should not have already allocated this chunk" ); chunk_ends.push(chunk_end); } last = Some(this); } let mut seen = vec![false; MAX as usize]; chunk_ends.reverse(); for ch in bump.iter_allocated_chunks() { let chunk_end = ch.as_ptr() as usize + ch.len(); println!("iter chunk ending @ {:#x}", chunk_end); assert_eq!( chunk_ends.pop().unwrap(), chunk_end, "should iterate over each chunk once, in order they were allocated in" ); let (before, mid, after) = unsafe { ch.align_to::<u64>() }; assert!(before.is_empty()); assert!(after.is_empty()); for i in mid { assert!(*i < MAX, "{} < {} (aka {:x} < {:x})", i, MAX, i, MAX); seen[*i as usize] = true; } } assert!(seen.iter().all(|s| *s)); } #[test] #[should_panic(expected = "out of memory")] fn oom_instead_of_bump_pointer_overflow() { let bump = Bump::new(); let x = bump.alloc(0_u8); let p = x as *mut u8 as usize; let size = usize::MAX - p + 1; let align = 1; let layout = match Layout::from_size_al
().copied() }); assert!(pushed_values.eq(iter.clone())); } } #[test] fn with_capacity_test() { with_capacity_helper(0u8..255); with_capacity_helper(0u16..10000); with_capacity_helper(0u32..10000); with_capacity_helper(0u64..10000); with_capacity_helper(0u128..10000); } #[test] fn test_reset() { let mut b = Bump::new(); for i in 0u64..10_000 { b.alloc(i); } assert!(b.iter_allocated_chunks().count() > 1); let last_chunk = b.iter_allocated_chunks().last().unwrap(); let start = last_chunk.as_ptr() as usize; let end = start + last_chunk.len(); dbg!((start, end)); b.reset(); assert_eq!(end - mem::size_of::<u64>(), b.alloc(0u64) as *const u64 as usize); assert_eq!(b.iter_allocated_chunks().count(), 1); }
ign(size, align) { Err(e) => { eprintln!("Layout::from_size_align errored: {}", e); return; } Ok(l) => l, }; bump.alloc_layout(layout); } #[test] fn force_new_chunk_fits_well() { let b = Bump::new(); b.alloc_layout(Layout::from_size_align(1, 1).unwrap()); b.alloc_layout(Layout::from_size_align(100_001, 1).unwrap()); b.alloc_layout(Layout::from_size_align(100_003, 1).unwrap()); } #[test] fn alloc_with_strong_alignment() { let b = Bump::new(); b.alloc_layout(Layout::from_size_align(4096, 64).unwrap()); } #[test] fn alloc_slice_copy() { let b = Bump::new(); let src: &[u16] = &[0xFEED, 0xFACE, 0xA7, 0xCAFE]; let dst = b.alloc_slice_copy(src); assert_eq!(src, dst); } #[test] fn alloc_slice_clone() { let b = Bump::new(); let src = vec![vec![0], vec![1, 2], vec![3, 4, 5], vec![6, 7, 8, 9]]; let dst = b.alloc_slice_clone(&src); assert_eq!(src, dst); } #[test] fn small_size_and_large_align() { let b = Bump::new(); let layout = std::alloc::Layout::from_size_align(1, 0x1000).unwrap(); b.alloc_layout(layout); } fn with_capacity_helper<I, T>(iter: I) where T: Copy + Eq, I: Clone + Iterator<Item = T>, { for &initial_size in &[0, 1, 8, 11, 0x1000, 0x12345] { let mut b = Bump::with_capacity(initial_size); for v in iter.clone() { b.alloc(v); } let pushed_values = b.iter_allocated_chunks().flat_map(|c| { let (before, mid, after) = unsafe { c.align_to::<T>() }; assert!(before.is_empty()); assert!(after.is_empty()); mid.iter().rev
random
[ { "content": "fn size_align<T>() -> (usize, usize) {\n\n (mem::size_of::<T>(), mem::align_of::<T>())\n\n}\n\n\n\n/// The `AllocErr` error indicates an allocation failure\n\n/// that may be due to resource exhaustion or to\n\n/// something wrong when combining the given input arguments with this\n\n/// allocator.\n\n// #[unstable(feature = \"allocator_api\", issue = \"32838\")]\n\n#[derive(Clone, PartialEq, Eq, Debug)]\n\npub struct AllocErr;\n\n\n\n// (we need this for downstream impl of trait Error)\n\n// #[unstable(feature = \"allocator_api\", issue = \"32838\")]\n\nimpl fmt::Display for AllocErr {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n f.write_str(\"memory allocation failed\")\n\n }\n\n}\n\n\n", "file_path": "src/alloc.rs", "rank": 0, "score": 142058.8208647711 }, { "content": "pub fn handle_alloc_error(layout: Layout) -> ! {\n\n panic!(\"encountered allocation error: {:?}\", layout)\n\n}\n\n\n", "file_path": "src/alloc.rs", "rank": 4, "score": 123099.45622975155 }, { "content": "#[inline]\n\nfn alloc_guard(alloc_size: usize) -> Result<(), CollectionAllocErr> {\n\n if mem::size_of::<usize>() < 8 && alloc_size > ::core::isize::MAX as usize {\n\n Err(CapacityOverflow)\n\n } else {\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "src/collections/raw_vec.rs", "rank": 6, "score": 115815.19492347758 }, { "content": "#[cfg(feature = \"collections\")]\n\nfn format_realloc(bump: &bumpalo::Bump, n: usize) {\n\n let n = criterion::black_box(n);\n\n let s = bumpalo::format!(in bump, \"Hello {:.*}\", n, \"World! \");\n\n criterion::black_box(s);\n\n}\n\n\n\nconst ALLOCATIONS: usize = 10_000;\n\n\n", "file_path": "benches/benches.rs", "rank": 8, "score": 109069.89134298867 }, { "content": "#[test]\n\nfn recursive_vecs() {\n\n // The purpose of this test is to see if the data structures with\n\n // self references are allowed without causing a compile error\n\n // because of the dropck\n\n let b = Bump::new();\n\n\n\n struct Node<'a> {\n\n myself: Cell<Option<&'a Node<'a>>>,\n\n edges: Cell<Vec<'a, &'a Node<'a>>>,\n\n }\n\n\n\n let node1: &Node = b.alloc(Node {\n\n myself: Cell::new(None),\n\n edges: Cell::new(Vec::new_in(&b)),\n\n });\n\n let node2: &Node = b.alloc(Node {\n\n myself: Cell::new(None),\n\n edges: Cell::new(Vec::new_in(&b)),\n\n });\n\n\n\n node1.myself.set(Some(node1));\n\n node1.edges.set(bumpalo::vec![in &b; node1, node1, node2]);\n\n\n\n node2.myself.set(Some(node2));\n\n node2.edges.set(bumpalo::vec![in &b; node1, node2]);\n\n}\n", "file_path": "tests/vec.rs", "rank": 9, "score": 108897.66864539678 }, { "content": "#[test]\n\nfn alloc_with_large_array() {\n\n let b = Bump::new();\n\n\n\n b.alloc_with(|| [4u8; 10_000_000]);\n\n}\n\n\n", "file_path": "tests/alloc_with.rs", "rank": 12, "score": 105972.81030527822 }, { "content": "#[test]\n\nfn alloc_with_large_tuple() {\n\n let b = Bump::new();\n\n\n\n b.alloc_with(|| {\n\n (\n\n 1u32,\n\n LargeStruct {\n\n small: 2,\n\n big1: [3; 20_000_000],\n\n big2: [4; 20_000_000],\n\n big3: [5; 20_000_000],\n\n },\n\n )\n\n });\n\n}\n\n\n", "file_path": "tests/alloc_with.rs", "rank": 13, "score": 105972.81030527822 }, { "content": "#[test]\n\nfn alloc_with_large_enum() {\n\n let b = Bump::new();\n\n\n\n b.alloc_with(|| LargeEnum::Small);\n\n}\n", "file_path": "tests/alloc_with.rs", "rank": 14, "score": 105972.81030527822 }, { "content": "#[test]\n\nfn alloc_with_large_struct() {\n\n let b = Bump::new();\n\n\n\n b.alloc_with(|| LargeStruct {\n\n small: 1,\n\n big1: [2; 20_000_000],\n\n big2: [3; 20_000_000],\n\n big3: [4; 20_000_000],\n\n });\n\n}\n\n\n", "file_path": "tests/alloc_with.rs", "rank": 15, "score": 105972.81030527822 }, { "content": "fn bench_alloc_with(c: &mut Criterion) {\n\n let mut group = c.benchmark_group(\"alloc-with\");\n\n group.throughput(Throughput::Elements(ALLOCATIONS as u64));\n\n group.bench_function(\"small\", |b| b.iter(|| alloc_with::<Small>(ALLOCATIONS)));\n\n group.bench_function(\"big\", |b| b.iter(|| alloc_with::<Big>(ALLOCATIONS)));\n\n}\n\n\n", "file_path": "benches/benches.rs", "rank": 16, "score": 103349.87709484312 }, { "content": "fn bench_alloc(c: &mut Criterion) {\n\n let mut group = c.benchmark_group(\"alloc\");\n\n group.throughput(Throughput::Elements(ALLOCATIONS as u64));\n\n group.bench_function(\"small\", |b| b.iter(|| alloc::<Small>(ALLOCATIONS)));\n\n group.bench_function(\"big\", |b| b.iter(|| alloc::<Big>(ALLOCATIONS)));\n\n}\n\n\n", "file_path": "benches/benches.rs", "rank": 17, "score": 103349.87709484312 }, { "content": "fn alloc<T: Default>(n: usize) {\n\n let arena = bumpalo::Bump::with_capacity(n * std::mem::size_of::<T>());\n\n for _ in 0..n {\n\n let arena = black_box(&arena);\n\n let val: &mut T = arena.alloc(black_box(Default::default()));\n\n black_box(val);\n\n }\n\n}\n\n\n", "file_path": "benches/benches.rs", "rank": 18, "score": 101231.38534855613 }, { "content": "fn new_layout_err() -> LayoutErr {\n\n Layout::from_size_align(1, 3).unwrap_err()\n\n}\n\n\n", "file_path": "src/alloc.rs", "rank": 20, "score": 96290.14635701379 }, { "content": "fn partition_dedup_by<T, F>(s: &mut [T], mut same_bucket: F) -> (&mut [T], &mut [T])\n\nwhere\n\n F: FnMut(&mut T, &mut T) -> bool,\n\n{\n\n // Although we have a mutable reference to `s`, we cannot make\n\n // *arbitrary* changes. The `same_bucket` calls could panic, so we\n\n // must ensure that the slice is in a valid state at all times.\n\n //\n\n // The way that we handle this is by using swaps; we iterate\n\n // over all the elements, swapping as we go so that at the end\n\n // the elements we wish to keep are in the front, and those we\n\n // wish to reject are at the back. We can then split the slice.\n\n // This operation is still O(n).\n\n //\n\n // Example: We start in this state, where `r` represents \"next\n\n // read\" and `w` represents \"next_write`.\n\n //\n\n // r\n\n // +---+---+---+---+---+---+\n\n // | 0 | 1 | 1 | 2 | 3 | 3 |\n", "file_path": "src/collections/vec.rs", "rank": 21, "score": 94564.56233858677 }, { "content": "#[test]\n\nfn push_a_bunch_of_items() {\n\n let b = Bump::new();\n\n let mut v = Vec::new_in(&b);\n\n for x in 0..10_000 {\n\n v.push(x);\n\n }\n\n}\n\n\n", "file_path": "tests/vec.rs", "rank": 22, "score": 94478.65833582269 }, { "content": "fn range<T>(t: &T) -> (usize, usize) {\n\n let start = t as *const _ as usize;\n\n let end = start + mem::size_of::<T>();\n\n (start, end)\n\n}\n\n\n\nquickcheck! {\n\n fn can_allocate_big_values(values: Vec<BigValue>) -> () {\n\n let bump = Bump::new();\n\n let mut alloced = vec![];\n\n\n\n for vals in values.iter().cloned() {\n\n alloced.push(bump.alloc(vals));\n\n }\n\n\n\n for (vals, alloc) in values.iter().zip(alloced.into_iter()) {\n\n assert_eq!(vals, alloc);\n\n }\n\n }\n\n\n", "file_path": "tests/quickchecks.rs", "rank": 23, "score": 92031.1240774926 }, { "content": "#[inline]\n\npub fn utf8_char_width(b: u8) -> usize {\n\n UTF8_CHAR_WIDTH[b as usize] as usize\n\n}\n", "file_path": "src/collections/str/mod.rs", "rank": 26, "score": 87725.57363190521 }, { "content": "fn alloc_with<T: Default>(n: usize) {\n\n let arena = bumpalo::Bump::with_capacity(n * std::mem::size_of::<T>());\n\n for _ in 0..n {\n\n let arena = black_box(&arena);\n\n let val: &mut T = arena.alloc_with(|| black_box(Default::default()));\n\n black_box(val);\n\n }\n\n}\n\n\n", "file_path": "benches/benches.rs", "rank": 27, "score": 86331.78781580087 }, { "content": "#[inline(never)]\n\nfn allocation_size_overflow<T>() -> T {\n\n panic!(\"requested allocation size overflowed\")\n\n}\n\n\n\nimpl Bump {\n\n fn default_chunk_layout() -> Layout {\n\n unsafe { layout_from_size_align(DEFAULT_CHUNK_SIZE_WITH_FOOTER, DEFAULT_CHUNK_ALIGN) }\n\n }\n\n\n\n /// Construct a new arena to bump allocate into.\n\n ///\n\n /// ## Example\n\n ///\n\n /// ```\n\n /// let bump = bumpalo::Bump::new();\n\n /// # let _ = bump;\n\n /// ```\n\n pub fn new() -> Bump {\n\n let chunk_footer = Self::new_chunk(None);\n\n Bump {\n", "file_path": "src/lib.rs", "rank": 28, "score": 84485.11717949974 }, { "content": "fn overlap((a1, a2): (usize, usize), (b1, b2): (usize, usize)) -> bool {\n\n assert!(a1 < a2);\n\n assert!(b1 < b2);\n\n a1 < b2 && b1 < a2\n\n}\n\n\n", "file_path": "tests/quickchecks.rs", "rank": 29, "score": 84336.77547742744 }, { "content": "fn bench_format_realloc(c: &mut Criterion) {\n\n let mut group = c.benchmark_group(\"format-realloc\");\n\n\n\n for n in (1..5).map(|n| n * n * n * 10) {\n\n group.throughput(Throughput::Elements(n as u64));\n\n group.bench_with_input(BenchmarkId::new(\"format-realloc\", n), &n, |b, n| {\n\n let mut bump = bumpalo::Bump::new();\n\n b.iter(|| {\n\n bump.reset();\n\n format_realloc(&bump, *n);\n\n });\n\n });\n\n }\n\n}\n\n\n\ncriterion_group!(benches, bench_alloc, bench_alloc_with, bench_format_realloc);\n\ncriterion_main!(benches);\n", "file_path": "benches/benches.rs", "rank": 30, "score": 75645.02519914247 }, { "content": "#[test]\n\nfn format_a_bunch_of_strings() {\n\n let b = Bump::new();\n\n let mut s = String::from_str_in(\"hello\", &b);\n\n for i in 0..1000 {\n\n write!(&mut s, \" {}\", i).unwrap();\n\n }\n\n}\n", "file_path": "tests/string.rs", "rank": 31, "score": 69400.57576590047 }, { "content": "#[test]\n\nfn cargo_readme_up_to_date() {\n\n println!(\"Checking that `cargo readme > README.md` is up to date...\");\n\n\n\n let expected = Command::new(\"cargo\")\n\n .arg(\"readme\")\n\n .current_dir(env!(\"CARGO_MANIFEST_DIR\"))\n\n .output()\n\n .expect(\"should run `cargo readme` OK\")\n\n .stdout;\n\n let expected = String::from_utf8_lossy(&expected);\n\n\n\n let actual = fs::read_to_string(concat!(env!(\"CARGO_MANIFEST_DIR\"), \"/README.md\"))\n\n .expect(\"should read README.md OK\");\n\n\n\n if actual != expected {\n\n panic!(\"Run `cargo readme > README.md` to update README.md\");\n\n }\n\n}\n", "file_path": "tests/readme_up_to_date.rs", "rank": 32, "score": 66394.13396468632 }, { "content": "// One central function responsible for reporting capacity overflows. This'll\n\n// ensure that the code generation related to these panics is minimal as there's\n\n// only one location which panics rather than a bunch throughout the module.\n\nfn capacity_overflow() -> ! {\n\n panic!(\"capacity overflow\")\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn reserve_does_not_overallocate() {\n\n let bump = Bump::new();\n\n {\n\n let mut v: RawVec<u32> = RawVec::new_in(&bump);\n\n // First `reserve` allocates like `reserve_exact`\n\n v.reserve(0, 9);\n\n assert_eq!(9, v.cap());\n\n }\n\n\n\n {\n\n let mut v: RawVec<u32> = RawVec::new_in(&bump);\n", "file_path": "src/collections/raw_vec.rs", "rank": 33, "score": 64834.080162158236 }, { "content": "#![cfg(not(debug_assertions))]\n\n\n\n// All of these alloc_with tests will fail with \"fatal runtime error: stack overflow\" unless LLVM\n\n// manages to optimize the stack writes away.\n\n//\n\n// We only run them when debug_assertions are not set, as we expect them to fail outside release\n\n// mode.\n\n\n\nuse bumpalo::Bump;\n\n\n\n#[test]\n", "file_path": "tests/alloc_with.rs", "rank": 34, "score": 64805.90582546 }, { "content": "#![cfg(feature = \"collections\")]\n\nuse bumpalo::{collections::Vec, Bump};\n\nuse std::cell::Cell;\n\n\n\n#[test]\n", "file_path": "tests/vec.rs", "rank": 35, "score": 63088.06736083298 }, { "content": "enum LargeEnum {\n\n Small,\n\n #[allow(dead_code)]\n\n Large([u8; 10_000_000]),\n\n}\n\n\n", "file_path": "tests/alloc_with.rs", "rank": 36, "score": 56584.49649013883 }, { "content": "#[allow(dead_code)]\n\nstruct LargeStruct {\n\n small: usize,\n\n big1: [u8; 20_000_000],\n\n big2: [u8; 20_000_000],\n\n big3: [u8; 20_000_000],\n\n}\n\n\n", "file_path": "tests/alloc_with.rs", "rank": 37, "score": 56584.49649013883 }, { "content": "pub trait UnstableLayoutMethods {\n\n fn padding_needed_for(&self, align: usize) -> usize;\n\n fn repeat(&self, n: usize) -> Result<(Layout, usize), LayoutErr>;\n\n fn array<T>(n: usize) -> Result<Layout, LayoutErr>;\n\n}\n\n\n\nimpl UnstableLayoutMethods for Layout {\n\n fn padding_needed_for(&self, align: usize) -> usize {\n\n let len = self.size();\n\n\n\n // Rounded up value is:\n\n // len_rounded_up = (len + align - 1) & !(align - 1);\n\n // and then we return the padding difference: `len_rounded_up - len`.\n\n //\n\n // We use modular arithmetic throughout:\n\n //\n\n // 1. align is guaranteed to be > 0, so align - 1 is always\n\n // valid.\n\n //\n\n // 2. `len + align - 1` can overflow by at most `align - 1`,\n", "file_path": "src/alloc.rs", "rank": 38, "score": 50591.2430529751 }, { "content": "#[derive(Default)]\n\nstruct Small(u8);\n\n\n", "file_path": "benches/benches.rs", "rank": 39, "score": 48671.325003632235 }, { "content": "#[inline(never)]\n\n#[cold]\n\nfn oom() -> ! {\n\n panic!(\"out of memory\")\n\n}\n\n\n\nunsafe impl<'a> alloc::Alloc for &'a Bump {\n\n #[inline(always)]\n\n unsafe fn alloc(&mut self, layout: Layout) -> Result<NonNull<u8>, alloc::AllocErr> {\n\n Ok(self.alloc_layout(layout))\n\n }\n\n\n\n #[inline]\n\n unsafe fn dealloc(&mut self, ptr: NonNull<u8>, layout: Layout) {\n\n // If the pointer is the last allocation we made, we can reuse the bytes,\n\n // otherwise they are simply leaked -- at least until somebody calls reset().\n\n if self.is_last_allocation(ptr) {\n\n let ptr = NonNull::new_unchecked(ptr.as_ptr().add(layout.size()));\n\n self.current_chunk_footer.get().as_ref().ptr.set(ptr);\n\n }\n\n }\n\n\n", "file_path": "src/lib.rs", "rank": 40, "score": 46288.36944498806 }, { "content": " fn big_allocations_never_overlap(values: Vec<BigValue>) -> () {\n\n let bump = Bump::new();\n\n let mut alloced = vec![];\n\n\n\n for v in values {\n\n let a = bump.alloc(v);\n\n let start = a as *const _ as usize;\n\n let end = unsafe { (a as *const BigValue).offset(1) as usize };\n\n let range = (start, end);\n\n\n\n for r in &alloced {\n\n assert!(!overlap(*r, range));\n\n }\n\n\n\n alloced.push(range);\n\n }\n\n }\n\n\n\n fn can_allocate_heterogeneous_things_and_they_dont_overlap(things: Vec<Elems<u8, u64>>) -> () {\n\n let bump = Bump::new();\n", "file_path": "tests/quickchecks.rs", "rank": 44, "score": 32567.42914419957 }, { "content": " let mut ranges = vec![];\n\n\n\n for t in things {\n\n let r = match t {\n\n Elems::OneT(a) => {\n\n range(bump.alloc(a))\n\n },\n\n Elems::TwoT(a, b) => {\n\n range(bump.alloc([a, b]))\n\n },\n\n Elems::FourT(a, b, c, d) => {\n\n range(bump.alloc([a, b, c, d]))\n\n },\n\n Elems::OneU(a) => {\n\n range(bump.alloc(a))\n\n },\n\n Elems::TwoU(a, b) => {\n\n range(bump.alloc([a, b]))\n\n },\n\n Elems::FourU(a, b, c, d) => {\n", "file_path": "tests/quickchecks.rs", "rank": 45, "score": 32556.347353414745 }, { "content": "#![cfg(feature = \"collections\")]\n\nuse bumpalo::{collections::String, Bump};\n\nuse std::fmt::Write;\n\n\n\n#[test]\n", "file_path": "tests/string.rs", "rank": 46, "score": 32551.256768465813 }, { "content": " range(bump.alloc([a, b, c, d]))\n\n },\n\n };\n\n\n\n for s in &ranges {\n\n assert!(!overlap(r, *s));\n\n }\n\n\n\n ranges.push(r);\n\n }\n\n }\n\n}\n", "file_path": "tests/quickchecks.rs", "rank": 47, "score": 32551.13090490611 }, { "content": " T::arbitrary(g),\n\n T::arbitrary(g),\n\n T::arbitrary(g),\n\n T::arbitrary(g),\n\n ),\n\n 3 => Elems::OneU(U::arbitrary(g)),\n\n 4 => Elems::TwoU(U::arbitrary(g), U::arbitrary(g)),\n\n 5 => Elems::FourU(\n\n U::arbitrary(g),\n\n U::arbitrary(g),\n\n U::arbitrary(g),\n\n U::arbitrary(g),\n\n ),\n\n _ => unreachable!(),\n\n }\n\n }\n\n\n\n fn shrink(&self) -> Box<dyn Iterator<Item = Self>> {\n\n match self {\n\n Elems::OneT(_) => Box::new(vec![].into_iter()),\n", "file_path": "tests/quickchecks.rs", "rank": 48, "score": 32548.36648837994 }, { "content": " Elems::TwoT(a, b) => {\n\n Box::new(vec![Elems::OneT(a.clone()), Elems::OneT(b.clone())].into_iter())\n\n }\n\n Elems::FourT(a, b, c, d) => Box::new(\n\n vec![\n\n Elems::TwoT(a.clone(), b.clone()),\n\n Elems::TwoT(a.clone(), c.clone()),\n\n Elems::TwoT(a.clone(), d.clone()),\n\n Elems::TwoT(b.clone(), c.clone()),\n\n Elems::TwoT(b.clone(), d.clone()),\n\n ]\n\n .into_iter(),\n\n ),\n\n Elems::OneU(_) => Box::new(vec![].into_iter()),\n\n Elems::TwoU(a, b) => {\n\n Box::new(vec![Elems::OneU(a.clone()), Elems::OneU(b.clone())].into_iter())\n\n }\n\n Elems::FourU(a, b, c, d) => Box::new(\n\n vec![\n\n Elems::TwoU(a.clone(), b.clone()),\n", "file_path": "tests/quickchecks.rs", "rank": 49, "score": 32547.71567979427 }, { "content": "use bumpalo::Bump;\n\nuse quickcheck::{quickcheck, Arbitrary, Gen};\n\nuse std::mem;\n\n\n\n#[derive(Clone, Debug, PartialEq)]\n", "file_path": "tests/quickchecks.rs", "rank": 50, "score": 32547.700436730087 }, { "content": " Elems::TwoU(a.clone(), c.clone()),\n\n Elems::TwoU(a.clone(), d.clone()),\n\n Elems::TwoU(b.clone(), c.clone()),\n\n Elems::TwoU(b.clone(), d.clone()),\n\n ]\n\n .into_iter(),\n\n ),\n\n }\n\n }\n\n}\n\n\n", "file_path": "tests/quickchecks.rs", "rank": 51, "score": 32543.43873506217 }, { "content": " ///\n\n /// * `layout` must *fit* that block of memory,\n\n ///\n\n /// * In addition to fitting the block of memory `layout`, the\n\n /// alignment of the `layout` must match the alignment used\n\n /// to allocate that block of memory.\n\n unsafe fn dealloc(&mut self, ptr: NonNull<u8>, layout: Layout);\n\n\n\n // == ALLOCATOR-SPECIFIC QUANTITIES AND LIMITS ==\n\n // usable_size\n\n\n\n /// Returns bounds on the guaranteed usable size of a successful\n\n /// allocation created with the specified `layout`.\n\n ///\n\n /// In particular, if one has a memory block allocated via a given\n\n /// allocator `a` and layout `k` where `a.usable_size(k)` returns\n\n /// `(l, u)`, then one can pass that block to `a.dealloc()` with a\n\n /// layout in the size range [l, u].\n\n ///\n\n /// (All implementors of `usable_size` must ensure that\n", "file_path": "src/alloc.rs", "rank": 52, "score": 32279.892394620918 }, { "content": " unsafe {\n\n // self.align is already known to be valid and alloc_size has been\n\n // padded already.\n\n Ok((\n\n Layout::from_size_align_unchecked(alloc_size, self.align()),\n\n padded_size,\n\n ))\n\n }\n\n }\n\n\n\n fn array<T>(n: usize) -> Result<Layout, LayoutErr> {\n\n Layout::new::<T>().repeat(n).map(|(k, offs)| {\n\n debug_assert!(offs == mem::size_of::<T>());\n\n k\n\n })\n\n }\n\n}\n\n\n\n/// Represents the combination of a starting address and\n\n/// a total capacity of the returned block.\n\n// #[unstable(feature = \"allocator_api\", issue = \"32838\")]\n\n#[derive(Debug)]\n\npub struct Excess(pub NonNull<u8>, pub usize);\n\n\n", "file_path": "src/alloc.rs", "rank": 53, "score": 32278.269910331877 }, { "content": "/// What it means for a layout to \"fit\" a memory block means (or\n\n/// equivalently, for a memory block to \"fit\" a layout) is that the\n\n/// following two conditions must hold:\n\n///\n\n/// 1. The block's starting address must be aligned to `layout.align()`.\n\n///\n\n/// 2. The block's size must fall in the range `[use_min, use_max]`, where:\n\n///\n\n/// * `use_min` is `self.usable_size(layout).0`, and\n\n///\n\n/// * `use_max` is the capacity that was (or would have been)\n\n/// returned when (if) the block was allocated via a call to\n\n/// `alloc_excess` or `realloc_excess`.\n\n///\n\n/// Note that:\n\n///\n\n/// * the size of the layout most recently used to allocate the block\n\n/// is guaranteed to be in the range `[use_min, use_max]`, and\n\n///\n\n/// * a lower-bound on `use_max` can be safely approximated by a call to\n", "file_path": "src/alloc.rs", "rank": 54, "score": 32275.778931323 }, { "content": " layout: Layout,\n\n new_size: usize,\n\n ) -> Result<Excess, AllocErr> {\n\n let new_layout = Layout::from_size_align_unchecked(new_size, layout.align());\n\n let usable_size = self.usable_size(&new_layout);\n\n self.realloc(ptr, layout, new_size)\n\n .map(|p| Excess(p, usable_size.1))\n\n }\n\n\n\n /// Attempts to extend the allocation referenced by `ptr` to fit `new_size`.\n\n ///\n\n /// If this returns `Ok`, then the allocator has asserted that the\n\n /// memory block referenced by `ptr` now fits `new_size`, and thus can\n\n /// be used to carry data of a layout of that size and same alignment as\n\n /// `layout`. (The allocator is allowed to\n\n /// expend effort to accomplish this, such as extending the memory block to\n\n /// include successor blocks, or virtual memory tricks.)\n\n ///\n\n /// Regardless of what this method returns, ownership of the\n\n /// memory block referenced by `ptr` has not been transferred, and\n", "file_path": "src/alloc.rs", "rank": 55, "score": 32275.340589123964 }, { "content": " /// Returning `Err` indicates that either `[T; n]` or the given\n\n /// memory block does not meet allocator's size or alignment\n\n /// constraints.\n\n ///\n\n /// Always returns `Err` on arithmetic overflow.\n\n unsafe fn dealloc_array<T>(&mut self, ptr: NonNull<T>, n: usize) -> Result<(), AllocErr>\n\n where\n\n Self: Sized,\n\n {\n\n match Layout::array::<T>(n) {\n\n Ok(k) if k.size() > 0 => {\n\n self.dealloc(ptr.cast(), k);\n\n Ok(())\n\n }\n\n _ => Err(AllocErr),\n\n }\n\n }\n\n}\n", "file_path": "src/alloc.rs", "rank": 56, "score": 32274.748584392106 }, { "content": " &mut self,\n\n ptr: NonNull<u8>,\n\n layout: Layout,\n\n new_size: usize,\n\n ) -> Result<NonNull<u8>, AllocErr> {\n\n let old_size = layout.size();\n\n\n\n if new_size >= old_size {\n\n if let Ok(()) = self.grow_in_place(ptr, layout, new_size) {\n\n return Ok(ptr);\n\n }\n\n } else if new_size < old_size {\n\n if let Ok(()) = self.shrink_in_place(ptr, layout, new_size) {\n\n return Ok(ptr);\n\n }\n\n }\n\n\n\n // otherwise, fall back on alloc + copy + dealloc.\n\n let new_layout = Layout::from_size_align_unchecked(new_size, layout.align());\n\n let result = self.alloc(new_layout);\n", "file_path": "src/alloc.rs", "rank": 57, "score": 32273.349758939454 }, { "content": " ///\n\n /// Clients wishing to abort computation in response to a\n\n /// reallocation error are encouraged to call the [`handle_alloc_error`] function,\n\n /// rather than directly invoking `panic!` or similar.\n\n ///\n\n /// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html\n\n unsafe fn realloc_array<T>(\n\n &mut self,\n\n ptr: NonNull<T>,\n\n n_old: usize,\n\n n_new: usize,\n\n ) -> Result<NonNull<T>, AllocErr>\n\n where\n\n Self: Sized,\n\n {\n\n match (Layout::array::<T>(n_old), Layout::array::<T>(n_new)) {\n\n (Ok(ref k_old), Ok(ref k_new)) if k_old.size() > 0 && k_new.size() > 0 => {\n\n debug_assert!(k_old.align() == k_new.align());\n\n self.realloc(ptr.cast(), k_old.clone(), k_new.size())\n\n .map(NonNull::cast)\n", "file_path": "src/alloc.rs", "rank": 58, "score": 32272.85711879941 }, { "content": " /// This function is unsafe for the same reasons that `alloc` is.\n\n ///\n\n /// # Errors\n\n ///\n\n /// Returning `Err` indicates that either memory is exhausted or\n\n /// `layout` does not meet allocator's size or alignment\n\n /// constraints, just as in `alloc`.\n\n ///\n\n /// Clients wishing to abort computation in response to an\n\n /// allocation error are encouraged to call the [`handle_alloc_error`] function,\n\n /// rather than directly invoking `panic!` or similar.\n\n ///\n\n /// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html\n\n unsafe fn alloc_excess(&mut self, layout: Layout) -> Result<Excess, AllocErr> {\n\n let usable_size = self.usable_size(&layout);\n\n self.alloc(layout).map(|p| Excess(p, usable_size.1))\n\n }\n\n\n\n /// Behaves like `realloc`, but also returns the whole size of\n\n /// the returned block. For some `layout` inputs, like arrays, this\n", "file_path": "src/alloc.rs", "rank": 59, "score": 32271.211365796207 }, { "content": " /// contents of the memory block are unaltered.\n\n ///\n\n /// # Safety\n\n ///\n\n /// This function is unsafe because undefined behavior can result\n\n /// if the caller does not ensure all of the following:\n\n ///\n\n /// * `ptr` must be currently allocated via this allocator,\n\n ///\n\n /// * `layout` must *fit* the `ptr` (see above). (The `new_size`\n\n /// argument need not fit it.)\n\n ///\n\n /// * `new_size` must be greater than zero.\n\n ///\n\n /// * `new_size`, when rounded up to the nearest multiple of `layout.align()`,\n\n /// must not overflow (i.e. the rounded value must be less than `usize::MAX`).\n\n ///\n\n /// (Extension subtraits might provide more specific bounds on\n\n /// behavior, e.g. guarantee a sentinel address or a null pointer\n\n /// in response to a zero-size allocation request.)\n", "file_path": "src/alloc.rs", "rank": 60, "score": 32270.74656612645 }, { "content": " /// on another reallocation method before resorting to an abort.\n\n unsafe fn shrink_in_place(\n\n &mut self,\n\n ptr: NonNull<u8>,\n\n layout: Layout,\n\n new_size: usize,\n\n ) -> Result<(), CannotReallocInPlace> {\n\n let _ = ptr; // this default implementation doesn't care about the actual address.\n\n debug_assert!(new_size <= layout.size());\n\n let (l, _u) = self.usable_size(&layout);\n\n // layout.size() <= _u [guaranteed by usable_size()]\n\n // new_layout.size() <= layout.size() [required by this method]\n\n if l <= new_size {\n\n Ok(())\n\n } else {\n\n Err(CannotReallocInPlace)\n\n }\n\n }\n\n\n\n // == COMMON USAGE PATTERNS ==\n", "file_path": "src/alloc.rs", "rank": 61, "score": 32270.32367363564 }, { "content": " }\n\n\n\n /// Attempts to shrink the allocation referenced by `ptr` to fit `new_size`.\n\n ///\n\n /// If this returns `Ok`, then the allocator has asserted that the\n\n /// memory block referenced by `ptr` now fits `new_size`, and\n\n /// thus can only be used to carry data of that smaller\n\n /// layout. (The allocator is allowed to take advantage of this,\n\n /// carving off portions of the block for reuse elsewhere.) The\n\n /// truncated contents of the block within the smaller layout are\n\n /// unaltered, and ownership of block has not been transferred.\n\n ///\n\n /// If this returns `Err`, then the memory block is considered to\n\n /// still represent the original (larger) `layout`. None of the\n\n /// block has been carved off for reuse elsewhere, ownership of\n\n /// the memory block has not been transferred, and the contents of\n\n /// the memory block are unaltered.\n\n ///\n\n /// # Safety\n\n ///\n", "file_path": "src/alloc.rs", "rank": 62, "score": 32270.28971932188 }, { "content": " /// Note that one cannot pass `CannotReallocInPlace` to the `handle_alloc_error`\n\n /// function; clients are expected either to be able to recover from\n\n /// `grow_in_place` failures without aborting, or to fall back on\n\n /// another reallocation method before resorting to an abort.\n\n unsafe fn grow_in_place(\n\n &mut self,\n\n ptr: NonNull<u8>,\n\n layout: Layout,\n\n new_size: usize,\n\n ) -> Result<(), CannotReallocInPlace> {\n\n let _ = ptr; // this default implementation doesn't care about the actual address.\n\n debug_assert!(new_size >= layout.size());\n\n let (_l, u) = self.usable_size(&layout);\n\n // _l <= layout.size() [guaranteed by usable_size()]\n\n // layout.size() <= new_layout.size() [required by this method]\n\n if new_size <= u {\n\n Ok(())\n\n } else {\n\n Err(CannotReallocInPlace)\n\n }\n", "file_path": "src/alloc.rs", "rank": 63, "score": 32270.24803398511 }, { "content": " // so the &-mask wth `!(align - 1)` will ensure that in the\n\n // case of overflow, `len_rounded_up` will itself be 0.\n\n // Thus the returned padding, when added to `len`, yields 0,\n\n // which trivially satisfies the alignment `align`.\n\n //\n\n // (Of course, attempts to allocate blocks of memory whose\n\n // size and padding overflow in the above manner should cause\n\n // the allocator to yield an error anyway.)\n\n\n\n let len_rounded_up = len.wrapping_add(align).wrapping_sub(1) & !align.wrapping_sub(1);\n\n len_rounded_up.wrapping_sub(len)\n\n }\n\n\n\n fn repeat(&self, n: usize) -> Result<(Layout, usize), LayoutErr> {\n\n let padded_size = self\n\n .size()\n\n .checked_add(self.padding_needed_for(self.align()))\n\n .ok_or_else(new_layout_err)?;\n\n let alloc_size = padded_size.checked_mul(n).ok_or_else(new_layout_err)?;\n\n\n", "file_path": "src/alloc.rs", "rank": 64, "score": 32269.928791590562 }, { "content": " /// may include extra storage usable for additional data.\n\n ///\n\n /// # Safety\n\n ///\n\n /// This function is unsafe for the same reasons that `realloc` is.\n\n ///\n\n /// # Errors\n\n ///\n\n /// Returning `Err` indicates that either memory is exhausted or\n\n /// `layout` does not meet allocator's size or alignment\n\n /// constraints, just as in `realloc`.\n\n ///\n\n /// Clients wishing to abort computation in response to a\n\n /// reallocation error are encouraged to call the [`handle_alloc_error`] function,\n\n /// rather than directly invoking `panic!` or similar.\n\n ///\n\n /// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html\n\n unsafe fn realloc_excess(\n\n &mut self,\n\n ptr: NonNull<u8>,\n", "file_path": "src/alloc.rs", "rank": 65, "score": 32269.84326785086 }, { "content": " ///\n\n /// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html\n\n fn alloc_array<T>(&mut self, n: usize) -> Result<NonNull<T>, AllocErr>\n\n where\n\n Self: Sized,\n\n {\n\n match Layout::array::<T>(n) {\n\n Ok(layout) if layout.size() > 0 => unsafe { self.alloc(layout).map(|p| p.cast()) },\n\n _ => Err(AllocErr),\n\n }\n\n }\n\n\n\n /// Reallocates a block previously suitable for holding `n_old`\n\n /// instances of `T`, returning a block suitable for holding\n\n /// `n_new` instances of `T`.\n\n ///\n\n /// Captures a common usage pattern for allocators.\n\n ///\n\n /// The returned block is suitable for passing to the\n\n /// `alloc`/`realloc` methods of this allocator.\n", "file_path": "src/alloc.rs", "rank": 66, "score": 32269.83117456711 }, { "content": " /// the contents of the memory block are unaltered.\n\n ///\n\n /// # Safety\n\n ///\n\n /// This function is unsafe because undefined behavior can result\n\n /// if the caller does not ensure all of the following:\n\n ///\n\n /// * `ptr` must be currently allocated via this allocator,\n\n ///\n\n /// * `layout` must *fit* the `ptr` (see above); note the\n\n /// `new_size` argument need not fit it,\n\n ///\n\n /// * `new_size` must not be less than `layout.size()`,\n\n ///\n\n /// # Errors\n\n ///\n\n /// Returns `Err(CannotReallocInPlace)` when the allocator is\n\n /// unable to assert that the memory block referenced by `ptr`\n\n /// could fit `layout`.\n\n ///\n", "file_path": "src/alloc.rs", "rank": 67, "score": 32269.722805038422 }, { "content": " /// Clients wishing to abort computation in response to an\n\n /// allocation error are encouraged to call the [`handle_alloc_error`] function,\n\n /// rather than directly invoking `panic!` or similar.\n\n ///\n\n /// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html\n\n unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result<NonNull<u8>, AllocErr> {\n\n let size = layout.size();\n\n let p = self.alloc(layout);\n\n if let Ok(p) = p {\n\n ptr::write_bytes(p.as_ptr(), 0, size);\n\n }\n\n p\n\n }\n\n\n\n /// Behaves like `alloc`, but also returns the whole size of\n\n /// the returned block. For some `layout` inputs, like arrays, this\n\n /// may include extra storage usable for additional data.\n\n ///\n\n /// # Safety\n\n ///\n", "file_path": "src/alloc.rs", "rank": 68, "score": 32268.624170301744 }, { "content": "/// `usable_size`.\n\n///\n\n/// * if a layout `k` fits a memory block (denoted by `ptr`)\n\n/// currently allocated via an allocator `a`, then it is legal to\n\n/// use that layout to deallocate it, i.e. `a.dealloc(ptr, k);`.\n\n///\n\n/// # Unsafety\n\n///\n\n/// The `Alloc` trait is an `unsafe` trait for a number of reasons, and\n\n/// implementors must ensure that they adhere to these contracts:\n\n///\n\n/// * Pointers returned from allocation functions must point to valid memory and\n\n/// retain their validity until at least the instance of `Alloc` is dropped\n\n/// itself.\n\n///\n\n/// * `Layout` queries and calculations in general must be correct. Callers of\n\n/// this trait are allowed to rely on the contracts defined on each method,\n\n/// and implementors must ensure such contracts remain true.\n\n///\n\n/// Note that this list may get tweaked over time as clarifications are made in\n", "file_path": "src/alloc.rs", "rank": 69, "score": 32268.326638322025 }, { "content": " /// `l <= k.size() <= u`)\n\n ///\n\n /// Both the lower- and upper-bounds (`l` and `u` respectively)\n\n /// are provided, because an allocator based on size classes could\n\n /// misbehave if one attempts to deallocate a block without\n\n /// providing a correct value for its size (i.e., one within the\n\n /// range `[l, u]`).\n\n ///\n\n /// Clients who wish to make use of excess capacity are encouraged\n\n /// to use the `alloc_excess` and `realloc_excess` instead, as\n\n /// this method is constrained to report conservative values that\n\n /// serve as valid bounds for *all possible* allocation method\n\n /// calls.\n\n ///\n\n /// However, for clients that do not wish to track the capacity\n\n /// returned by `alloc_excess` locally, this method is likely to\n\n /// produce useful results.\n\n #[inline]\n\n fn usable_size(&self, layout: &Layout) -> (usize, usize) {\n\n (layout.size(), layout.size())\n", "file_path": "src/alloc.rs", "rank": 70, "score": 32268.267398240598 }, { "content": " if let Ok(new_ptr) = result {\n\n ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_ptr(), cmp::min(old_size, new_size));\n\n self.dealloc(ptr, layout);\n\n }\n\n result\n\n }\n\n\n\n /// Behaves like `alloc`, but also ensures that the contents\n\n /// are set to zero before being returned.\n\n ///\n\n /// # Safety\n\n ///\n\n /// This function is unsafe for the same reasons that `alloc` is.\n\n ///\n\n /// # Errors\n\n ///\n\n /// Returning `Err` indicates that either memory is exhausted or\n\n /// `layout` does not meet allocator's size or alignment\n\n /// constraints, just as in `alloc`.\n\n ///\n", "file_path": "src/alloc.rs", "rank": 71, "score": 32268.243016434135 }, { "content": " /// Returning `Err` indicates that either memory is exhausted or\n\n /// `T` does not meet allocator's size or alignment constraints.\n\n ///\n\n /// For zero-sized `T`, may return either of `Ok` or `Err`, but\n\n /// will *not* yield undefined behavior.\n\n ///\n\n /// Clients wishing to abort computation in response to an\n\n /// allocation error are encouraged to call the [`handle_alloc_error`] function,\n\n /// rather than directly invoking `panic!` or similar.\n\n ///\n\n /// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html\n\n fn alloc_one<T>(&mut self) -> Result<NonNull<T>, AllocErr>\n\n where\n\n Self: Sized,\n\n {\n\n let k = Layout::new::<T>();\n\n if k.size() > 0 {\n\n unsafe { self.alloc(k).map(|p| p.cast()) }\n\n } else {\n\n Err(AllocErr)\n", "file_path": "src/alloc.rs", "rank": 72, "score": 32268.059075330842 }, { "content": " /// a strict requirement. (Specifically: it is *legal* to\n\n /// implement this trait atop an underlying native allocation\n\n /// library that aborts on memory exhaustion.)\n\n ///\n\n /// Clients wishing to abort computation in response to an\n\n /// allocation error are encouraged to call the [`handle_alloc_error`] function,\n\n /// rather than directly invoking `panic!` or similar.\n\n ///\n\n /// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html\n\n unsafe fn alloc(&mut self, layout: Layout) -> Result<NonNull<u8>, AllocErr>;\n\n\n\n /// Deallocate the memory referenced by `ptr`.\n\n ///\n\n /// # Safety\n\n ///\n\n /// This function is unsafe because undefined behavior can result\n\n /// if the caller does not ensure all of the following:\n\n ///\n\n /// * `ptr` must denote a block of memory currently allocated via\n\n /// this allocator,\n", "file_path": "src/alloc.rs", "rank": 73, "score": 32267.56790705482 }, { "content": " /// behavior, e.g. to ensure initialization to particular sets of\n\n /// bit patterns.)\n\n ///\n\n /// # Safety\n\n ///\n\n /// This function is unsafe because undefined behavior can result\n\n /// if the caller does not ensure that `layout` has non-zero size.\n\n ///\n\n /// (Extension subtraits might provide more specific bounds on\n\n /// behavior, e.g. guarantee a sentinel address or a null pointer\n\n /// in response to a zero-size allocation request.)\n\n ///\n\n /// # Errors\n\n ///\n\n /// Returning `Err` indicates that either memory is exhausted or\n\n /// `layout` does not meet allocator's size or alignment\n\n /// constraints.\n\n ///\n\n /// Implementations are encouraged to return `Err` on memory\n\n /// exhaustion rather than panicking or aborting, but this is not\n", "file_path": "src/alloc.rs", "rank": 74, "score": 32267.409295437923 }, { "content": " }\n\n\n\n // == METHODS FOR MEMORY REUSE ==\n\n // realloc. alloc_excess, realloc_excess\n\n\n\n /// Returns a pointer suitable for holding data described by\n\n /// a new layout with `layout`’s alignment and a size given\n\n /// by `new_size`. To\n\n /// accomplish this, this may extend or shrink the allocation\n\n /// referenced by `ptr` to fit the new layout.\n\n ///\n\n /// If this returns `Ok`, then ownership of the memory block\n\n /// referenced by `ptr` has been transferred to this\n\n /// allocator. The memory may or may not have been freed, and\n\n /// should be considered unusable (unless of course it was\n\n /// transferred back to the caller again via the return value of\n\n /// this method).\n\n ///\n\n /// If this method returns `Err`, then ownership of the memory\n\n /// block has not been transferred to this allocator, and the\n", "file_path": "src/alloc.rs", "rank": 75, "score": 32267.199515565957 }, { "content": " ///\n\n /// # Safety\n\n ///\n\n /// This function is unsafe because undefined behavior can result\n\n /// if the caller does not ensure all of the following:\n\n ///\n\n /// * `ptr` must be currently allocated via this allocator,\n\n ///\n\n /// * the layout of `[T; n_old]` must *fit* that block of memory.\n\n ///\n\n /// # Errors\n\n ///\n\n /// Returning `Err` indicates that either memory is exhausted or\n\n /// `[T; n_new]` does not meet allocator's size or alignment\n\n /// constraints.\n\n ///\n\n /// For zero-sized `T` or `n_new == 0`, may return either of `Ok` or\n\n /// `Err`, but will *not* yield undefined behavior.\n\n ///\n\n /// Always returns `Err` on arithmetic overflow.\n", "file_path": "src/alloc.rs", "rank": 76, "score": 32267.111802177384 }, { "content": " /// This function is unsafe because undefined behavior can result\n\n /// if the caller does not ensure all of the following:\n\n ///\n\n /// * `ptr` must be currently allocated via this allocator,\n\n ///\n\n /// * `layout` must *fit* the `ptr` (see above); note the\n\n /// `new_size` argument need not fit it,\n\n ///\n\n /// * `new_size` must not be greater than `layout.size()`\n\n /// (and must be greater than zero),\n\n ///\n\n /// # Errors\n\n ///\n\n /// Returns `Err(CannotReallocInPlace)` when the allocator is\n\n /// unable to assert that the memory block referenced by `ptr`\n\n /// could fit `layout`.\n\n ///\n\n /// Note that one cannot pass `CannotReallocInPlace` to the `handle_alloc_error`\n\n /// function; clients are expected either to be able to recover from\n\n /// `shrink_in_place` failures without aborting, or to fall back\n", "file_path": "src/alloc.rs", "rank": 77, "score": 32266.958217034768 }, { "content": " }\n\n }\n\n\n\n /// Deallocates a block suitable for holding an instance of `T`.\n\n ///\n\n /// The given block must have been produced by this allocator,\n\n /// and must be suitable for storing a `T` (in terms of alignment\n\n /// as well as minimum and maximum size); otherwise yields\n\n /// undefined behavior.\n\n ///\n\n /// Captures a common usage pattern for allocators.\n\n ///\n\n /// # Safety\n\n ///\n\n /// This function is unsafe because undefined behavior can result\n\n /// if the caller does not ensure both:\n\n ///\n\n /// * `ptr` must denote a block of memory currently allocated via this allocator\n\n ///\n\n /// * the layout of `T` must *fit* that block of memory.\n", "file_path": "src/alloc.rs", "rank": 78, "score": 32266.476774884213 }, { "content": " ///\n\n /// # Errors\n\n ///\n\n /// Returns `Err` only if the new layout\n\n /// does not meet the allocator's size\n\n /// and alignment constraints of the allocator, or if reallocation\n\n /// otherwise fails.\n\n ///\n\n /// Implementations are encouraged to return `Err` on memory\n\n /// exhaustion rather than panicking or aborting, but this is not\n\n /// a strict requirement. (Specifically: it is *legal* to\n\n /// implement this trait atop an underlying native allocation\n\n /// library that aborts on memory exhaustion.)\n\n ///\n\n /// Clients wishing to abort computation in response to a\n\n /// reallocation error are encouraged to call the [`handle_alloc_error`] function,\n\n /// rather than directly invoking `panic!` or similar.\n\n ///\n\n /// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html\n\n unsafe fn realloc(\n", "file_path": "src/alloc.rs", "rank": 79, "score": 32265.871418550705 }, { "content": " unsafe fn dealloc_one<T>(&mut self, ptr: NonNull<T>)\n\n where\n\n Self: Sized,\n\n {\n\n let k = Layout::new::<T>();\n\n if k.size() > 0 {\n\n self.dealloc(ptr.cast(), k);\n\n }\n\n }\n\n\n\n /// Allocates a block suitable for holding `n` instances of `T`.\n\n ///\n\n /// Captures a common usage pattern for allocators.\n\n ///\n\n /// The returned block is suitable for passing to the\n\n /// `alloc`/`realloc` methods of this allocator.\n\n ///\n\n /// Note to implementors: If this returns `Ok(ptr)`, then `ptr`\n\n /// must be considered \"currently allocated\" and must be\n\n /// acceptable input to methods such as `realloc` or `dealloc`,\n", "file_path": "src/alloc.rs", "rank": 80, "score": 32264.39014592206 }, { "content": "}\n\n\n\n/// An implementation of `Alloc` can allocate, reallocate, and\n\n/// deallocate arbitrary blocks of data described via `Layout`.\n\n///\n\n/// Some of the methods require that a memory block be *currently\n\n/// allocated* via an allocator. This means that:\n\n///\n\n/// * the starting address for that memory block was previously\n\n/// returned by a previous call to an allocation method (`alloc`,\n\n/// `alloc_zeroed`, `alloc_excess`, `alloc_one`, `alloc_array`) or\n\n/// reallocation method (`realloc`, `realloc_excess`, or\n\n/// `realloc_array`), and\n\n///\n\n/// * the memory block has not been subsequently deallocated, where\n\n/// blocks are deallocated either by being passed to a deallocation\n\n/// method (`dealloc`, `dealloc_one`, `dealloc_array`) or by being\n\n/// passed to a reallocation method (see above) that returns `Ok`.\n\n///\n\n/// A note regarding zero-sized types and zero-sized layouts: many\n", "file_path": "src/alloc.rs", "rank": 81, "score": 32264.01554896397 }, { "content": "/// the future.\n\n// #[unstable(feature = \"allocator_api\", issue = \"32838\")]\n\npub unsafe trait Alloc {\n\n // (Note: some existing allocators have unspecified but well-defined\n\n // behavior in response to a zero size allocation request ;\n\n // e.g. in C, `malloc` of 0 will either return a null pointer or a\n\n // unique pointer, but will not have arbitrary undefined\n\n // behavior.\n\n // However in jemalloc for example,\n\n // `mallocx(0)` is documented as undefined behavior.)\n\n\n\n /// Returns a pointer meeting the size and alignment guarantees of\n\n /// `layout`.\n\n ///\n\n /// If this method returns an `Ok(addr)`, then the `addr` returned\n\n /// will be non-null address pointing to a block of storage\n\n /// suitable for holding an instance of `layout`.\n\n ///\n\n /// The returned block of storage may or may not have its contents\n\n /// initialized. (Extension subtraits might restrict this\n", "file_path": "src/alloc.rs", "rank": 82, "score": 32263.357060388025 }, { "content": " }\n\n _ => Err(AllocErr),\n\n }\n\n }\n\n\n\n /// Deallocates a block suitable for holding `n` instances of `T`.\n\n ///\n\n /// Captures a common usage pattern for allocators.\n\n ///\n\n /// # Safety\n\n ///\n\n /// This function is unsafe because undefined behavior can result\n\n /// if the caller does not ensure both:\n\n ///\n\n /// * `ptr` must denote a block of memory currently allocated via this allocator\n\n ///\n\n /// * the layout of `[T; n]` must *fit* that block of memory.\n\n ///\n\n /// # Errors\n\n ///\n", "file_path": "src/alloc.rs", "rank": 83, "score": 32263.33600270502 }, { "content": "// Copyright 2015 The Rust Project Developers. See the COPYRIGHT\n\n// file at the top-level directory of this distribution and at\n\n// http://rust-lang.org/COPYRIGHT.\n\n//\n\n// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\n// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license\n\n// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your\n\n// option. This file may not be copied, modified, or distributed\n\n// except according to those terms.\n\n\n\n#![allow(unstable_name_collisions)]\n\n#![allow(dead_code)]\n\n\n\n//! Memory allocation APIs\n\n\n\nuse core::cmp;\n\nuse core::fmt;\n\nuse core::mem;\n\nuse core::ptr::{self, NonNull};\n\nuse core::usize;\n\n\n\npub use core::alloc::{Layout, LayoutErr};\n\n\n", "file_path": "src/alloc.rs", "rank": 84, "score": 32263.319636152424 }, { "content": "/// The `CannotReallocInPlace` error is used when `grow_in_place` or\n\n/// `shrink_in_place` were unable to reuse the given memory block for\n\n/// a requested layout.\n\n// #[unstable(feature = \"allocator_api\", issue = \"32838\")]\n\n#[derive(Clone, PartialEq, Eq, Debug)]\n\npub struct CannotReallocInPlace;\n\n\n\n// #[unstable(feature = \"allocator_api\", issue = \"32838\")]\n\nimpl CannotReallocInPlace {\n\n pub fn description(&self) -> &str {\n\n \"cannot reallocate allocator's memory in place\"\n\n }\n\n}\n\n\n\n// (we need this for downstream impl of trait Error)\n\n// #[unstable(feature = \"allocator_api\", issue = \"32838\")]\n\nimpl fmt::Display for CannotReallocInPlace {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(f, \"{}\", self.description())\n\n }\n", "file_path": "src/alloc.rs", "rank": 85, "score": 32262.79977921949 }, { "content": "/// methods in the `Alloc` trait state that allocation requests\n\n/// must be non-zero size, or else undefined behavior can result.\n\n///\n\n/// * However, some higher-level allocation methods (`alloc_one`,\n\n/// `alloc_array`) are well-defined on zero-sized types and can\n\n/// optionally support them: it is left up to the implementor\n\n/// whether to return `Err`, or to return `Ok` with some pointer.\n\n///\n\n/// * If an `Alloc` implementation chooses to return `Ok` in this\n\n/// case (i.e. the pointer denotes a zero-sized inaccessible block)\n\n/// then that returned pointer must be considered \"currently\n\n/// allocated\". On such an allocator, *all* methods that take\n\n/// currently-allocated pointers as inputs must accept these\n\n/// zero-sized pointers, *without* causing undefined behavior.\n\n///\n\n/// * In other words, if a zero-sized pointer can flow out of an\n\n/// allocator, then that allocator must likewise accept that pointer\n\n/// flowing back into its deallocation and reallocation methods.\n\n///\n\n/// Some of the methods require that a layout *fit* a memory block.\n", "file_path": "src/alloc.rs", "rank": 86, "score": 32262.058919863302 }, { "content": " /// *even if* `T` is a zero-sized type. In other words, if your\n\n /// `Alloc` implementation overrides this method in a manner\n\n /// that can return a zero-sized `ptr`, then all reallocation and\n\n /// deallocation methods need to be similarly overridden to accept\n\n /// such values as input.\n\n ///\n\n /// # Errors\n\n ///\n\n /// Returning `Err` indicates that either memory is exhausted or\n\n /// `[T; n]` does not meet allocator's size or alignment\n\n /// constraints.\n\n ///\n\n /// For zero-sized `T` or `n == 0`, may return either of `Ok` or\n\n /// `Err`, but will *not* yield undefined behavior.\n\n ///\n\n /// Always returns `Err` on arithmetic overflow.\n\n ///\n\n /// Clients wishing to abort computation in response to an\n\n /// allocation error are encouraged to call the [`handle_alloc_error`] function,\n\n /// rather than directly invoking `panic!` or similar.\n", "file_path": "src/alloc.rs", "rank": 87, "score": 32261.8459882146 }, { "content": " // alloc_one, dealloc_one, alloc_array, realloc_array. dealloc_array\n\n\n\n /// Allocates a block suitable for holding an instance of `T`.\n\n ///\n\n /// Captures a common usage pattern for allocators.\n\n ///\n\n /// The returned block is suitable for passing to the\n\n /// `alloc`/`realloc` methods of this allocator.\n\n ///\n\n /// Note to implementors: If this returns `Ok(ptr)`, then `ptr`\n\n /// must be considered \"currently allocated\" and must be\n\n /// acceptable input to methods such as `realloc` or `dealloc`,\n\n /// *even if* `T` is a zero-sized type. In other words, if your\n\n /// `Alloc` implementation overrides this method in a manner\n\n /// that can return a zero-sized `ptr`, then all reallocation and\n\n /// deallocation methods need to be similarly overridden to accept\n\n /// such values as input.\n\n ///\n\n /// # Errors\n\n ///\n", "file_path": "src/alloc.rs", "rank": 88, "score": 32257.736057816623 }, { "content": "use std::fs;\n\nuse std::process::Command;\n\n\n\n#[test]\n", "file_path": "tests/readme_up_to_date.rs", "rank": 89, "score": 30348.525885708368 }, { "content": "#[repr(C)]\n\n#[derive(Debug)]\n\nstruct ChunkFooter {\n\n // Pointer to the start of this chunk allocation. This footer is always at\n\n // the end of the chunk.\n\n data: NonNull<u8>,\n\n\n\n // The layout of this chunk's allocation.\n\n layout: Layout,\n\n\n\n // Link to the next chunk, if any.\n\n next: Cell<Option<NonNull<ChunkFooter>>>,\n\n\n\n // Bump allocation finger that is always in the range `self.data..=self`.\n\n ptr: Cell<NonNull<u8>>,\n\n}\n\n\n\nimpl Default for Bump {\n\n fn default() -> Bump {\n\n Bump::new()\n\n }\n\n}\n", "file_path": "src/lib.rs", "rank": 90, "score": 28845.14286349317 }, { "content": "#[derive(Default)]\n\nstruct Big([usize; 32]);\n\n\n", "file_path": "benches/benches.rs", "rank": 91, "score": 28763.90359512642 }, { "content": " } else {\n\n unsafe { offset_from(self.end, self.ptr) as usize }\n\n };\n\n (exact, Some(exact))\n\n }\n\n\n\n #[inline]\n\n fn count(self) -> usize {\n\n self.len()\n\n }\n\n}\n\n\n\nimpl<'bump, T: 'bump> DoubleEndedIterator for IntoIter<T> {\n\n #[inline]\n\n fn next_back(&mut self) -> Option<T> {\n\n unsafe {\n\n if self.end == self.ptr {\n\n None\n\n } else if mem::size_of::<T>() == 0 {\n\n // See above for why 'ptr.offset' isn't used\n", "file_path": "src/collections/vec.rs", "rank": 92, "score": 28496.36981552833 }, { "content": " /// In this example, the vector gets expanded from zero to four items\n\n /// without any memory allocations occurring, resulting in vector\n\n /// values of unallocated memory:\n\n ///\n\n /// ```\n\n /// use bumpalo::{Bump, collections::Vec};\n\n ///\n\n /// let b = Bump::new();\n\n ///\n\n /// let mut vec: Vec<char> = Vec::new_in(&b);\n\n ///\n\n /// unsafe {\n\n /// vec.set_len(4);\n\n /// }\n\n /// ```\n\n #[inline]\n\n pub unsafe fn set_len(&mut self, new_len: usize) {\n\n self.len = new_len;\n\n }\n\n\n", "file_path": "src/collections/vec.rs", "rank": 93, "score": 28493.3581919656 }, { "content": " /// vec.push(3);\n\n /// assert_eq!(vec, [1, 2, 3]);\n\n /// ```\n\n #[inline]\n\n pub fn push(&mut self, value: T) {\n\n // This will panic or abort if we would allocate > isize::MAX bytes\n\n // or if the length increment would overflow for zero-sized types.\n\n if self.len == self.buf.cap() {\n\n self.reserve(1);\n\n }\n\n unsafe {\n\n let end = self.as_mut_ptr().add(self.len);\n\n ptr::write(end, value);\n\n self.len += 1;\n\n }\n\n }\n\n\n\n /// Removes the last element from a vector and returns it, or [`None`] if it\n\n /// is empty.\n\n ///\n", "file_path": "src/collections/vec.rs", "rank": 94, "score": 28493.08502649648 }, { "content": " /// assert_eq!(into_iter.as_slice(), &['b', 'c']);\n\n /// ```\n\n pub fn as_slice(&self) -> &[T] {\n\n unsafe { slice::from_raw_parts(self.ptr, self.len()) }\n\n }\n\n\n\n /// Returns the remaining items of this iterator as a mutable slice.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// use bumpalo::{Bump, collections::Vec};\n\n ///\n\n /// let b = Bump::new();\n\n ///\n\n /// let vec = bumpalo::vec![in &b; 'a', 'b', 'c'];\n\n /// let mut into_iter = vec.into_iter();\n\n /// assert_eq!(into_iter.as_slice(), &['a', 'b', 'c']);\n\n /// into_iter.as_mut_slice()[2] = 'z';\n\n /// assert_eq!(into_iter.next().unwrap(), 'a');\n", "file_path": "src/collections/vec.rs", "rank": 95, "score": 28492.53566983345 }, { "content": " ptr::copy_nonoverlapping(src, dst, 1);\n\n }\n\n }\n\n None\n\n }\n\n }\n\n\n\n fn size_hint(&self) -> (usize, Option<usize>) {\n\n (0, Some(self.old_len - self.idx))\n\n }\n\n}\n\n\n\nimpl<'a, 'bump, T, F> Drop for DrainFilter<'a, 'bump, T, F>\n\nwhere\n\n F: FnMut(&mut T) -> bool,\n\n{\n\n fn drop(&mut self) {\n\n self.for_each(drop);\n\n unsafe {\n\n self.vec.set_len(self.old_len - self.del);\n\n }\n\n }\n\n}\n", "file_path": "src/collections/vec.rs", "rank": 96, "score": 28492.231162797143 }, { "content": " /// [`None`]: https://doc.rust-lang.org/nightly/std/option/enum.Option.html#variant.None\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// use bumpalo::{Bump, collections::Vec};\n\n ///\n\n /// let b = Bump::new();\n\n ///\n\n /// let mut vec = bumpalo::vec![in &b; 1, 2, 3];\n\n /// assert_eq!(vec.pop(), Some(3));\n\n /// assert_eq!(vec, [1, 2]);\n\n /// ```\n\n #[inline]\n\n pub fn pop(&mut self) -> Option<T> {\n\n if self.len == 0 {\n\n None\n\n } else {\n\n unsafe {\n\n self.len -= 1;\n", "file_path": "src/collections/vec.rs", "rank": 97, "score": 28492.219341748427 }, { "content": "unsafe impl<'a, 'bump, T: Send> Send for Drain<'a, 'bump, T> {}\n\n\n\nimpl<'a, 'bump, T> Iterator for Drain<'a, 'bump, T> {\n\n type Item = T;\n\n\n\n #[inline]\n\n fn next(&mut self) -> Option<T> {\n\n self.iter\n\n .next()\n\n .map(|elt| unsafe { ptr::read(elt as *const _) })\n\n }\n\n\n\n fn size_hint(&self) -> (usize, Option<usize>) {\n\n self.iter.size_hint()\n\n }\n\n}\n\n\n\nimpl<'a, 'bump, T> DoubleEndedIterator for Drain<'a, 'bump, T> {\n\n #[inline]\n\n fn next_back(&mut self) -> Option<T> {\n", "file_path": "src/collections/vec.rs", "rank": 98, "score": 28492.10647628813 }, { "content": " /// assert_eq!(into_iter.next().unwrap(), 'b');\n\n /// assert_eq!(into_iter.next().unwrap(), 'z');\n\n /// ```\n\n pub fn as_mut_slice(&mut self) -> &mut [T] {\n\n unsafe { slice::from_raw_parts_mut(self.ptr as *mut T, self.len()) }\n\n }\n\n}\n\n\n\nunsafe impl<T: Send> Send for IntoIter<T> {}\n\nunsafe impl<T: Sync> Sync for IntoIter<T> {}\n\n\n\nimpl<'bump, T: 'bump> Iterator for IntoIter<T> {\n\n type Item = T;\n\n\n\n #[inline]\n\n fn next(&mut self) -> Option<T> {\n\n unsafe {\n\n if self.ptr as *const _ == self.end {\n\n None\n\n } else if mem::size_of::<T>() == 0 {\n", "file_path": "src/collections/vec.rs", "rank": 99, "score": 28491.78677691387 } ]
Rust
artichoke-backend/src/convert/fixnum.rs
Talljoe/artichoke
36ed5eba078a9fbf3cb4d5c8f7407d0a773d2d6e
use std::convert::TryFrom; use crate::convert::{BoxIntoRubyError, UnboxRubyError}; use crate::core::{Convert, TryConvert}; use crate::exception::Exception; use crate::sys; use crate::types::{Int, Ruby, Rust}; use crate::value::Value; use crate::Artichoke; impl Convert<u8, Value> for Artichoke { #[inline] fn convert(&self, value: u8) -> Value { self.convert(Int::from(value)) } } impl Convert<u16, Value> for Artichoke { #[inline] fn convert(&self, value: u16) -> Value { self.convert(Int::from(value)) } } impl Convert<u32, Value> for Artichoke { #[inline] fn convert(&self, value: u32) -> Value { self.convert(Int::from(value)) } } impl TryConvert<u64, Value> for Artichoke { type Error = Exception; fn try_convert(&self, value: u64) -> Result<Value, Self::Error> { if let Ok(value) = Int::try_from(value) { let fixnum = unsafe { sys::mrb_sys_fixnum_value(value) }; Ok(Value::from(fixnum)) } else { Err(Exception::from(BoxIntoRubyError::new( Rust::UnsignedInt, Ruby::Fixnum, ))) } } } impl TryConvert<usize, Value> for Artichoke { type Error = Exception; fn try_convert(&self, value: usize) -> Result<Value, Self::Error> { if let Ok(value) = Int::try_from(value) { let fixnum = unsafe { sys::mrb_sys_fixnum_value(value) }; Ok(Value::from(fixnum)) } else { Err(Exception::from(BoxIntoRubyError::new( Rust::UnsignedInt, Ruby::Fixnum, ))) } } } impl Convert<i8, Value> for Artichoke { #[inline] fn convert(&self, value: i8) -> Value { self.convert(Int::from(value)) } } impl Convert<i16, Value> for Artichoke { #[inline] fn convert(&self, value: i16) -> Value { self.convert(Int::from(value)) } } impl Convert<i32, Value> for Artichoke { #[inline] fn convert(&self, value: i32) -> Value { self.convert(Int::from(value)) } } impl TryConvert<isize, Value> for Artichoke { type Error = Exception; fn try_convert(&self, value: isize) -> Result<Value, Self::Error> { if let Ok(value) = Int::try_from(value) { let fixnum = unsafe { sys::mrb_sys_fixnum_value(value) }; Ok(Value::from(fixnum)) } else { Err(Exception::from(BoxIntoRubyError::new( Rust::SignedInt, Ruby::Fixnum, ))) } } } impl Convert<Int, Value> for Artichoke { #[inline] fn convert(&self, value: Int) -> Value { let fixnum = unsafe { sys::mrb_sys_fixnum_value(value) }; Value::from(fixnum) } } impl TryConvert<Value, Int> for Artichoke { type Error = Exception; fn try_convert(&self, value: Value) -> Result<Int, Self::Error> { if let Ruby::Fixnum = value.ruby_type() { let inner = value.inner(); Ok(unsafe { sys::mrb_sys_fixnum_to_cint(inner) }) } else { Err(Exception::from(UnboxRubyError::new( &value, Rust::SignedInt, ))) } } } impl TryConvert<Value, u32> for Artichoke { type Error = Exception; fn try_convert(&self, value: Value) -> Result<u32, Self::Error> { if let Ruby::Fixnum = value.ruby_type() { let inner = value.inner(); let num = unsafe { sys::mrb_sys_fixnum_to_cint(inner) }; let num = u32::try_from(num).map_err(|_| UnboxRubyError::new(&value, Rust::UnsignedInt))?; Ok(num) } else { Err(Exception::from(UnboxRubyError::new( &value, Rust::SignedInt, ))) } } } impl TryConvert<Value, usize> for Artichoke { type Error = Exception; fn try_convert(&self, value: Value) -> Result<usize, Self::Error> { if let Ruby::Fixnum = value.ruby_type() { let inner = value.inner(); let num = unsafe { sys::mrb_sys_fixnum_to_cint(inner) }; let num = usize::try_from(num).map_err(|_| UnboxRubyError::new(&value, Rust::UnsignedInt))?; Ok(num) } else { Err(Exception::from(UnboxRubyError::new( &value, Rust::SignedInt, ))) } } } #[cfg(test)] mod tests { use quickcheck_macros::quickcheck; use crate::test::prelude::*; #[test] fn fail_convert() { let mut interp = crate::interpreter().unwrap(); let value = interp.eval(b"Object.new").unwrap(); let result = value.try_into::<Int>(&interp); assert!(result.is_err()); } #[quickcheck] fn convert_to_fixnum(i: Int) -> bool { let interp = crate::interpreter().unwrap(); let value = interp.convert(i); value.ruby_type() == Ruby::Fixnum } #[quickcheck] fn fixnum_with_value(i: Int) -> bool { let interp = crate::interpreter().unwrap(); let value = interp.convert(i); let inner = value.inner(); let cint = unsafe { sys::mrb_sys_fixnum_to_cint(inner) }; cint == i } #[quickcheck] fn roundtrip(i: Int) -> bool { let interp = crate::interpreter().unwrap(); let value = interp.convert(i); let value = value.try_into::<Int>(&interp).unwrap(); value == i } #[quickcheck] fn roundtrip_err(b: bool) -> bool { let interp = crate::interpreter().unwrap(); let value = interp.convert(b); let value = value.try_into::<Int>(&interp); value.is_err() } #[test] fn fixnum_to_usize() { let interp = crate::interpreter().unwrap(); let value = Convert::<_, Value>::convert(&interp, 100); let value = value.try_into::<usize>(&interp).unwrap(); assert_eq!(100, value); let value = Convert::<_, Value>::convert(&interp, -100); let value = value.try_into::<usize>(&interp); assert!(value.is_err()); } }
use std::convert::TryFrom; use crate::convert::{BoxIntoRubyError, UnboxRubyError}; use crate::core::{Convert, TryConvert}; use crate::exception::Exception; use crate::sys; use crate::types::{Int, Ruby, Rust}; use crate::value::Value; use crate::Artichoke; impl Convert<u8, Value> for Artichoke { #[inline] fn convert(&self, value: u8) -> Value { self.convert(Int::from(value)) } } impl Convert<u16, Value> for Artichoke { #[inline] fn convert(&self, value: u16) -> Value { self.convert(Int::from(value)) } } impl Convert<u32, Value> for Artichoke { #[inline] fn convert(&self, value: u32) -> Value { self.convert(Int::from(value)) } } impl TryConvert<u64, Value> for Artichoke { type Error = Exception; fn try_convert(&self, value: u64) -> Result<Value, Self::Error> { if let Ok(value) = Int::try_from(value) { let fixnum = unsafe { sys::mrb_sys_fixnum_value(value) }; Ok(Value::from(fixnum)) } else { Err(Exception::from(BoxIntoRubyError::new( Rust::UnsignedInt, Ruby::Fixnum, ))) } } } impl TryConvert<usize, Value> for Artichoke { type Error = Exception; fn try_convert(&self, value: usize) -> Result<Value, Self::Error> { if let Ok(value) = Int::try_from(value) { let fixnum = unsafe { sys::mrb_sys_fixnum_value(value) }; Ok(Value::from(fixnum)) } else { Err(Exception::from(BoxIntoRubyError::new( Rust::UnsignedInt, Ruby::Fixnum, ))) } } } impl Convert<i8, Value> for Artichoke { #[inline] fn convert(&self, value: i8) -> Value { self.convert(Int::from(value)) } } impl Convert<i16, Value> for Artichoke { #[inline] fn convert(&self, value: i16) -> Value { self.convert(Int::from(value)) } } impl Convert<i32, Value> for Artichoke { #[inline] fn convert(&self, value: i32) -> Value { self.convert(Int::from(value)) } } impl TryConvert<isize, Value> for Artichoke { type Error = Exception; fn try_convert(&self, value: isize) -> Result<Value, Self::Error> { if let Ok(value) = Int::try_from(value) { let fixnum = unsafe { sys::mrb_sys_fixnum_value(value) }; Ok(Value::from(fixnum)) } else { Err(Exception::from(BoxIntoRubyError::new( Rust::SignedInt, Ruby::Fixnum, ))) } } } impl Convert<Int, Value> for Artichoke { #[inline] fn convert(&self, value: Int) -> Value { let fixnum = unsafe { sys::mrb_sys_fixnum_value(value) }; Value::from(fixnum) } } impl TryConvert<Value, Int> for Artichoke { type Error = Exception; fn try_convert(&self, value: Value) -> Result<Int, Self::Error> { if let Ruby::Fixnum = value.ruby_type() { let inner = value.inner(); Ok(unsafe { sys::mrb_sys_fixnum_to_cint(inner) }) } else { Err(Exception::from(UnboxRubyError::new( &value, Rust::SignedInt, ))) } } } impl TryConvert<Value, u32> for Artichoke { type Error = Exception; fn try_convert(&self, value: Value) -> Result<u32, Self::Error> { if let Ruby::Fixnum = value.ruby_type() { let inner = value.inner(); let num = unsafe { sys::mrb_sys_fixnum_to_cint(inner) }; let num = u32::try_from(num).map_err(|_| UnboxRubyError::new(&value, Rust::UnsignedInt))?; Ok(num) } else { Err(Exception::from(UnboxRubyError::new( &value, Rust::SignedInt, ))) } } } impl TryConvert<Value, usize> for Artichoke { type Error = Exception; fn try_convert(&self, value: Value) -> Result<usize, Self::Error> { if let Ruby::Fixnum = value.ruby_type() { let inner = value.inner(); let num = unsafe { sys::mrb_sys_fixnum_to_cint(inner) }; let num = usize::try_from(num).map_err(|_| UnboxRubyError::new(&value, Rust::UnsignedInt))?; Ok(num) } else { Err(Exception::from(UnboxRubyError::new( &value, Rust::SignedInt, ))) } } } #[cfg(test)] mod tests { use quickcheck_macros::quickcheck; use crate::test::prelude::*; #[test] fn fail_convert() { let mut interp = crate::interpreter().unwrap(); let value = interp.eval(b"Object.new").unwrap(); let result = value.try_into::<Int>(&interp); assert!(result.is_err()); } #[quickcheck] fn convert_to_fixnum(i: Int) -> bool { let interp = crate::interpreter().unwrap(); let value = interp.convert(i); value.ruby_type() == Ruby::Fixnum } #[quickcheck] fn fixnum_with_value(i: Int) -> bool { let interp = crate::interpreter().unwrap(); let value = interp.convert(i); let inner = value.inner(); let cint = unsafe { sys::mrb_sys_fixnum_to_cint(inner) }; cint == i } #[quickcheck] fn roundtrip(i: Int) -> bool { let interp = crate::interpreter().unwrap(); let value = interp.convert(i); let value = value.try_into::<Int>(&interp).unwrap(); value == i } #[quickcheck] fn roundtrip_err(b: bool) -> bool { let interp = crate::interpreter().unwrap(); let value = interp.convert(b); let value = value.try_into::<Int>(&interp); value.is_err() } #[test]
}
fn fixnum_to_usize() { let interp = crate::interpreter().unwrap(); let value = Convert::<_, Value>::convert(&interp, 100); let value = value.try_into::<usize>(&interp).unwrap(); assert_eq!(100, value); let value = Convert::<_, Value>::convert(&interp, -100); let value = value.try_into::<usize>(&interp); assert!(value.is_err()); }
function_block-full_function
[ { "content": "#[cfg(feature = \"core-math-extra\")]\n\npub fn frexp(interp: &mut Artichoke, value: Value) -> Result<(Fp, Int), Exception> {\n\n let value = value_to_float(interp, value)?;\n\n let (fraction, exponent) = libm::frexp(value);\n\n Ok((fraction, exponent.into()))\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/math/mod.rs", "rank": 0, "score": 477069.3001098992 }, { "content": "#[cfg(feature = \"core-math-extra\")]\n\npub fn lgamma(interp: &mut Artichoke, value: Value) -> Result<(Fp, Int), Exception> {\n\n let value = value_to_float(interp, value)?;\n\n if value.is_infinite() && value.is_sign_negative() {\n\n Err(DomainError::from(r#\"Numerical argument is out of domain - \"lgamma\"\"#).into())\n\n } else {\n\n let (result, sign) = libm::lgamma_r(value);\n\n Ok((result, Int::from(sign)))\n\n }\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/math/mod.rs", "rank": 1, "score": 477069.3001098992 }, { "content": "/// Transform a `Exception` Ruby `Value` into an [`Exception`].\n\n///\n\n/// # Errors\n\n///\n\n/// This function makes funcalls on the interpreter which are fallible.\n\npub fn last_error(interp: &mut Artichoke, exception: Value) -> Result<Exception, Exception> {\n\n let mut arena = interp.create_arena_savepoint();\n\n // Clear the current exception from the mruby interpreter so subsequent\n\n // calls to the mruby VM are not tainted by an error they did not\n\n // generate.\n\n //\n\n // We must clear the pointer at the beginning of this function so we can\n\n // use the mruby VM to inspect the exception once we turn it into an\n\n // `mrb_value`. `Value::funcall` handles errors by calling this\n\n // function, so not clearing the exception results in a stack overflow.\n\n\n\n // Generate exception metadata in by executing the Ruby code:\n\n //\n\n // ```ruby\n\n // clazz = exception.class.name\n\n // message = exception.message\n\n // ```\n\n\n\n // Sometimes when hacking on extn/core it is possible to enter a\n\n // crash loop where an exception is captured by this handler, but\n", "file_path": "artichoke-backend/src/exception_handler.rs", "rank": 2, "score": 438091.6744774017 }, { "content": "pub fn load(interp: &mut Artichoke, filename: Value) -> Result<bool, Exception> {\n\n let filename = filename.implicitly_convert_to_string(interp)?;\n\n if filename.find_byte(b'\\0').is_some() {\n\n return Err(ArgumentError::from(\"path name contains null byte\").into());\n\n }\n\n let file = ffi::bytes_to_os_str(filename)?;\n\n let pathbuf;\n\n let mut path = Path::new(file);\n\n if path.is_relative() {\n\n pathbuf = Path::new(RUBY_LOAD_PATH).join(file);\n\n path = pathbuf.as_path();\n\n }\n\n if !interp.source_is_file(path)? {\n\n let mut message = b\"cannot load such file -- \".to_vec();\n\n message.extend_from_slice(filename);\n\n return Err(LoadError::from(message).into());\n\n }\n\n let context = Context::new(ffi::os_str_to_bytes(path.as_os_str())?.to_vec())\n\n .ok_or_else(|| ArgumentError::from(\"path name contains null byte\"))?;\n\n interp.push_context(context)?;\n\n let result = interp.load_source(path);\n\n let _ = interp.pop_context()?;\n\n result\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/kernel/require.rs", "rank": 3, "score": 436540.6522978223 }, { "content": "pub fn srand(interp: &mut Artichoke, seed: Seed) -> Result<Int, Exception> {\n\n let old_seed = interp.prng_seed()?;\n\n interp.prng_reseed(seed.to_reseed())?;\n\n #[allow(clippy::cast_possible_wrap)]\n\n Ok(old_seed as Int)\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/random/mod.rs", "rank": 4, "score": 436265.469995944 }, { "content": "fn value_to_float(interp: &mut Artichoke, value: Value) -> Result<Fp, Exception> {\n\n match value.ruby_type() {\n\n Ruby::Float => value.try_into(interp),\n\n Ruby::Fixnum => value.try_into::<Integer>(interp).map(Integer::as_f64),\n\n Ruby::Nil => Err(TypeError::from(\"can't convert nil into Float\").into()),\n\n _ => {\n\n // TODO: This should use `numeric::coerce`\n\n let class_of_numeric = interp\n\n .class_of::<Numeric>()?\n\n .ok_or_else(|| NotDefinedError::class(\"Numeric\"))?;\n\n let is_a_numeric = value.funcall(interp, \"is_a?\", &[class_of_numeric], None)?;\n\n let is_a_numeric = interp.try_convert(is_a_numeric);\n\n if let Ok(true) = is_a_numeric {\n\n if value.respond_to(interp, \"to_f\")? {\n\n let coerced = value.funcall(interp, \"to_f\", &[], None)?;\n\n if let Ruby::Float = coerced.ruby_type() {\n\n coerced.try_into::<Fp>(interp)\n\n } else {\n\n let mut message = String::from(\"can't convert \");\n\n message.push_str(value.pretty_name(interp));\n", "file_path": "artichoke-backend/src/extn/core/math/mod.rs", "rank": 5, "score": 433597.1920658608 }, { "content": "pub fn len(interp: &mut Artichoke, mut ary: Value) -> Result<usize, Exception> {\n\n let array = unsafe { Array::unbox_from_value(&mut ary, interp)? };\n\n Ok(array.len())\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/array/trampoline.rs", "rank": 6, "score": 425079.32349605515 }, { "content": "#[cfg(feature = \"core-math-extra\")]\n\npub fn erf(interp: &mut Artichoke, value: Value) -> Result<Fp, Exception> {\n\n let value = value_to_float(interp, value)?;\n\n let result = libm::erf(value);\n\n Ok(result)\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/math/mod.rs", "rank": 7, "score": 422350.3202676763 }, { "content": "pub fn acos(interp: &mut Artichoke, value: Value) -> Result<Fp, Exception> {\n\n let value = value_to_float(interp, value)?;\n\n if value.is_nan() {\n\n return Ok(value);\n\n }\n\n let result = value.acos();\n\n if result.is_nan() {\n\n Err(DomainError::from(r#\"Numerical argument is out of domain - \"acos\"\"#).into())\n\n } else {\n\n Ok(result)\n\n }\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/math/mod.rs", "rank": 8, "score": 422350.3202676762 }, { "content": "pub fn asin(interp: &mut Artichoke, value: Value) -> Result<Fp, Exception> {\n\n let value = value_to_float(interp, value)?;\n\n if value.is_nan() {\n\n return Ok(value);\n\n }\n\n let result = value.asin();\n\n if result.is_nan() {\n\n Err(DomainError::from(r#\"Numerical argument is out of domain - \"asin\"\"#).into())\n\n } else {\n\n Ok(result)\n\n }\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/math/mod.rs", "rank": 9, "score": 422350.3202676762 }, { "content": "pub fn tanh(interp: &mut Artichoke, value: Value) -> Result<Fp, Exception> {\n\n let value = value_to_float(interp, value)?;\n\n let result = value.tanh();\n\n Ok(result)\n\n}\n\n\n\n#[derive(Default, Debug, Clone, Hash, PartialEq, Eq, PartialOrd, Ord)]\n\npub struct DomainError(Cow<'static, str>);\n\n\n\nimpl From<String> for DomainError {\n\n fn from(message: String) -> Self {\n\n Self(message.into())\n\n }\n\n}\n\n\n\nimpl From<&'static str> for DomainError {\n\n fn from(message: &'static str) -> Self {\n\n Self(message.into())\n\n }\n\n}\n", "file_path": "artichoke-backend/src/extn/core/math/mod.rs", "rank": 10, "score": 422350.3202676762 }, { "content": "pub fn atan(interp: &mut Artichoke, value: Value) -> Result<Fp, Exception> {\n\n let value = value_to_float(interp, value)?;\n\n let result = value.atan();\n\n Ok(result)\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/math/mod.rs", "rank": 11, "score": 422350.3202676762 }, { "content": "pub fn sin(interp: &mut Artichoke, value: Value) -> Result<Fp, Exception> {\n\n let value = value_to_float(interp, value)?;\n\n let result = value.sin();\n\n Ok(result)\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/math/mod.rs", "rank": 12, "score": 422350.3202676762 }, { "content": "pub fn atanh(interp: &mut Artichoke, value: Value) -> Result<Fp, Exception> {\n\n let value = value_to_float(interp, value)?;\n\n if value.is_nan() {\n\n return Ok(value);\n\n }\n\n let result = value.atanh();\n\n if result.is_nan() {\n\n Err(DomainError::from(r#\"Numerical argument is out of domain - \"atanh\"\"#).into())\n\n } else {\n\n Ok(result)\n\n }\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/math/mod.rs", "rank": 13, "score": 422350.3202676763 }, { "content": "pub fn acosh(interp: &mut Artichoke, value: Value) -> Result<Fp, Exception> {\n\n let value = value_to_float(interp, value)?;\n\n if value.is_nan() {\n\n return Ok(value);\n\n }\n\n let result = value.acosh();\n\n if result.is_nan() {\n\n Err(DomainError::from(r#\"Numerical argument is out of domain - \"acosh\"\"#).into())\n\n } else {\n\n Ok(result)\n\n }\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/math/mod.rs", "rank": 14, "score": 422350.3202676763 }, { "content": "pub fn asinh(interp: &mut Artichoke, value: Value) -> Result<Fp, Exception> {\n\n let value = value_to_float(interp, value)?;\n\n let result = value.asinh();\n\n Ok(result)\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/math/mod.rs", "rank": 15, "score": 422350.3202676762 }, { "content": "pub fn sqrt(interp: &mut Artichoke, value: Value) -> Result<Fp, Exception> {\n\n let value = value_to_float(interp, value)?;\n\n if value.is_nan() {\n\n return Ok(value);\n\n }\n\n let result = value.sqrt();\n\n if result.is_nan() {\n\n Err(DomainError::from(r#\"Numerical argument is out of domain - \"sqrt\"\"#).into())\n\n } else {\n\n Ok(result)\n\n }\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/math/mod.rs", "rank": 16, "score": 422350.3202676762 }, { "content": "pub fn exp(interp: &mut Artichoke, value: Value) -> Result<Fp, Exception> {\n\n let value = value_to_float(interp, value)?;\n\n let result = value.exp();\n\n Ok(result)\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/math/mod.rs", "rank": 17, "score": 422350.3202676762 }, { "content": "pub fn cos(interp: &mut Artichoke, value: Value) -> Result<Fp, Exception> {\n\n let value = value_to_float(interp, value)?;\n\n let result = value.cos();\n\n Ok(result)\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/math/mod.rs", "rank": 18, "score": 422350.3202676762 }, { "content": "pub fn log2(interp: &mut Artichoke, value: Value) -> Result<Fp, Exception> {\n\n let value = value_to_float(interp, value)?;\n\n if value.is_nan() {\n\n return Ok(value);\n\n }\n\n let result = value.log2();\n\n if result.is_nan() {\n\n Err(DomainError::from(r#\"Numerical argument is out of domain - \"log2\"\"#).into())\n\n } else {\n\n Ok(result)\n\n }\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/math/mod.rs", "rank": 19, "score": 422350.3202676762 }, { "content": "pub fn cbrt(interp: &mut Artichoke, value: Value) -> Result<Fp, Exception> {\n\n let value = value_to_float(interp, value)?;\n\n let result = value.cbrt();\n\n Ok(result)\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/math/mod.rs", "rank": 20, "score": 422350.32026767626 }, { "content": "#[cfg(feature = \"core-math-extra\")]\n\npub fn gamma(interp: &mut Artichoke, value: Value) -> Result<Fp, Exception> {\n\n use crate::extn::core::float;\n\n use std::convert::TryFrom;\n\n use std::num::FpCategory;\n\n\n\n let value = value_to_float(interp, value)?;\n\n // `gamma(n)` is the same as `n!` for integer n > 0. `gamma` returns float\n\n // and might be an approximation so include a lookup table for as many `n`\n\n // as can fit in the float manitssa.\n\n let factorial_table = [\n\n 1.0_f64, // fact(0)\n\n 1.0, // fact(1)\n\n 2.0, // fact(2)\n\n 6.0, // fact(3)\n\n 24.0, // fact(4)\n\n 120.0, // fact(5)\n\n 720.0, // fact(6)\n\n 5_040.0, // fact(7)\n\n 40_320.0, // fact(8)\n\n 362_880.0, // fact(9)\n", "file_path": "artichoke-backend/src/extn/core/math/mod.rs", "rank": 21, "score": 422350.32026767626 }, { "content": "pub fn sinh(interp: &mut Artichoke, value: Value) -> Result<Fp, Exception> {\n\n let value = value_to_float(interp, value)?;\n\n let result = value.sinh();\n\n Ok(result)\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/math/mod.rs", "rank": 22, "score": 422350.3202676762 }, { "content": "pub fn cosh(interp: &mut Artichoke, value: Value) -> Result<Fp, Exception> {\n\n let value = value_to_float(interp, value)?;\n\n let result = value.cosh();\n\n Ok(result)\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/math/mod.rs", "rank": 23, "score": 422350.3202676762 }, { "content": "pub fn log10(interp: &mut Artichoke, value: Value) -> Result<Fp, Exception> {\n\n let value = value_to_float(interp, value)?;\n\n if value.is_nan() {\n\n return Ok(value);\n\n }\n\n let result = value.log10();\n\n if result.is_nan() {\n\n Err(DomainError::from(r#\"Numerical argument is out of domain - \"log10\"\"#).into())\n\n } else {\n\n Ok(result)\n\n }\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/math/mod.rs", "rank": 24, "score": 422350.3202676762 }, { "content": "pub fn tan(interp: &mut Artichoke, value: Value) -> Result<Fp, Exception> {\n\n let value = value_to_float(interp, value)?;\n\n let result = value.tan();\n\n Ok(result)\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/math/mod.rs", "rank": 25, "score": 422350.3202676762 }, { "content": "#[cfg(feature = \"core-math-extra\")]\n\npub fn erfc(interp: &mut Artichoke, value: Value) -> Result<Fp, Exception> {\n\n let value = value_to_float(interp, value)?;\n\n let result = libm::erfc(value);\n\n Ok(result)\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/math/mod.rs", "rank": 26, "score": 422350.3202676762 }, { "content": "pub fn hypot(interp: &mut Artichoke, value: Value, other: Value) -> Result<Fp, Exception> {\n\n let value = value_to_float(interp, value)?;\n\n let other = value_to_float(interp, other)?;\n\n let result = value.hypot(other);\n\n Ok(result)\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/math/mod.rs", "rank": 27, "score": 414980.28785174 }, { "content": "pub fn atan2(interp: &mut Artichoke, value: Value, other: Value) -> Result<Fp, Exception> {\n\n let value = value_to_float(interp, value)?;\n\n let other = value_to_float(interp, other)?;\n\n let result = value.atan2(other);\n\n Ok(result)\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/math/mod.rs", "rank": 28, "score": 414980.28785174 }, { "content": "/// If `y` is the same type as `x`, returns an array `[y, x]`. Otherwise,\n\n/// returns an array with both `y` and `x` represented as `Float` objects.\n\n///\n\n/// This coercion mechanism is used by Ruby to handle mixed-type numeric\n\n/// operations: it is intended to find a compatible common type between the two\n\n/// operands of the operator.\n\n///\n\n/// See [`Numeric#coerce`][numeric].\n\n///\n\n/// # Coercion enum\n\n///\n\n/// Artichoke represents the `[y, x]` tuple Array as the [`Coercion`] enum, which\n\n/// orders its values `Coercion::Integer(x, y)`.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// # use artichoke_backend::prelude::core::*;\n\n/// # use artichoke_backend::extn::core::numeric::{self, Coercion};\n\n/// # fn main() -> Result<(), Box<std::error::Error>> {\n\n/// # let mut interp = artichoke_backend::interpreter()?;\n\n/// let x = interp.convert(1_i64);\n\n/// let y = interp.convert_mut(2.5_f64);\n\n/// assert_eq!(Coercion::Float(1.0, 2.5), numeric::coerce(&mut interp, x, y)?);\n\n/// let x = interp.convert_mut(1.2_f64);\n\n/// let y = interp.convert(3_i64);\n\n/// assert_eq!(Coercion::Float(1.2, 3.0), numeric::coerce(&mut interp, x, y)?);\n\n/// let x = interp.convert(1_i64);\n\n/// let y = interp.convert(2_i64);\n\n/// assert_eq!(Coercion::Integer(1, 2), numeric::coerce(&mut interp, x, y)?);\n\n/// # Ok(())\n\n/// # }\n\n/// ```\n\n///\n\n/// [numeric]: https://ruby-doc.org/core-2.6.3/Numeric.html#method-i-coerce\n\npub fn coerce(interp: &mut Artichoke, x: Value, y: Value) -> Result<Coercion, Exception> {\n\n fn do_coerce(\n\n interp: &mut Artichoke,\n\n x: Value,\n\n y: Value,\n\n depth: u8,\n\n ) -> Result<Coercion, Exception> {\n\n if depth > MAX_COERCE_DEPTH {\n\n return Err(SystemStackError::from(\"stack level too deep\").into());\n\n }\n\n match (x.ruby_type(), y.ruby_type()) {\n\n (Ruby::Float, Ruby::Float) => {\n\n Ok(Coercion::Float(x.try_into(interp)?, y.try_into(interp)?))\n\n }\n\n (Ruby::Float, Ruby::Fixnum) => {\n\n let y = y.try_into::<Integer>(interp)?;\n\n Ok(Coercion::Float(x.try_into(interp)?, y.as_f64()))\n\n }\n\n (Ruby::Fixnum, Ruby::Float) => {\n\n let x = x.try_into::<Integer>(interp)?;\n", "file_path": "artichoke-backend/src/extn/core/numeric/mod.rs", "rank": 29, "score": 413685.58376178995 }, { "content": "pub fn to_a(interp: &mut Artichoke, mut value: Value) -> Result<Value, Exception> {\n\n let data = unsafe { MatchData::unbox_from_value(&mut value, interp)? };\n\n if let Some(ary) = data.to_a()? {\n\n interp.try_convert_mut(ary)\n\n } else {\n\n Ok(Value::nil())\n\n }\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/matchdata/trampoline.rs", "rank": 30, "score": 409050.55085936794 }, { "content": "pub fn to_s(interp: &mut Artichoke, mut value: Value) -> Result<Value, Exception> {\n\n let data = unsafe { MatchData::unbox_from_value(&mut value, interp)? };\n\n let display = data.to_s()?;\n\n Ok(interp.convert_mut(display))\n\n}\n", "file_path": "artichoke-backend/src/extn/core/matchdata/trampoline.rs", "rank": 31, "score": 409050.55085936794 }, { "content": "pub fn length(interp: &mut Artichoke, mut value: Value) -> Result<Value, Exception> {\n\n let data = unsafe { MatchData::unbox_from_value(&mut value, interp)? };\n\n let len = data.len()?;\n\n if let Ok(len) = Int::try_from(len) {\n\n Ok(interp.convert(len))\n\n } else {\n\n Err(ArgumentError::from(\"input string too long\").into())\n\n }\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/matchdata/trampoline.rs", "rank": 32, "score": 404886.36536461493 }, { "content": "pub fn string(interp: &mut Artichoke, mut value: Value) -> Result<Value, Exception> {\n\n let data = unsafe { MatchData::unbox_from_value(&mut value, interp)? };\n\n let mut string = interp.convert_mut(data.string());\n\n string.freeze(interp)?;\n\n Ok(string)\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/matchdata/trampoline.rs", "rank": 33, "score": 404886.36536461493 }, { "content": "pub fn is_empty(interp: &mut Artichoke, mut value: Value) -> Result<Value, Exception> {\n\n let symbol = unsafe { Symbol::unbox_from_value(&mut value, interp)? };\n\n let is_empty = symbol.is_empty(interp);\n\n Ok(interp.convert(is_empty))\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/symbol/trampoline.rs", "rank": 34, "score": 404886.36536461493 }, { "content": "pub fn regexp(interp: &mut Artichoke, mut value: Value) -> Result<Value, Exception> {\n\n let data = unsafe { MatchData::unbox_from_value(&mut value, interp)? };\n\n let regexp = data.regexp();\n\n // TODO(GH-614): MatchData#regexp needs to return an identical Regexp to the\n\n // one used to create the match (same object ID).\n\n //\n\n // The `Regexp::alloc_value` here should be replaced with\n\n // `Regexp::box_into_value`.\n\n //\n\n // See: https://github.com/ruby/spec/pull/727\n\n let regexp = Regexp::alloc_value(regexp.clone(), interp)?;\n\n Ok(regexp)\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/matchdata/trampoline.rs", "rank": 35, "score": 404886.36536461493 }, { "content": "pub fn captures(interp: &mut Artichoke, mut value: Value) -> Result<Value, Exception> {\n\n let data = unsafe { MatchData::unbox_from_value(&mut value, interp)? };\n\n if let Some(captures) = data.captures()? {\n\n interp.try_convert_mut(captures)\n\n } else {\n\n Ok(Value::nil())\n\n }\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/matchdata/trampoline.rs", "rank": 36, "score": 404886.36536461493 }, { "content": "pub fn names(interp: &mut Artichoke, mut value: Value) -> Result<Value, Exception> {\n\n let data = unsafe { MatchData::unbox_from_value(&mut value, interp)? };\n\n let names = data.names();\n\n interp.try_convert_mut(names)\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/matchdata/trampoline.rs", "rank": 37, "score": 404886.36536461493 }, { "content": "pub fn bytes(interp: &mut Artichoke, mut value: Value) -> Result<Value, Exception> {\n\n let symbol = unsafe { Symbol::unbox_from_value(&mut value, interp)? };\n\n // These bytes must be cloned because they are owned by the interpreter.\n\n let bytes = symbol.bytes(interp).to_vec();\n\n Ok(interp.convert_mut(bytes))\n\n}\n", "file_path": "artichoke-backend/src/extn/core/symbol/trampoline.rs", "rank": 38, "score": 404886.365364615 }, { "content": "pub fn length(interp: &mut Artichoke, mut value: Value) -> Result<Value, Exception> {\n\n let symbol = unsafe { Symbol::unbox_from_value(&mut value, interp)? };\n\n let len = symbol.len(interp);\n\n interp.try_convert(len)\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/symbol/trampoline.rs", "rank": 39, "score": 404886.365364615 }, { "content": "#[cfg(not(feature = \"core-math-extra\"))]\n\npub fn ldexp(interp: &mut Artichoke, fraction: Value, exponent: Value) -> Result<Fp, Exception> {\n\n let _ = interp;\n\n let _ = fraction;\n\n let _ = exponent;\n\n Err(Exception::from(NotImplementedError::from(\n\n \"enable 'core-math-extra' feature when building Artichoke\",\n\n )))\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/math/mod.rs", "rank": 40, "score": 403768.3471070957 }, { "content": "pub fn to_h(interp: &mut Artichoke, mut environ: Value) -> Result<Value, Exception> {\n\n let environ = unsafe { Environ::unbox_from_value(&mut environ, interp) }?;\n\n let result = environ.to_map()?;\n\n Ok(interp.convert_mut(result))\n\n}\n", "file_path": "artichoke-backend/src/extn/core/env/trampoline.rs", "rank": 41, "score": 402794.0725699044 }, { "content": "pub fn to_s(interp: &mut Artichoke, mut regexp: Value) -> Result<Value, Exception> {\n\n let regexp = unsafe { Regexp::unbox_from_value(&mut regexp, interp)? };\n\n let s = regexp.string();\n\n Ok(interp.convert_mut(s))\n\n}\n", "file_path": "artichoke-backend/src/extn/core/regexp/trampoline.rs", "rank": 42, "score": 402794.0725699044 }, { "content": "pub fn offset(interp: &mut Artichoke, mut value: Value, mut at: Value) -> Result<Value, Exception> {\n\n let data = unsafe { MatchData::unbox_from_value(&mut value, interp)? };\n\n let capture = match interp.try_convert_mut(&mut at)? {\n\n CaptureExtract::GroupIndex(idx) => Capture::GroupIndex(idx),\n\n CaptureExtract::GroupName(name) => Capture::GroupName(name),\n\n CaptureExtract::Symbol(symbol) => Capture::GroupName(symbol.bytes(interp)),\n\n };\n\n if let Some([begin, end]) = data.offset(capture)? {\n\n if let (Ok(begin), Ok(end)) = (Int::try_from(begin), Int::try_from(end)) {\n\n let ary = Array::assoc(interp.convert(begin), interp.convert(end));\n\n Array::alloc_value(ary, interp)\n\n } else {\n\n Err(ArgumentError::from(\"input string too long\").into())\n\n }\n\n } else {\n\n let ary = Array::assoc(Value::nil(), Value::nil());\n\n Array::alloc_value(ary, interp)\n\n }\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/matchdata/trampoline.rs", "rank": 43, "score": 402441.2775409149 }, { "content": "pub fn end(interp: &mut Artichoke, mut value: Value, mut at: Value) -> Result<Value, Exception> {\n\n let data = unsafe { MatchData::unbox_from_value(&mut value, interp)? };\n\n let capture = match interp.try_convert_mut(&mut at)? {\n\n CaptureExtract::GroupIndex(idx) => Capture::GroupIndex(idx),\n\n CaptureExtract::GroupName(name) => Capture::GroupName(name),\n\n CaptureExtract::Symbol(symbol) => Capture::GroupName(symbol.bytes(interp)),\n\n };\n\n let end = data.end(capture)?;\n\n match end.map(Int::try_from) {\n\n Some(Ok(end)) => Ok(interp.convert(end)),\n\n Some(Err(_)) => Err(ArgumentError::from(\"input string too long\").into()),\n\n None => Ok(Value::nil()),\n\n }\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/matchdata/trampoline.rs", "rank": 44, "score": 402441.2775409149 }, { "content": "pub fn begin(interp: &mut Artichoke, mut value: Value, mut at: Value) -> Result<Value, Exception> {\n\n let data = unsafe { MatchData::unbox_from_value(&mut value, interp)? };\n\n let capture = match interp.try_convert_mut(&mut at)? {\n\n CaptureExtract::GroupIndex(idx) => Capture::GroupIndex(idx),\n\n CaptureExtract::GroupName(name) => Capture::GroupName(name),\n\n CaptureExtract::Symbol(symbol) => Capture::GroupName(symbol.bytes(interp)),\n\n };\n\n let begin = data.begin(capture)?;\n\n match begin.map(Int::try_from) {\n\n Some(Ok(begin)) => Ok(interp.convert(begin)),\n\n Some(Err(_)) => Err(ArgumentError::from(\"input string too long\").into()),\n\n None => Ok(Value::nil()),\n\n }\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/matchdata/trampoline.rs", "rank": 45, "score": 402441.27754091495 }, { "content": "pub fn log(interp: &mut Artichoke, value: Value, base: Option<Value>) -> Result<Fp, Exception> {\n\n let value = value_to_float(interp, value)?;\n\n if value.is_nan() {\n\n return Ok(value);\n\n }\n\n let result = if let Some(base) = base {\n\n let base = value_to_float(interp, base)?;\n\n if base.is_nan() {\n\n return Ok(base);\n\n }\n\n value.log(base)\n\n } else {\n\n value.ln()\n\n };\n\n if result.is_nan() {\n\n Err(DomainError::from(r#\"Numerical argument is out of domain - \"log\"\"#).into())\n\n } else {\n\n Ok(result)\n\n }\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/math/mod.rs", "rank": 46, "score": 401772.04897643 }, { "content": "pub fn named_captures(interp: &mut Artichoke, mut value: Value) -> Result<Value, Exception> {\n\n let data = unsafe { MatchData::unbox_from_value(&mut value, interp)? };\n\n let named_captures = data.named_captures()?;\n\n interp.try_convert_mut(named_captures)\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/matchdata/trampoline.rs", "rank": 47, "score": 400843.6963369335 }, { "content": "pub fn pre_match(interp: &mut Artichoke, mut value: Value) -> Result<Value, Exception> {\n\n let data = unsafe { MatchData::unbox_from_value(&mut value, interp)? };\n\n let pre = data.pre();\n\n Ok(interp.convert_mut(pre))\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/matchdata/trampoline.rs", "rank": 48, "score": 400843.6963369335 }, { "content": "pub fn post_match(interp: &mut Artichoke, mut value: Value) -> Result<Value, Exception> {\n\n let data = unsafe { MatchData::unbox_from_value(&mut value, interp)? };\n\n let post = data.post();\n\n Ok(interp.convert_mut(post))\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/matchdata/trampoline.rs", "rank": 49, "score": 400843.6963369335 }, { "content": "pub fn minute(interp: &mut Artichoke, mut time: Value) -> Result<Value, Exception> {\n\n let time = unsafe { Time::unbox_from_value(&mut time, interp)? };\n\n let minute = time.inner().minute();\n\n let result = interp.convert(minute);\n\n Ok(result)\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/time/trampoline.rs", "rank": 50, "score": 398533.6809075187 }, { "content": "pub fn names(interp: &mut Artichoke, mut regexp: Value) -> Result<Value, Exception> {\n\n let regexp = unsafe { Regexp::unbox_from_value(&mut regexp, interp)? };\n\n let names = regexp.names();\n\n interp.try_convert_mut(names)\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/regexp/trampoline.rs", "rank": 51, "score": 398533.6809075187 }, { "content": "pub fn year(interp: &mut Artichoke, mut time: Value) -> Result<Value, Exception> {\n\n let time = unsafe { Time::unbox_from_value(&mut time, interp)? };\n\n let year = time.inner().year();\n\n let result = interp.convert(year);\n\n Ok(result)\n\n}\n", "file_path": "artichoke-backend/src/extn/core/time/trampoline.rs", "rank": 52, "score": 398533.6809075187 }, { "content": "pub fn second(interp: &mut Artichoke, mut time: Value) -> Result<Value, Exception> {\n\n let time = unsafe { Time::unbox_from_value(&mut time, interp)? };\n\n let second = time.inner().second();\n\n let result = interp.convert(second);\n\n Ok(result)\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/time/trampoline.rs", "rank": 53, "score": 398533.6809075187 }, { "content": "pub fn inspect(interp: &mut Artichoke, mut regexp: Value) -> Result<Value, Exception> {\n\n let regexp = unsafe { Regexp::unbox_from_value(&mut regexp, interp)? };\n\n let inspect = regexp.inspect();\n\n Ok(interp.convert_mut(inspect))\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/regexp/trampoline.rs", "rank": 54, "score": 398533.6809075187 }, { "content": "pub fn nanosecond(interp: &mut Artichoke, mut time: Value) -> Result<Value, Exception> {\n\n let time = unsafe { Time::unbox_from_value(&mut time, interp)? };\n\n let nanosecond = time.inner().nanosecond();\n\n let result = interp.convert(nanosecond);\n\n Ok(result)\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/time/trampoline.rs", "rank": 55, "score": 398533.6809075187 }, { "content": "pub fn microsecond(interp: &mut Artichoke, mut time: Value) -> Result<Value, Exception> {\n\n let time = unsafe { Time::unbox_from_value(&mut time, interp)? };\n\n let microsecond = time.inner().microsecond();\n\n let result = interp.convert(microsecond);\n\n Ok(result)\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/time/trampoline.rs", "rank": 56, "score": 398533.6809075187 }, { "content": "pub fn month(interp: &mut Artichoke, mut time: Value) -> Result<Value, Exception> {\n\n let time = unsafe { Time::unbox_from_value(&mut time, interp)? };\n\n let month = time.inner().month();\n\n let result = interp.convert(month);\n\n Ok(result)\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/time/trampoline.rs", "rank": 57, "score": 398533.6809075187 }, { "content": "pub fn seed(interp: &mut Artichoke, mut rand: Value) -> Result<Value, Exception> {\n\n let rand = unsafe { Random::unbox_from_value(&mut rand, interp)? };\n\n let seed = rand.seed(interp)?;\n\n Ok(interp.convert(seed))\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/random/trampoline.rs", "rank": 58, "score": 398533.68090751866 }, { "content": "pub fn source(interp: &mut Artichoke, mut regexp: Value) -> Result<Value, Exception> {\n\n let regexp = unsafe { Regexp::unbox_from_value(&mut regexp, interp)? };\n\n let source = regexp.source();\n\n Ok(interp.convert_mut(source))\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/regexp/trampoline.rs", "rank": 59, "score": 398533.6809075187 }, { "content": "pub fn options(interp: &mut Artichoke, mut regexp: Value) -> Result<Value, Exception> {\n\n let regexp = unsafe { Regexp::unbox_from_value(&mut regexp, interp)? };\n\n let opts = regexp.options();\n\n Ok(interp.convert(opts))\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/regexp/trampoline.rs", "rank": 60, "score": 398533.6809075187 }, { "content": "pub fn is_casefold(interp: &mut Artichoke, mut regexp: Value) -> Result<Value, Exception> {\n\n let regexp = unsafe { Regexp::unbox_from_value(&mut regexp, interp)? };\n\n let is_casefold = regexp.is_casefold();\n\n Ok(interp.convert(is_casefold))\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/regexp/trampoline.rs", "rank": 61, "score": 398533.6809075187 }, { "content": "pub fn clear(interp: &mut Artichoke, mut ary: Value) -> Result<Value, Exception> {\n\n if ary.is_frozen(interp) {\n\n return Err(FrozenError::from(\"can't modify frozen Array\").into());\n\n }\n\n let mut array = unsafe { Array::unbox_from_value(&mut ary, interp)? };\n\n array.clear();\n\n Ok(ary)\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/array/trampoline.rs", "rank": 62, "score": 398533.6809075187 }, { "content": "pub fn day(interp: &mut Artichoke, mut time: Value) -> Result<Value, Exception> {\n\n let time = unsafe { Time::unbox_from_value(&mut time, interp)? };\n\n let day = time.inner().day();\n\n let result = interp.convert(day);\n\n Ok(result)\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/time/trampoline.rs", "rank": 63, "score": 398533.6809075187 }, { "content": "pub fn hash(interp: &mut Artichoke, mut regexp: Value) -> Result<Value, Exception> {\n\n let regexp = unsafe { Regexp::unbox_from_value(&mut regexp, interp)? };\n\n let hash = regexp.hash();\n\n #[allow(clippy::cast_possible_wrap)]\n\n Ok(interp.convert(hash as Int))\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/regexp/trampoline.rs", "rank": 64, "score": 398533.6809075187 }, { "content": "pub fn pop(interp: &mut Artichoke, mut ary: Value) -> Result<Value, Exception> {\n\n if ary.is_frozen(interp) {\n\n return Err(FrozenError::from(\"can't modify frozen Array\").into());\n\n }\n\n let mut array = unsafe { Array::unbox_from_value(&mut ary, interp)? };\n\n let result = array.pop();\n\n Ok(interp.convert(result))\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/array/trampoline.rs", "rank": 65, "score": 398533.6809075187 }, { "content": "pub fn hour(interp: &mut Artichoke, mut time: Value) -> Result<Value, Exception> {\n\n let time = unsafe { Time::unbox_from_value(&mut time, interp)? };\n\n let hour = time.inner().hour();\n\n let result = interp.convert(hour);\n\n Ok(result)\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/time/trampoline.rs", "rank": 66, "score": 398533.6809075187 }, { "content": "pub fn weekday(interp: &mut Artichoke, mut time: Value) -> Result<Value, Exception> {\n\n let time = unsafe { Time::unbox_from_value(&mut time, interp)? };\n\n let weekday = time.inner().weekday();\n\n let result = interp.convert(weekday);\n\n Ok(result)\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/time/trampoline.rs", "rank": 67, "score": 398533.6809075187 }, { "content": "pub fn year_day(interp: &mut Artichoke, mut time: Value) -> Result<Value, Exception> {\n\n let time = unsafe { Time::unbox_from_value(&mut time, interp)? };\n\n let year_day = time.inner().year_day();\n\n let result = interp.convert(year_day);\n\n Ok(result)\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/time/trampoline.rs", "rank": 68, "score": 394399.779051236 }, { "content": "pub fn is_fixed_encoding(interp: &mut Artichoke, mut regexp: Value) -> Result<Value, Exception> {\n\n let regexp = unsafe { Regexp::unbox_from_value(&mut regexp, interp)? };\n\n let is_fixed_encoding = regexp.is_fixed_encoding();\n\n Ok(interp.convert(is_fixed_encoding))\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/regexp/trampoline.rs", "rank": 69, "score": 394399.779051236 }, { "content": "pub fn named_captures(interp: &mut Artichoke, mut regexp: Value) -> Result<Value, Exception> {\n\n let regexp = unsafe { Regexp::unbox_from_value(&mut regexp, interp)? };\n\n let named_captures = regexp.named_captures()?;\n\n interp.try_convert_mut(named_captures)\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/regexp/trampoline.rs", "rank": 70, "score": 394399.779051236 }, { "content": "pub fn reverse_bang(interp: &mut Artichoke, mut ary: Value) -> Result<Value, Exception> {\n\n if ary.is_frozen(interp) {\n\n return Err(FrozenError::from(\"can't modify frozen Array\").into());\n\n }\n\n let mut array = unsafe { Array::unbox_from_value(&mut ary, interp)? };\n\n array.reverse();\n\n Ok(ary)\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/array/trampoline.rs", "rank": 71, "score": 394399.779051236 }, { "content": "pub fn push(interp: &mut Artichoke, mut ary: Value, value: Value) -> Result<Value, Exception> {\n\n if ary.is_frozen(interp) {\n\n return Err(FrozenError::from(\"can't modify frozen Array\").into());\n\n }\n\n let mut array = unsafe { Array::unbox_from_value(&mut ary, interp)? };\n\n array.push(value);\n\n Ok(ary)\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/array/trampoline.rs", "rank": 72, "score": 392666.8592652012 }, { "content": "pub fn equal(interp: &mut Artichoke, mut rand: Value, other: Value) -> Result<Value, Exception> {\n\n let rand = unsafe { Random::unbox_from_value(&mut rand, interp)? };\n\n let eql = rand.eql(interp, other)?;\n\n Ok(interp.convert(eql))\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/random/trampoline.rs", "rank": 73, "score": 392650.7529463153 }, { "content": "pub fn eql(interp: &mut Artichoke, mut regexp: Value, other: Value) -> Result<Value, Exception> {\n\n let regexp = unsafe { Regexp::unbox_from_value(&mut regexp, interp)? };\n\n let cmp = regexp.eql(interp, other);\n\n Ok(interp.convert(cmp))\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/regexp/trampoline.rs", "rank": 74, "score": 392650.7529463152 }, { "content": "pub fn clear_capture_globals(interp: &mut Artichoke) -> Result<(), Exception> {\n\n let mut idx = interp.active_regexp_globals()?;\n\n while let Some(group) = NonZeroUsize::new(idx) {\n\n interp.unset_global_variable(nth_match_group(group))?;\n\n idx -= 1\n\n }\n\n interp.clear_regexp()?;\n\n Ok(())\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct Regexp(Box<dyn RegexpType>);\n\n\n\nimpl Hash for Regexp {\n\n fn hash<H: Hasher>(&self, state: &mut H) {\n\n self.0.hash(state);\n\n }\n\n}\n\n\n\nimpl PartialEq for Regexp {\n", "file_path": "artichoke-backend/src/extn/core/regexp/mod.rs", "rank": 75, "score": 391300.3196150424 }, { "content": "#[inline]\n\npub fn uuid(interp: &mut Artichoke) -> Result<Value, Exception> {\n\n let uuid = securerandom::uuid();\n\n Ok(interp.convert_mut(uuid))\n\n}\n", "file_path": "artichoke-backend/src/extn/stdlib/securerandom/trampoline.rs", "rank": 76, "score": 390189.9069499698 }, { "content": "pub fn now(interp: &mut Artichoke) -> Result<Value, Exception> {\n\n let now = Time::now();\n\n let result = Time::alloc_value(now, interp)?;\n\n Ok(result)\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/time/trampoline.rs", "rank": 77, "score": 390184.42697431927 }, { "content": "pub fn all_symbols(interp: &mut Artichoke) -> Result<Value, Exception> {\n\n let all_symbols = Symbol::all_symbols(interp)?;\n\n Array::alloc_value(all_symbols, interp)\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/symbol/trampoline.rs", "rank": 78, "score": 390184.42697431927 }, { "content": "pub fn ord(interp: &mut Artichoke, value: Value) -> Result<Value, Exception> {\n\n let string = value.try_into_mut::<&[u8]>(interp)?;\n\n\n\n let ord = if let Some((start, end, ch)) = string.char_indices().next() {\n\n if ch == '\\u{FFFD}' {\n\n let slice = &string[start..end];\n\n match slice {\n\n [] => 0,\n\n [a] => u32::from_le_bytes([*a, 0, 0, 0]),\n\n [a, b] => u32::from_le_bytes([*a, *b, 0, 0]),\n\n [a, b, c] => u32::from_le_bytes([*a, *b, *c, 0]),\n\n [a, b, c, d] => u32::from_le_bytes([*a, *b, *c, *d]),\n\n _ => return Err(ArgumentError::from(\"Unicode out of range\").into()),\n\n }\n\n } else {\n\n // All `char`s are valid `u32`s\n\n // https://github.com/rust-lang/rust/blob/1.41.0/src/libcore/char/convert.rs#L12-L20\n\n ch as u32\n\n }\n\n } else {\n\n return Err(ArgumentError::from(\"empty string\").into());\n\n };\n\n Ok(interp.convert(ord))\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/string/trampoline.rs", "rank": 79, "score": 389315.01161238004 }, { "content": "pub fn bytes(interp: &mut Artichoke, mut rand: Value, size: Value) -> Result<Value, Exception> {\n\n let mut rand = unsafe { Random::unbox_from_value(&mut rand, interp)? };\n\n let size = size.implicitly_convert_to_int(interp)?;\n\n let buf = rand.bytes(interp, size)?;\n\n Ok(interp.convert_mut(buf))\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/random/trampoline.rs", "rank": 80, "score": 388724.10838711663 }, { "content": "/// Load the Artichoke `MSpec` entry point end execute the specs.\n\n///\n\n/// # Errors\n\n///\n\n/// If an exception is raised on the Artichoke interpreter, it is returned.\n\npub fn run<'a, T>(interp: &mut Artichoke, specs: T) -> Result<bool, Exception>\n\nwhere\n\n T: IntoIterator<Item = &'a str>,\n\n{\n\n interp.def_rb_source_file(\"/src/lib/spec_helper.rb\", &b\"\"[..])?;\n\n interp.def_rb_source_file(\n\n \"/src/lib/test/spec_runner\",\n\n &include_bytes!(\"spec_runner.rb\")[..],\n\n )?;\n\n interp.eval_file(Path::new(\"/src/lib/test/spec_runner\"))?;\n\n let specs = interp.try_convert_mut(specs.into_iter().collect::<Vec<_>>())?;\n\n let result = interp\n\n .top_self()\n\n .funcall(interp, \"run_specs\", &[specs], None)?;\n\n interp.try_convert(result)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n #[test]\n\n // TODO(GH-528): fix failing tests on Windows.\n\n #[cfg_attr(target_os = \"windows\", should_panic)]\n\n fn mspec_framework_loads() {\n\n let mut interp = artichoke::interpreter().unwrap();\n\n super::init(&mut interp).unwrap();\n\n // should not panic\n\n assert!(super::run(&mut interp, vec![]).unwrap());\n\n }\n\n}\n", "file_path": "spec-runner/src/mspec.rs", "rank": 81, "score": 388082.74139802705 }, { "content": "pub fn initialize(interp: &mut Artichoke, into: Value) -> Result<Value, Exception> {\n\n let environ = Environ::initialize();\n\n let result = Environ::box_into_value(environ, into, interp)?;\n\n Ok(result)\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/env/trampoline.rs", "rank": 82, "score": 387545.06068331376 }, { "content": "pub fn new_seed(interp: &mut Artichoke) -> Result<Value, Exception> {\n\n let seed = Random::new_seed();\n\n Ok(interp.convert(seed))\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/random/trampoline.rs", "rank": 83, "score": 385443.42871236196 }, { "content": "pub fn urandom(size: Int) -> Result<Vec<u8>, Exception> {\n\n match usize::try_from(size) {\n\n Ok(0) => Ok(Vec::new()),\n\n Ok(len) => {\n\n let mut buf = vec![0; len];\n\n let mut rng = rand::thread_rng();\n\n rng.try_fill_bytes(&mut buf)\n\n .map_err(|err| RuntimeError::from(err.to_string()))?;\n\n Ok(buf)\n\n }\n\n Err(_) => Err(ArgumentError::from(\"negative string size (or size too big)\").into()),\n\n }\n\n}\n\n\n\npub struct Random(Box<dyn backend::RandType>);\n\n\n\nimpl fmt::Debug for Random {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n f.debug_struct(\"Random\")\n\n .field(\"backend\", self.0.as_debug())\n", "file_path": "artichoke-backend/src/extn/core/random/mod.rs", "rank": 84, "score": 384487.11054396327 }, { "content": "pub fn urandom(interp: &mut Artichoke, size: Value) -> Result<Value, Exception> {\n\n let size = size.implicitly_convert_to_int(interp)?;\n\n let buf = random::urandom(size)?;\n\n Ok(interp.convert_mut(buf))\n\n}\n", "file_path": "artichoke-backend/src/extn/core/random/trampoline.rs", "rank": 85, "score": 383058.5333229165 }, { "content": "pub fn load(interp: &mut Artichoke, path: Value) -> Result<Value, Exception> {\n\n let success = kernel::require::load(interp, path)?;\n\n Ok(interp.convert(success))\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/kernel/trampoline.rs", "rank": 86, "score": 383058.5333229165 }, { "content": "pub fn require(interp: &mut Artichoke, path: Value) -> Result<Value, Exception> {\n\n let success = kernel::require::require(interp, path, None)?;\n\n Ok(interp.convert(success))\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/kernel/trampoline.rs", "rank": 87, "score": 383058.5333229165 }, { "content": "pub fn escape(interp: &mut Artichoke, pattern: Value) -> Result<Value, Exception> {\n\n let pattern = pattern.implicitly_convert_to_string(interp)?;\n\n let pattern = Regexp::escape(pattern)?;\n\n Ok(interp.convert_mut(pattern))\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/regexp/trampoline.rs", "rank": 88, "score": 383058.5333229165 }, { "content": "pub fn require_relative(interp: &mut Artichoke, path: Value) -> Result<Value, Exception> {\n\n let relative_base = RelativePath::try_from_interp(interp)?;\n\n let success = kernel::require::require(interp, path, Some(relative_base))?;\n\n Ok(interp.convert(success))\n\n}\n", "file_path": "artichoke-backend/src/extn/core/kernel/trampoline.rs", "rank": 89, "score": 378714.8541684979 }, { "content": "pub fn init(interp: &mut Artichoke) -> InitializeResult<()> {\n\n let exception_spec = class::Spec::new(\"Exception\", None, None)?;\n\n class::Builder::for_spec(interp, &exception_spec).define()?;\n\n interp.def_class::<Exception>(exception_spec)?;\n\n\n\n let nomemory_spec = class::Spec::new(\"NoMemoryError\", None, None)?;\n\n class::Builder::for_spec(interp, &nomemory_spec)\n\n .with_super_class::<Exception, _>(\"Exception\")?\n\n .define()?;\n\n interp.def_class::<NoMemoryError>(nomemory_spec)?;\n\n\n\n let script_spec = class::Spec::new(\"ScriptError\", None, None)?;\n\n class::Builder::for_spec(interp, &script_spec)\n\n .with_super_class::<Exception, _>(\"Exception\")?\n\n .define()?;\n\n interp.def_class::<ScriptError>(script_spec)?;\n\n\n\n let load_spec = class::Spec::new(\"LoadError\", None, None)?;\n\n class::Builder::for_spec(interp, &load_spec)\n\n .with_super_class::<ScriptError, _>(\"ScriptError\")?\n", "file_path": "artichoke-backend/src/extn/core/exception/mod.rs", "rank": 90, "score": 376148.4075497588 }, { "content": "pub fn div(interp: &mut Artichoke, value: Value, denominator: Value) -> Result<Value, Exception> {\n\n let value = value.try_into::<Integer>(interp)?;\n\n let quotient = value.div(interp, denominator)?;\n\n Ok(interp.convert_mut(quotient))\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/core/integer/trampoline.rs", "rank": 91, "score": 376119.5902848476 }, { "content": "fn preamble(interp: &mut Artichoke) -> Result<String, Exception> {\n\n let description = interp\n\n .eval(b\"RUBY_DESCRIPTION\")?\n\n .try_into_mut::<&str>(interp)?;\n\n let compiler = interp\n\n .eval(b\"ARTICHOKE_COMPILER_VERSION\")?\n\n .try_into_mut::<&str>(interp)?;\n\n let mut buf = String::with_capacity(description.len() + 2 + compiler.len() + 1);\n\n buf.push_str(description);\n\n buf.push_str(\"\\n[\");\n\n buf.push_str(compiler);\n\n buf.push(']');\n\n Ok(buf)\n\n}\n\n\n", "file_path": "src/repl.rs", "rank": 92, "score": 375898.33084693586 }, { "content": "#[inline]\n\npub fn alphanumeric(interp: &mut Artichoke, len: Option<Value>) -> Result<Value, Exception> {\n\n let alpha = if let Some(len) = len {\n\n let len = len.implicitly_convert_to_int(interp)?;\n\n securerandom::alphanumeric(Some(len))?\n\n } else {\n\n securerandom::alphanumeric(None)?\n\n };\n\n Ok(interp.convert_mut(alpha))\n\n}\n\n\n", "file_path": "artichoke-backend/src/extn/stdlib/securerandom/trampoline.rs", "rank": 93, "score": 374174.2316730312 }, { "content": " if let Ruby::Float = value.ruby_type() {\n\n let value = value.inner();\n\n Ok(unsafe { sys::mrb_sys_float_to_cdouble(value) })\n\n } else {\n\n Err(Exception::from(UnboxRubyError::new(&value, Rust::Float)))\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use quickcheck_macros::quickcheck;\n\n\n\n use crate::test::prelude::*;\n\n\n\n #[test]\n\n fn fail_convert() {\n\n let mut interp = crate::interpreter().unwrap();\n\n // get a Ruby Value that can't be converted to a primitive type.\n\n let value = interp.eval(b\"Object.new\").unwrap();\n", "file_path": "artichoke-backend/src/convert/float.rs", "rank": 99, "score": 54.6585629564231 } ]
Rust
garnet/bin/odu/src/file_target.rs
opensource-assist/fuschia
66646c55b3d0b36aae90a4b6706b87f1a6261935
use { crate::common_operations::pwrite, crate::io_packet::{IoPacket, IoPacketType, TimeInterval}, crate::operations::{OperationType, PipelineStages}, crate::target::{Error, Target, TargetOps, TargetType}, log::debug, log::error, std::{ fs::{File, OpenOptions}, ops::Range, os::unix::io::AsRawFd, process, result::Result, sync::Arc, time::Instant, }, }; #[derive(Clone)] pub struct FileIoPacket { io_sequence_number: u64, seed: u64, stage_timestamps: [TimeInterval; PipelineStages::stage_count()], operation_type: OperationType, offset_range: Range<u64>, io_result: Option<Error>, target: TargetType, buffer: Vec<u8>, } impl FileIoPacket { pub fn new( operation_type: OperationType, io_sequence_number: u64, seed: u64, offset_range: Range<u64>, target: TargetType, ) -> FileIoPacket { let buffer = vec![0; offset_range.end as usize - offset_range.start as usize]; FileIoPacket { operation_type, io_sequence_number, seed, stage_timestamps: [TimeInterval::new(); PipelineStages::stage_count()], offset_range: offset_range.clone(), io_result: None, target, buffer, } } } impl IoPacket for FileIoPacket { fn operation_type(&self) -> OperationType { self.operation_type } fn timestamp_stage_start(&mut self, stage: PipelineStages) { self.stage_timestamps[stage.stage_number()].start(); } fn timestamp_stage_end(&mut self, stage: PipelineStages) { self.stage_timestamps[stage.stage_number()].end(); } fn sequence_number(&self) -> u64 { self.io_sequence_number } fn stage_timestamps(&self) -> &[TimeInterval; PipelineStages::stage_count()] { &self.stage_timestamps } fn interval_to_u64(&self, stage: PipelineStages) -> (u64, u64) { self.stage_timestamps[stage.stage_number()].interval_to_u64(&self.target.start_instant()) } fn io_offset_range(&self) -> Range<u64> { self.offset_range.clone() } fn do_io(&mut self) { self.target.clone().do_io(self) } fn is_complete(&self) -> bool { self.target.clone().is_complete(self) } fn verify_needs_io(&self) -> bool { self.target.clone().verify_needs_io(self) } fn generate_verify_io(&mut self) { self.target.clone().generate_verify_io(self) } fn verify(&mut self, verify_packet: &dyn IoPacket) -> bool { self.target.clone().verify(self, verify_packet) } fn get_error(&self) -> Result<(), Error> { match &self.io_result { Some(error) => Err(error.clone()), None => Ok(()), } } fn set_error(&mut self, io_error: Error) { self.io_result = Some(io_error); } fn buffer_mut(&mut self) -> &mut Vec<u8> { &mut self.buffer } fn buffer(&mut self) -> &Vec<u8> { &self.buffer } } pub struct FileBlockingTarget { #[allow(unused)] name: String, file: File, target_unique_id: u64, offset_range: Range<u64>, start_instant: Instant, } impl FileBlockingTarget { pub fn new( name: String, target_unique_id: u64, offset_range: Range<u64>, start_instant: Instant, ) -> TargetType { let file = OpenOptions::new().write(true).append(false).open(&name).unwrap(); Arc::new(Box::new(FileBlockingTarget { name, file, target_unique_id, offset_range, start_instant, })) } fn write(&self, io_packet: &mut dyn IoPacket) { let offset_range = io_packet.io_offset_range().clone(); if offset_range.start < self.offset_range.start || offset_range.end > self.offset_range.end { io_packet.set_error(Error::OffsetOutOfRange); return; } let raw_fd = self.file.as_raw_fd().clone(); let b = io_packet.buffer_mut(); let ret = pwrite(raw_fd, b, offset_range.start as i64); if let Err(err) = ret { return io_packet.set_error(err); } } fn open(&self, io_packet: &mut dyn IoPacket) { error!("open not yet supported {}", io_packet.sequence_number()); process::abort(); } fn exit(&self, io_packet: &mut dyn IoPacket) { debug!("Nothing to do for exit path {}", io_packet.sequence_number()); } } impl Target for FileBlockingTarget { fn setup(&mut self, _file_name: &String, _range: Range<u64>) -> Result<(), Error> { Ok(()) } fn create_io_packet( &self, operation_type: OperationType, seq: u64, seed: u64, io_offset_range: Range<u64>, target: TargetType, ) -> IoPacketType { Box::new(FileIoPacket::new(operation_type, seq, seed, io_offset_range, target)) } fn id(&self) -> u64 { self.target_unique_id } fn supported_ops() -> &'static TargetOps where Self: Sized, { &TargetOps { write: true, open: false } } fn allowed_ops() -> &'static TargetOps where Self: Sized, { &TargetOps { write: true, open: false } } fn do_io(&self, io_packet: &mut dyn IoPacket) { match io_packet.operation_type() { OperationType::Write => self.write(io_packet), OperationType::Open => self.open(io_packet), OperationType::Exit => self.exit(io_packet), _ => { error!("Unsupported operation"); process::abort(); } }; } fn is_complete(&self, io_packet: &dyn IoPacket) -> bool { match io_packet.operation_type() { OperationType::Write | OperationType::Open | OperationType::Exit => true, _ => { error!("Complete for unsupported operation"); process::abort(); } } } fn verify_needs_io(&self, io_packet: &dyn IoPacket) -> bool { match io_packet.operation_type() { OperationType::Write | OperationType::Open | OperationType::Exit => false, _ => { error!("verify_needs_io for unsupported operation"); process::abort(); } } } fn generate_verify_io(&self, io_packet: &mut dyn IoPacket) { match io_packet.operation_type() { _ => { error!("generate_verify_io for unsupported operation"); process::abort(); } }; } fn verify(&self, io_packet: &mut dyn IoPacket, _verify_packet: &dyn IoPacket) -> bool { match io_packet.operation_type() { OperationType::Write | OperationType::Exit => true, _ => { error!("verify for unsupported operation"); process::abort(); } } } fn start_instant(&self) -> Instant { self.start_instant } } #[cfg(test)] mod tests { use { crate::file_target::FileBlockingTarget, crate::operations::OperationType, crate::target::{Error, TargetType}, std::{fs, fs::File, time::Instant}, }; static FILE_LENGTH: u64 = 1 * 1024 * 1024; fn setup(file_name: &String) -> TargetType { let f = File::create(&file_name).unwrap(); f.set_len(FILE_LENGTH).unwrap(); let start_instant = Instant::now(); FileBlockingTarget::new(file_name.to_string(), 0, 0..FILE_LENGTH, start_instant) } fn teardown(file_name: &String) { fs::remove_file(file_name).unwrap(); } #[test] fn simple_write() { let file_name = "/tmp/odu-file_target-simple_write-file01".to_string(); let target = setup(&file_name); let mut io_packet = target.create_io_packet(OperationType::Write, 0, 0, 0..4096, target.clone()); let mut _buffer = io_packet.buffer_mut(); io_packet.do_io(); assert_eq!(io_packet.is_complete(), true); io_packet.get_error().unwrap(); teardown(&file_name); } #[test] fn write_failure() { let file_name = "/tmp/odu-file_target-write_failure-file01".to_string(); let target = setup(&file_name); let mut io_packet = target.create_io_packet( OperationType::Write, 0, 0, (2 * FILE_LENGTH)..(3 * FILE_LENGTH), target.clone(), ); let mut _buffer = io_packet.buffer_mut(); io_packet.do_io(); assert_eq!(io_packet.is_complete(), true); assert_eq!(io_packet.get_error().is_err(), true); assert_eq!(io_packet.get_error().err(), Some(Error::OffsetOutOfRange)); teardown(&file_name); } }
use { crate::common_operations::pwrite, crate::io_packet::{IoPacket, IoPacketType, TimeInterval}, crate::operations::{OperationType, PipelineStages}, crate::target::{Error, Target, TargetOps, TargetType}, log::debug, log::error, std::{ fs::{File, OpenOptions}, ops::Range, os::unix::io::AsRawFd, process, result::Result, sync::Arc, time::Instant, }, }; #[derive(Clone)] pub struct FileIoPacket { io_sequence_number: u64, seed: u64, stage_timestamps: [TimeInterval; PipelineStages::stage_count()], operation_type: OperationType, offset_range: Range<u64>, io_result: Option<Error>, target: TargetType, buffer: Vec<u8>, } impl FileIoPacket { pub fn new( operation_type: OperationType, io_sequence_number: u64, seed: u64, offset_range: Range<u64>, target: TargetType, ) -> FileIoPacket { let buffer = vec![0; offset_range.end as usize - offset_range.start as usize]; FileIoPacket { operation_type, io_sequence_number, seed, stage_timestamps: [TimeInterval::new(); PipelineStages::stage_count()], offset_range: offset_range.clone(), io_result: None, target, buffer, } } } impl IoPacket for FileIoPacket { fn operation_type(&self) -> OperationType { self.operation_type } fn timestamp_stage_start(&mut self, stage: PipelineStages) { self.stage_timestamps[stage.stage_number()].start(); } fn timestamp_stage_end(&mut self, stage: PipelineStages) { self.stage_timestamps[stage.stage_number()].end(); } fn sequence_number(&self) -> u64 { self.io_sequence_number } fn stage_timestamps(&self) -> &[TimeInterval; PipelineStages::stage_count()] { &self.stage_timestamps } fn interval_to_u64(&self, stage: PipelineStages) -> (u64, u64) { self.stage_timestamps[stage.stage_number()].interval_to_u64(&self.target.start_instant()) } fn io_offset_range(&self) -> Range<u64> { self.offset_range.clone() } fn do_io(&mut self) { self.target.clone().do_io(self) } fn is_complete(&self) -> bool { self.target.clone().is_complete(self) } fn verify_needs_io(&self) -> bool { self.target.clone().verify_needs_io(self) } fn generate_verify_io(&mut self) { self.target.clone().generate_verify_io(self) } fn verify(&mut self, verify_packet: &dyn IoPacket) -> bool { self.target.clone().verify(self, verify_packet) } fn get_error(&self) -> Result<(), Error> { match &self.io_result { Some(error) => Err(error.clone()), None => Ok(()), } } fn set_error(&mut self, io_error: Error) { self.io_result = Some(io_error); } fn buffer_mut(&mut self) -> &mut Vec<u8> { &mut self.buffer } fn buffer(&mut self) -> &Vec<u8> { &self.buffer } } pub struct FileBlockingTarget { #[allow(unused)] name: String, file: File, target_unique_id: u64, offset_range: Range<u64>, start_instant: Instant, } impl FileBlockingTarget { pub fn new( name: String, target_unique_id: u64, offset_range: Range<u64>, start_instant: Instant, ) -> TargetType { let file = OpenOptions::new().write(true).append(false).open(&name).unwrap(); Arc::new(Box::new(FileBlockingTarget { name, file, target_unique_id, offset_range, start_instant, })) } fn write(&self, io_packet: &mut dyn IoPacket) { let offset_range = io_packet.io_offset_range().clone(); if offset_range.start < self.offset_range.start || offset_range.end > self.offset_range.end { io_packet.set_error(Error::OffsetOutOfRange); return; } let raw_fd = self.file.as_raw_fd().clone(); let b = io_packet.buffer_mut(); let ret = pwrite(raw_fd, b, offset_range.start as i64); if let Err(err) = ret { return io_packet.set_error(err); } } fn open(&self, io_packet: &mut dyn IoPacket) { error!("open not yet supported {}", io_packet.sequence_number()); process::abort(); } fn exit(&self, io_packet: &mut dyn IoPacket) { debug!("Nothing to do for exit path {}", io_packet.sequence_number()); } } impl Target for FileBlockingTarget { fn setup(&mut self, _file_name: &String, _range: Range<u64>) -> Result<(), Error> { Ok(()) } fn create_io_packet( &self, operation_type: OperationType, seq: u64,
tionType::Write | OperationType::Exit => true, _ => { error!("verify for unsupported operation"); process::abort(); } } } fn start_instant(&self) -> Instant { self.start_instant } } #[cfg(test)] mod tests { use { crate::file_target::FileBlockingTarget, crate::operations::OperationType, crate::target::{Error, TargetType}, std::{fs, fs::File, time::Instant}, }; static FILE_LENGTH: u64 = 1 * 1024 * 1024; fn setup(file_name: &String) -> TargetType { let f = File::create(&file_name).unwrap(); f.set_len(FILE_LENGTH).unwrap(); let start_instant = Instant::now(); FileBlockingTarget::new(file_name.to_string(), 0, 0..FILE_LENGTH, start_instant) } fn teardown(file_name: &String) { fs::remove_file(file_name).unwrap(); } #[test] fn simple_write() { let file_name = "/tmp/odu-file_target-simple_write-file01".to_string(); let target = setup(&file_name); let mut io_packet = target.create_io_packet(OperationType::Write, 0, 0, 0..4096, target.clone()); let mut _buffer = io_packet.buffer_mut(); io_packet.do_io(); assert_eq!(io_packet.is_complete(), true); io_packet.get_error().unwrap(); teardown(&file_name); } #[test] fn write_failure() { let file_name = "/tmp/odu-file_target-write_failure-file01".to_string(); let target = setup(&file_name); let mut io_packet = target.create_io_packet( OperationType::Write, 0, 0, (2 * FILE_LENGTH)..(3 * FILE_LENGTH), target.clone(), ); let mut _buffer = io_packet.buffer_mut(); io_packet.do_io(); assert_eq!(io_packet.is_complete(), true); assert_eq!(io_packet.get_error().is_err(), true); assert_eq!(io_packet.get_error().err(), Some(Error::OffsetOutOfRange)); teardown(&file_name); } }
seed: u64, io_offset_range: Range<u64>, target: TargetType, ) -> IoPacketType { Box::new(FileIoPacket::new(operation_type, seq, seed, io_offset_range, target)) } fn id(&self) -> u64 { self.target_unique_id } fn supported_ops() -> &'static TargetOps where Self: Sized, { &TargetOps { write: true, open: false } } fn allowed_ops() -> &'static TargetOps where Self: Sized, { &TargetOps { write: true, open: false } } fn do_io(&self, io_packet: &mut dyn IoPacket) { match io_packet.operation_type() { OperationType::Write => self.write(io_packet), OperationType::Open => self.open(io_packet), OperationType::Exit => self.exit(io_packet), _ => { error!("Unsupported operation"); process::abort(); } }; } fn is_complete(&self, io_packet: &dyn IoPacket) -> bool { match io_packet.operation_type() { OperationType::Write | OperationType::Open | OperationType::Exit => true, _ => { error!("Complete for unsupported operation"); process::abort(); } } } fn verify_needs_io(&self, io_packet: &dyn IoPacket) -> bool { match io_packet.operation_type() { OperationType::Write | OperationType::Open | OperationType::Exit => false, _ => { error!("verify_needs_io for unsupported operation"); process::abort(); } } } fn generate_verify_io(&self, io_packet: &mut dyn IoPacket) { match io_packet.operation_type() { _ => { error!("generate_verify_io for unsupported operation"); process::abort(); } }; } fn verify(&self, io_packet: &mut dyn IoPacket, _verify_packet: &dyn IoPacket) -> bool { match io_packet.operation_type() { Opera
random
[]
Rust
src/raytracer/ray.rs
infinityb/rust-raytracer
4177c241c4630b822a308d982894134f0751d7d2
use std::f64::INFINITY; use raytracer::Intersection; use scene::Scene; use vec3::Vec3; #[cfg(test)] use geometry::prim::Prim; #[cfg(test)] use geometry::prims::Sphere; #[cfg(test)] use light::light::Light; #[cfg(test)] use material::materials::FlatMaterial; pub struct Ray { pub origin: Vec3, pub direction: Vec3, pub inverse_dir: Vec3, pub signs: [bool; 3], } impl Ray { pub fn new(origin: Vec3, direction: Vec3) -> Ray { let inv_x = 1.0 / direction.x; let inv_y = 1.0 / direction.y; let inv_z = 1.0 / direction.z; Ray { origin: origin, direction: direction, inverse_dir: Vec3 { x: inv_x, y: inv_y, z: inv_z }, signs: [ inv_x > 0.0, inv_y > 0.0, inv_z > 0.0 ] } } pub fn get_nearest_hit<'a>(&'a self, scene: &'a Scene) -> Option<Intersection<'a>> { let t_min = 0.000001; let mut nearest_hit = None; let mut nearest_t = INFINITY; for prim in scene.octree.intersect_iter(self) { let intersection = prim.intersects(self, t_min, nearest_t); nearest_hit = match intersection { Some(intersection) => { if intersection.t > t_min && intersection.t < nearest_t { nearest_t = intersection.t; Some(intersection) } else { nearest_hit } }, None => nearest_hit }; } nearest_hit } pub fn perturb(&self, magnitude: f64) -> Ray { let rand_vec = Vec3::random() * magnitude; let corrected_rand_vec = if rand_vec.dot(&self.direction) < 0.0 { rand_vec * -1.0 } else { rand_vec }; let direction = (corrected_rand_vec + self.direction).unit(); Ray::new(self.origin, direction) } } #[test] fn it_gets_the_nearest_hit() { let lights: Vec<Box<Light+Send+Sync>> = Vec::new(); let mut prims: Vec<Box<Prim+Send+Sync>> = Vec::new(); let mat = FlatMaterial { color: Vec3::one() }; let sphere_top = Sphere { center: Vec3::zero(), radius: 1.0, material: Box::new(mat.clone()), }; let sphere_mid = Sphere { center: Vec3 { x: -1.0, y: 0.0, z: 0.0 }, radius: 1.0, material: Box::new(mat.clone()), }; let sphere_bot = Sphere { center: Vec3 { x: -2.0, y: 0.0, z: 0.0 }, radius: 1.0, material: Box::new(mat.clone()), }; prims.push(Box::new(sphere_top)); prims.push(Box::new(sphere_mid)); prims.push(Box::new(sphere_bot)); println!("Generating octree..."); let octree = prims.into_iter().collect(); println!("Octree generated..."); let scene = Scene { lights: lights, background: Vec3::one(), octree: octree, skybox: None }; let intersecting_ray = Ray::new( Vec3 { x: 10.0, y: 0.0, z: 0.0 }, Vec3 { x: -1.0, y: 0.0, z: 0.0 } ); let intersection = intersecting_ray.get_nearest_hit(&scene); assert_eq!(1.0, intersection.unwrap().position.x); let non_intersecting_ray = Ray::new( Vec3 { x: 10.0, y: 0.0, z: 0.0 }, Vec3 { x: 1.0, y: 0.0, z: 0.0 }); let non_intersection = non_intersecting_ray.get_nearest_hit(&scene); assert!(non_intersection.is_none()); }
use std::f64::INFINITY; use raytracer::Intersection; use scene::Scene; use vec3::Vec3; #[cfg(test)] use geometry::prim::Prim; #[cfg(test)] use geometry::prims::Sphere; #[cfg(test)] use light::light::Light; #[cfg(test)] use material::materials::FlatMaterial; pub struct Ray { pub origin: Vec3, pub direction: Vec3, pub inverse_dir: Vec3, pub signs: [bool; 3], } impl Ray { pub fn new(origin: Vec3, direction: Vec3) -> Ray { let inv_x = 1.0 / direction.x; let inv_y = 1.0 / direction.y; let inv_z = 1.0 / direction.z; Ray { origin: origin, direction: direction, inverse_dir: Vec3 { x: inv_x, y: inv_y, z: inv_z }, signs: [ inv_x > 0.0, inv_y > 0.0, inv_z > 0.0 ] } } pub fn get_nearest_hit<'a>(&'a self, scene: &'a Scene) -> Option<Intersection<'a>> { let t_min = 0.000001; let mut nearest_hit = None; let mut nearest_t = INFINITY; for prim in scene.octree.intersect_iter(self) { let intersection = prim.intersects(self, t_min, nearest_t); nearest_hit = match intersection { Some(intersection) => { if intersection.t > t_min && intersection.t < nearest_t { nearest_t = intersection.t; Some(intersection) } else { nearest_hit } }, None => nearest_hit }; } nearest_hit } pub fn perturb(&self, magnitude: f64) -> Ray { let rand_vec = Vec3::random() * magnitude; let corrected_rand_vec = if rand_vec.dot(&self.direction) < 0.0 { rand_vec * -1.0 } else { rand_vec }; let direction = (corrected_rand_vec + self.direction).unit(); Ray::new(self.origin, direction) } } #[test]
fn it_gets_the_nearest_hit() { let lights: Vec<Box<Light+Send+Sync>> = Vec::new(); let mut prims: Vec<Box<Prim+Send+Sync>> = Vec::new(); let mat = FlatMaterial { color: Vec3::one() }; let sphere_top = Sphere { center: Vec3::zero(), radius: 1.0, material: Box::new(mat.clone()), }; let sphere_mid = Sphere { center: Vec3 { x: -1.0, y: 0.0, z: 0.0 }, radius: 1.0, material: Box::new(mat.clone()), }; let sphere_bot = Sphere { center: Vec3 { x: -2.0, y: 0.0, z: 0.0 }, radius: 1.0, material: Box::new(mat.clone()), }; prims.push(Box::new(sphere_top)); prims.push(Box::new(sphere_mid)); prims.push(Box::new(sphere_bot)); println!("Generating octree..."); let octree = prims.into_iter().collect(); println!("Octree generated..."); let scene = Scene { lights: lights, background: Vec3::one(), octree: octree, skybox: None }; let intersecting_ray = Ray::new( Vec3 { x: 10.0, y: 0.0, z: 0.0 }, Vec3 { x: -1.0, y: 0.0, z: 0.0 } ); let intersection = intersecting_ray.get_nearest_hit(&scene); assert_eq!(1.0, intersection.unwrap().position.x); let non_intersecting_ray = Ray::new( Vec3 { x: 10.0, y: 0.0, z: 0.0 }, Vec3 { x: 1.0, y: 0.0, z: 0.0 }); let non_intersection = non_intersecting_ray.get_nearest_hit(&scene); assert!(non_intersection.is_none()); }
function_block-full_function
[ { "content": "pub fn get_scene() -> Scene {\n\n let mut lights: Vec<Box<Light+Send+Sync>> = Vec::new();\n\n lights.push(Box::new(SphereLight { position: Vec3 { x: 8.0, y: 8.0, z: 0.0 }, color: Vec3 { x: 1.0, y: 0.8, z: 0.4}, radius: 0.5 }));\n\n lights.push(Box::new(SphereLight { position: Vec3 { x: 8.0, y: -5.0, z: 0.0 }, color: Vec3 { x: 0.5, y: 0.4, z: 0.2}, radius: 1.0 }));\n\n lights.push(Box::new(PointLight { position: Vec3 { x: -16.0, y: -14.5, z: -2.0 }, color: Vec3 { x: 0.15, y: 0.07, z: 0.05 } }));\n\n\n\n\n\n let checker: Box<Texture+Send+Sync> = Box::new(CheckerTexture { color1: ColorRGBA::white(), color2: ColorRGBA::new_rgb(0.15, 0.11, 0.1), scale: 1.0 });\n\n\n\n let stone = CookTorranceMaterial { k_a: 0.1, k_d: 0.8, k_s: 0.2, k_sg: 0.0, k_tg: 0.0, gauss_constant: 25.0, roughness: 1.0, glossiness: 0.0, ior: 1.5, ambient: Vec3 { x: 0.88, y: 0.83, z: 0.77 }, diffuse: Vec3 { x: 0.88, y: 0.83, z: 0.77 }, specular: Vec3::one(), transmission: Vec3::zero(), diffuse_texture: None };\n\n let ground = CookTorranceMaterial { k_a: 0.03, k_d: 0.9, k_s: 0.3, k_sg: 0.5, k_tg: 0.0, gauss_constant: 25.0, roughness: 0.1, glossiness: 0.0, ior: 0.5, ambient: Vec3::one(), diffuse: Vec3 { x: 0.38, y: 0.38, z: 0.5 }, specular: Vec3::one(), transmission: Vec3::zero(), diffuse_texture: Some(checker.clone()) };\n\n\n\n let mut prims: Vec<Box<Prim+Send+Sync>> = Vec::new();\n\n prims.push(Box::new(Plane { a: 0.0, b: -1.0, c: 0.0, d: -14.9, material: Box::new(ground.clone()) }));\n\n\n\n let sibenik = ::util::import::from_obj(stone, false, \"./docs/assets/models/sibenik.obj\").ok().expect(\"failed to load obj model\");;\n\n for triangle in sibenik.triangles.into_iter() { prims.push(triangle); }\n\n\n\n println!(\"Generating octree...\");\n\n let octree = prims.into_iter().collect();\n\n println!(\"Octree generated...\");\n\n\n\n Scene {\n\n lights: lights,\n\n octree: octree,\n\n background: Vec3 { x: 0.5, y: 0.5, z: 0.5 },\n\n skybox: None\n\n }\n\n}\n", "file_path": "src/my_scene/sibenik.rs", "rank": 0, "score": 173810.91190649226 }, { "content": "pub fn get_scene() -> Scene {\n\n let mut lights: Vec<Box<Light+Send+Sync>> = Vec::new();\n\n lights.push(Box::new(SphereLight { position: Vec3 { x: 3.0, y: 10.0, z: 6.0 }, color: Vec3::one(), radius: 5.0 }));\n\n\n\n let mut prims: Vec<Box<Prim+Send+Sync>> = Vec::new();\n\n let shiny = CookTorranceMaterial { k_a: 0.0, k_d: 0.2, k_s: 1.0, k_sg: 1.0, k_tg: 0.0, gauss_constant: 5.0, roughness: 0.01, glossiness: 0.0, ior: 0.05, ambient: Vec3::one(), diffuse: Vec3 { x: 1.0, y: 1.0, z: 1.0 }, specular: Vec3 { x: 0.9, y: 0.9, z: 0.9 }, transmission: Vec3::zero(), diffuse_texture: None };\n\n prims.push(Box::new(Sphere { center: Vec3::zero(), radius: 2.0, material: Box::new(shiny) }));\n\n\n\n println!(\"Generating octree...\");\n\n let octree = prims.into_iter().collect();\n\n println!(\"Octree generated...\");\n\n\n\n // For y as up\n\n Scene {\n\n lights: lights,\n\n background: Vec3 { x: 0.3, y: 0.5, z: 0.8 },\n\n octree: octree,\n\n skybox: Some(CubeMap::load(\n\n \"./docs/assets/textures/skyboxes/storm_y_up/left.png\",\n\n \"./docs/assets/textures/skyboxes/storm_y_up/right.png\",\n\n \"./docs/assets/textures/skyboxes/storm_y_up/down.png\",\n\n \"./docs/assets/textures/skyboxes/storm_y_up/up.png\",\n\n \"./docs/assets/textures/skyboxes/storm_y_up/front.png\",\n\n \"./docs/assets/textures/skyboxes/storm_y_up/back.png\"\n\n ))\n\n }\n\n}\n", "file_path": "src/my_scene/sphere.rs", "rank": 1, "score": 173810.91190649226 }, { "content": "pub fn get_scene() -> Scene {\n\n let mut lights: Vec<Box<Light+Send+Sync>> = Vec::new();\n\n lights.push(Box::new(SphereLight { position: Vec3 { x: 50.0, y: 80.0, z: 50.0 }, color: Vec3::one(), radius: 10.0 }));\n\n\n\n\n\n let checker: Box<Texture+Send+Sync> = Box::new(CheckerTexture { color1: ColorRGBA::white(), color2: ColorRGBA::new_rgb(0.1, 0.1, 0.1), scale: 32.0 });\n\n let checker_red = CookTorranceMaterial { k_a: 0.0, k_d: 1.0, k_s: 0.0, k_sg: 0.0, k_tg: 0.0, gauss_constant: 1.0, roughness: 0.15, glossiness: 0.0, ior: 1.5, ambient: Vec3::one(), diffuse: Vec3 { x: 0.6, y: 0.6, z: 0.6 }, specular: Vec3::one(), transmission: Vec3::zero(), diffuse_texture: Some(checker.clone()) };\n\n let shiny = CookTorranceMaterial { k_a: 0.0, k_d: 0.2, k_s: 1.0, k_sg: 1.0, k_tg: 0.0, gauss_constant: 5.0, roughness: 0.01, glossiness: 0.0, ior: 0.15, ambient: Vec3::one(), diffuse: Vec3 { x: 1.0, y: 1.0, z: 1.0 }, specular: Vec3 { x: 0.9, y: 0.9, z: 0.9 }, transmission: Vec3::zero(), diffuse_texture: None };\n\n let global_specular_only = CookTorranceMaterial { k_a: 0.0, k_d: 0.0, k_s: 0.0, k_sg: 1.0, k_tg: 0.0, gauss_constant: 5.0, roughness: 0.01, glossiness: 0.0, ior: 1.5, ambient: Vec3::one(), diffuse: Vec3 { x: 1.0, y: 1.0, z: 1.0 }, specular: Vec3 { x: 0.9, y: 0.9, z: 0.9 }, transmission: Vec3::zero(), diffuse_texture: None };\n\n let refract = CookTorranceMaterial { k_a: 0.0, k_d: 0.0, k_s: 1.0, k_sg: 1.0, k_tg: 1.0, gauss_constant: 5.0, roughness: 0.01, glossiness: 0.0, ior: 3.0, ambient: Vec3::one(), diffuse: Vec3 { x: 1.0, y: 1.0, z: 1.0 }, specular: Vec3 { x: 0.9, y: 0.9, z: 0.9 }, transmission: Vec3::zero(), diffuse_texture: None };\n\n\n\n let mut prims: Vec<Box<Prim+Send+Sync>> = Vec::new();\n\n prims.push(Box::new(Plane { a: 0.0, b: 0.0, c: 1.0, d: 0.0, material: Box::new(checker_red.clone()) })); // Ahead\n\n prims.push(Box::new(Plane { a: 0.0, b: 1.0, c: 0.0, d: 0.0, material: Box::new(global_specular_only.clone()) })); // Bottom\n\n prims.push(Box::new(Sphere { center: Vec3 {x: 30.0, y: 15.0, z: 20.0 }, radius: 15.0, material: Box::new(shiny.clone()) }));\n\n prims.push(Box::new(Sphere { center: Vec3 {x: 70.0, y: 17.0, z: 60.0 }, radius: 17.0, material: Box::new(refract.clone()) }));\n\n\n\n println!(\"Generating octree...\");\n\n let octree = prims.into_iter().collect();\n\n println!(\"Octree generated...\");\n\n\n\n Scene {\n\n lights: lights,\n\n octree: octree,\n\n background: Vec3 { x: 1.0, y: 1.0, z: 1.0 },\n\n skybox: None\n\n }\n\n}\n", "file_path": "src/my_scene/fresnel.rs", "rank": 2, "score": 173810.91190649226 }, { "content": "pub fn get_scene() -> Scene {\n\n let mut lights: Vec<Box<Light+Send+Sync>> = Vec::new();\n\n lights.push(Box::new(SphereLight {position: Vec3 { x: 50.0, y: 80.0, z: 50.0 }, color: Vec3::one(), radius: 10.0 }));\n\n\n\n // Example of a textured material\n\n let checker: Box<Texture+Send+Sync> = Box::new(CheckerTexture { color1: ColorRGBA::white(), color2: ColorRGBA::new_rgb(0.8, 0.1, 0.1), scale: 16.0 });\n\n let checker_grey = CookTorranceMaterial { k_a: 0.0, k_d: 1.0, k_s: 0.0, k_sg: 0.0, k_tg: 0.0, gauss_constant: 1.0, roughness: 0.15, glossiness: 0.0, ior: 0.7, ambient: Vec3::one(), diffuse: Vec3 { x: 0.6, y: 0.6, z: 0.6 }, specular: Vec3::one(), transmission: Vec3::zero(), diffuse_texture: Some(checker.clone()) };\n\n\n\n // Example of a short-form material definition using defaults\n\n // let grey = CookTorranceMaterial { k_a: 0.0, k_d: 1.0, k_s: 1.0, k_sg: 0.0, k_tg: 0.0, gauss_constant: 1.0, roughness: 0.15, glossiness: 0.0, ior: 1.5, ambient: Vec3::one(), diffuse: Vec3 { x: 0.6, y: 0.6, z: 0.6 }, specular: Vec3::one(), transmission: Vec3::zero(), diffuse_texture: None };\n\n let grey = CookTorranceMaterial { diffuse: Vec3 { x: 0.6, y: 0.6, z: 0.6 }, ..Default::default() };\n\n\n\n let blue = CookTorranceMaterial { k_a: 0.0, k_d: 0.3, k_s: 0.7, k_sg: 0.0, k_tg: 0.0, gauss_constant: 50.0, roughness: 0.1, glossiness: 0.0, ior: 1.3, ambient: Vec3::one(), diffuse: Vec3 { x: 0.1, y: 0.1, z: 1.0 }, specular: Vec3::one(), transmission: Vec3::zero(), diffuse_texture: None };\n\n let red = PhongMaterial { k_a: 0.0, k_d: 0.6, k_s: 0.4, k_sg: 0.8, k_tg: 0.0, shininess: 10.0, glossiness: 0.0, ior: 0.5, ambient: Vec3::one(), diffuse: Vec3 { x: 1.0, y: 0.0, z: 0.0 }, specular: Vec3::one(), transmission: Vec3::zero(), diffuse_texture: None };\n\n let green = PhongMaterial { k_a: 0.0, k_d: 0.9, k_s: 0.1, k_sg: 0.5, k_tg: 0.0, shininess: 10.0, glossiness: 0.0, ior: 0.7, ambient: Vec3::one(), diffuse: Vec3 { x: 0.0, y: 1.0, z: 0.0 }, specular: Vec3::one(), transmission: Vec3::zero(), diffuse_texture: None };\n\n let shiny = CookTorranceMaterial { k_a: 0.0, k_d: 0.2, k_s: 1.0, k_sg: 0.8, k_tg: 0.0, gauss_constant: 5.0, roughness: 0.01, glossiness: 0.0, ior: 0.25, ambient: Vec3::one(), diffuse: Vec3 { x: 1.0, y: 1.0, z: 1.0 }, specular: Vec3 { x: 0.9, y: 0.9, z: 0.9 }, transmission: Vec3::zero(), diffuse_texture: None };\n\n let shiny_glossy = CookTorranceMaterial { k_a: 0.0, k_d: 0.7, k_s: 1.0, k_sg: 0.4, k_tg: 0.0, gauss_constant: 5.0, roughness: 0.01, glossiness: 0.2, ior: 0.25, ambient: Vec3::one(), diffuse: Vec3 { x: 0.3, y: 0.3, z: 1.0 }, specular: Vec3 { x: 0.3, y: 0.3, z: 1.0 }, transmission: Vec3::zero(), diffuse_texture: None };\n\n let refract = CookTorranceMaterial { k_a: 0.0, k_d: 0.0, k_s: 1.0, k_sg: 1.0, k_tg: 1.0, gauss_constant: 5.0, roughness: 0.01, glossiness: 0.0, ior: 3.0, ambient: Vec3::one(), diffuse: Vec3 { x: 1.0, y: 1.0, z: 1.0 }, specular: Vec3 { x: 0.9, y: 0.9, z: 0.9 }, transmission: Vec3 { x: 0.8, y: 0.8, z: 0.8 }, diffuse_texture: None };\n\n\n\n let mut prims: Vec<Box<Prim+Send+Sync>> = Vec::new();\n", "file_path": "src/my_scene/cornell.rs", "rank": 3, "score": 173810.91190649226 }, { "content": "pub fn get_scene() -> Scene {\n\n let mut lights: Vec<Box<Light+Send+Sync>> = Vec::new();\n\n lights.push(Box::new(SphereLight { position: Vec3 { x: 0.0, y: 100.0, z: 0.0 }, color: Vec3 { x: 1.0, y: 1.0, z: 1.0 }, radius: 25.0 }));\n\n\n\n let blue = CookTorranceMaterial { k_a: 0.0, k_d: 0.9, k_s: 1.0, k_sg: 0.4, k_tg: 0.0, gauss_constant: 5.0, roughness: 0.01, glossiness: 0.0, ior: 0.25, ambient: Vec3::one(), diffuse: Vec3 { x: 0.16, y: 0.29, z: 0.44 }, specular: Vec3::one(), transmission: Vec3::zero(), diffuse_texture: None };\n\n let floor = CookTorranceMaterial { k_a: 0.0, k_d: 0.9, k_s: 1.0, k_sg: 1.0, k_tg: 0.0, gauss_constant: 5.0, roughness: 0.3, glossiness: 0.0, ior: 1.0, ambient: Vec3::one(), diffuse: Vec3 { x: 0.58, y: 0.63, z: 0.44 }, specular: Vec3 { x: 0.9, y: 0.9, z: 0.9 }, transmission: Vec3::zero(), diffuse_texture: None };\n\n\n\n let mut prims: Vec<Box<Prim+Send+Sync>> = Vec::new();\n\n prims.push(Box::new(Plane { a: 0.0, b: 1.0, c: 0.0, d: 0.0, material: Box::new(floor.clone()) })); // Bottom\n\n\n\n let tachikoma = ::util::import::from_obj(blue, false, \"./docs/assets/models/tachikoma.obj\").ok().expect(\"failed to load obj model\");;\n\n for triangle in tachikoma.triangles.into_iter() { prims.push(triangle); }\n\n\n\n println!(\"Generating octree...\");\n\n let octree = prims.into_iter().collect();\n\n println!(\"Octree generated...\");\n\n\n\n Scene {\n\n lights: lights,\n\n octree: octree,\n", "file_path": "src/my_scene/tachikoma.rs", "rank": 4, "score": 173810.91190649226 }, { "content": "pub fn get_scene() -> Scene {\n\n let mut lights: Vec<Box<Light+Send+Sync>> = Vec::new();\n\n lights.push(Box::new(SphereLight { position: Vec3 {x: 3.0, y: 10.0, z: 6.0}, color: Vec3::one(), radius: 5.0 }));\n\n\n\n let red = CookTorranceMaterial { k_a: 0.0, k_d: 0.6, k_s: 1.0, k_sg: 0.2, k_tg: 0.0, gauss_constant: 30.0, roughness: 0.1, glossiness: 0.0, ior: 0.8, ambient: Vec3::one(), diffuse: Vec3 { x: 1.0, y: 0.25, z: 0.1 }, specular: Vec3::one(), transmission: Vec3::zero(), diffuse_texture: None };\n\n let green = CookTorranceMaterial { k_a: 0.0, k_d: 0.5, k_s: 0.4, k_sg: 0.1, k_tg: 0.0, gauss_constant: 25.0, roughness: 0.4, glossiness: 0.0, ior: 0.95, ambient: Vec3::one(), diffuse: Vec3 { x: 0.2, y: 0.7, z: 0.2 }, specular: Vec3::one(), transmission: Vec3::zero(), diffuse_texture: None };\n\n\n\n let mut prims: Vec<Box<Prim+Send+Sync>> = Vec::new();\n\n prims.push(Box::new(Plane { a: 0.0, b: 1.0, c: 0.0, d: 3.6, material: Box::new(green) }));\n\n let cow = ::util::import::from_obj(red, true, \"./docs/assets/models/cow.obj\").ok().expect(\"failed to load obj model\");;\n\n for triangle in cow.triangles.into_iter() { prims.push(triangle); }\n\n\n\n println!(\"Generating octree...\");\n\n let octree = prims.into_iter().collect();\n\n println!(\"Octree generated...\");\n\n\n\n Scene {\n\n lights: lights,\n\n octree: octree,\n\n background: Vec3 { x: 0.3, y: 0.5, z: 0.8 },\n\n skybox: None\n\n }\n\n}\n", "file_path": "src/my_scene/cow.rs", "rank": 5, "score": 173810.91190649226 }, { "content": "pub fn get_scene() -> Scene {\n\n let mut lights: Vec<Box<Light+Send+Sync>> = Vec::new();\n\n lights.push(Box::new(SphereLight { position: Vec3 { x: 200.0, y: -200.0, z: 100.0 }, color: Vec3::one(), radius: 40.0 }));\n\n lights.push(Box::new(SphereLight { position: Vec3 { x: -95.0, y: 20.0, z: 170.0 }, color: Vec3 { x: 0.5, y: 0.5, z: 0.3 }, radius: 15.0 }));\n\n\n\n let red = CookTorranceMaterial { k_a: 0.1, k_d: 0.4, k_s: 0.5, k_sg: 0.5, k_tg: 0.0, gauss_constant: 5.0, roughness: 0.05, glossiness: 0.0, ior: 0.98, ambient: Vec3::one(), diffuse: Vec3 { x: 1.0, y: 0.25, z: 0.1 }, specular: Vec3::one(), transmission: Vec3::zero(), diffuse_texture: None};\n\n let green = CookTorranceMaterial { k_a: 0.0, k_d: 0.4, k_s: 0.6, k_sg: 0.7, k_tg: 0.0, gauss_constant: 50.0, roughness: 0.3, glossiness: 0.0, ior: 1.5, ambient: Vec3::one(), diffuse: Vec3 { x: 0.2, y: 0.7, z: 0.2 }, specular: Vec3::one(), transmission: Vec3::zero(), diffuse_texture: None};\n\n let shiny = CookTorranceMaterial { k_a: 0.0, k_d: 0.2, k_s: 0.7, k_sg: 1.0, k_tg: 0.0, gauss_constant: 25.0, roughness: 0.01, glossiness: 0.0, ior: 0.2, ambient: Vec3::one(), diffuse: Vec3 { x: 0.9, y: 0.9, z: 0.1 }, specular: Vec3 {x: 0.9, y: 0.9, z: 0.1}, transmission: Vec3::zero(), diffuse_texture: None};\n\n\n\n let mut prims: Vec<Box<Prim+Send+Sync>> = Vec::new();\n\n prims.push(Box::new(Plane { a: 0.0, b: 0.0, c: 1.0, d: -10.0, material: Box::new(green)}));\n\n prims.push(Box::new(Sphere { center: Vec3 { x: -75.0, y: 60.0, z: 50.0 }, radius: 40.0, material: Box::new(shiny.clone()) }));\n\n prims.push(Box::new(Sphere { center: Vec3 { x: -75.0, y: 60.0, z: 140.0 }, radius: 40.0, material: Box::new(shiny.clone()) }));\n\n let bunny = ::util::import::from_obj(red, false, \"./docs/assets/models/bunny.obj\").ok().expect(\"failed to load obj model\");\n\n for triangle in bunny.triangles.into_iter() { prims.push(triangle); }\n\n\n\n println!(\"Generating octree...\");\n\n let octree = prims.into_iter().collect();\n\n println!(\"Octree generated...\");\n\n\n", "file_path": "src/my_scene/bunny.rs", "rank": 6, "score": 173810.91190649226 }, { "content": "pub fn get_scene() -> Scene {\n\n let mut lights: Vec<Box<Light+Send+Sync>> = Vec::new();\n\n lights.push(Box::new(SphereLight { position: Vec3 { x: 0.0, y: 3000.0, z: 1000.0 }, color: Vec3 { x: 1.0, y: 0.8, z: 0.4 }, radius: 50.0 }));\n\n lights.push(Box::new(SphereLight { position: Vec3 { x: 300.0, y: 300.0, z: 60.0 }, color: Vec3 { x: 0.38, y: 0.32, z: 0.28 }, radius: 20.0 }));\n\n\n\n let checker: Box<Texture+Send+Sync> = Box::new(CheckerTexture { color1: ColorRGBA::white(), color2: ColorRGBA::new_rgb(0.15, 0.11, 0.1), scale: 32.0 });\n\n\n\n let stone = CookTorranceMaterial { k_a: 0.1, k_d: 0.8, k_s: 0.2, k_sg: 0.2, k_tg: 0.0, gauss_constant: 50.0, roughness: 1.0, glossiness: 0.0, ior: 1.5, ambient: Vec3 { x: 0.88, y: 0.83, z: 0.77 }, diffuse: Vec3 { x: 0.88, y: 0.83, z: 0.77 }, specular: Vec3::one(), transmission: Vec3::zero(), diffuse_texture: None };\n\n let ground = CookTorranceMaterial { k_a: 0.03, k_d: 0.9, k_s: 0.3, k_sg: 0.5, k_tg: 0.0, gauss_constant: 25.0, roughness: 0.1, glossiness: 0.0, ior: 0.5, ambient: Vec3::one(), diffuse: Vec3 { x: 0.38, y: 0.38, z: 0.5 }, specular: Vec3::one(), transmission: Vec3::zero(), diffuse_texture: Some(checker.clone()) };\n\n let cloth = CookTorranceMaterial { k_a: 0.03, k_d: 0.8, k_s: 0.1, k_sg: 0.05, k_tg: 0.0, gauss_constant: 40.0, roughness: 0.8, glossiness: 0.0, ior: 1.3, ambient: Vec3::one(), diffuse: Vec3 { x: 0.85, y: 0.05, z: 0.05 }, specular: Vec3::one(), transmission: Vec3::zero(), diffuse_texture: None };\n\n let shrubbery = CookTorranceMaterial { k_a: 0.03, k_d: 0.8, k_s: 0.2, k_sg: 0.05, k_tg: 0.0, gauss_constant: 50.0, roughness: 0.2, glossiness: 0.0, ior: 1.2, ambient: Vec3::one(), diffuse: Vec3 { x: 0.16, y: 0.47, z: 0.11 }, specular: Vec3::one(), transmission: Vec3::zero(), diffuse_texture: None };\n\n\n\n let mut prims: Vec<Box<Prim+Send+Sync>> = Vec::new();\n\n prims.push(Box::new(Plane { a: 0.0, b: 1.0, c: 0.0, d: 0.0, material: Box::new(ground) }));\n\n\n\n let sponza_other = ::util::import::from_obj(stone, false, \"./docs/assets/models/sponza_other.obj\").ok().expect(\"failed to load obj model\");;\n\n for triangle in sponza_other.triangles.into_iter() { prims.push(triangle); }\n\n\n\n let sponza_column_shrubbery = ::util::import::from_obj(shrubbery, false, \"./docs/assets/models/sponza_column_shrubbery.obj\").ok().expect(\"failed to load obj model\");;\n\n for triangle in sponza_column_shrubbery.triangles.into_iter() { prims.push(triangle); }\n", "file_path": "src/my_scene/sponza.rs", "rank": 7, "score": 173810.91190649226 }, { "content": "pub fn get_scene() -> Scene {\n\n let mut lights: Vec<Box<Light+Send+Sync>> = Vec::new();\n\n lights.push(Box::new(SphereLight { position: Vec3 { x: -1400.0, y: 200.0, z: 100.0 }, color: Vec3 { x: 1.0, y: 0.80, z: 0.40 }, radius: 50.0 }));\n\n\n\n let grey = CookTorranceMaterial { k_a: 0.0, k_d: 0.5, k_s: 0.8, k_sg: 0.5, k_tg: 0.0, gauss_constant: 5.0, roughness: 0.1, glossiness: 0.0, ior: 0.4, ambient: Vec3::one(), diffuse: Vec3 { x: 0.6, y: 0.6, z: 0.65 }, specular: Vec3::one(), transmission: Vec3::zero(), diffuse_texture: None };\n\n\n\n let mut prims: Vec<Box<Prim+Send+Sync>> = Vec::new();\n\n let lucy = ::util::import::from_obj(grey, true, \"./docs/assets/models/lucy.obj\").ok().expect(\"failed to load obj model\");;\n\n for triangle in lucy.triangles.into_iter() { prims.push(triangle); }\n\n\n\n println!(\"Generating octree...\");\n\n let octree = prims.into_iter().collect();\n\n println!(\"Octree generated...\");\n\n\n\n Scene {\n\n lights: lights,\n\n octree: octree,\n\n background: Vec3 { x: 0.84, y: 0.34, z: 0.0 },\n\n skybox: Some(CubeMap::load(\n\n \"./docs/assets/textures/skyboxes/storm_y_up/left.png\",\n\n \"./docs/assets/textures/skyboxes/storm_y_up/right.png\",\n\n \"./docs/assets/textures/skyboxes/storm_y_up/down.png\",\n\n \"./docs/assets/textures/skyboxes/storm_y_up/up.png\",\n\n \"./docs/assets/textures/skyboxes/storm_y_up/front.png\",\n\n \"./docs/assets/textures/skyboxes/storm_y_up/back.png\"\n\n ))\n\n }\n\n}\n", "file_path": "src/my_scene/lucy.rs", "rank": 8, "score": 173810.91190649226 }, { "content": "fn get_auto_normals(v: [Vec3; 3]) -> [Vec3; 3] {\n\n let n = (v[1] - v[0]).cross(&(v[2] - v[0]));\n\n [n, n, n]\n\n}\n\n\n\nimpl TriangleOptions { \n\n pub fn new(v0: Vec3, v1: Vec3, v2: Vec3) -> TriangleOptions {\n\n TriangleOptions {\n\n vertices: [v0, v1, v2],\n\n normals: None,\n\n texinfo: None,\n\n material: None,\n\n }\n\n }\n\n\n\n /// In the default case, all three normals at vertices are perpendicular\n\n /// to the triangle plane.\n\n pub fn normals(&mut self, normals: [Vec3; 3]) -> &mut Self {\n\n self.normals = Some(normals);\n\n self\n", "file_path": "src/geometry/prims/triangle.rs", "rank": 9, "score": 172564.57596140302 }, { "content": "pub fn get_teapot_scene() -> Scene {\n\n let mut lights: Vec<Box<Light+Send+Sync>> = Vec::new();\n\n lights.push(Box::new(SphereLight { position: Vec3 { x: 0.6, y: 2.0, z: 1.2 }, color: Vec3::one(), radius: 1.0 }));\n\n\n\n let porcelain = CookTorranceMaterial { k_a: 0.0, k_d: 0.9, k_s: 1.0, k_sg: 1.0, k_tg: 0.0, gauss_constant: 5.0, roughness: 0.1, glossiness: 0.0, ior: 1.1, ambient: Vec3::one(), diffuse: Vec3 { x: 0.9, y: 0.85, z: 0.7 }, specular: Vec3::one(), transmission: Vec3::zero(), diffuse_texture: None };\n\n\n\n let mut prims: Vec<Box<Prim+Send+Sync>> = Vec::new();\n\n // prims.push(Box::new(Plane { a: 0.0, b: 1.0, c: 0.0, d: 0.0, material: Box::new(green) }));\n\n let mut teapot = ::util::import::from_obj(porcelain, false, \"./docs/assets/models/teapot.obj\").ok().expect(\"failed to load obj model\");;\n\n let rotate = Transform::new(Mat4::rotate_x_deg_matrix(1.0));\n\n teapot.mut_transform(&rotate);\n\n for triangle in teapot.triangles.into_iter() { prims.push(triangle); }\n\n\n\n println!(\"Generating octree...\");\n\n let octree = prims.into_iter().collect();\n\n println!(\"Octree generated...\");\n\n\n\n Scene {\n\n lights: lights,\n\n octree: octree,\n", "file_path": "src/my_scene/teapot.rs", "rank": 10, "score": 171210.16449395573 }, { "content": "pub fn get_scene(material_option: &str) -> Scene {\n\n let mut lights: Vec<Box<Light+Send+Sync>> = Vec::new();\n\n lights.push(Box::new(SphereLight { position: Vec3 { x: 2.0, y: 3.0, z: -2.0 }, color: Vec3 { x: 1.0, y: 1.0, z: 1.0 }, radius: 1.0 }));\n\n\n\n // Defaults to white\n\n let heptoroid_material = match material_option {\n\n \"shiny\" => CookTorranceMaterial { k_a: 0.0, k_d: 0.2, k_s: 1.0, k_sg: 0.55, k_tg: 0.0, gauss_constant: 5.0, roughness: 0.01, glossiness: 0.0, ior: 0.25, ambient: Vec3::one(), diffuse: Vec3 { x: 1.0, y: 1.0, z: 1.0 }, specular: Vec3 { x: 0.9, y: 0.9, z: 0.9 }, transmission: Vec3::zero(), diffuse_texture: None },\n\n \"refractive\" => CookTorranceMaterial { k_a: 0.0, k_d: 0.0, k_s: 1.0, k_sg: 1.0, k_tg: 1.0, gauss_constant: 5.0, roughness: 0.01, glossiness: 0.0, ior: 1.50, ambient: Vec3::one(), diffuse: Vec3 { x: 1.0, y: 1.0, z: 1.0 }, specular: Vec3 { x: 0.9, y: 0.9, z: 0.9 }, transmission: Vec3 { x: 0.8, y: 0.8, z: 0.8 }, diffuse_texture: None },\n\n _ => CookTorranceMaterial { k_a: 0.0, k_d: 0.9, k_s: 1.0, k_sg: 0.15, k_tg: 0.0, gauss_constant: 5.0, roughness: 0.1, ior: 0.5, glossiness: 0.0, ambient: Vec3::one(), diffuse: Vec3 { x: 0.9, y: 0.85, z: 0.7 }, specular: Vec3::one(), transmission: Vec3::zero(), diffuse_texture: None }\n\n };\n\n\n\n let mut prims: Vec<Box<Prim+Send+Sync>> = Vec::new();\n\n let heptoroid = ::util::import::from_obj(heptoroid_material, false, \"./docs/assets/models/heptoroid.obj\").ok().expect(\"failed to load obj model\");;\n\n for triangle in heptoroid.triangles.into_iter() { prims.push(triangle); }\n\n\n\n println!(\"Generating octree...\");\n\n let octree = prims.into_iter().collect();\n\n println!(\"Octree generated...\");\n\n\n\n Scene {\n", "file_path": "src/my_scene/heptoroid.rs", "rank": 11, "score": 158782.72330751966 }, { "content": "/// Given two points, compute and return a new BBox that encompasses both points\n\npub fn union_points(p1: &Vec3, p2: &Vec3) -> BBox {\n\n BBox {\n\n min: Vec3 {\n\n x: p1.x.min(p2.x),\n\n y: p1.y.min(p2.y),\n\n z: p1.z.min(p2.z)\n\n },\n\n max: Vec3 {\n\n x: p1.x.max(p2.x),\n\n y: p1.y.max(p2.y),\n\n z: p1.z.max(p2.z)\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/geometry/bbox.rs", "rank": 12, "score": 152456.16033382498 }, { "content": "// Skybox test scene\n\npub fn get_camera(image_width: u32, image_height: u32, fov: f64) -> Camera {\n\n let up = Vec3 { x: 0.0, y: 1.0, z: 0.0 }; // y-up\n\n Camera::new(\n\n Vec3 { x: 0.0, y: 0.0, z: 10.0 },\n\n Vec3 { x: 0.0, y: 0.0, z: 0.0 },\n\n up,\n\n fov,\n\n image_width,\n\n image_height\n\n )\n\n\n\n // let up = Vec3 { x: 0.0, y: 0.0, z: 1.0 }; // z-up\n\n // Camera::new(\n\n // Vec3 { x: 0.0, y: 10.0, z: 0.0 },\n\n // Vec3 { x: 0.0, y: 0.0, z: 0.0 },\n\n // up,\n\n // fov,\n\n // image_width,\n\n // image_height\n\n // )\n\n}\n\n\n", "file_path": "src/my_scene/sphere.rs", "rank": 13, "score": 146754.89986193046 }, { "content": "// Fresnel test scene\n\npub fn get_camera(image_width: u32, image_height: u32, fov: f64) -> Camera {\n\n let height = 50.0;\n\n\n\n Camera::new(\n\n Vec3 { x: 50.0, y: height, z: 250.0 },\n\n Vec3 { x: 50.0, y: 50.0, z: 50.0 },\n\n Vec3 { x: 0.0, y: 1.0, z: 0.0 },\n\n fov,\n\n image_width,\n\n image_height\n\n )\n\n}\n\n\n", "file_path": "src/my_scene/fresnel.rs", "rank": 14, "score": 146754.89986193046 }, { "content": "// ~28000 triangles, complex scene with 2 lights\n\npub fn get_camera(image_width: u32, image_height: u32, fov: f64) -> Camera {\n\n Camera::new(\n\n Vec3 { x: 800.0, y: 30.0, z: 90.0 },\n\n Vec3 { x: -500.0, y: 1000.0, z: -100.0 },\n\n Vec3 { x: 0.0, y: 1.0, z: 0.0 },\n\n fov,\n\n image_width,\n\n image_height\n\n )\n\n}\n\n\n", "file_path": "src/my_scene/sponza.rs", "rank": 15, "score": 146750.9906601704 }, { "content": "// 10 primitives, octree is super inefficient for this scene\n\npub fn get_camera(image_width: u32, image_height: u32, fov: f64) -> Camera {\n\n Camera::new(\n\n Vec3 { x: 50.0, y: 25.0, z: 150.0 },\n\n Vec3 { x: 50.0, y: 50.0, z: 50.0 },\n\n Vec3 { x: 0.0, y: 1.0, z: 0.0 },\n\n fov,\n\n image_width,\n\n image_height\n\n )\n\n}\n\n\n", "file_path": "src/my_scene/cornell.rs", "rank": 16, "score": 146750.90868920684 }, { "content": "// ~70K triangles, no textures yet\n\npub fn get_camera(image_width: u32, image_height: u32, fov: f64) -> Camera {\n\n Camera::new(\n\n Vec3 { x: -16.0, y: -14.5, z: -2.0 },\n\n Vec3 { x: 8.0, y: -3.0, z: 2.0 },\n\n Vec3 { x: 0.0, y: 1.0, z: 0.0 },\n\n fov,\n\n image_width,\n\n image_height\n\n )\n\n}\n", "file_path": "src/my_scene/sibenik.rs", "rank": 17, "score": 146747.20589549124 }, { "content": "// 5000 polys, cow. Octree helps.\n\npub fn get_camera(image_width: u32, image_height: u32, fov: f64) -> Camera {\n\n Camera::new(\n\n Vec3 { x: -2.0, y: 4.0, z: 10.0 },\n\n Vec3 { x: 0.0, y: 0.0, z: 0.0 },\n\n Vec3 { x: 0.0, y: 1.0, z: 0.0 },\n\n fov,\n\n image_width,\n\n image_height\n\n )\n\n}\n\n\n", "file_path": "src/my_scene/cow.rs", "rank": 18, "score": 146747.20589549124 }, { "content": "// 300 polys, octree is slightly slower than no octree\n\npub fn get_camera(image_width: u32, image_height: u32, fov: f64) -> Camera {\n\n Camera::new(\n\n Vec3 { x: 0.0, y: -150.0, z: 30.0 },\n\n Vec3 { x: 0.0, y: 60.0, z: 50.0 },\n\n Vec3 { x: 0.0, y: 0.0, z: 1.0 },\n\n fov,\n\n image_width,\n\n image_height\n\n )\n\n}\n\n\n", "file_path": "src/my_scene/bunny.rs", "rank": 19, "score": 146747.2058954912 }, { "content": "// 50000 polys, model not included!\n\npub fn get_camera(image_width: u32, image_height: u32, fov: f64) -> Camera {\n\n Camera::new(\n\n Vec3 { x: -1500.0, y: 300.0, z: 600.0 },\n\n Vec3 { x: 0.0, y: 400.0, z: -200.0 },\n\n Vec3 { x: 0.0, y: 1.0, z: 0.0 },\n\n fov,\n\n image_width,\n\n image_height\n\n )\n\n}\n\n\n", "file_path": "src/my_scene/lucy.rs", "rank": 20, "score": 146747.2058954912 }, { "content": "// 114688 tris, 57302 verts\n\npub fn get_camera(image_width: u32, image_height: u32, fov: f64) -> Camera {\n\n Camera::new(\n\n Vec3 { x: 7.0, y: 2.0, z: -6.0 },\n\n Vec3 { x: 0.0, y: 0.0, z: 0.0 },\n\n Vec3 { x: 0.0, y: 1.0, z: 0.0 },\n\n fov,\n\n image_width,\n\n image_height\n\n )\n\n}\n\n\n", "file_path": "src/my_scene/heptoroid.rs", "rank": 21, "score": 146747.2058954912 }, { "content": "pub fn get_camera(image_width: u32, image_height: u32, fov: f64) -> Camera {\n\n Camera::new(\n\n Vec3 { x: 100.0, y: 60.0, z: -150.0 },\n\n Vec3 { x: 0.0, y: 50.0, z: 0.0 },\n\n Vec3 { x: 0.0, y: 1.0, z: 0.0 },\n\n fov,\n\n image_width,\n\n image_height\n\n )\n\n}\n\n\n", "file_path": "src/my_scene/tachikoma.rs", "rank": 22, "score": 146747.2058954912 }, { "content": "// 2500 polys, marginal improvement from an octree\n\npub fn get_teapot_camera(image_width: u32, image_height: u32, fov: f64) -> Camera {\n\n Camera::new(\n\n Vec3 { x: -0.2, y: 1.0, z: 2.0 },\n\n Vec3 { x: 0.0, y: 0.6, z: 0.0 },\n\n Vec3 { x: 0.0, y: 1.0, z: 0.0 },\n\n fov,\n\n image_width,\n\n image_height\n\n )\n\n}\n\n\n", "file_path": "src/my_scene/teapot.rs", "rank": 23, "score": 144326.92390989268 }, { "content": "pub fn get_animation_camera(image_width: u32, image_height: u32, fov: f64) -> Camera {\n\n // State at time t=0\n\n // A keyframe at time t=0 is automatically created when insert_keyframes is called\n\n let camera = Camera::new_with_keyframes(\n\n Vec3 { x: 0.0, y: 1.0, z: 250.0 },\n\n Vec3 { x: 0.0, y: 1.0, z: 50.0 },\n\n Vec3 { x: 0.0, y: 1.0, z: 0.0 },\n\n fov,\n\n image_width,\n\n image_height,\n\n vec![\n\n CameraKeyframe {\n\n time: 2.5,\n\n position: Vec3 { x: 50.0, y: 100.0, z: 250.0 },\n\n look_at: Vec3 { x: 0.0, y: 1.0, z: 50.0 },\n\n up: Vec3 { x: 0.0, y: 1.0, z: 0.0 }\n\n },\n\n CameraKeyframe {\n\n time: 5.0,\n\n position: Vec3 { x: 0.0, y: 200.0, z: 250.0 },\n", "file_path": "src/my_scene/fresnel.rs", "rank": 24, "score": 144326.92390989265 }, { "content": "// 7s target length\n\npub fn get_animation_camera(image_width: u32, image_height: u32, fov: f64) -> Camera {\n\n Camera::new_with_keyframes(\n\n Vec3 { x: -16.0, y: -14.5, z: -2.0 },\n\n Vec3 { x: 8.0, y: -3.0, z: 2.0 },\n\n Vec3 { x: 0.0, y: 1.0, z: 0.0 },\n\n fov,\n\n image_width,\n\n image_height,\n\n vec![\n\n CameraKeyframe {\n\n time: 7.0,\n\n position: Vec3 { x: 8.0, y: -13.5, z: 0.2 },\n\n look_at: Vec3 { x: 8.5, y: 8.0, z: 2.0 },\n\n up: Vec3 { x: -0.9, y: 0.0, z: -0.7 }\n\n }\n\n ]\n\n )\n\n}\n\n\n", "file_path": "src/my_scene/sibenik.rs", "rank": 25, "score": 144326.92390989265 }, { "content": "pub fn get_animation_camera(image_width: u32, image_height: u32, fov: f64) -> Camera {\n\n // State at time t=0\n\n // A keyframe at time t=0 is automatically created when insert_keyframes is called\n\n let camera = Camera::new_with_keyframes(\n\n Vec3 { x: 0.0, y: 0.0, z: 10.0 },\n\n Vec3 { x: 0.0, y: 0.0, z: 0.0 },\n\n Vec3 { x: 0.0, y: 1.0, z: 0.0 },\n\n fov,\n\n image_width,\n\n image_height,\n\n vec![\n\n CameraKeyframe {\n\n time: 2.5,\n\n position: Vec3 { x: 10.0, y: 0.0, z: 0.0 },\n\n look_at: Vec3 { x: 0.0, y: 0.0, z: 0.0 },\n\n up: Vec3 { x: 0.0, y: 1.0, z: 0.0 }\n\n },\n\n CameraKeyframe {\n\n time: 5.0,\n\n position: Vec3 { x: 0.0, y: 0.0, z: -10.0 },\n", "file_path": "src/my_scene/sphere.rs", "rank": 26, "score": 144326.92390989268 }, { "content": "#[test]\n\npub fn test_from_png24() {\n\n let surface = from_image(\"test/res/png24.png\")\n\n .ok().expect(\"failed to load test image `test/res/png24.png`\");\n\n\n\n let expected_image: [[(u8, u8, u8, u8); 10]; 2] = [[\n\n (0, 0, 0, 255), (1, 1, 1, 255), (2, 2, 2, 255),\n\n (3, 3, 3, 255), (4, 4, 4, 255), (5, 5, 5, 255),\n\n (6, 6, 6, 255), (7, 7, 7, 255), (8, 8, 8, 255),\n\n (9, 9, 9, 255)\n\n ], [\n\n (255, 0, 0, 255), (255, 0, 0, 127), (255, 0, 0, 0),\n\n (0, 255, 0, 255), (0, 255, 0, 127), (0, 255, 0, 0),\n\n (0, 0, 255, 255), (0, 0, 255, 127), (0, 0, 255, 0),\n\n (0, 0, 0, 0)\n\n ]];\n\n\n\n for y in (0..1) {\n\n for x in (0..9) {\n\n let pixel = surface[(x, y)];\n\n let expected = expected_image[y][x];\n\n assert_eq!(expected, (pixel.r, pixel.g, pixel.b, pixel.a));\n\n }\n\n }\n\n}\n", "file_path": "src/util/import.rs", "rank": 27, "score": 140311.65979779704 }, { "content": "/// Given a bounding box and a point, compute and return a new BBox that\n\n/// encompasses the point and the space the original box encompassed.\n\npub fn union_point(b: &BBox, p: &Vec3) -> BBox {\n\n BBox {\n\n min: Vec3 {\n\n x: b.min.x.min(p.x),\n\n y: b.min.y.min(p.y),\n\n z: b.min.z.min(p.z)\n\n },\n\n max: Vec3 {\n\n x: b.max.x.max(p.x),\n\n y: b.max.y.max(p.y),\n\n z: b.max.z.max(p.z)\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/geometry/bbox.rs", "rank": 28, "score": 137184.77893607377 }, { "content": "#[test]\n\npub fn test_obj_loads_correct_number_of_triangles() {\n\n let material: CookTorranceMaterial = Default::default();\n\n let mesh = from_obj(material, false, \"test/res/cube.obj\")\n\n .ok().expect(\"failed to laod test obj `test/res/cube.obj`\");\n\n\n\n assert_eq!(mesh.triangles.len(), 12);\n\n}\n\n\n", "file_path": "src/util/import.rs", "rank": 29, "score": 129008.48174447332 }, { "content": "#[test]\n\nfn it_has_vec3vec3_equality() {\n\n assert!(Vec3::zero() != Vec3::one());\n\n assert!(Vec3::zero() == Vec3::zero());\n\n}\n\n\n", "file_path": "src/vec3.rs", "rank": 30, "score": 122399.06574510306 }, { "content": "pub trait Prim: PartialBoundingBox {\n\n fn intersects<'a>(&'a self, ray: &Ray, t_min: f64, t_max: f64) -> Option<Intersection<'a>>;\n\n \n\n // fn transform(&self, transform: &Transform) -> Box<Prim+Send+Sync>;\n\n fn mut_transform(&mut self, transform: &Transform);\n\n}\n\n\n\nimpl<'a> PartialBoundingBox for Box<Prim+Send+Sync> {\n\n fn partial_bounding_box(&self) -> Option<BBox> {\n\n (**self).partial_bounding_box()\n\n }\n\n}", "file_path": "src/geometry/prim.rs", "rank": 31, "score": 120332.1229586653 }, { "content": "#[test]\n\nfn it_intersects_with_a_ray() {\n\n let bbox = BBox {\n\n min: Vec3::zero(),\n\n max: Vec3::one()\n\n };\n\n\n\n // Out of the box\n\n let mut intersecting_ray = Ray::new(Vec3 { x: 0.5, y: 1.5, z: 0.5 }, Vec3 { x: 0.0, y: -1.0, z: 0.0 });\n\n assert!(bbox.intersects(&intersecting_ray));\n\n\n\n // In the box\n\n intersecting_ray = Ray::new(Vec3 { x: 0.5, y: 0.5, z: 0.5 }, Vec3 { x: 0.0, y: -1.0, z: 0.0 });\n\n assert!(bbox.intersects(&intersecting_ray));\n\n\n\n // Away from box\n\n let mut non_intersecting_ray = Ray::new(Vec3 { x: 0.5, y: 1.5, z: 0.5 }, Vec3 { x: 0.0, y: 1.0, z: 0.0 });\n\n assert_eq!(false, bbox.intersects(&non_intersecting_ray));\n\n\n\n // To the side\n\n non_intersecting_ray = Ray::new(Vec3 { x: 0.5, y: 1.5, z: 0.5 }, Vec3 { x: 1000.0, y: -1.0, z: 1000.0 });\n\n assert_eq!(false, bbox.intersects(&non_intersecting_ray));\n\n}\n\n\n", "file_path": "src/geometry/bbox.rs", "rank": 32, "score": 119037.50385854117 }, { "content": "#[test]\n\nfn it_intersects() {\n\n let plane = Plane { a: 0.0, b: 1.0, c: 0.0, d: 0.0, material: Box::new(FlatMaterial { color: Vec3::one() }) };\n\n\n\n // Tests actual intersection\n\n let intersecting_ray = Ray::new(Vec3 { x: 0.0, y: 1.0, z: 0.0 }, Vec3 { x: 0.0, y: -1.0, z: 0.0 });\n\n let intersection = plane.intersects(&intersecting_ray, 0.0, 10.0).unwrap();\n\n assert_eq!(intersection.position.x, 0.0);\n\n assert_eq!(intersection.position.y, 0.0);\n\n assert_eq!(intersection.position.z, 0.0);\n\n assert_eq!(intersection.n.x, 0.0);\n\n assert_eq!(intersection.n.y, 1.0);\n\n assert_eq!(intersection.n.z, 0.0);\n\n\n\n // Parallel ray\n\n let mut non_intersecting_ray = Ray::new(Vec3 { x: 0.0, y: 1.0, z: 0.0 }, Vec3 { x: 1.0, y: 0.0, z: 1.0 });\n\n let mut non_intersection = plane.intersects(&non_intersecting_ray, 0.0, 10000.0);\n\n assert!(non_intersection.is_none());\n\n\n\n // Ray in opposite direction\n\n non_intersecting_ray = Ray::new(Vec3 { x: 0.0, y: 1.0, z: 0.0 }, Vec3 { x: 0.0, y: 1.0, z: 0.0 });\n\n non_intersection = plane.intersects(&non_intersecting_ray, 0.0, 10.0);\n\n assert!(non_intersection.is_none());\n\n}\n\n\n", "file_path": "src/geometry/prims/plane.rs", "rank": 33, "score": 118657.7858569744 }, { "content": "#[test]\n\nfn it_intersects() {\n\n let sphere = Sphere {\n\n center: Vec3::zero(),\n\n radius: 1.0,\n\n material: Box::new(FlatMaterial { color: Vec3::one() })\n\n };\n\n\n\n // Tests actual intersection\n\n let intersecting_ray = Ray::new(Vec3 { x: 0.0, y: 0.0, z: -2.0 }, Vec3 { x: 0.0, y: 0.0, z: 1.0 });\n\n let intersection = sphere.intersects(&intersecting_ray, 0.0, 10.0).unwrap();\n\n assert_eq!(intersection.position.x, 0.0);\n\n assert_eq!(intersection.position.y, 0.0);\n\n assert_eq!(intersection.position.z, -1.0);\n\n assert_eq!(intersection.n.x, 0.0);\n\n assert_eq!(intersection.n.y, 0.0);\n\n assert_eq!(intersection.n.z, -1.0);\n\n\n\n // Ray off to the sides\n\n let mut non_intersecting_ray = Ray::new(Vec3 { x: 0.0, y: 0.0, z: -2.0 }, Vec3 { x: 100.0, y: 100.0, z: 0.1 });\n\n let mut non_intersection = sphere.intersects(&non_intersecting_ray, 0.0, 10.0);\n", "file_path": "src/geometry/prims/sphere.rs", "rank": 34, "score": 118657.7858569744 }, { "content": "/// This is limited to only CookTorranceMaterials, as I couldn't get a Box<Material> to clone\n\n/// a new material for each triangle primitive in the object model.\n\npub fn from_obj(material: CookTorranceMaterial, flip_normals: bool, filename: &str) -> Result<Mesh, String> {\n\n let file_handle = match File::open(&filename) {\n\n Ok(f) => f,\n\n Err(err) => return Err(format!(\"{}\", err))\n\n };\n\n\n\n let total_bytes = match file_handle.metadata() {\n\n Ok(metadata) => metadata.len(),\n\n Err(err) => return Err(format!(\"{}\", err))\n\n };\n\n\n\n let file = BufReader::new(file_handle);\n\n\n\n let start_time = ::time::get_time();\n\n let print_every = 2048u32;\n\n let mut current_line = 0;\n\n let mut processed_bytes = 0;\n\n\n\n let mut vertices: Vec<Vec3> = Vec::new();\n\n let mut normals : Vec<Vec3> = Vec::new();\n", "file_path": "src/util/import.rs", "rank": 35, "score": 116327.01350143229 }, { "content": "#[test]\n\nfn it_intersects_and_interpolates() {\n\n let mut triopts = TriangleOptions::new(\n\n Vec3 { x: -1.0, y: 0.0, z: 0.0 },\n\n Vec3 { x: 1.0, y: 0.0, z: 0.0 },\n\n Vec3 { x: 0.0, y: 1.0, z: 0.0 });\n\n triopts.normals([\n\n Vec3 { x: -1.0, y: 0.0, z: 0.0 },\n\n Vec3 { x: 1.0, y: 0.0, z: 0.0 },\n\n Vec3 { x: 0.0, y: 1.0, z: 0.0 }]);\n\n triopts.texinfo([(0.0, 0.0), (1.0, 0.0), (0.0, 1.0)]);\n\n\n\n let triangle = triopts.build();\n\n\n\n // Tests actual intersection\n\n let intersecting_ray = Ray::new(Vec3 { x: 0.0, y: 0.5, z: -1.0 }, Vec3 { x: 0.0, y: 0.0, z: 1.0 });\n\n let intersection = triangle.intersects(&intersecting_ray, 0.0, 10.0).unwrap();\n\n assert_eq!(intersection.position.x, 0.0);\n\n assert_eq!(intersection.position.y, 0.5);\n\n assert_eq!(intersection.position.z, 0.0);\n\n assert_eq!(intersection.u, 0.25);\n", "file_path": "src/geometry/prims/triangle.rs", "rank": 36, "score": 115310.62244779509 }, { "content": "#[test]\n\nfn it_intersects_only_in_tmin_tmax() {\n\n let mut triopts = TriangleOptions::new(\n\n Vec3 { x: -1.0, y: 0.0, z: 0.0 },\n\n Vec3 { x: 1.0, y: 0.0, z: 0.0 },\n\n Vec3 { x: 0.0, y: 1.0, z: 0.0 });\n\n triopts.normals([Vec3::zero(), Vec3::zero(), Vec3::one()]);\n\n triopts.texinfo([(0.0, 0.0), (1.0, 0.0), (0.0, 1.0)]);\n\n\n\n let triangle = triopts.build();\n\n\n\n // Tests tmin\n\n let intersecting_ray = Ray::new(Vec3 { x: 0.0, y: 0.5, z: -1.0 }, Vec3 { x: 0.0, y: 0.0, z: 1.0 });\n\n let mut non_intersection = triangle.intersects(&intersecting_ray, 1000.0, 10000.0);\n\n assert!(non_intersection.is_none());\n\n\n\n // Tests tmax\n\n non_intersection = triangle.intersects(&intersecting_ray, 0.0, 0.0001);\n\n assert!(non_intersection.is_none());\n\n}\n", "file_path": "src/geometry/prims/triangle.rs", "rank": 37, "score": 112188.45845231699 }, { "content": "#[test]\n\nfn it_intersects_only_in_tmin_tmax() {\n\n let plane = Plane { a: 0.0, b: 1.0, c: 0.0, d: 0.0, material: Box::new(FlatMaterial { color: Vec3::one() }) };\n\n\n\n // Tests tmin\n\n let intersecting_ray = Ray::new(Vec3 { x: 0.0, y: 1.0, z: 0.0 }, Vec3 { x: 0.0, y: -1.0, z: 0.0 });\n\n let mut non_intersection = plane.intersects(&intersecting_ray, 1000.0, 10000.0);\n\n assert!(non_intersection.is_none());\n\n\n\n // Tests tmax\n\n non_intersection = plane.intersects(&intersecting_ray, 0.0, 0.0001);\n\n assert!(non_intersection.is_none());\n\n}\n", "file_path": "src/geometry/prims/plane.rs", "rank": 38, "score": 112188.45845231699 }, { "content": "#[test]\n\nfn it_intersects_only_in_tmin_tmax() {\n\n let sphere = Sphere {\n\n center: Vec3::zero(),\n\n radius: 1.0,\n\n material: Box::new(FlatMaterial { color: Vec3::one() })\n\n };\n\n\n\n // Tests tmin\n\n let intersecting_ray = Ray::new(Vec3 { x: 0.0, y: 0.0, z: -2.0 }, Vec3 { x: 0.0, y: 0.0, z: 1.0 });\n\n let mut non_intersection = sphere.intersects(&intersecting_ray, 1000.0, 10000.0);\n\n assert!(non_intersection.is_none());\n\n\n\n // Tests tmax\n\n non_intersection = sphere.intersects(&intersecting_ray, 0.0, 0.0001);\n\n assert!(non_intersection.is_none());\n\n}\n", "file_path": "src/geometry/prims/sphere.rs", "rank": 39, "score": 112188.45845231699 }, { "content": "fn get_camera_and_scene(config: &SceneConfig) -> Option<(Camera, Scene)> {\n\n let scene_name = config.name.clone();\n\n let (image_width, image_height) = config.size;\n\n let fov = config.fov;\n\n\n\n // Cameras, scenes created in ./my_scene.rs\n\n // Scenes with an octree supplied (see my_scene.rs) will use it.\n\n // Lower the render quality (especially shadow_samples) for complex scenes\n\n return match scene_name.as_ref() {\n\n \"box\" => {\n\n // Box. Simplest scene with 9 primitives, no octree\n\n let camera = my_scene::cornell::get_camera(image_width, image_height, fov);\n\n let scene = my_scene::cornell::get_scene();\n\n Some((camera, scene))\n\n },\n\n \"bunny\" => {\n\n // Bunny. Around 300 primitives, 2 lights. Uses octree. Has skybox, textures are\n\n // in another repository.\n\n let camera = my_scene::bunny::get_camera(image_width, image_height, fov);\n\n let scene = my_scene::bunny::get_scene();\n", "file_path": "src/main.rs", "rank": 40, "score": 105560.21500776696 }, { "content": "#[derive(RustcDecodable, RustcEncodable)]\n\nstruct SceneConfig {\n\n name: String,\n\n size: (u32, u32),\n\n fov: f64,\n\n reflect_depth: u32,\n\n refract_depth: u32,\n\n shadow_samples: u32,\n\n gloss_samples: u32,\n\n pixel_samples: u32,\n\n output_file: String,\n\n animating: bool,\n\n fps: f64,\n\n time_slice: (f64, f64),\n\n starting_frame_number: u32\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 41, "score": 98090.90980418382 }, { "content": "#[test]\n\nfn it_adds_vec3s_and_scalars() {\n\n assert_eq!(Vec3 { x: 2.0, y: 2.0, z: 2.0 }, Vec3::one() + Vec3::one());\n\n assert_eq!(Vec3 { x: 2.0, y: 2.0, z: 2.0 }, Vec3::one() + 1.0);\n\n}\n\n\n", "file_path": "src/vec3.rs", "rank": 42, "score": 94621.58945050024 }, { "content": "#[test]\n\nfn it_subtracts_vec3s_and_scalars() {\n\n assert_eq!(Vec3::zero(), Vec3::one() - Vec3::one());\n\n assert_eq!(Vec3::zero(), Vec3::one() - 1.0);\n\n}\n\n\n", "file_path": "src/vec3.rs", "rank": 43, "score": 94621.58945050024 }, { "content": "#[test]\n\nfn it_computes_length_of_a_vec3() {\n\n assert_eq!(Vec3 { x: -1.0, y: -1.0, z: -1.0 }, -Vec3::one());\n\n assert_eq!(29.0_f64.sqrt(), Vec3 { x: 2.0, y: 3.0, z: 4.0 }.len());\n\n assert_eq!(1.0, Vec3 { x: 10.0, y: 0.0, z: 0.0 }.unit().len());\n\n}\n\n\n", "file_path": "src/vec3.rs", "rank": 44, "score": 94621.58945050024 }, { "content": "struct UvValue {\n\n u: f64,\n\n v: f64\n\n}\n\n\n\nimpl UvValue {\n\n pub fn from_tuple(uv: (f64, f64)) -> UvValue {\n\n UvValue { u: uv.0, v: uv.1 }\n\n }\n\n\n\n fn default3() -> [UvValue; 3] {\n\n [\n\n UvValue { u: 0.5, v: 1.0 },\n\n UvValue { u: 0.0, v: 0.0 },\n\n UvValue { u: 1.0, v: 0.0 },\n\n ]\n\n }\n\n}\n\n\n\npub struct TriangleOptions {\n\n vertices: [Vec3; 3],\n\n normals: Option<[Vec3; 3]>,\n\n texinfo: Option<[UvValue; 3]>,\n\n material: Option<Box<Material+Send+Sync>>,\n\n}\n\n\n", "file_path": "src/geometry/prims/triangle.rs", "rank": 45, "score": 94041.09446663223 }, { "content": "#[allow(unused_must_use)]\n\npub fn to_ppm(surface: Surface, filename: &str) {\n\n let channel_max: u8 = Channel::max_value();\n\n let header = format!(\n\n \"P3 {} {} {}\\n\", surface.width, surface.height,\n\n channel_max);\n\n\n\n let mut f = match File::create(filename) {\n\n Ok(f) => f,\n\n Err(e) => panic!(\"File error: {}\", e),\n\n };\n\n\n\n f.write_all(header.as_bytes());\n\n for pixel in surface.buffer.iter() {\n\n f.write_all(format!(\"{} {} {} \", pixel.r, pixel.g, pixel.b).as_bytes());\n\n }\n\n}\n", "file_path": "src/util/export.rs", "rank": 46, "score": 93366.70830120653 }, { "content": "#[test]\n\nfn it_multiplies_vec3s_and_scalars_elementwise() {\n\n assert_eq!(Vec3 { x: 2.0, y: 2.0, z: 2.0 }, Vec3::one().scale(2.0));\n\n assert_eq!(Vec3 { x: 2.0, y: 2.0, z: 2.0 }, Vec3::one() * 2.0);\n\n assert_eq!(Vec3 { x: 4.0, y: 9.0, z: -4.0 }, Vec3 { x: 2.0, y: 3.0, z: 4.0 } * Vec3 { x: 2.0, y: 3.0, z: -1.0 });\n\n}\n\n\n", "file_path": "src/vec3.rs", "rank": 47, "score": 92621.0271419191 }, { "content": "#[test]\n\nfn it_divides_vec3s_and_scalars_elementwise() {\n\n assert_eq!(Vec3 { x: 0.5, y: 0.5, z: 0.5 }, Vec3::one() / 2.0);\n\n assert_eq!(Vec3 { x: 0.5, y: 0.5, z: 0.5 }, Vec3::one() / Vec3 { x: 2.0, y: 2.0, z: 2.0 });\n\n}\n\n\n", "file_path": "src/vec3.rs", "rank": 48, "score": 92621.0271419191 }, { "content": "fn make_progress_bar(ratio: f64, length: usize) -> String {\n\n let filled = (ratio * length as f64).round() as usize;\n\n let mut bar: String = repeat('|').take(filled).collect();\n\n\n\n for _ in 0..(length - filled) {\n\n bar.push('-');\n\n }\n\n\n\n bar\n\n}\n", "file_path": "src/util/mod.rs", "rank": 49, "score": 91982.4504778549 }, { "content": "#[test]\n\nfn test_sub() {\n\n let m = Mat4::new(\n\n 1.0, 2.0, 3.0, 4.0,\n\n 5.0, 6.0, 7.0, 8.0,\n\n 9.0, 10.0, 11.0, 12.0,\n\n 13.0, 14.0, 15.0, 16.0\n\n );\n\n\n\n assert_eq!(m - m, Mat4::zero());\n\n}\n\n\n", "file_path": "src/mat4.rs", "rank": 50, "score": 88571.78000701123 }, { "content": "#[test]\n\nfn test_add() {\n\n let m = Mat4::new(\n\n 1.0, 2.0, 3.0, 4.0,\n\n 5.0, 6.0, 7.0, 8.0,\n\n 9.0, 10.0, 11.0, 12.0,\n\n 13.0, 14.0, 15.0, 16.0\n\n );\n\n\n\n let expected = Mat4::new(\n\n 2.0, 4.0, 6.0, 8.0,\n\n 10.0, 12.0, 14.0, 16.0,\n\n 18.0, 20.0, 22.0, 24.0,\n\n 26.0, 28.0, 30.0, 32.0\n\n );\n\n\n\n assert_eq!(m + m, expected);\n\n}\n\n\n", "file_path": "src/mat4.rs", "rank": 51, "score": 88571.78000701123 }, { "content": "#[test]\n\nfn test_mul() {\n\n let a = Mat4::new(\n\n 1.0, 3.0, 5.0, 7.0,\n\n 11.0, 13.0, 17.0, 23.0,\n\n 29.0, 31.0, 37.0, 41.0,\n\n 43.0, 47.0, 53.0, 59.0\n\n );\n\n\n\n let b = Mat4::new(\n\n 1.0, 2.0, 3.0, 4.0,\n\n 5.0, 6.0, 7.0, 8.0,\n\n 9.0, 10.0, 11.0, 12.0,\n\n 13.0, 14.0, 15.0, 16.0\n\n );\n\n\n\n let expected = Mat4::new(\n\n 152.0, 168.0, 184.0, 200.0,\n\n 528.0, 592.0, 656.0, 720.0,\n\n 1050.0, 1188.0, 1326.0, 1464.0,\n\n 1522.0, 1724.0, 1926.0, 2128.0\n\n );\n\n\n\n let out = Mat4::mult_m(&a, &b);\n\n assert_eq!(out, expected);\n\n}\n\n\n", "file_path": "src/mat4.rs", "rank": 52, "score": 88571.78000701123 }, { "content": "#[test]\n\nfn test_transpose() {\n\n let m = Mat4::new(\n\n 1.0, 2.0, 3.0, 4.0,\n\n 5.0, 6.0, 7.0, 8.0,\n\n 9.0, 10.0, 11.0, 12.0,\n\n 13.0, 14.0, 15.0, 16.0\n\n );\n\n\n\n let mt = Mat4::new(\n\n 1.0, 5.0, 9.0, 13.0,\n\n 2.0, 6.0, 10.0, 14.0,\n\n 3.0, 7.0, 11.0, 15.0,\n\n 4.0, 8.0, 12.0, 16.0\n\n );\n\n\n\n assert!(m.transpose() == mt);\n\n}\n\n\n", "file_path": "src/mat4.rs", "rank": 53, "score": 88571.78000701123 }, { "content": "#[test]\n\nfn test_equality() {\n\n let i1 = Mat4::identity();\n\n let i2 = Mat4::identity();\n\n let zero = Mat4::zero();\n\n\n\n assert!(i1 == i2);\n\n assert!(i1 != zero);\n\n}\n\n\n", "file_path": "src/mat4.rs", "rank": 54, "score": 88571.78000701123 }, { "content": "#[test]\n\nfn test_inverse() {\n\n let i = Mat4::identity();\n\n assert_eq!(i, i.inverse());\n\n\n\n let m = Mat4::new(\n\n 1.0, 0.0, 1.0, 1.0,\n\n 2.0, 0.0, 1.0, 0.0,\n\n 2.0, 1.0, 1.0, 0.0,\n\n 0.0, 0.0, 1.0, 3.0\n\n );\n\n\n\n let m_inverse = Mat4::new(\n\n -3.0, 2.0, 0.0, 1.0,\n\n 0.0, -1.0, 1.0, 0.0,\n\n 6.0, -3.0, 0.0, -2.0,\n\n -2.0, 1.0, 0.0, 1.0\n\n );\n\n\n\n assert_eq!(m.inverse(), m_inverse);\n\n}\n\n\n", "file_path": "src/mat4.rs", "rank": 55, "score": 88571.78000701123 }, { "content": "#[test]\n\nfn it_does_dot_product() {\n\n assert_eq!(5.0, Vec3 { x: 0.0, y: 1.0, z: 2.0 }.dot(&Vec3 { x: 0.0, y: 1.0, z: 2.0 }));\n\n}\n\n\n", "file_path": "src/vec3.rs", "rank": 56, "score": 88258.87202396031 }, { "content": "#[test]\n\nfn it_does_cross_product() {\n\n assert_eq!(Vec3 { x: -1.0, y: 2.0, z: -1.0 }, Vec3 { x: 1.0, y: 2.0, z: 3.0 }.cross(&Vec3 { x: 2.0, y: 3.0, z: 4.0 }));\n\n}\n\n\n", "file_path": "src/vec3.rs", "rank": 57, "score": 88258.87202396031 }, { "content": "#[test]\n\nfn it_implements_debug() {\n\n let vec = Vec3 { x: 0.0, y: 1.0, z: 1.3 };\n\n let formatted_string = format!(\"{:?}\", vec);\n\n let expected_string = \"(0, 1, 1.3)\";\n\n assert_eq!(&formatted_string, expected_string);\n\n}\n\n\n", "file_path": "src/vec3.rs", "rank": 58, "score": 88258.87202396031 }, { "content": "#[test]\n\nfn it_linearly_interpolates() {\n\n assert_eq!(Vec3::zero(), Vec3::lerp(&Vec3::zero(), &Vec3::one(), 0.0));\n\n assert_eq!(Vec3 { x: 0.5, y: 0.5, z: 0.5 }, Vec3::lerp(&Vec3::zero(), &Vec3::one(), 0.5));\n\n assert_eq!(Vec3::one(), Vec3::lerp(&Vec3::zero(), &Vec3::one(), 1.0));\n\n}\n\n\n", "file_path": "src/vec3.rs", "rank": 59, "score": 88258.87202396031 }, { "content": "#[test]\n\nfn test_mul_with_vec() {\n\n let m = Mat4::new(\n\n 1.0, 2.0, 3.0, 4.0,\n\n 5.0, 6.0, 7.0, 8.0,\n\n 9.0, 10.0, 11.0, 12.0,\n\n 13.0, 14.0, 15.0, 16.0\n\n );\n\n\n\n let v = Vec3 {\n\n x: 1.0,\n\n y: 2.0,\n\n z: 3.0\n\n };\n\n\n\n let expected_w0 = Vec3 {\n\n x: 1.0 * 1.0 + 2.0 * 2.0 + 3.0 * 3.0,\n\n y: 5.0 * 1.0 + 6.0 * 2.0 + 7.0 * 3.0,\n\n z: 9.0 * 1.0 + 10.0 * 2.0 + 11.0 * 3.0\n\n };\n\n\n\n let multiplied_w0 = Mat4::mult_v(&m, &v);\n\n assert_eq!(multiplied_w0, expected_w0);\n\n}\n", "file_path": "src/mat4.rs", "rank": 60, "score": 86188.08678724858 }, { "content": "#[test]\n\nfn its_macro_definitions_work() {\n\n let four = 4.0;\n\n assert_eq!(Vec3 { x: 2.0, y: 4.0, z: -6.0 }, vec3!(2.0, 1.0 * four, -1.0 * 6.0));\n\n assert_eq!(Vec3::one(), vec3!(1.0, 1.0, 1.0));\n\n assert_eq!(Vec3::zero(), vec3!(0.0));\n\n}\n", "file_path": "src/vec3.rs", "rank": 61, "score": 85886.453694439 }, { "content": "/// Given two bounding boxes, compute and return a new BBox that encompasses\n\n/// both spaces the original two boxes encompassed.\n\npub fn union_bbox(b1: &BBox, b2: &BBox) -> BBox {\n\n BBox {\n\n min: Vec3 {\n\n x: b1.min.x.min(b2.min.x),\n\n y: b1.min.y.min(b2.min.y),\n\n z: b1.min.z.min(b2.min.z)\n\n },\n\n max: Vec3 {\n\n x: b1.max.x.max(b2.max.x),\n\n y: b1.max.y.max(b2.max.y),\n\n z: b1.max.z.max(b2.max.z)\n\n }\n\n }\n\n}\n\n\n\nimpl BBox {\n\n pub fn zero() -> Self {\n\n BBox {\n\n min: Vec3::zero(),\n\n max: Vec3::zero(),\n", "file_path": "src/geometry/bbox.rs", "rank": 63, "score": 84022.74446996252 }, { "content": "#[test]\n\nfn test_measurement() {\n\n let width = 800;\n\n let height = 600;\n\n let width_tile = 128;\n\n let height_tile = 8;\n\n\n\n let background: ColorRGBA<u8> = ColorRGBA::new_rgb(0, 0, 0);\n\n let surf: Surface = Surface::new(width, height, background);\n\n\n\n let mut total_pixels = 0;\n\n\n\n for tile_factory in surf.divide(width_tile, height_tile) {\n\n total_pixels += tile_factory.create().pixel_count();\n\n }\n\n\n\n let (or_width, or_height) = surf.overrender_size(width_tile, height_tile);\n\n\n\n assert_eq!(or_width * or_height, total_pixels);\n\n}\n\n\n", "file_path": "src/raytracer/compositor/surface.rs", "rank": 64, "score": 83970.20047239051 }, { "content": "#[test]\n\nfn test_paint_it_red() {\n\n let width = 800;\n\n let height = 600;\n\n let width_tile = 128;\n\n let height_tile = 8;\n\n\n\n let background: ColorRGBA<u8> = ColorRGBA::new_rgb(0, 0, 0);\n\n let mut surf: Surface = Surface::new(width, height, background);\n\n\n\n for tile_factory in surf.divide(width_tile, height_tile) {\n\n let mut tile = tile_factory.create();\n\n for y in 0..tile.height {\n\n for x in 0..tile.width {\n\n tile[(x, y)] = ColorRGBA::new_rgb(255, 0, 0);\n\n }\n\n }\n\n for y in 0..tile.height {\n\n for x in 0..tile.width {\n\n assert_eq!(tile[(x, y)].r, 255);\n\n assert_eq!(tile[(x, y)].g, 0);\n", "file_path": "src/raytracer/compositor/surface.rs", "rank": 65, "score": 81901.40252266289 }, { "content": "#[test]\n\nfn it_renders_the_background_of_an_empty_scene() {\n\n let camera = Camera::new(\n\n Vec3 { x: 0.0, y: 0.0, z: 0.0 },\n\n Vec3 { x: 0.0, y: 1.0, z: 0.0 },\n\n Vec3 { x: 0.0, y: 0.0, z: 1.0 },\n\n 45.0,\n\n 32,\n\n 32\n\n );\n\n\n\n let test_scene = Scene {\n\n lights: vec!(),\n\n octree: vec!().into_iter().collect(),\n\n background: Vec3 { x: 1.0, y: 0.0, z: 0.0 },\n\n skybox: None\n\n };\n\n\n\n let shared_scene = Arc::new(test_scene);\n\n\n\n let render_options = RenderOptions {\n", "file_path": "src/raytracer/renderer.rs", "rank": 66, "score": 81031.02523325842 }, { "content": "pub fn from_image<P: AsRef<Path>>(path: P) -> Result<Surface, String> {\n\n let image = match ::image::open(path) {\n\n Ok(image) => image.to_rgba(),\n\n Err(err) => return Err(format!(\"{}\", err))\n\n };\n\n\n\n let mut surface = Surface::new(image.width() as usize,\n\n image.height() as usize,\n\n ColorRGBA::transparent());\n\n\n\n for (src, dst_pixel) in image.pixels().zip(surface.iter_pixels_mut()) {\n\n *dst_pixel = ColorRGBA::new_rgba(src[0], src[1], src[2], src[3]);\n\n }\n\n\n\n Ok(surface)\n\n}\n\n\n", "file_path": "src/util/import.rs", "rank": 67, "score": 80787.27284864926 }, { "content": "#[test]\n\nfn test_lerp_camera_position() {\n\n // Camera rotates 180 degrees\n\n let camera = Camera::new_with_keyframes(\n\n Vec3 { x: -1.0, y: -1.0, z: -1.0 },\n\n Vec3 { x: 0.0, y: 1.0, z: 0.0 },\n\n Vec3 { x: 0.0, y: 1.0, z: 0.0 },\n\n 45.0,\n\n 10,\n\n 10,\n\n vec![\n\n CameraKeyframe {\n\n time: 5.0,\n\n position: Vec3 { x: 0.0, y: 0.0, z: 0.0 },\n\n look_at: Vec3 { x: 0.0, y: 1.0, z: 0.0 },\n\n up: Vec3 { x: 0.0, y: 1.0, z: 0.0 }\n\n },\n\n CameraKeyframe {\n\n time: 10.0,\n\n position: Vec3 { x: 10.0, y: 0.0, z: 0.0 },\n\n look_at: Vec3 { x: 0.0, y: 1.0, z: 0.0 },\n", "file_path": "src/raytracer/animator/animator.rs", "rank": 68, "score": 79967.14898091485 }, { "content": "pub fn print_progress(noun: &str, start_time: ::time::Timespec, done: usize, total: usize) {\n\n let remaining_jobs = total - done;\n\n let progress: f64 = 100f64 * done as f64 / total as f64;\n\n let current_time = ::time::get_time().sec;\n\n let time_per_job = (current_time - start_time.sec) as f64 / done as f64;\n\n let remaining_time = time_per_job * remaining_jobs as f64;\n\n\n\n print!(\"\\r{} {}/{} complete\\t{:.2}% [{}]\",\n\n noun, done, total, progress,\n\n ::util::make_progress_bar(progress / 100.0, 20)\n\n );\n\n\n\n if remaining_jobs == 0 {\n\n println!(\" (took {:.2} min) \", (current_time - start_time.sec) as f64 / 60.0);\n\n } else {\n\n print!(\" ETA {:.2} min \", remaining_time / 60.0);\n\n ::std::io::stdout().flush().ok().expect(\"failed to flush io\");\n\n }\n\n}\n\n\n", "file_path": "src/util/mod.rs", "rank": 69, "score": 75712.5783878976 }, { "content": "// Replace this with argparse eventually\n\nstruct ProgramArgs {\n\n config_file: String\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 70, "score": 65143.585801826266 }, { "content": "#[derive(Clone, Copy)]\n\nstruct OctreeData {\n\n pub bbox: BBox,\n\n pub index: usize\n\n}\n\n\n\nimpl OctreeNode {\n\n #[allow(dead_code)]\n\n pub fn new(bbox: BBox, depth: i32) -> OctreeNode {\n\n OctreeNode {\n\n bbox: bbox,\n\n depth: depth,\n\n children: Vec::new(),\n\n leaf_data: Vec::new(),\n\n }\n\n }\n\n\n\n fn subdivide(&mut self) {\n\n for x in 0u32..2 {\n\n for y in 0u32..2 {\n\n for z in 0u32..2 {\n", "file_path": "src/raytracer/octree.rs", "rank": 71, "score": 63917.159333820615 }, { "content": "struct SubsurfaceIterator {\n\n x_delta: usize,\n\n x_off: usize,\n\n y_delta: usize,\n\n y_off: usize,\n\n parent_width: usize,\n\n parent_height: usize,\n\n background: ColorRGBA<u8>,\n\n}\n\n\n\n\n\nimpl SubsurfaceIterator {\n\n fn incr_tile(&mut self) {\n\n if self.x_off + self.x_delta < self.parent_width {\n\n self.x_off += self.x_delta;\n\n } else {\n\n self.x_off = 0;\n\n self.y_off += self.y_delta;\n\n }\n\n }\n", "file_path": "src/raytracer/compositor/surface.rs", "rank": 72, "score": 62776.04165264504 }, { "content": "/// TODO: Move specular/transmissive properties into traits\n\npub trait Material {\n\n fn sample(&self, n: Vec3, i: Vec3, l: Vec3, u: f64, v: f64) -> Vec3;\n\n fn is_reflective(&self) -> bool;\n\n fn is_refractive(&self) -> bool;\n\n fn global_specular(&self, color: &Vec3) -> Vec3;\n\n fn global_transmissive(&self, color: &Vec3) -> Vec3;\n\n fn transmission(&self) -> Vec3;\n\n fn ior(&self) -> f64;\n\n fn is_glossy(&self) -> bool;\n\n fn glossiness(&self) -> f64;\n\n}\n", "file_path": "src/material/material.rs", "rank": 73, "score": 58873.028240750005 }, { "content": "pub trait Texture {\n\n fn color(&self, u: f64, v: f64) -> ColorRGBA<f64>;\n\n fn clone_self(&self) -> Box<Texture+Send+Sync>;\n\n}\n\n\n\nimpl Clone for Box<Texture+Send+Sync> {\n\n fn clone(&self) -> Box<Texture+Send+Sync> {\n\n self.clone_self()\n\n }\n\n}\n", "file_path": "src/material/texture.rs", "rank": 74, "score": 58873.028240750005 }, { "content": "pub trait Light {\n\n fn position(&self) -> Vec3;\n\n fn color(&self) -> Vec3;\n\n fn center(&self) -> Vec3;\n\n fn is_point(&self) -> bool;\n\n}\n", "file_path": "src/light/light.rs", "rank": 75, "score": 58873.028240750005 }, { "content": "struct OctreeIterator<'a, T:'a> {\n\n prims: &'a [T],\n\n stack: Vec<&'a OctreeNode>,\n\n leaf_iter: Option<Iter<'a, OctreeData>>,\n\n ray: &'a Ray,\n\n infinites: Iter<'a, T>,\n\n just_infinites: bool\n\n}\n\n\n\n\n\nimpl<'a, T> OctreeIterator<'a, T> where T: PartialBoundingBox {\n\n fn new<'b>(octree: &'b Octree<T>, ray: &'b Ray) -> OctreeIterator<'b, T> {\n\n OctreeIterator {\n\n prims: &octree.prims[..],\n\n stack: vec![&octree.root],\n\n leaf_iter: None,\n\n ray: ray,\n\n infinites: octree.infinites.iter(),\n\n just_infinites: false\n\n }\n", "file_path": "src/raytracer/octree.rs", "rank": 76, "score": 57546.57730707593 }, { "content": "pub trait PartialBoundingBox {\n\n fn partial_bounding_box(&self) -> Option<BBox>;\n\n}\n\n\n\nimpl PartialBoundingBox for BBox {\n\n fn partial_bounding_box(&self) -> Option<BBox> {\n\n Some(*self)\n\n }\n\n}\n\n\n\nimpl PartialBoundingBox for Option<BBox> {\n\n fn partial_bounding_box(&self) -> Option<BBox> {\n\n *self\n\n }\n\n}\n\n\n", "file_path": "src/geometry/bbox.rs", "rank": 77, "score": 56714.09099617639 }, { "content": "fn main() {\n\n let start_time = ::time::get_time().sec;\n\n\n\n let program_args = match parse_args(env::args()) {\n\n Ok(program_args) => program_args,\n\n Err(error_str) => {\n\n write!(&mut io::stderr(), \"{}\\n\", error_str).unwrap();\n\n process::exit(1);\n\n }\n\n };\n\n let mut file_handle = match File::open(&program_args.config_file) {\n\n Ok(file) => file,\n\n Err(err) => {\n\n write!(&mut io::stderr(), \"{}\\n\", err).unwrap();\n\n process::exit(1);\n\n }\n\n };\n\n\n\n let mut json_data = String::new();\n\n if let Err(ref err) = file_handle.read_to_string(&mut json_data) {\n", "file_path": "src/main.rs", "rank": 78, "score": 55904.32244311874 }, { "content": "pub trait Channel: ToPrimitive {\n\n fn min_value() -> Self;\n\n fn max_value() -> Self;\n\n fn add(a: Self, b: Self) -> Self;\n\n fn sub(a: Self, b: Self) -> Self;\n\n}\n\n\n\nimpl Channel for u8 {\n\n #[inline]\n\n fn min_value() -> u8 { u8::min_value() }\n\n\n\n #[inline]\n\n fn max_value() -> u8 { u8::max_value() }\n\n\n\n #[inline]\n\n fn add(a: u8, b: u8) -> u8 { a.saturating_add(b) }\n\n\n\n #[inline]\n\n fn sub(a: u8, b: u8) -> u8 { a.saturating_sub(b) }\n\n}\n", "file_path": "src/raytracer/compositor/colorrgba.rs", "rank": 79, "score": 55125.74979726385 }, { "content": "#[test]\n\nfn it_expands_by_a_factor() {\n\n let bbox = BBox {\n\n min: Vec3::zero(),\n\n max: Vec3::one()\n\n };\n\n\n\n let expanded = bbox.expand(1.0);\n\n assert_eq!(-Vec3::one(), expanded.min);\n\n assert_eq!(Vec3::one().scale(2.0), expanded.max);\n\n\n\n let shrunken = bbox.expand(-0.25);\n\n assert_eq!(Vec3 { x: 0.25, y: 0.25, z: 0.25 }, shrunken.min);\n\n assert_eq!(Vec3 { x: 0.75, y: 0.75, z: 0.75 }, shrunken.max);\n\n}\n\n\n", "file_path": "src/geometry/bbox.rs", "rank": 80, "score": 53494.83953347035 }, { "content": "#[test]\n\nfn it_unions_a_bbox_with_a_point() {\n\n let original_bbox = BBox {\n\n min: Vec3::zero(),\n\n max: Vec3::one()\n\n };\n\n\n\n let smaller_point = Vec3 { x: -1.0, y: -1.0, z: -1.0 };\n\n let unioned_bbox = union_point(&original_bbox, &smaller_point);\n\n assert_eq!(unioned_bbox.min, smaller_point);\n\n assert_eq!(unioned_bbox.max, Vec3::one());\n\n\n\n let larger_point = Vec3 { x: 2.0, y: 2.0, z: 2.0 };\n\n let unioned_bbox2 = union_point(&unioned_bbox, &larger_point);\n\n assert_eq!(unioned_bbox2.min, smaller_point);\n\n assert_eq!(unioned_bbox2.max, larger_point);\n\n}\n\n\n", "file_path": "src/geometry/bbox.rs", "rank": 81, "score": 52414.005187692426 }, { "content": "#[test]\n\nfn it_returns_max_extent() {\n\n let x = BBox {\n\n min: Vec3::zero(),\n\n max: Vec3 { x: 2.0, y: 1.0, z: 1.0 }\n\n };\n\n\n\n let y = BBox {\n\n min: Vec3::zero(),\n\n max: Vec3 { x: 1.0, y: 2.0, z: 1.0 }\n\n };\n\n\n\n let z = BBox {\n\n min: Vec3::zero(),\n\n max: Vec3 { x: 1.0, y: 1.0, z: 2.0 }\n\n };\n\n\n\n assert_eq!(0u8, x.max_extent());\n\n assert_eq!(1u8, y.max_extent());\n\n assert_eq!(2u8, z.max_extent());\n\n}\n\n\n", "file_path": "src/geometry/bbox.rs", "rank": 82, "score": 52414.005187692426 }, { "content": "#[test]\n\nfn it_checks_for_bbox_overlap() {\n\n let bbox = BBox {\n\n min: Vec3::zero(),\n\n max: Vec3::one()\n\n };\n\n\n\n let overlapping = BBox {\n\n min: Vec3 { x: 0.5, y: 0.5, z: 0.5 },\n\n max: Vec3 { x: 1.5, y: 1.5, z: 1.5 }\n\n };\n\n\n\n let not_overlapping = BBox {\n\n min: Vec3 { x: 1.5, y: 1.5, z: 1.5 },\n\n max: Vec3 { x: 2.5, y: 2.5, z: 2.5 }\n\n };\n\n\n\n assert!(bbox.overlaps(&overlapping));\n\n assert_eq!(false, bbox.overlaps(&not_overlapping));\n\n}\n\n\n", "file_path": "src/geometry/bbox.rs", "rank": 83, "score": 52414.005187692426 }, { "content": "#[test]\n\nfn it_returns_side_lengths() {\n\n let bbox = BBox {\n\n min: Vec3::zero(),\n\n max: Vec3 { x: 1.0, y: 2.0, z: 3.0 }\n\n };\n\n\n\n assert_eq!(1.0, bbox.x_len());\n\n assert_eq!(2.0, bbox.y_len());\n\n assert_eq!(3.0, bbox.z_len());\n\n}\n", "file_path": "src/geometry/bbox.rs", "rank": 84, "score": 52414.005187692426 }, { "content": "#[test]\n\nfn it_checks_for_point_inside() {\n\n let bbox = BBox {\n\n min: Vec3::zero(),\n\n max: Vec3::one()\n\n };\n\n\n\n let inside = Vec3 { x: 0.5, y: 0.5, z: 0.5 };\n\n assert!(bbox.inside(&inside));\n\n\n\n let outside_1 = Vec3 { x: 1.5, y: 1.5, z: 1.5 };\n\n let outside_2 = Vec3 { x: 0.5, y: 1.5, z: 0.5 };\n\n let outside_3 = Vec3 { x: -0.5, y: 0.5, z: 0.5 };\n\n\n\n assert_eq!(false, bbox.inside(&outside_1));\n\n assert_eq!(false, bbox.inside(&outside_2));\n\n assert_eq!(false, bbox.inside(&outside_3));\n\n}\n\n\n", "file_path": "src/geometry/bbox.rs", "rank": 85, "score": 52414.005187692426 }, { "content": "#[test]\n\nfn it_unions_two_bboxes() {\n\n let bbox_one = BBox {\n\n min: Vec3::zero(),\n\n max: Vec3::one()\n\n };\n\n\n\n let bbox_two = BBox {\n\n min: -Vec3::one(),\n\n max: Vec3::zero()\n\n };\n\n\n\n let unioned_bbox = union_bbox(&bbox_one, &bbox_two);\n\n assert_eq!(unioned_bbox.min, -Vec3::one());\n\n assert_eq!(unioned_bbox.max, Vec3::one());\n\n}\n\n\n", "file_path": "src/geometry/bbox.rs", "rank": 86, "score": 52414.005187692426 }, { "content": "#[test]\n\nfn color_mul() {\n\n let foo_color = ColorRGBA::<f64>::new_rgb(0.5, 0.0, 0.0) * 2.0;\n\n\n\n assert_eq!(foo_color.r, 1.0);\n\n assert_eq!(foo_color.g, 0.0);\n\n assert_eq!(foo_color.b, 0.0);\n\n assert_eq!(foo_color.a, 1.0);\n\n}\n\n\n", "file_path": "src/raytracer/compositor/colorrgba.rs", "rank": 87, "score": 52414.005187692426 }, { "content": "#[test]\n\nfn color_add() {\n\n let foo_color: ColorRGBA<u8> = ColorRGBA::new_rgba(1, 1, 1, 1) +\n\n ColorRGBA::new_rgba(2, 2, 2, 2);\n\n assert_eq!(foo_color.r, 3);\n\n assert_eq!(foo_color.g, 3);\n\n assert_eq!(foo_color.b, 3);\n\n assert_eq!(foo_color.a, 3);\n\n\n\n let foo_color: ColorRGBA<u8> = ColorRGBA::new_rgba(200, 1, 1, 1) +\n\n ColorRGBA::new_rgba(200, 2, 2, 2);\n\n assert_eq!(foo_color.r, 255);\n\n assert_eq!(foo_color.g, 3);\n\n assert_eq!(foo_color.b, 3);\n\n assert_eq!(foo_color.a, 3);\n\n}\n\n\n", "file_path": "src/raytracer/compositor/colorrgba.rs", "rank": 88, "score": 52414.005187692426 }, { "content": "#[test]\n\nfn it_unions_two_points() {\n\n // Larger to smaller\n\n let unioned_bbox = union_points(&Vec3::one(), &Vec3::zero());\n\n assert_eq!(unioned_bbox.min, Vec3::zero());\n\n assert_eq!(unioned_bbox.max, Vec3::one());\n\n\n\n // Smaller to larger\n\n let unioned_bbox2 = union_points(&-Vec3::one(), &Vec3::zero());\n\n assert_eq!(unioned_bbox2.min, -Vec3::one());\n\n assert_eq!(unioned_bbox2.max, Vec3::zero());\n\n}\n\n\n", "file_path": "src/geometry/bbox.rs", "rank": 89, "score": 52414.005187692426 }, { "content": "#[test]\n\nfn it_bilinearly_filters() {\n\n let background = ColorRGBA::new_rgb(0, 0, 0);\n\n let mut surface = Surface::new(2, 2, background);\n\n\n\n surface[(0, 0)] = ColorRGBA::new_rgb(255, 0, 0);\n\n surface[(0, 1)] = ColorRGBA::new_rgb(0, 255, 0);\n\n surface[(1, 0)] = ColorRGBA::new_rgb(0, 0, 255);\n\n surface[(1, 1)] = ColorRGBA::new_rgb(0, 0, 0);\n\n\n\n let texture = ImageTexture { image: surface };\n\n\n\n let left = texture.color(0.0, 0.5);\n\n assert_eq!(left.r, 0.5);\n\n assert_eq!(left.g, 0.5);\n\n assert_eq!(left.b, 0.0);\n\n\n\n let center = texture.color(0.5, 0.5);\n\n assert_eq!(center.r, 0.25);\n\n assert_eq!(center.g, 0.25);\n\n assert_eq!(center.b, 0.25);\n\n}\n", "file_path": "src/material/textures/imagetexture.rs", "rank": 90, "score": 52414.005187692426 }, { "content": "#[test]\n\nfn color_sub() {\n\n let foo_color: ColorRGBA<u8> = ColorRGBA::new_rgba(7, 7, 7, 7) -\n\n ColorRGBA::new_rgba(2, 2, 2, 2);\n\n assert_eq!(foo_color.r, 5);\n\n assert_eq!(foo_color.g, 5);\n\n assert_eq!(foo_color.b, 5);\n\n assert_eq!(foo_color.a, 5);\n\n}\n\n\n", "file_path": "src/raytracer/compositor/colorrgba.rs", "rank": 91, "score": 52414.005187692426 }, { "content": "#[test]\n\nfn it_checks_for_contains_another_bbox() {\n\n let bbox = BBox {\n\n min: Vec3::zero(),\n\n max: Vec3::one()\n\n };\n\n\n\n let overlapping = BBox {\n\n min: Vec3 { x: 0.5, y: 0.5, z: 0.5 },\n\n max: Vec3 { x: 1.5, y: 1.5, z: 1.5 }\n\n };\n\n\n\n let not_overlapping = BBox {\n\n min: Vec3 { x: 1.5, y: 1.5, z: 1.5 },\n\n max: Vec3 { x: 2.5, y: 2.5, z: 2.5 }\n\n };\n\n\n\n let inside = BBox {\n\n min: Vec3 { x: 0.25, y: 0.25, z: 0.25 },\n\n max: Vec3 { x: 0.75, y: 0.75, z: 0.75 }\n\n };\n\n\n\n assert_eq!(false, bbox.contains(&overlapping));\n\n assert_eq!(false, bbox.contains(&not_overlapping));\n\n assert!(bbox.contains(&inside));\n\n}\n\n\n", "file_path": "src/geometry/bbox.rs", "rank": 92, "score": 51405.825524216925 }, { "content": "#[test]\n\nfn it_returns_offset_length_from_min_corner() {\n\n let bbox = BBox {\n\n min: -Vec3::one(),\n\n max: Vec3::one()\n\n };\n\n\n\n let offset_point = bbox.offset(&Vec3::one());\n\n assert_eq!(Vec3::one(), offset_point);\n\n}\n\n\n", "file_path": "src/geometry/bbox.rs", "rank": 93, "score": 50463.21289012543 }, { "content": "use light::Light;\n\nuse material::textures::CubeMap;\n\nuse geometry::Prim;\n\nuse raytracer::Octree;\n\nuse vec3::Vec3;\n\n\n\npub struct Scene {\n\n pub lights: Vec<Box<Light+Send+Sync>>,\n\n pub octree: Octree<Box<Prim+Send+Sync>>,\n\n pub background: Vec3,\n\n pub skybox: Option<CubeMap>\n\n}\n", "file_path": "src/scene/scene.rs", "rank": 94, "score": 42274.7700438506 }, { "content": "fn parse_args(args: env::Args) -> Result<ProgramArgs, String> {\n\n let args = args.collect::<Vec<String>>();\n\n if args.len() == 0 {\n\n panic!(\"Args do not even include a program name\");\n\n }\n\n\n\n let program_name = &args[0];\n\n match args.len() {\n\n // I wouldn't expect this in the wild\n\n 0 => unreachable!(),\n\n 1 => Err(format!(\"Usage: {} scene_config.json\", program_name)),\n\n 2 => Ok(ProgramArgs { config_file: args[1].clone() }),\n\n _ => Err(format!(\"Usage: {} scene_config.json\", program_name)),\n\n }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 95, "score": 41514.95180472804 }, { "content": "use std::cmp;\n\nuse std::fmt;\n\nuse std::ops::{Add, Mul, Div, Neg, Sub};\n\nuse rand;\n\nuse rand::{thread_rng, Rng};\n\n\n\n#[derive(Clone, Copy, Default)]\n\npub struct Vec3 {\n\n pub x: f64,\n\n pub y: f64,\n\n pub z: f64\n\n}\n\n\n\nimpl Vec3 {\n\n pub fn zero() -> Vec3 {\n\n Vec3 {\n\n x: 0.0,\n\n y: 0.0,\n\n z: 0.0\n\n }\n", "file_path": "src/vec3.rs", "rank": 96, "score": 36334.86537128503 }, { "content": " /// inside: Is the ray inside an object (ie. going out of an object)?\n\n pub fn refract(v: &Vec3, n: &Vec3, ior: f64, inside: bool) -> Option<Vec3> {\n\n let (n1, n2, n_dot_v, nn): (f64, f64, _, _) = if !inside {\n\n (1.0, ior, n.dot(v), *n)\n\n } else {\n\n (ior, 1.0, -n.dot(v), -*n)\n\n };\n\n\n\n let ratio = n1 / n2;\n\n let disc = 1.0 - ((ratio * ratio) * (1.0 - n_dot_v * n_dot_v));\n\n\n\n if disc < 0.0 {\n\n None // Total internal reflection\n\n } else {\n\n Some(v.scale(-ratio) + nn.scale(ratio * n_dot_v - disc.sqrt()))\n\n }\n\n }\n\n\n\n pub fn lerp(v1: &Vec3, v2: &Vec3, alpha: f64) -> Vec3 {\n\n Vec3 {\n", "file_path": "src/vec3.rs", "rank": 97, "score": 36332.92154390084 }, { "content": "impl cmp::PartialEq for Vec3 {\n\n fn eq(&self, other: &Vec3) -> bool {\n\n self.x == other.x && self.y == other.y && self.z == other.z\n\n }\n\n\n\n fn ne(&self, other: &Vec3) -> bool {\n\n !(self.eq(other))\n\n }\n\n}\n\n\n\nimpl fmt::Debug for Vec3 {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(f, \"({}, {}, {})\", self.x, self.y, self.z)\n\n }\n\n}\n\n\n\nmacro_rules! vec3 {\n\n ($x:expr, $y:expr, $z:expr) => {\n\n Vec3 { x: $x, y: $y, z: $z }\n\n };\n\n\n\n ($s:expr) => {\n\n Vec3 { x: $s, y: $s, z: $s }\n\n }\n\n}\n\n\n\n#[test]\n", "file_path": "src/vec3.rs", "rank": 98, "score": 36332.83515841165 }, { "content": " }\n\n\n\n pub fn one() -> Vec3 {\n\n Vec3 {\n\n x: 1.0,\n\n y: 1.0,\n\n z: 1.0\n\n }\n\n }\n\n\n\n pub fn len(&self) -> f64 {\n\n (self.x * self.x +\n\n self.y * self.y +\n\n self.z * self.z).sqrt()\n\n }\n\n\n\n pub fn dot(&self, other: &Vec3) -> f64 {\n\n self.x * other.x +\n\n self.y * other.y +\n\n self.z * other.z\n", "file_path": "src/vec3.rs", "rank": 99, "score": 36329.95616189156 } ]
Rust
relm4-examples/examples/grid_factory.rs
Hofer-Julian/relm4
bc8ea3f027a801126f767e93a3b04e20df4ca714
use gtk::prelude::{BoxExt, ButtonExt, GtkWindowExt}; use relm4::factory::{Factory, FactoryPrototype, FactoryVec, GridPosition}; use relm4::Sender; use relm4::*; struct AppWidgets { main: gtk::ApplicationWindow, gen_grid: gtk::Grid, } #[derive(Debug)] enum AppMsg { Add, Remove, Clicked(usize), } struct Data { counter: u8, } struct AppModel { data: FactoryVec<Data>, counter: u8, } impl Model for AppModel { type Msg = AppMsg; type Widgets = AppWidgets; type Components = (); } impl Widgets<AppModel, ()> for AppWidgets { type Root = gtk::ApplicationWindow; fn init_view(_model: &AppModel, _components: &(), sender: Sender<AppMsg>) -> Self { let main = gtk::ApplicationWindowBuilder::new() .default_width(300) .default_height(200) .build(); let main_box = gtk::Box::builder() .orientation(gtk::Orientation::Vertical) .margin_end(5) .margin_top(5) .margin_start(5) .margin_bottom(5) .spacing(5) .build(); let gen_grid = gtk::Grid::builder() .orientation(gtk::Orientation::Vertical) .margin_end(5) .margin_top(5) .margin_start(5) .margin_bottom(5) .row_spacing(5) .column_spacing(5) .column_homogeneous(true) .build(); let add = gtk::Button::with_label("Add"); let remove = gtk::Button::with_label("Remove"); main_box.append(&add); main_box.append(&remove); main_box.append(&gen_grid); main.set_child(Some(&main_box)); let cloned_sender = sender.clone(); add.connect_clicked(move |_| { cloned_sender.send(AppMsg::Add).unwrap(); }); remove.connect_clicked(move |_| { sender.send(AppMsg::Remove).unwrap(); }); AppWidgets { main, gen_grid } } fn view(&mut self, model: &AppModel, sender: Sender<AppMsg>) { model.data.generate(&self.gen_grid, sender); } fn root_widget(&self) -> gtk::ApplicationWindow { self.main.clone() } } impl AppUpdate for AppModel { fn update(&mut self, msg: AppMsg, _components: &(), _sender: Sender<AppMsg>) -> bool { match msg { AppMsg::Add => { self.data.push(Data { counter: self.counter, }); self.counter += 1; } AppMsg::Remove => { self.data.pop(); } AppMsg::Clicked(index) => { if let Some(data) = self.data.get_mut(index) { data.counter = data.counter.wrapping_sub(1); } } } true } } struct FactoryWidgets { button: gtk::Button, } impl FactoryPrototype for Data { type Factory = FactoryVec<Self>; type Widgets = FactoryWidgets; type Root = gtk::Button; type View = gtk::Grid; type Msg = AppMsg; fn generate(&self, index: &usize, sender: Sender<AppMsg>) -> FactoryWidgets { let button = gtk::Button::with_label(&self.counter.to_string()); let index = *index; button.connect_clicked(move |_| { sender.send(AppMsg::Clicked(index)).unwrap(); }); FactoryWidgets { button } } fn position(&self, index: &usize) -> GridPosition { let index = *index as i32; let row = index / 5; let column = (index % 5) * 2 + row % 2; GridPosition { column, row, width: 1, height: 1, } } fn update(&self, _index: &usize, widgets: &FactoryWidgets) { widgets.button.set_label(&self.counter.to_string()); } fn get_root(widgets: &FactoryWidgets) -> &gtk::Button { &widgets.button } } fn main() { let model = AppModel { data: FactoryVec::new(), counter: 0, }; let relm = RelmApp::new(model); relm.run(); }
use gtk::prelude::{BoxExt, ButtonExt, GtkWindowExt}; use relm4::factory::{Factory, FactoryPrototype, FactoryVec, GridPosition}; use relm4::Sender; use relm4::*; struct AppWidgets { main: gtk::ApplicationWindow, gen_grid: gtk::Grid, } #[derive(Debug)] enum AppMsg { Add, Remove, Clicked(usize), } struct Data { counter: u8, } struct AppModel { data: FactoryVec<Data>, counter: u8, } impl Model for AppModel { type Msg = AppMsg; type Widgets = AppWidgets; type Components = (); } impl Widgets<AppModel, ()> for AppWidgets { type Root = gtk::ApplicationWindow; fn init_view(_model: &AppModel, _components: &(), sender: Sender<AppMsg>) -> Self { let main = gtk::ApplicationWindowBuilder::new() .default_width(300) .default_height(200) .build(); let main_box = gtk::Box::builder() .orientation(gtk::Orientation::Vertical) .margin_end(5) .margin_top(5) .margin_start(5) .margin_bottom(5) .spacing(5) .build(); let gen_grid = gtk::Grid::builder() .orientation(gtk::Orientation::Vertical) .margin_end(5) .
fn view(&mut self, model: &AppModel, sender: Sender<AppMsg>) { model.data.generate(&self.gen_grid, sender); } fn root_widget(&self) -> gtk::ApplicationWindow { self.main.clone() } } impl AppUpdate for AppModel { fn update(&mut self, msg: AppMsg, _components: &(), _sender: Sender<AppMsg>) -> bool { match msg { AppMsg::Add => { self.data.push(Data { counter: self.counter, }); self.counter += 1; } AppMsg::Remove => { self.data.pop(); } AppMsg::Clicked(index) => { if let Some(data) = self.data.get_mut(index) { data.counter = data.counter.wrapping_sub(1); } } } true } } struct FactoryWidgets { button: gtk::Button, } impl FactoryPrototype for Data { type Factory = FactoryVec<Self>; type Widgets = FactoryWidgets; type Root = gtk::Button; type View = gtk::Grid; type Msg = AppMsg; fn generate(&self, index: &usize, sender: Sender<AppMsg>) -> FactoryWidgets { let button = gtk::Button::with_label(&self.counter.to_string()); let index = *index; button.connect_clicked(move |_| { sender.send(AppMsg::Clicked(index)).unwrap(); }); FactoryWidgets { button } } fn position(&self, index: &usize) -> GridPosition { let index = *index as i32; let row = index / 5; let column = (index % 5) * 2 + row % 2; GridPosition { column, row, width: 1, height: 1, } } fn update(&self, _index: &usize, widgets: &FactoryWidgets) { widgets.button.set_label(&self.counter.to_string()); } fn get_root(widgets: &FactoryWidgets) -> &gtk::Button { &widgets.button } } fn main() { let model = AppModel { data: FactoryVec::new(), counter: 0, }; let relm = RelmApp::new(model); relm.run(); }
margin_top(5) .margin_start(5) .margin_bottom(5) .row_spacing(5) .column_spacing(5) .column_homogeneous(true) .build(); let add = gtk::Button::with_label("Add"); let remove = gtk::Button::with_label("Remove"); main_box.append(&add); main_box.append(&remove); main_box.append(&gen_grid); main.set_child(Some(&main_box)); let cloned_sender = sender.clone(); add.connect_clicked(move |_| { cloned_sender.send(AppMsg::Add).unwrap(); }); remove.connect_clicked(move |_| { sender.send(AppMsg::Remove).unwrap(); }); AppWidgets { main, gen_grid } }
function_block-function_prefix_line
[ { "content": "fn main() {\n\n let model = AppModel {\n\n mode: AppMode::View,\n\n };\n\n let relm = RelmApp::new(model);\n\n relm.run();\n\n}\n", "file_path": "relm4-examples/examples/components.rs", "rank": 0, "score": 212890.8310617418 }, { "content": "fn main() {\n\n let model = AppModel { counter: 0 };\n\n let relm = RelmApp::new(model);\n\n relm.run();\n\n}\n", "file_path": "relm4-examples/examples/components_old.rs", "rank": 1, "score": 209210.51053550054 }, { "content": "enum AppMsg {\n\n SetMode(AppMode),\n\n CloseRequest,\n\n Close,\n\n}\n\n\n", "file_path": "relm4-examples/examples/components.rs", "rank": 2, "score": 208656.9830704928 }, { "content": "struct AppModel {\n\n mode: AppMode,\n\n}\n\n\n\nimpl Model for AppModel {\n\n type Msg = AppMsg;\n\n type Widgets = AppWidgets;\n\n type Components = AppComponents;\n\n}\n\n\n\n#[relm4_macros::widget]\n\nimpl Widgets<AppModel, ()> for AppWidgets {\n\n view! {\n\n main_window = gtk::ApplicationWindow {\n\n set_default_width: 500,\n\n set_default_height: 250,\n\n set_titlebar: component!(Some(components.header.root_widget())),\n\n set_child = Some(&gtk::Label) {\n\n set_label: watch!(&format!(\"Placeholder for {:?}\", model.mode)),\n\n },\n", "file_path": "relm4-examples/examples/components.rs", "rank": 3, "score": 207078.2197748529 }, { "content": "fn main() {\n\n let model = AppModel { counter: 0 };\n\n let app = RelmApp::new(model);\n\n app.run();\n\n}\n", "file_path": "relm4-examples/examples/stateful_msg_handler.rs", "rank": 4, "score": 206359.43691134948 }, { "content": "enum AppMsg {\n\n Increment,\n\n Decrement,\n\n ShowComp2,\n\n ShowComp1,\n\n}\n\n\n", "file_path": "relm4-examples/examples/components_old.rs", "rank": 5, "score": 205166.32822129602 }, { "content": "struct AppModel {\n\n counter: u8,\n\n}\n\n\n\nimpl Model for AppModel {\n\n type Msg = AppMsg;\n\n type Widgets = AppWidgets;\n\n type Components = AppComponents;\n\n}\n\n\n\nimpl Widgets<AppModel, ()> for AppWidgets {\n\n type Root = gtk::ApplicationWindow;\n\n\n\n fn init_view(model: &AppModel, _parent_widgets: &(), sender: Sender<AppMsg>) -> Self {\n\n let main = gtk::ApplicationWindowBuilder::new().build();\n\n let vbox = gtk::BoxBuilder::new()\n\n .orientation(gtk::Orientation::Vertical)\n\n .spacing(10)\n\n .build();\n\n vbox.set_margin_all(5);\n", "file_path": "relm4-examples/examples/components_old.rs", "rank": 6, "score": 203597.14114598575 }, { "content": "struct AppModel {\n\n counter: u8,\n\n}\n\n\n", "file_path": "relm4-examples/examples/stateful_msg_handler.rs", "rank": 7, "score": 200915.8230775951 }, { "content": "fn main() {\n\n let model = AppModel {\n\n tasks: FactoryVec::new(),\n\n };\n\n let relm = RelmApp::new(model);\n\n relm.run();\n\n}\n", "file_path": "relm4-examples/examples/to_do.rs", "rank": 8, "score": 177710.18747181157 }, { "content": "#[derive(Debug)]\n\nenum AppMsg {\n\n Increment,\n\n Decrement,\n\n}\n\n\n\nimpl Model for AppModel {\n\n type Msg = AppMsg;\n\n type Widgets = AppWidgets;\n\n type Components = AppComponents;\n\n}\n\n\n\nimpl AppUpdate for AppModel {\n\n fn update(\n\n &mut self,\n\n msg: AppMsg,\n\n _components: &AppComponents,\n\n _sender: Sender<AppMsg>,\n\n ) -> bool {\n\n match msg {\n\n AppMsg::Increment => {\n\n self.counter = self.counter.wrapping_add(1);\n\n }\n\n AppMsg::Decrement => {\n\n self.counter = self.counter.wrapping_sub(1);\n\n }\n\n }\n\n true\n\n }\n\n}\n\n\n", "file_path": "relm4-examples/examples/stateful_msg_handler.rs", "rank": 9, "score": 175962.99784602463 }, { "content": "enum DialogMsg {\n\n Show,\n\n Accept,\n\n Cancel,\n\n}\n\n\n\nimpl Model for DialogModel {\n\n type Msg = DialogMsg;\n\n type Widgets = DialogWidgets;\n\n type Components = ();\n\n}\n\n\n\nimpl ComponentUpdate<AppModel> for DialogModel {\n\n fn init_model(_parent_model: &AppModel) -> Self {\n\n DialogModel { hidden: true }\n\n }\n\n\n\n fn update(\n\n &mut self,\n\n msg: DialogMsg,\n", "file_path": "relm4-examples/examples/components.rs", "rank": 10, "score": 175001.48580127858 }, { "content": "enum HeaderMsg {\n\n View,\n\n Edit,\n\n Export,\n\n}\n\n\n", "file_path": "relm4-examples/examples/components.rs", "rank": 11, "score": 175001.48580127858 }, { "content": "fn main() {\n\n let model = AppModel {\n\n text: String::new(),\n\n waiting: false,\n\n tracker: 0,\n\n };\n\n let relm = RelmApp::new(model);\n\n relm.run();\n\n}\n", "file_path": "relm4-examples/examples/future.rs", "rank": 12, "score": 174845.1503495439 }, { "content": "fn main() {\n\n let model = AppModel { page: Page::Hello };\n\n let app = RelmApp::new(model);\n\n app.run();\n\n}\n", "file_path": "relm4-examples/examples/stack.rs", "rank": 13, "score": 174845.1503495439 }, { "content": "fn main() {\n\n let model = AppModel {\n\n width: 1000,\n\n counter: 0,\n\n tracker: 0,\n\n };\n\n let relm = RelmApp::new(model);\n\n relm.run();\n\n}\n", "file_path": "relm4-examples/examples/macro.rs", "rank": 14, "score": 174845.1503495439 }, { "content": "fn main() {\n\n let model = AppModel {\n\n text: String::new(),\n\n text_waiting: false,\n\n image_data: None,\n\n image_waiting: false,\n\n tracker: 0,\n\n };\n\n let relm = RelmApp::new(model);\n\n relm.run();\n\n}\n", "file_path": "relm4-examples/examples/tokio.rs", "rank": 15, "score": 174845.1503495439 }, { "content": "fn main() {\n\n let store = gio::ListStore::new(IntegerObject::static_type());\n\n for number in 0..=10 {\n\n //0_000 {\n\n let integer_object = IntegerObject::new(number);\n\n store.append(&integer_object);\n\n }\n\n\n\n let model = AppModel { store };\n\n let relm = RelmApp::new(model);\n\n relm.run();\n\n}\n", "file_path": "relm4-examples/examples/list.rs", "rank": 16, "score": 174845.1503495439 }, { "content": "fn main() {\n\n let model = AppModel::default();\n\n let app = RelmApp::new(model);\n\n app.run();\n\n}\n", "file_path": "relm4-examples/examples/simple.rs", "rank": 17, "score": 174845.1503495439 }, { "content": "fn main() {\n\n let model = AppModel::default();\n\n let app = RelmApp::new(model);\n\n app.run();\n\n}\n", "file_path": "relm4-examples/examples/popover.rs", "rank": 18, "score": 174845.1503495439 }, { "content": "fn main() {\n\n let model = AppModel::default();\n\n let app = RelmApp::new(model);\n\n app.run();\n\n}\n", "file_path": "relm4-examples/examples/alert.rs", "rank": 19, "score": 174845.1503495439 }, { "content": "fn main() {\n\n let model = AppModel {\n\n width: 100.0,\n\n height: 100.0,\n\n points: Vec::new(),\n\n reset: false,\n\n };\n\n let relm = RelmApp::new(model);\n\n relm.run();\n\n}\n", "file_path": "relm4-examples/examples/drawing.rs", "rank": 20, "score": 174845.1503495439 }, { "content": "fn main() {\n\n let model = AppModel {\n\n first_icon: random_icon_name(),\n\n second_icon: random_icon_name(),\n\n identical: false,\n\n tracker: 0,\n\n };\n\n let relm = RelmApp::new(model);\n\n relm.run();\n\n}\n", "file_path": "relm4-examples/examples/tracker.rs", "rank": 21, "score": 174845.1503495439 }, { "content": "fn main() {\n\n let model = AppModel {\n\n counters: FactoryVec::new(),\n\n created_counters: 0,\n\n };\n\n\n\n let relm = RelmApp::new(model);\n\n relm.run();\n\n}\n", "file_path": "relm4-examples/examples/factory.rs", "rank": 22, "score": 174845.1503495439 }, { "content": "enum AppMsg {\n\n SetCompleted((usize, bool)),\n\n AddEntry(String),\n\n}\n\n\n", "file_path": "relm4-examples/examples/to_do.rs", "rank": 23, "score": 174282.6457014984 }, { "content": "struct Counter {\n\n value: u8,\n\n}\n\n\n", "file_path": "relm4-examples/examples/factory.rs", "rank": 24, "score": 173941.47932863503 }, { "content": "struct HeaderModel {}\n\n\n\nimpl Model for HeaderModel {\n\n type Msg = HeaderMsg;\n\n type Widgets = HeaderWidgets;\n\n type Components = ();\n\n}\n\n\n\nimpl ComponentUpdate<AppModel> for HeaderModel {\n\n fn init_model(_parent_model: &AppModel) -> Self {\n\n HeaderModel {}\n\n }\n\n\n\n fn update(\n\n &mut self,\n\n msg: HeaderMsg,\n\n _components: &(),\n\n _sender: Sender<HeaderMsg>,\n\n parent_sender: Sender<AppMsg>,\n\n ) {\n", "file_path": "relm4-examples/examples/components.rs", "rank": 25, "score": 173422.72250563867 }, { "content": "struct DialogModel {\n\n hidden: bool,\n\n}\n\n\n", "file_path": "relm4-examples/examples/components.rs", "rank": 26, "score": 173422.72250563867 }, { "content": "struct AppModel {\n\n tasks: FactoryVec<Task>,\n\n}\n\n\n\nimpl Model for AppModel {\n\n type Msg = AppMsg;\n\n type Widgets = AppWidgets;\n\n type Components = ();\n\n}\n\n\n\nimpl AppUpdate for AppModel {\n\n fn update(&mut self, msg: AppMsg, _components: &(), _sender: Sender<AppMsg>) -> bool {\n\n match msg {\n\n AppMsg::SetCompleted((index, completed)) => {\n\n if let Some(task) = self.tasks.get_mut(index) {\n\n task.completed = completed;\n\n }\n\n }\n\n AppMsg::AddEntry(name) => {\n\n self.tasks.push(Task {\n", "file_path": "relm4-examples/examples/to_do.rs", "rank": 27, "score": 172693.81048689797 }, { "content": "fn main() {\n\n let model = AppModel { counter: 0 };\n\n let app = RelmApp::new(model);\n\n app.run();\n\n}\n", "file_path": "relm4-examples/examples/simple_manual.rs", "rank": 28, "score": 172124.67602357658 }, { "content": "fn main() {\n\n let model = AppModel {};\n\n let relm = RelmApp::new(model);\n\n relm.run();\n\n}\n", "file_path": "relm4-examples/examples/stack_switcher.rs", "rank": 29, "score": 172124.67602357658 }, { "content": "fn main() {\n\n let model = AppModel {\n\n counter: 0,\n\n classes: vec![\"first\", \"second\"],\n\n decrement: false,\n\n };\n\n let app = RelmApp::new(model);\n\n app.run();\n\n}\n", "file_path": "relm4-examples/examples/macro_test.rs", "rank": 31, "score": 172124.67602357658 }, { "content": "fn main() {\n\n let model = AppModel::default();\n\n let app = RelmApp::new(model);\n\n app.run();\n\n}\n", "file_path": "relm4-examples/examples/save_dialog.rs", "rank": 32, "score": 172124.67602357658 }, { "content": "fn main() {\n\n let model = AppModel {\n\n counters: FactoryVecDeque::new(),\n\n received_messages: 0,\n\n };\n\n\n\n let relm = RelmApp::new(model);\n\n relm.run();\n\n}\n", "file_path": "relm4-examples/examples/factory_advanced.rs", "rank": 33, "score": 172124.67602357658 }, { "content": "fn main() {\n\n let model = AppModel {};\n\n let app = RelmApp::new(model);\n\n app.run();\n\n}\n", "file_path": "relm4-examples/examples/open_button.rs", "rank": 34, "score": 172124.67602357658 }, { "content": "fn main() {\n\n let model = AppModel::default();\n\n let app = RelmApp::new(model);\n\n app.run();\n\n}\n", "file_path": "relm4-examples/libadwaita/examples/simple.rs", "rank": 35, "score": 172124.67602357658 }, { "content": "fn main() {\n\n let model = AppModel {\n\n data: FactoryVec::new(),\n\n counter: 0,\n\n };\n\n\n\n let relm = RelmApp::new(model);\n\n relm.run();\n\n}\n", "file_path": "relm4-examples/examples/factory_manual.rs", "rank": 36, "score": 172124.67602357658 }, { "content": "#[derive(Debug)]\n\nenum AppMsg {\n\n Add,\n\n Remove,\n\n Clicked(usize),\n\n}\n\n\n", "file_path": "relm4-examples/examples/factory.rs", "rank": 37, "score": 171571.1485585689 }, { "content": "enum AppMsg {\n\n Increment,\n\n Decrement,\n\n CloseRequest,\n\n Save,\n\n Close,\n\n Ignore,\n\n}\n\n\n\nimpl Model for AppModel {\n\n type Msg = AppMsg;\n\n type Widgets = AppWidgets;\n\n type Components = AppComponents;\n\n}\n\n\n\nimpl AppUpdate for AppModel {\n\n fn update(&mut self, msg: AppMsg, components: &AppComponents, _sender: Sender<AppMsg>) -> bool {\n\n match msg {\n\n AppMsg::Increment => {\n\n self.counter = self.counter.wrapping_add(1);\n", "file_path": "relm4-examples/examples/alert.rs", "rank": 38, "score": 171571.1485585689 }, { "content": "enum AppMsg {\n\n AddPoint((f64, f64)),\n\n UpdatePoints,\n\n Reset,\n\n Resize((i32, i32)),\n\n}\n\n\n", "file_path": "relm4-examples/examples/drawing.rs", "rank": 39, "score": 171571.1485585689 }, { "content": "#[derive(Debug)]\n\nenum AppMsg {\n\n Request(String),\n\n Response(String),\n\n}\n\n\n", "file_path": "relm4-examples/examples/future.rs", "rank": 40, "score": 171571.1485585689 }, { "content": "enum AppMsg {\n\n UpdateFirst,\n\n UpdateSecond,\n\n}\n\n\n\n// The track proc macro allows to easily track changes to different\n\n// fields of the model\n", "file_path": "relm4-examples/examples/tracker.rs", "rank": 41, "score": 171571.1485585689 }, { "content": "enum AppMsg {\n\n NextPage,\n\n PreviousPage,\n\n}\n\n\n\nimpl Model for AppModel {\n\n type Msg = AppMsg;\n\n type Widgets = AppWidgets;\n\n type Components = ();\n\n}\n\n\n\nimpl AppUpdate for AppModel {\n\n fn update(&mut self, msg: AppMsg, _components: &(), _sender: Sender<AppMsg>) -> bool {\n\n match msg {\n\n AppMsg::NextPage => {\n\n self.page = self.page.next();\n\n }\n\n AppMsg::PreviousPage => {\n\n self.page = self.page.previous();\n\n }\n", "file_path": "relm4-examples/examples/stack.rs", "rank": 42, "score": 171571.1485585689 }, { "content": "enum AppMsg {\n\n Increment,\n\n Decrement,\n\n}\n\n\n\nimpl Model for AppModel {\n\n type Msg = AppMsg;\n\n type Widgets = AppWidgets;\n\n type Components = ();\n\n}\n\n\n\nimpl AppUpdate for AppModel {\n\n fn update(&mut self, msg: AppMsg, _components: &(), _sender: Sender<AppMsg>) -> bool {\n\n match msg {\n\n AppMsg::Increment => {\n\n self.counter = self.counter.wrapping_add(1);\n\n }\n\n AppMsg::Decrement => {\n\n self.counter = self.counter.wrapping_sub(1);\n\n }\n", "file_path": "relm4-examples/examples/popover.rs", "rank": 43, "score": 171571.1485585689 }, { "content": "#[derive(Debug)]\n\nenum AppMsg {\n\n Add(String),\n\n RemoveLast,\n\n}\n\n\n", "file_path": "relm4-examples/examples/list.rs", "rank": 44, "score": 171571.1485585689 }, { "content": "enum AppMsg {\n\n Increment,\n\n}\n\n\n", "file_path": "relm4-examples/examples/macro.rs", "rank": 45, "score": 171571.1485585689 }, { "content": "#[derive(Debug)]\n\nenum AppMsg {\n\n Request(String),\n\n SetText(Option<String>),\n\n SetImage(Option<bytes::Bytes>),\n\n}\n\n\n\nimpl Model for AppModel {\n\n type Msg = AppMsg;\n\n type Widgets = AppWidgets;\n\n type Components = AppComponents;\n\n}\n\n\n\nimpl AppUpdate for AppModel {\n\n fn update(&mut self, msg: AppMsg, components: &AppComponents, _sender: Sender<AppMsg>) -> bool {\n\n self.reset();\n\n\n\n match msg {\n\n AppMsg::Request(url) => {\n\n components.http.send(HttpMsg::Request(url)).unwrap();\n\n self.set_text_waiting(true);\n", "file_path": "relm4-examples/examples/tokio.rs", "rank": 46, "score": 171571.1485585689 }, { "content": "enum AppMsg {\n\n Increment,\n\n Decrement,\n\n}\n\n\n\nimpl Model for AppModel {\n\n type Msg = AppMsg;\n\n type Widgets = AppWidgets;\n\n type Components = ();\n\n}\n\n\n\nimpl AppUpdate for AppModel {\n\n fn update(&mut self, msg: AppMsg, _components: &(), _sender: Sender<AppMsg>) -> bool {\n\n match msg {\n\n AppMsg::Increment => {\n\n self.counter = self.counter.wrapping_add(1);\n\n }\n\n AppMsg::Decrement => {\n\n self.counter = self.counter.wrapping_sub(1);\n\n }\n", "file_path": "relm4-examples/examples/simple.rs", "rank": 47, "score": 171571.1485585689 }, { "content": "#[derive(PartialEq)]\n\nenum CompMsg {\n\n Hide,\n\n Show,\n\n}\n\n\n\nimpl Widgets<Comp1Model, AppModel> for Comp1Widgets {\n\n type Root = gtk::Button;\n\n\n\n fn init_view(model: &Comp1Model, _parent_widget: &AppWidgets, sender: Sender<CompMsg>) -> Self {\n\n // Initialize gtk widgets\n\n let button = gtk::Button::with_label(\"First Component\");\n\n button.set_visible(!model.hidden);\n\n\n\n button.connect_clicked(move |_button| {\n\n sender.send(CompMsg::Hide).unwrap();\n\n });\n\n\n\n Comp1Widgets { button }\n\n }\n\n\n", "file_path": "relm4-examples/examples/components_old.rs", "rank": 48, "score": 171510.83095208177 }, { "content": "struct Counter {\n\n value: u8,\n\n}\n\n\n", "file_path": "relm4-examples/examples/factory_advanced.rs", "rank": 50, "score": 171226.41225377 }, { "content": "struct Counter {\n\n counter: u8,\n\n}\n\n\n", "file_path": "relm4-examples/examples/factory_manual.rs", "rank": 51, "score": 171226.41225377 }, { "content": "struct AppModel {\n\n counters: FactoryVec<Counter>,\n\n created_counters: u8,\n\n}\n\n\n\nimpl Model for AppModel {\n\n type Msg = AppMsg;\n\n type Widgets = AppWidgets;\n\n type Components = ();\n\n}\n\n\n\nimpl AppUpdate for AppModel {\n\n fn update(&mut self, msg: AppMsg, _components: &(), _sender: Sender<AppMsg>) -> bool {\n\n match msg {\n\n AppMsg::Add => {\n\n self.counters.push(Counter {\n\n value: self.created_counters,\n\n });\n\n self.created_counters += 1;\n\n }\n", "file_path": "relm4-examples/examples/factory.rs", "rank": 52, "score": 169992.38526292896 }, { "content": "struct AppModel {\n\n page: Page,\n\n}\n\n\n", "file_path": "relm4-examples/examples/stack.rs", "rank": 53, "score": 169992.38526292896 }, { "content": "#[tracker::track]\n\nstruct AppModel {\n\n text: String,\n\n waiting: bool,\n\n}\n\n\n\nimpl Model for AppModel {\n\n type Msg = AppMsg;\n\n type Widgets = AppWidgets;\n\n type Components = ();\n\n}\n\n\n\nimpl Widgets<AppModel, ()> for AppWidgets {\n\n type Root = gtk::ApplicationWindow;\n\n\n\n fn init_view(_model: &AppModel, _components: &(), sender: Sender<AppMsg>) -> Self {\n\n let main = gtk::ApplicationWindowBuilder::new()\n\n .default_width(300)\n\n .default_height(200)\n\n .build();\n\n let main_box = gtk::Box::builder()\n", "file_path": "relm4-examples/examples/future.rs", "rank": 54, "score": 169992.38526292896 }, { "content": "struct AppModel {\n\n store: gio::ListStore,\n\n}\n\n\n\nimpl Model for AppModel {\n\n type Msg = AppMsg;\n\n type Widgets = AppWidgets;\n\n type Components = ();\n\n}\n\n\n\nimpl Widgets<AppModel, ()> for AppWidgets {\n\n type Root = gtk::ApplicationWindow;\n\n\n\n fn init_view(model: &AppModel, _components: &(), sender: Sender<AppMsg>) -> Self {\n\n let main = gtk::ApplicationWindowBuilder::new()\n\n .default_width(300)\n\n .default_height(200)\n\n .build();\n\n let main_box = gtk::Box::builder()\n\n .orientation(gtk::Orientation::Vertical)\n", "file_path": "relm4-examples/examples/list.rs", "rank": 55, "score": 169992.38526292896 }, { "content": "#[tracker::track]\n\nstruct AppModel {\n\n width: u32,\n\n counter: u32,\n\n}\n\n\n\nimpl Model for AppModel {\n\n type Msg = AppMsg;\n\n type Widgets = AppWidgets;\n\n type Components = ();\n\n}\n\n\n\nimpl AppUpdate for AppModel {\n\n fn update(&mut self, msg: AppMsg, _components: &(), _sender: Sender<AppMsg>) -> bool {\n\n self.reset();\n\n match msg {\n\n AppMsg::Increment => {\n\n self.update_counter(|cnt| *cnt += 1);\n\n }\n\n }\n\n println!(\"counter: {}\", self.counter);\n", "file_path": "relm4-examples/examples/macro.rs", "rank": 56, "score": 169992.38526292896 }, { "content": "#[tracker::track]\n\nstruct AppModel {\n\n first_icon: &'static str,\n\n second_icon: &'static str,\n\n identical: bool,\n\n}\n\n\n\nimpl Model for AppModel {\n\n type Msg = AppMsg;\n\n type Widgets = AppWidgets;\n\n type Components = ();\n\n}\n\n\n\nimpl AppUpdate for AppModel {\n\n fn update(&mut self, msg: AppMsg, _components: &(), _sender: Sender<AppMsg>) -> bool {\n\n // reset tracker value of the model\n\n self.reset();\n\n\n\n match msg {\n\n AppMsg::UpdateFirst => {\n\n self.set_first_icon(random_icon_name());\n", "file_path": "relm4-examples/examples/tracker.rs", "rank": 57, "score": 169992.38526292896 }, { "content": "#[tracker::track]\n\nstruct AppModel {\n\n text: String,\n\n text_waiting: bool,\n\n image_data: Option<Pixbuf>,\n\n image_waiting: bool,\n\n}\n\n\n", "file_path": "relm4-examples/examples/tokio.rs", "rank": 58, "score": 169992.38526292896 }, { "content": "#[derive(Default)]\n\nstruct AppModel {\n\n counter: u8,\n\n alert_toggle: bool,\n\n}\n\n\n", "file_path": "relm4-examples/examples/alert.rs", "rank": 59, "score": 169992.38526292896 }, { "content": "#[derive(Default)]\n\nstruct AppModel {\n\n counter: u8,\n\n}\n\n\n", "file_path": "relm4-examples/examples/simple.rs", "rank": 60, "score": 169992.38526292896 }, { "content": "#[derive(Default)]\n\nstruct AppModel {\n\n counter: u8,\n\n}\n\n\n", "file_path": "relm4-examples/examples/popover.rs", "rank": 61, "score": 169992.38526292896 }, { "content": "struct AppModel {\n\n width: f64,\n\n height: f64,\n\n points: Vec<Point>,\n\n reset: bool,\n\n}\n\n\n\nimpl Model for AppModel {\n\n type Msg = AppMsg;\n\n type Widgets = AppWidgets;\n\n type Components = ();\n\n}\n\n\n\nimpl AppUpdate for AppModel {\n\n fn update(&mut self, msg: AppMsg, _components: &(), _sender: Sender<AppMsg>) -> bool {\n\n self.reset = false;\n\n match msg {\n\n AppMsg::AddPoint((x, y)) => {\n\n self.points.push(Point::new(x, y));\n\n }\n", "file_path": "relm4-examples/examples/drawing.rs", "rank": 62, "score": 169992.38526292896 }, { "content": "struct Comp1Model {\n\n hidden: bool,\n\n}\n\n\n", "file_path": "relm4-examples/examples/components_old.rs", "rank": 63, "score": 169941.6438767715 }, { "content": "struct Comp2Model {\n\n hidden: bool,\n\n}\n\n\n\nimpl Model for Comp1Model {\n\n type Msg = CompMsg;\n\n type Widgets = Comp1Widgets;\n\n type Components = ();\n\n}\n\n\n\nimpl Model for Comp2Model {\n\n type Msg = CompMsg;\n\n type Widgets = Comp2Widgets;\n\n type Components = ();\n\n}\n\n\n", "file_path": "relm4-examples/examples/components_old.rs", "rank": 64, "score": 169941.6438767715 }, { "content": "fn main() {\n\n let model = AppModel { counter: 0 };\n\n let app = RelmApp::new(model);\n\n app.run();\n\n}\n", "file_path": "relm4-examples/examples/non_blocking_async.rs", "rank": 65, "score": 169538.09231011564 }, { "content": "// Implement components that will be part of the main app\n\nstruct Comp1Widgets {\n\n button: gtk::Button,\n\n}\n\n\n", "file_path": "relm4-examples/examples/components_old.rs", "rank": 66, "score": 169445.02296349072 }, { "content": "struct Comp2Widgets {\n\n button: gtk::Button,\n\n}\n\n\n", "file_path": "relm4-examples/examples/components_old.rs", "rank": 67, "score": 169436.6985741977 }, { "content": "struct AppWidgets {\n\n main: gtk::ApplicationWindow,\n\n text: gtk::Label,\n\n vbox: gtk::Box,\n\n}\n\n\n", "file_path": "relm4-examples/examples/components_old.rs", "rank": 68, "score": 169436.6985741977 }, { "content": "enum AppMsg {\n\n Increment,\n\n Decrement,\n\n SaveRequest,\n\n SaveResponse(PathBuf),\n\n}\n\n\n\nimpl Model for AppModel {\n\n type Msg = AppMsg;\n\n type Widgets = AppWidgets;\n\n type Components = AppComponents;\n\n}\n\n\n\nimpl AppUpdate for AppModel {\n\n fn update(&mut self, msg: AppMsg, components: &AppComponents, _sender: Sender<AppMsg>) -> bool {\n\n match msg {\n\n AppMsg::Increment => {\n\n self.counter = self.counter.wrapping_add(1);\n\n }\n\n AppMsg::Decrement => {\n", "file_path": "relm4-examples/examples/save_dialog.rs", "rank": 69, "score": 168993.10020793666 }, { "content": "enum AppMsg {}\n\n\n", "file_path": "relm4-examples/examples/stack_switcher.rs", "rank": 71, "score": 168993.10020793666 }, { "content": "enum AppMsg {\n\n Increment,\n\n Decrement,\n\n}\n\n\n\nimpl Model for AppModel {\n\n type Msg = AppMsg;\n\n type Widgets = AppWidgets;\n\n type Components = ();\n\n}\n\n\n\nimpl AppUpdate for AppModel {\n\n fn update(&mut self, msg: AppMsg, _components: &(), _sender: Sender<AppMsg>) -> bool {\n\n match msg {\n\n AppMsg::Increment => {\n\n self.counter = self.counter.wrapping_add(1);\n\n }\n\n AppMsg::Decrement => {\n\n self.counter = self.counter.wrapping_sub(1);\n\n }\n", "file_path": "relm4-examples/libadwaita/examples/simple.rs", "rank": 72, "score": 168993.10020793666 }, { "content": "#[derive(Debug)]\n\nenum AppMsg {\n\n AddFirst,\n\n RemoveLast,\n\n CountAt(MsgIndex),\n\n RemoveAt(MsgIndex),\n\n InsertBefore(MsgIndex),\n\n InsertAfter(MsgIndex),\n\n}\n\n\n", "file_path": "relm4-examples/examples/factory_advanced.rs", "rank": 73, "score": 168993.10020793666 }, { "content": "enum AppMsg {\n\n Increment,\n\n Decrement,\n\n}\n\n\n\nimpl Model for AppModel {\n\n type Msg = AppMsg;\n\n type Widgets = AppWidgets;\n\n type Components = ();\n\n}\n\n\n\nimpl AppUpdate for AppModel {\n\n fn update(&mut self, msg: AppMsg, _components: &(), _sender: Sender<AppMsg>) -> bool {\n\n match msg {\n\n AppMsg::Increment => {\n\n self.counter = self.counter.wrapping_add(1);\n\n }\n\n AppMsg::Decrement => {\n\n self.counter = self.counter.wrapping_sub(1);\n\n }\n\n }\n\n true\n\n }\n\n}\n\n\n", "file_path": "relm4-examples/examples/simple_manual.rs", "rank": 74, "score": 168993.10020793666 }, { "content": "enum AppMsg {\n\n Increment,\n\n Decrement,\n\n}\n\n\n\nimpl Model for AppModel {\n\n type Msg = AppMsg;\n\n type Widgets = AppWidgets;\n\n type Components = AppComponents;\n\n}\n\n\n\nimpl AppUpdate for AppModel {\n\n fn update(\n\n &mut self,\n\n msg: AppMsg,\n\n _components: &AppComponents,\n\n _sender: Sender<AppMsg>,\n\n ) -> bool {\n\n match msg {\n\n AppMsg::Increment => {\n", "file_path": "relm4-examples/examples/macro_test.rs", "rank": 75, "score": 168993.10020793666 }, { "content": "#[derive(Debug)]\n\nenum AppMsg {\n\n Add,\n\n Remove,\n\n Clicked(usize),\n\n}\n\n\n", "file_path": "relm4-examples/examples/factory_manual.rs", "rank": 76, "score": 168993.10020793666 }, { "content": "enum AppMsg {\n\n Open(PathBuf),\n\n}\n\n\n\nimpl Model for AppModel {\n\n type Msg = AppMsg;\n\n type Widgets = AppWidgets;\n\n type Components = AppComponents;\n\n}\n\n\n\nimpl AppUpdate for AppModel {\n\n fn update(\n\n &mut self,\n\n msg: AppMsg,\n\n _components: &AppComponents,\n\n _sender: Sender<AppMsg>,\n\n ) -> bool {\n\n match msg {\n\n AppMsg::Open(path) => {\n\n println!(\"* Open file at {:?} *\", path);\n\n }\n\n }\n\n\n\n true\n\n }\n\n}\n\n\n", "file_path": "relm4-examples/examples/open_button.rs", "rank": 77, "score": 168993.10020793666 }, { "content": "#[derive(Default)]\n\nstruct AppModel {\n\n counter: u8,\n\n}\n\n\n", "file_path": "relm4-examples/libadwaita/examples/simple.rs", "rank": 78, "score": 167423.9131326264 }, { "content": "struct AppModel {\n\n data: FactoryVec<Counter>,\n\n counter: u8,\n\n}\n\n\n\nimpl Model for AppModel {\n\n type Msg = AppMsg;\n\n type Widgets = AppWidgets;\n\n type Components = ();\n\n}\n\n\n\nimpl AppUpdate for AppModel {\n\n fn update(&mut self, msg: AppMsg, _components: &(), _sender: Sender<AppMsg>) -> bool {\n\n match msg {\n\n AppMsg::Add => {\n\n self.data.push(Counter {\n\n counter: self.counter,\n\n });\n\n self.counter += 1;\n\n }\n", "file_path": "relm4-examples/examples/factory_manual.rs", "rank": 79, "score": 167423.9131326264 }, { "content": "#[derive(Default)]\n\nstruct AppModel {\n\n counter: u8,\n\n}\n\n\n", "file_path": "relm4-examples/examples/save_dialog.rs", "rank": 80, "score": 167423.9131326264 }, { "content": "struct AppModel {\n\n counters: FactoryVecDeque<Counter>,\n\n received_messages: u8,\n\n}\n\n\n\nimpl Model for AppModel {\n\n type Msg = AppMsg;\n\n type Widgets = AppWidgets;\n\n type Components = ();\n\n}\n\n\n\nimpl AppUpdate for AppModel {\n\n fn update(&mut self, msg: AppMsg, _components: &(), _sender: Sender<AppMsg>) -> bool {\n\n match msg {\n\n AppMsg::AddFirst => {\n\n self.counters.push_front(Counter {\n\n value: self.received_messages,\n\n });\n\n }\n\n AppMsg::RemoveLast => {\n", "file_path": "relm4-examples/examples/factory_advanced.rs", "rank": 81, "score": 167423.9131326264 }, { "content": "struct AppModel {\n\n counter: u8,\n\n classes: Vec<&'static str>,\n\n decrement: bool,\n\n}\n\n\n", "file_path": "relm4-examples/examples/macro_test.rs", "rank": 82, "score": 167423.9131326264 }, { "content": "struct AppModel {\n\n counter: u8,\n\n}\n\n\n", "file_path": "relm4-examples/examples/simple_manual.rs", "rank": 83, "score": 167423.9131326264 }, { "content": "struct AppModel {}\n\n\n", "file_path": "relm4-examples/examples/open_button.rs", "rank": 85, "score": 167423.9131326264 }, { "content": "struct AppModel {}\n\n\n\nimpl Model for AppModel {\n\n type Msg = AppMsg;\n\n type Widgets = AppWidgets;\n\n type Components = AppComponents;\n\n}\n\n\n\n#[relm4_macros::widget]\n\nimpl Widgets<AppModel, ()> for AppWidgets {\n\n view! {\n\n main_window = gtk::ApplicationWindow {\n\n set_default_width: 500,\n\n set_default_height: 250,\n\n set_titlebar: component!(Some(components.header.root_widget())),\n\n set_child: component!(Some(components.stack.root_widget())),\n\n }\n\n }\n\n\n\n fn post_connect_components() {\n", "file_path": "relm4-examples/examples/stack_switcher.rs", "rank": 86, "score": 167423.9131326264 }, { "content": "fn main() {\n\n let (rx, mut tx) = channel::<(AsyncHandlerMsg, Sender<AppMsg>)>(10);\n\n\n\n let rt = Builder::new_multi_thread()\n\n .worker_threads(8)\n\n .enable_time()\n\n .build()\n\n .unwrap();\n\n\n\n rt.spawn(async move {\n\n while let Some((msg, sender)) = tx.recv().await {\n\n tokio::spawn(async move {\n\n tokio::time::sleep(std::time::Duration::from_secs(1)).await;\n\n match msg {\n\n AsyncHandlerMsg::IncrementRequest => {\n\n send!(sender, AppMsg::Increment);\n\n }\n\n AsyncHandlerMsg::DecrementRequest => {\n\n send!(sender, AppMsg::Decrement);\n\n }\n", "file_path": "relm4-examples/examples/non_blocking_async_manual.rs", "rank": 87, "score": 167075.7522784065 }, { "content": "struct AppComponents {\n\n async_handler: RelmMsgHandler<AsyncHandler, AppModel>,\n\n}\n\n\n\nimpl Components<AppModel> for AppComponents {\n\n fn init_components(\n\n parent_model: &AppModel,\n\n _parent_widget: &AppWidgets,\n\n parent_sender: Sender<AppMsg>,\n\n ) -> Self {\n\n AppComponents {\n\n async_handler: RelmMsgHandler::new(parent_model, parent_sender),\n\n }\n\n }\n\n}\n\n\n\n#[relm4_macros::widget]\n\nimpl Widgets<AppModel, ()> for AppWidgets {\n\n view! {\n\n gtk::ApplicationWindow {\n", "file_path": "relm4-examples/examples/stateful_msg_handler.rs", "rank": 88, "score": 166602.84503438094 }, { "content": "#[derive(Debug)]\n\nenum AppMsg {\n\n Increment,\n\n Decrement,\n\n}\n\n\n\nimpl Model for AppModel {\n\n type Msg = AppMsg;\n\n type Widgets = AppWidgets;\n\n type Components = AppComponents;\n\n}\n\n\n\nimpl AppUpdate for AppModel {\n\n fn update(\n\n &mut self,\n\n msg: AppMsg,\n\n _components: &AppComponents,\n\n _sender: Sender<AppMsg>,\n\n ) -> bool {\n\n match msg {\n\n AppMsg::Increment => {\n\n self.counter = self.counter.wrapping_add(1);\n\n }\n\n AppMsg::Decrement => {\n\n self.counter = self.counter.wrapping_sub(1);\n\n }\n\n }\n\n true\n\n }\n\n}\n\n\n", "file_path": "relm4-examples/examples/non_blocking_async.rs", "rank": 89, "score": 166538.88555236318 }, { "content": "struct AppModel {\n\n counter: u8,\n\n}\n\n\n", "file_path": "relm4-examples/examples/non_blocking_async.rs", "rank": 90, "score": 164978.81471425312 }, { "content": "#[derive(Debug)]\n\nenum AppMsg {\n\n Increment,\n\n Decrement,\n\n}\n\n\n\nimpl Model for AppModel {\n\n type Msg = AppMsg;\n\n type Widgets = AppWidgets;\n\n type Components = ();\n\n}\n\n\n\nimpl AppUpdate for AppModel {\n\n fn update(&mut self, msg: AppMsg, _components: &(), _sender: Sender<AppMsg>) -> bool {\n\n match msg {\n\n AppMsg::Increment => {\n\n self.counter = self.counter.wrapping_add(1);\n\n }\n\n AppMsg::Decrement => {\n\n self.counter = self.counter.wrapping_sub(1);\n\n }\n", "file_path": "relm4-examples/examples/non_blocking_async_manual.rs", "rank": 91, "score": 164199.79153151577 }, { "content": "struct AppModel {\n\n counter: u8,\n\n async_handler: TokioSender<(AsyncHandlerMsg, Sender<AppMsg>)>,\n\n}\n\n\n", "file_path": "relm4-examples/examples/non_blocking_async_manual.rs", "rank": 92, "score": 162648.40931234095 }, { "content": "/// Sets a custom global stylesheet.\n\n///\n\n/// # Panics\n\n///\n\n/// This function panics if [`RelmApp::new`] wasn't called before\n\n/// or this function is not called on the thread that also called [`RelmApp::new`].\n\npub fn set_global_css(style_data: &[u8]) {\n\n let display = gtk::gdk::Display::default().unwrap();\n\n let provider = gtk::CssProvider::new();\n\n provider.load_from_data(style_data);\n\n gtk::StyleContext::add_provider_for_display(\n\n &display,\n\n &provider,\n\n gtk::STYLE_PROVIDER_PRIORITY_APPLICATION,\n\n );\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 93, "score": 160357.91633156512 }, { "content": "struct AppComponents {\n\n header: RelmComponent<HeaderModel, AppModel>,\n\n dialog: RelmComponent<DialogModel, AppModel>,\n\n}\n\n\n\nimpl Components<AppModel> for AppComponents {\n\n fn init_components(\n\n parent_model: &AppModel,\n\n parent_widgets: &AppWidgets,\n\n parent_sender: Sender<AppMsg>,\n\n ) -> Self {\n\n AppComponents {\n\n header: RelmComponent::new(parent_model, parent_widgets, parent_sender.clone()),\n\n dialog: RelmComponent::new(parent_model, parent_widgets, parent_sender),\n\n }\n\n }\n\n}\n\n\n", "file_path": "relm4-examples/examples/components.rs", "rank": 94, "score": 144516.2458517779 }, { "content": "struct AppComponents {\n\n comp1: RelmComponent<Comp1Model, AppModel>,\n\n comp2: RelmComponent<Comp2Model, AppModel>,\n\n}\n\n\n\nimpl Components<AppModel> for AppComponents {\n\n fn init_components(\n\n parent_model: &AppModel,\n\n parent_widgets: &AppWidgets,\n\n parent_sender: Sender<AppMsg>,\n\n ) -> Self {\n\n AppComponents {\n\n comp1: RelmComponent::with_new_thread(\n\n parent_model,\n\n parent_widgets,\n\n parent_sender.clone(),\n\n ),\n\n comp2: RelmComponent::new(parent_model, parent_widgets, parent_sender),\n\n }\n\n }\n\n}\n\n\n", "file_path": "relm4-examples/examples/components_old.rs", "rank": 95, "score": 142170.07582056237 }, { "content": "#[derive(Debug)]\n\nenum AsyncHandlerMsg {\n\n DelayedIncrement,\n\n DelayedDecrement,\n\n}\n\n\n\nimpl MessageHandler<AppModel> for AsyncHandler {\n\n type Msg = AsyncHandlerMsg;\n\n type Sender = TokioSender<AsyncHandlerMsg>;\n\n\n\n fn init(_parent_model: &AppModel, parent_sender: Sender<AppMsg>) -> Self {\n\n let (sender, mut rx) = channel::<AsyncHandlerMsg>(10);\n\n\n\n let rt = Builder::new_multi_thread()\n\n .worker_threads(8)\n\n .enable_time()\n\n .build()\n\n .unwrap();\n\n\n\n let mut state = Mutex::new(0u8);\n\n\n", "file_path": "relm4-examples/examples/stateful_msg_handler.rs", "rank": 96, "score": 140133.96269893413 }, { "content": "struct TaskWidgets {\n\n label: gtk::Label,\n\n hbox: gtk::Box,\n\n}\n\n\n\nimpl FactoryPrototype for Task {\n\n type View = gtk::ListBox;\n\n type Msg = AppMsg;\n\n type Factory = FactoryVec<Task>;\n\n type Widgets = TaskWidgets;\n\n type Root = gtk::Box;\n\n\n\n fn generate(&self, key: &usize, sender: Sender<Self::Msg>) -> Self::Widgets {\n\n let hbox = gtk::Box::builder()\n\n .orientation(gtk::Orientation::Horizontal)\n\n .build();\n\n let checkbox = gtk::CheckButton::builder().active(false).build();\n\n let label = gtk::Label::new(Some(&self.name));\n\n\n\n assert!(!self.completed);\n", "file_path": "relm4-examples/examples/to_do.rs", "rank": 97, "score": 138507.23018158445 }, { "content": "enum HttpMsg {\n\n Request(String),\n\n}\n\n\n\n#[relm4::async_trait]\n\nimpl AsyncComponentUpdate<AppModel> for HttpModel {\n\n fn init_model(_parent_model: &AppModel) -> Self {\n\n HttpModel {}\n\n }\n\n\n\n async fn update(\n\n &mut self,\n\n msg: HttpMsg,\n\n _components: &(),\n\n _sender: Sender<HttpMsg>,\n\n parent_sender: Sender<AppMsg>,\n\n ) {\n\n match msg {\n\n HttpMsg::Request(url) => {\n\n let t2_sender = parent_sender.clone();\n", "file_path": "relm4-examples/examples/tokio.rs", "rank": 98, "score": 137915.65128935466 }, { "content": "#[derive(Debug)]\n\nenum AppMode {\n\n View,\n\n Edit,\n\n Export,\n\n}\n\n\n", "file_path": "relm4-examples/examples/components.rs", "rank": 99, "score": 137251.18350910928 } ]
Rust
src/elem/wrap/ymerge_wrap.rs
dbeck/minions_rs
b731c8c5c0e6f52013cb56f20a76ecccfe94dc7f
use lossyq::spsc::{Sender}; use super::super::super::{Task, Message, ChannelWrapper, ChannelId, SenderName, ReceiverChannelId, SenderChannelId, ChannelPosition }; use super::super::connectable::{ConnectableY}; use super::super::identified_input::{IdentifiedInput}; use super::super::counter::{OutputCounter, InputCounter}; use super::super::ymerge::{YMerge}; pub struct YMergeWrap<InputValueA: Send, InputErrorA: Send, InputValueB: Send, InputErrorB: Send, OutputValue: Send, OutputError: Send> { name : String, state : Box<YMerge<InputValueA=InputValueA, InputErrorA=InputErrorA, InputValueB=InputValueB, InputErrorB=InputErrorB, OutputValue=OutputValue, OutputError=OutputError>+Send>, input_a_rx : ChannelWrapper<InputValueA, InputErrorA>, input_b_rx : ChannelWrapper<InputValueB, InputErrorB>, output_tx : Sender<Message<OutputValue, OutputError>>, } pub fn new<InputValueA: Send, InputErrorA: Send, InputValueB: Send, InputErrorB: Send, OutputValue: Send, OutputError: Send>( name : String, state : Box<YMerge<InputValueA=InputValueA, InputErrorA=InputErrorA, InputValueB=InputValueB, InputErrorB=InputErrorB, OutputValue=OutputValue, OutputError=OutputError>+Send>, input_a_rx : ChannelWrapper<InputValueA, InputErrorA>, input_b_rx : ChannelWrapper<InputValueB, InputErrorB>, output_tx : Sender<Message<OutputValue, OutputError>>) -> YMergeWrap<InputValueA, InputErrorA, InputValueB, InputErrorB, OutputValue, OutputError> { YMergeWrap{ name: name, state: state, input_a_rx: input_a_rx, input_b_rx: input_b_rx, output_tx: output_tx } } impl<InputValueA: Send, InputErrorA: Send, InputValueB: Send, InputErrorB: Send, OutputValue: Send, OutputError: Send> IdentifiedInput for YMergeWrap<InputValueA, InputErrorA, InputValueB, InputErrorB, OutputValue, OutputError> { fn get_input_id(&self, ch_id: ReceiverChannelId) -> Option<(ChannelId, SenderName)> { if ch_id.0 > 1 { None } else if ch_id.0 == 0 { match &self.input_a_rx { &ChannelWrapper::ConnectedReceiver(ref channel_id, ref _receiver, ref sender_name) => { Some((*channel_id, sender_name.clone())) }, _ => None } } else { match &self.input_b_rx { &ChannelWrapper::ConnectedReceiver(ref channel_id, ref _receiver, ref sender_name) => { Some((*channel_id, sender_name.clone())) }, _ => None } } } } impl<InputValueA: Send, InputErrorA: Send, InputValueB: Send, InputErrorB: Send, OutputValue: Send, OutputError: Send> InputCounter for YMergeWrap<InputValueA, InputErrorA, InputValueB, InputErrorB, OutputValue, OutputError> { fn get_rx_count(&self, ch_id: ReceiverChannelId) -> usize { if ch_id.0 > 1 { 0 } else if ch_id.0 == 0 { match &self.input_a_rx { &ChannelWrapper::ConnectedReceiver(ref _channel_id, ref receiver, ref _sender_name) => { receiver.seqno() }, _ => 0 } } else { match &self.input_b_rx { &ChannelWrapper::ConnectedReceiver(ref _channel_id, ref receiver, ref _sender_name) => { receiver.seqno() }, _ => 0 } } } } impl<InputValueA: Send, InputErrorA: Send, InputValueB: Send, InputErrorB: Send, OutputValue: Send, OutputError: Send> OutputCounter for YMergeWrap<InputValueA, InputErrorA, InputValueB, InputErrorB, OutputValue, OutputError> { fn get_tx_count(&self, ch_id: SenderChannelId) -> usize { if ch_id.0 == 0 { self.output_tx.seqno() } else { 0 } } } impl<InputValueA: Send, InputErrorA: Send, InputValueB: Send, InputErrorB: Send, OutputValue: Send, OutputError: Send> ConnectableY for YMergeWrap<InputValueA, InputErrorA, InputValueB, InputErrorB, OutputValue, OutputError> { type InputValueA = InputValueA; type InputErrorA = InputErrorA; type InputValueB = InputValueB; type InputErrorB = InputErrorB; fn input_a(&mut self) -> &mut ChannelWrapper<InputValueA, InputErrorA> { &mut self.input_a_rx } fn input_b(&mut self) -> &mut ChannelWrapper<InputValueB, InputErrorB> { &mut self.input_b_rx } } impl<InputValueA: Send, InputErrorA: Send, InputValueB: Send, InputErrorB: Send, OutputValue: Send, OutputError: Send> Task for YMergeWrap<InputValueA, InputErrorA, InputValueB, InputErrorB, OutputValue, OutputError> { fn execute(&mut self, stop: &mut bool) { self.state.process(&mut self.input_a_rx, &mut self.input_b_rx, &mut self.output_tx, stop); } fn name(&self) -> &String { &self.name } fn input_count(&self) -> usize { 2 } fn output_count(&self) -> usize { 1 } fn input_id(&self, ch_id: ReceiverChannelId) -> Option<(ChannelId, SenderName)> { self.get_input_id(ch_id) } fn input_channel_pos(&self, ch_id: ReceiverChannelId) -> ChannelPosition { ChannelPosition( self.get_rx_count(ch_id) ) } fn output_channel_pos(&self, ch_id: SenderChannelId) -> ChannelPosition { ChannelPosition( self.get_tx_count(ch_id) ) } }
use lossyq::spsc::{Sender}; use super::super::super::{Task, Message, ChannelWrapper, ChannelId, SenderName, ReceiverChannelId, SenderChannelId, ChannelPosition }; use super::super::connectable::{ConnectableY}; use super::super::identified_input::{IdentifiedInput}; use super::super::counter::{OutputCounter, InputCounter}; use super::super::ymerge::{YMerge}; pub struct YMergeWrap<InputValueA: Send, InputErrorA: Send, InputValueB: Send, InputErrorB: Send, OutputValue: Send, OutputError: Send> { name : String, state : Box<YMerge<InputValueA=InputValueA, InputErrorA=InputErrorA, InputValueB=InputValueB, InputErrorB=InputErrorB, OutputValue=OutputValue, OutputError=OutputError>+Send>, input_a_rx : ChannelWrapper<InputValueA, InputErrorA>, input_b_rx : ChannelWrapper<InputValueB, InputErrorB>, output_tx : Sender<Message<OutputValue, OutputError>>, } pub fn new<InputValueA: Send, InputErrorA: Send, InputValueB: Send, InputErrorB: Send, OutputValue: Send, OutputError: Send>( name : String, state : Box<YMerge<InputValueA=InputValueA, InputErrorA=InputErrorA, InputValueB=InputValueB, InputErrorB=InputErrorB, OutputValue=OutputValue, OutputError=OutputError>+Send>, input_a_rx : ChannelWrapper<InputValueA, InputErrorA>, input_b_rx : ChannelWrapper<InputValueB, InputErrorB>, output_tx : Sender<Message<OutputValue, OutputError>>) -> YMergeWrap<InputValueA, InputErrorA, InputValueB, InputErrorB, OutputValue, OutputError> { YMergeWrap{ name: name, state: state, input_a_rx: input_a_rx, input_b_rx: input_b_rx, output_tx: output_tx } } impl<InputValueA: Send, InputErrorA: Send, InputValueB: Send, InputErrorB: Send, OutputValue: Send, OutputError: Send> IdentifiedInput for YMergeWrap<InputValueA, InputErrorA, InputValueB, InputErrorB, OutputValue, OutputError> { fn get_input_id(&self, ch_id: ReceiverChannelId) -> Option<(ChannelId, SenderName)> { if ch_id.0 > 1 { None } else if ch_id.0 == 0 { match &self.input_a_rx { &ChannelWrapper::ConnectedReceiver(ref channel_id, ref _receiver, ref sender_name) => { Some((*channel_id, sender_name.clone())) }, _ => None } } else { match &self.input_b_rx { &ChannelWrapper::ConnectedReceiver(ref channel_id, ref _receiver, ref sender_name) => { Some((*channel_id, sender_name.clone())) }, _ => None } } } } impl<InputValueA: Send, InputErrorA: Send, InputValueB: Send, InputErrorB: Send, OutputValue: Send, OutputError: Send> InputCounter for YMergeWrap<InputValueA, InputErrorA, InputValueB, InputErrorB, OutputValue, OutputError> { fn get_rx_count(&self, ch_id: ReceiverChannelId) -> usize { if ch_id.0 > 1 { 0 } else if ch_id.0 == 0 {
} else { match &self.input_b_rx { &ChannelWrapper::ConnectedReceiver(ref _channel_id, ref receiver, ref _sender_name) => { receiver.seqno() }, _ => 0 } } } } impl<InputValueA: Send, InputErrorA: Send, InputValueB: Send, InputErrorB: Send, OutputValue: Send, OutputError: Send> OutputCounter for YMergeWrap<InputValueA, InputErrorA, InputValueB, InputErrorB, OutputValue, OutputError> { fn get_tx_count(&self, ch_id: SenderChannelId) -> usize { if ch_id.0 == 0 { self.output_tx.seqno() } else { 0 } } } impl<InputValueA: Send, InputErrorA: Send, InputValueB: Send, InputErrorB: Send, OutputValue: Send, OutputError: Send> ConnectableY for YMergeWrap<InputValueA, InputErrorA, InputValueB, InputErrorB, OutputValue, OutputError> { type InputValueA = InputValueA; type InputErrorA = InputErrorA; type InputValueB = InputValueB; type InputErrorB = InputErrorB; fn input_a(&mut self) -> &mut ChannelWrapper<InputValueA, InputErrorA> { &mut self.input_a_rx } fn input_b(&mut self) -> &mut ChannelWrapper<InputValueB, InputErrorB> { &mut self.input_b_rx } } impl<InputValueA: Send, InputErrorA: Send, InputValueB: Send, InputErrorB: Send, OutputValue: Send, OutputError: Send> Task for YMergeWrap<InputValueA, InputErrorA, InputValueB, InputErrorB, OutputValue, OutputError> { fn execute(&mut self, stop: &mut bool) { self.state.process(&mut self.input_a_rx, &mut self.input_b_rx, &mut self.output_tx, stop); } fn name(&self) -> &String { &self.name } fn input_count(&self) -> usize { 2 } fn output_count(&self) -> usize { 1 } fn input_id(&self, ch_id: ReceiverChannelId) -> Option<(ChannelId, SenderName)> { self.get_input_id(ch_id) } fn input_channel_pos(&self, ch_id: ReceiverChannelId) -> ChannelPosition { ChannelPosition( self.get_rx_count(ch_id) ) } fn output_channel_pos(&self, ch_id: SenderChannelId) -> ChannelPosition { ChannelPosition( self.get_tx_count(ch_id) ) } }
match &self.input_a_rx { &ChannelWrapper::ConnectedReceiver(ref _channel_id, ref receiver, ref _sender_name) => { receiver.seqno() }, _ => 0 }
if_condition
[ { "content": "pub fn new<InputValueA: Send, InputErrorA: Send,\n\n InputValueB: Send, InputErrorB: Send,\n\n OutputValue: Send, OutputError: Send>(\n\n name : &str,\n\n output_q_size : usize,\n\n ymerge : Box<YMerge<InputValueA=InputValueA, InputErrorA=InputErrorA,\n\n InputValueB=InputValueB, InputErrorB=InputErrorB,\n\n OutputValue=OutputValue, OutputError=OutputError>+Send>)\n\n -> (Box<ymerge_wrap::YMergeWrap<InputValueA, InputErrorA,\n\n InputValueB, InputErrorB,\n\n OutputValue, OutputError>>,\n\n Box<ChannelWrapper<OutputValue, OutputError>>)\n\n{\n\n let (output_tx, output_rx) = channel(output_q_size);\n\n let name = String::from(name);\n\n\n\n (\n\n Box::new(\n\n ymerge_wrap::new(\n\n name.clone(),\n", "file_path": "src/elem/ymerge.rs", "rank": 0, "score": 175683.2027354726 }, { "content": "pub fn new<OutputValue: Send, OutputError: Send>(\n\n name : &str,\n\n output_q_size : usize,\n\n source : Box<Source<OutputValue=OutputValue, OutputError=OutputError>+Send>)\n\n -> (Box<source_wrap::SourceWrap<OutputValue, OutputError>>,\n\n Box<ChannelWrapper<OutputValue, OutputError>>)\n\n{\n\n let (output_tx, output_rx) = channel(output_q_size);\n\n let name = String::from(name);\n\n\n\n (\n\n Box::new(source_wrap::new(name.clone(), source, output_tx)),\n\n Box::new(\n\n ChannelWrapper::SenderNotConnected(\n\n SenderChannelId(0),\n\n output_rx,\n\n SenderName(name)\n\n )\n\n )\n\n )\n\n}\n", "file_path": "src/elem/source.rs", "rank": 1, "score": 175316.983419466 }, { "content": "pub fn new<OutputValue: Send, OutputError: Send>(\n\n name : String,\n\n state : Box<Source<OutputValue=OutputValue, OutputError=OutputError>+Send>,\n\n output_tx : Sender<Message<OutputValue, OutputError>>)\n\n -> SourceWrap<OutputValue, OutputError>\n\n{\n\n SourceWrap{ name: name, state: state, output_tx: output_tx }\n\n}\n\n\n\nimpl<OutputValue: Send, OutputError: Send> OutputCounter\n\n for SourceWrap<OutputValue, OutputError>\n\n{\n\n fn get_tx_count(&self, ch_id: SenderChannelId) -> usize {\n\n if ch_id.0 == 0 {\n\n self.output_tx.seqno()\n\n } else {\n\n 0\n\n }\n\n }\n\n}\n", "file_path": "src/elem/wrap/source_wrap.rs", "rank": 3, "score": 171587.23032897938 }, { "content": "pub fn new<InputValue: Send, InputError: Send, OutputValue: Send, OutputError: Send>(\n\n name : &str,\n\n output_q_size : usize,\n\n filter : Box<Filter<InputValue=InputValue, InputError=InputError,\n\n OutputValue=OutputValue, OutputError=OutputError>+Send>)\n\n -> (Box<filter_wrap::FilterWrap<InputValue, InputError, OutputValue, OutputError>>,\n\n Box<ChannelWrapper<OutputValue, OutputError>>)\n\n{\n\n let (output_tx, output_rx) = channel(output_q_size);\n\n let name = String::from(name);\n\n\n\n (\n\n Box::new(\n\n filter_wrap::new(\n\n name.clone(),\n\n filter,\n\n ChannelWrapper::ReceiverNotConnected(\n\n ReceiverChannelId(0),\n\n ReceiverName (name.clone())\n\n ),\n", "file_path": "src/elem/filter.rs", "rank": 4, "score": 166266.45428904146 }, { "content": "pub fn new<InputValue: Send, InputError: Send, OutputValue: Send, OutputError: Send>(\n\n name : &str,\n\n output_q_size : usize,\n\n scatter : Box<Scatter<InputValue=InputValue, InputError=InputError,\n\n OutputValue=OutputValue, OutputError=OutputError>+Send>,\n\n n_channels : usize)\n\n -> (Box<scatter_wrap::ScatterWrap<InputValue, InputError, OutputValue, OutputError>>,\n\n Vec<Box<ChannelWrapper<OutputValue, OutputError>>>)\n\n{\n\n let mut tx_vec = Vec::with_capacity(n_channels);\n\n let mut rx_vec = Vec::with_capacity(n_channels);\n\n let name = String::from(name);\n\n\n\n for i in 0..n_channels {\n\n let (output_tx, output_rx) = channel(output_q_size);\n\n tx_vec.push(output_tx);\n\n rx_vec.push(\n\n Box::new(\n\n ChannelWrapper::SenderNotConnected(\n\n SenderChannelId(i),\n", "file_path": "src/elem/scatter.rs", "rank": 5, "score": 166266.45428904146 }, { "content": "pub fn new<InputValue: Send, InputError: Send, OutputValue: Send, OutputError: Send>(\n\n name : &str,\n\n output_q_size : usize,\n\n gather : Box<Gather<InputValue=InputValue, InputError=InputError,\n\n OutputValue=OutputValue, OutputError=OutputError>+Send>,\n\n n_channels : usize)\n\n -> (Box<gather_wrap::GatherWrap<InputValue, InputError, OutputValue, OutputError>>,\n\n Box<ChannelWrapper<OutputValue, OutputError>>)\n\n{\n\n let (output_tx, output_rx) = channel(output_q_size);\n\n let name = String::from(name);\n\n let mut inputs = vec![];\n\n for i in 0..n_channels {\n\n inputs.push(ChannelWrapper::ReceiverNotConnected(\n\n ReceiverChannelId(i),\n\n ReceiverName (name.clone())\n\n ));\n\n }\n\n\n\n (\n", "file_path": "src/elem/gather.rs", "rank": 6, "score": 166266.45428904146 }, { "content": "pub fn new<InputValue: Send, InputError: Send, OutputValue: Send, OutputError: Send>(\n\n name : String,\n\n state : Box<Gather<InputValue=InputValue, InputError=InputError,\n\n OutputValue=OutputValue, OutputError=OutputError>+Send>,\n\n input_rx_vec : Vec<ChannelWrapper<InputValue, InputError>>,\n\n output_tx : Sender<Message<OutputValue, OutputError>>)\n\n -> GatherWrap<InputValue, InputError, OutputValue, OutputError>\n\n{\n\n GatherWrap{ name: name, state: state, input_rx_vec: input_rx_vec, output_tx: output_tx }\n\n}\n\n\n\nimpl<InputValue: Send, InputError: Send, OutputValue: Send, OutputError: Send> IdentifiedInput\n\n for GatherWrap<InputValue, InputError, OutputValue, OutputError>\n\n{\n\n fn get_input_id(&self, ch_id: ReceiverChannelId) -> Option<(ChannelId, SenderName)> {\n\n if ch_id.0 < self.input_rx_vec.len() {\n\n let slice = self.input_rx_vec.as_slice();\n\n match &slice[ch_id.0] {\n\n &ChannelWrapper::ConnectedReceiver(ref channel_id, ref _receiver, ref sender_name) => {\n\n Some((*channel_id, sender_name.clone()))\n", "file_path": "src/elem/wrap/gather_wrap.rs", "rank": 7, "score": 163584.60897041752 }, { "content": "pub fn new<InputValue: Send, InputError: Send, OutputValue: Send, OutputError: Send>(\n\n name : String,\n\n state : Box<Filter<InputValue=InputValue, InputError=InputError,\n\n OutputValue=OutputValue, OutputError=OutputError>+Send>,\n\n input_rx : ChannelWrapper<InputValue, InputError>,\n\n output_tx : Sender<Message<OutputValue, OutputError>>)\n\n -> FilterWrap<InputValue, InputError, OutputValue, OutputError>\n\n{\n\n FilterWrap{ name: name, state: state, input_rx: input_rx, output_tx: output_tx }\n\n}\n\n\n\nimpl<InputValue: Send, InputError: Send, OutputValue: Send, OutputError: Send> IdentifiedInput\n\n for FilterWrap<InputValue, InputError, OutputValue, OutputError>\n\n{\n\n fn get_input_id(&self, ch_id: ReceiverChannelId) -> Option<(ChannelId, SenderName)> {\n\n if ch_id.0 == 0 {\n\n match &self.input_rx {\n\n &ChannelWrapper::ConnectedReceiver(ref channel_id, ref _receiver, ref sender_name) => {\n\n Some((*channel_id, sender_name.clone()))\n\n },\n", "file_path": "src/elem/wrap/filter_wrap.rs", "rank": 8, "score": 163584.60897041752 }, { "content": "pub fn new<InputValue: Send, InputError: Send, OutputValue: Send, OutputError: Send>(\n\n name : String,\n\n state : Box<Scatter<InputValue=InputValue, InputError=InputError,\n\n OutputValue=OutputValue, OutputError=OutputError>+Send>,\n\n input_rx : ChannelWrapper<InputValue, InputError>,\n\n output_tx_vec : Vec<Sender<Message<OutputValue, OutputError>>>)\n\n -> ScatterWrap<InputValue, InputError, OutputValue, OutputError>\n\n{\n\n ScatterWrap{ name: name, state: state, input_rx: input_rx, output_tx_vec: output_tx_vec }\n\n}\n\n\n\nimpl<InputValue: Send, InputError: Send, OutputValue: Send, OutputError: Send> IdentifiedInput\n\n for ScatterWrap<InputValue, InputError, OutputValue, OutputError>\n\n{\n\n fn get_input_id(&self, ch_id: ReceiverChannelId) -> Option<(ChannelId, SenderName)> {\n\n if ch_id.0 != 0 {\n\n None\n\n } else {\n\n match &self.input_rx {\n\n &ChannelWrapper::ConnectedReceiver(ref channel_id, ref _receiver, ref sender_name) => {\n", "file_path": "src/elem/wrap/scatter_wrap.rs", "rank": 9, "score": 163584.60897041752 }, { "content": "pub fn new<InputValue: Send, InputError: Send>(\n\n name : &str,\n\n sink : Box<Sink<InputValue=InputValue, InputError=InputError>+Send>)\n\n -> Box<sink_wrap::SinkWrap<InputValue, InputError>>\n\n{\n\n let name = String::from(name);\n\n Box::new(\n\n sink_wrap::new(\n\n name.clone(),\n\n sink,\n\n ChannelWrapper::ReceiverNotConnected(\n\n ReceiverChannelId(0),\n\n ReceiverName (name)\n\n )\n\n )\n\n )\n\n}\n", "file_path": "src/elem/sink.rs", "rank": 10, "score": 152952.086370807 }, { "content": "pub fn new<InputValue: Send, InputError: Send,\n\n OutputValueA: Send, OutputErrorA: Send,\n\n OutputValueB: Send, OutputErrorB: Send>(\n\n name : &str,\n\n output_a_q_size : usize,\n\n output_b_q_size : usize,\n\n ysplit : Box<YSplit<InputValue=InputValue, InputError=InputError,\n\n OutputValueA=OutputValueA, OutputErrorA=OutputErrorA,\n\n OutputValueB=OutputValueB, OutputErrorB=OutputErrorB>+Send>)\n\n -> (Box<ysplit_wrap::YSplitWrap<InputValue, InputError,\n\n OutputValueA, OutputErrorA,\n\n OutputValueB, OutputErrorB>>,\n\n Box<ChannelWrapper<OutputValueA, OutputErrorA>>,\n\n Box<ChannelWrapper<OutputValueB, OutputErrorB>>)\n\n{\n\n let (output_a_tx, output_a_rx) = channel(output_a_q_size);\n\n let (output_b_tx, output_b_rx) = channel(output_b_q_size);\n\n let name = String::from(name);\n\n\n\n (\n", "file_path": "src/elem/ysplit.rs", "rank": 11, "score": 152952.086370807 }, { "content": "pub fn new<InputValue: Send, InputError: Send,\n\n OutputValueA: Send, OutputErrorA: Send,\n\n OutputValueB: Send, OutputErrorB: Send>(\n\n name : String,\n\n state : Box<YSplit<InputValue=InputValue, InputError=InputError,\n\n OutputValueA=OutputValueA, OutputErrorA=OutputErrorA,\n\n OutputValueB=OutputValueB, OutputErrorB=OutputErrorB>+Send>,\n\n input_rx : ChannelWrapper<InputValue, InputError>,\n\n output_a_tx : Sender<Message<OutputValueA, OutputErrorA>>,\n\n output_b_tx : Sender<Message<OutputValueB, OutputErrorB>>)\n\n -> YSplitWrap<InputValue, InputError, OutputValueA, OutputErrorA, OutputValueB, OutputErrorB>\n\n{\n\n YSplitWrap{\n\n name: name,\n\n state: state,\n\n input_rx: input_rx,\n\n output_a_tx: output_a_tx,\n\n output_b_tx: output_b_tx\n\n }\n\n}\n", "file_path": "src/elem/wrap/ysplit_wrap.rs", "rank": 12, "score": 149222.33328032034 }, { "content": "pub fn new<InputValue: Send, InputError: Send>(\n\n name : String,\n\n state : Box<Sink<InputValue=InputValue, InputError=InputError>+Send>,\n\n input_rx : ChannelWrapper<InputValue, InputError>)\n\n -> SinkWrap<InputValue, InputError>\n\n{\n\n SinkWrap{ name: name, state: state, input_rx: input_rx }\n\n}\n\n\n\nimpl<InputValue: 'static+Send, InputError: 'static+Send> IdentifiedInput\n\n for SinkWrap<InputValue, InputError>\n\n{\n\n fn get_input_id(&self, ch_id: ReceiverChannelId) -> Option<(ChannelId, SenderName)>\n\n {\n\n if ch_id.0 != 0 {\n\n None\n\n } else {\n\n match &self.input_rx {\n\n &ChannelWrapper::ConnectedReceiver(ref channel_id, ref _receiver, ref sender_name) => {\n\n Some((*channel_id, sender_name.clone()))\n", "file_path": "src/elem/wrap/sink_wrap.rs", "rank": 13, "score": 149222.33328032037 }, { "content": "pub fn disconnect_from<Value: Send, Error: Send>(me : &mut ChannelWrapper<Value, Error>,\n\n from : &mut ChannelWrapper<Value, Error>)\n\n -> Result<(), ActorError>\n\n{\n\n match me {\n\n &mut ChannelWrapper::ConnectedReceiver(..) => {\n\n match from {\n\n &mut ChannelWrapper::ConnectedSender(..) => {\n\n disconnect_receiver_from_sender(me, from)\n\n },\n\n &mut ChannelWrapper::ConnectedReceiver(..) => {\n\n Err(ActorError::InvalidChannelState(\n\n ExpectedChannelState(ChannelState::ConnectedSender),\n\n ActualChannelState(ChannelState::ConnectedReceiver)))\n\n },\n\n &mut ChannelWrapper::ReceiverNotConnected(..) => {\n\n Err(ActorError::InvalidChannelState(\n\n ExpectedChannelState(ChannelState::ConnectedSender),\n\n ActualChannelState(ChannelState::ReceiverNotConnected)))\n\n },\n", "file_path": "src/elem/connectable.rs", "rank": 14, "score": 136313.19109587756 }, { "content": "pub fn connect_to<Value: Send, Error: Send>(me : &mut ChannelWrapper<Value, Error>,\n\n to : &mut ChannelWrapper<Value, Error>)\n\n -> Result<(), ActorError>\n\n{\n\n match me {\n\n &mut ChannelWrapper::ReceiverNotConnected(..) => {\n\n match to {\n\n &mut ChannelWrapper::SenderNotConnected(..) => {\n\n connect_receiver_to_sender(me, to)\n\n },\n\n &mut ChannelWrapper::ConnectedSender(..) => {\n\n Err(ActorError::InvalidChannelState(\n\n ExpectedChannelState(ChannelState::SenderNotConnected),\n\n ActualChannelState(ChannelState::ConnectedSender)))\n\n },\n\n &mut ChannelWrapper::ReceiverNotConnected(..) => {\n\n Err(ActorError::InvalidChannelState(\n\n ExpectedChannelState(ChannelState::SenderNotConnected),\n\n ActualChannelState(ChannelState::ReceiverNotConnected)))\n\n },\n", "file_path": "src/elem/connectable.rs", "rank": 15, "score": 136313.19109587756 }, { "content": "pub fn new(task: Box<Task+Send>) -> TaskWrap {\n\n let n_outputs = task.output_count();\n\n TaskWrap{\n\n task: task,\n\n output_positions: vec![(ChannelPosition(0), TaskId(0)); n_outputs],\n\n }\n\n}\n", "file_path": "src/scheduler/wrap.rs", "rank": 16, "score": 134509.9059632253 }, { "content": "pub fn position(idx: usize) -> (usize, usize) {\n\n // note: this depends on max_idx !!!\n\n (idx>>12, idx&0xfff)\n\n}\n\n\n\nimpl TaskPage {\n\n pub fn store(&mut self,\n\n idx: usize,\n\n task: Box<Task+Send>)\n\n {\n\n let wrap = Box::new(wrap::new(task));\n\n let slice = self.data.as_mut_slice();\n\n let data_ref = &mut slice[idx];\n\n let old = data_ref.0.swap(Box::into_raw(wrap), Ordering::AcqRel);\n\n if old.is_null() == false {\n\n // make sure we drop old pointers when swapped, although\n\n // this shouldn't happen since the SchedulerData must take care\n\n // of atomically increasing indices\n\n let _b = unsafe { Box::from_raw(old) };\n\n }\n", "file_path": "src/scheduler/page.rs", "rank": 17, "score": 133125.9031818463 }, { "content": "pub fn disconnect_receiver_from_sender<Value: Send, Error: Send>(rcv : &mut ChannelWrapper<Value, Error>,\n\n snd : &mut ChannelWrapper<Value, Error>)\n\n -> Result<(), ActorError>\n\n{\n\n use std::mem;\n\n\n\n let mut tmp_receiver = match snd {\n\n &mut ChannelWrapper::ConnectedSender(ref mut channel_id_rcv, ref mut receiver_name) => {\n\n match rcv {\n\n &mut ChannelWrapper::ConnectedReceiver(ref mut _channel_id_snd, ref mut _receiver, ref mut _sender_name) => {\n\n ChannelWrapper::ReceiverNotConnected::<Value, Error>(channel_id_rcv.receiver_id,receiver_name.clone())\n\n },\n\n &mut ChannelWrapper::ReceiverNotConnected(..) => {\n\n return Err(ActorError::InvalidChannelState(\n\n ExpectedChannelState(ChannelState::ConnectedReceiver),\n\n ActualChannelState(ChannelState::ReceiverNotConnected)));\n\n },\n\n &mut ChannelWrapper::ConnectedSender(..) => {\n\n return Err(ActorError::InvalidChannelState(\n\n ExpectedChannelState(ChannelState::ConnectedReceiver),\n", "file_path": "src/elem/connectable.rs", "rank": 18, "score": 131502.31693966966 }, { "content": "pub fn connect_receiver_to_sender<Value: Send, Error: Send>(rcv : &mut ChannelWrapper<Value, Error>,\n\n snd : &mut ChannelWrapper<Value, Error>)\n\n -> Result<(), ActorError>\n\n{\n\n use std::mem;\n\n\n\n let (channel_id, mut tmp_sender) = match rcv {\n\n &mut ChannelWrapper::ReceiverNotConnected(ref mut receiver_channel_id, ref mut receiver_name) => {\n\n match snd {\n\n &mut ChannelWrapper::SenderNotConnected(ref mut sender_channel_id, ref mut _receiver, ref mut _sender_name) => {\n\n let channel_id = ChannelId{sender_id: *sender_channel_id, receiver_id: *receiver_channel_id};\n\n (channel_id, ChannelWrapper::ConnectedSender::<Value, Error>(channel_id.clone(), receiver_name.clone()))\n\n },\n\n &mut ChannelWrapper::ReceiverNotConnected(..) => {\n\n return Err(ActorError::InvalidChannelState(\n\n ExpectedChannelState(ChannelState::SenderNotConnected),\n\n ActualChannelState(ChannelState::ReceiverNotConnected)));\n\n },\n\n &mut ChannelWrapper::ConnectedReceiver(..) => {\n\n return Err(ActorError::InvalidChannelState(\n", "file_path": "src/elem/connectable.rs", "rank": 19, "score": 131502.31693966966 }, { "content": "pub fn initial_capacity() -> usize {\n\n 1024*1024\n\n}\n\n\n\nimpl Drop for SchedulerData {\n\n fn drop(&mut self) {\n\n let len = self.l1.len();\n\n let l1_slice = self.l1.as_mut_slice();\n\n for i in 0..len {\n\n let l1_atomic_ptr = &mut l1_slice[i];\n\n let ptr = l1_atomic_ptr.swap(ptr::null_mut::<page::TaskPage>(), Ordering::AcqRel);\n\n if ptr.is_null() == false {\n\n // make sure we drop the pointers\n\n let _b = unsafe { Box::from_raw(ptr) };\n\n } else {\n\n break;\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/scheduler/data.rs", "rank": 20, "score": 131456.51272438953 }, { "content": "pub fn max_idx() -> usize {\n\n // note: this must be aligned with position(idx)\n\n 4095\n\n}\n\n\n", "file_path": "src/scheduler/page.rs", "rank": 21, "score": 131456.51272438953 }, { "content": "pub fn new(_id: usize) -> TaskPage {\n\n let sz = max_idx()+1;\n\n let mut data = Vec::with_capacity(sz);\n\n\n\n for _i in 0..sz {\n\n // default flag is stopped: 16\n\n let f = ExecFlags(AtomicUsize::new(16));\n\n data.push( (AtomicPtr::default(), f, PeriodLengthInUsec(0)) );\n\n }\n\n\n\n TaskPage{\n\n data: data,\n\n }\n\n}\n\n\n\nimpl Drop for TaskPage {\n\n fn drop(&mut self) {\n\n self.print_stats();\n\n let slice = self.data.as_mut_slice();\n\n for i in 0..(1+max_idx()) {\n", "file_path": "src/scheduler/page.rs", "rank": 22, "score": 122245.23230327715 }, { "content": "pub fn new(spinned: Arc<AtomicUsize>) -> MeasuredPipelineSource {\n\n MeasuredPipelineSource::new(spinned)\n\n}\n\n\n\nimpl Drop for MeasuredPipelineSource {\n\n fn drop(&mut self) {\n\n self.print_stats();\n\n }\n\n}\n", "file_path": "src/sample/measured_pipeline_source.rs", "rank": 23, "score": 110344.1370764374 }, { "content": "pub fn new(spinned: Arc<AtomicUsize>) -> MeasuredPipelineSink {\n\n MeasuredPipelineSink::new(spinned)\n\n}\n\n\n\nimpl Drop for MeasuredPipelineSink {\n\n fn drop(&mut self) {\n\n self.print_stats();\n\n }\n\n}\n", "file_path": "src/sample/measured_pipeline_sink.rs", "rank": 24, "score": 110344.1370764374 }, { "content": "pub fn new(spinned: Arc<AtomicUsize>) -> MeasuredPipelineFilter {\n\n MeasuredPipelineFilter::new(spinned)\n\n}\n\n\n\nimpl Drop for MeasuredPipelineFilter {\n\n fn drop(&mut self) {\n\n self.print_stats();\n\n }\n\n}\n", "file_path": "src/sample/measured_pipeline_filter.rs", "rank": 25, "score": 110344.1370764374 }, { "content": "pub fn run() {\n\n let mut bs = BitSet::new(4096);\n\n bench_200ms(\"set-bitset\", |i| {\n\n bs.set((i%4096) as usize);\n\n });\n\n\n\n bench_200ms(\"clear-bitset\", |_i| {\n\n bs.clear_all();\n\n });\n\n\n\n bench_200ms(\"foreach-bitset\", |_i| {\n\n bs.for_each(|_pos, _val| {} );\n\n });\n\n\n\n bench_200ms(\"new-bitset\", |_i| {\n\n bs = BitSet::new(4096);\n\n });\n\n\n\n}\n", "file_path": "src/bench/bitset.rs", "rank": 26, "score": 107210.20901892945 }, { "content": "pub fn run() {\n\n time_baseline();\n\n spinner();\n\n atomic_ops();\n\n hash_map_10();\n\n bin_heap_4096();\n\n lossyq_send();\n\n lossyq_recv();\n\n lossyq_send_recv();\n\n lossyq_send_recv_3();\n\n mpsc_send();\n\n mpsc_recv();\n\n mpsc_send_recv();\n\n mpsc_send_recv_3();\n\n indirect_send_data();\n\n locked_data();\n\n locked_send_data();\n\n source_execute();\n\n source_execute_with_swap();\n\n}\n", "file_path": "src/bench/basics.rs", "rank": 27, "score": 107210.20901892945 }, { "content": "pub fn run() {\n\n bitset::run();\n\n basics::run();\n\n ext_pipeline_latency::run();\n\n}\n", "file_path": "src/bench/mod.rs", "rank": 28, "score": 107210.20901892945 }, { "content": "pub fn run() {\n\n for i in 1..10 {\n\n latency(1,20*i);\n\n latency(100000,20*i);\n\n latency(5000000,20*i);\n\n }\n\n}\n", "file_path": "src/bench/ext_pipeline_latency.rs", "rank": 29, "score": 103660.23240480901 }, { "content": "pub fn new() -> Scheduler {\n\n Scheduler::new()\n\n}\n\n\n\n#[cfg(test)]\n\npub mod tests;\n", "file_path": "src/scheduler/mod.rs", "rank": 30, "score": 102788.41533004455 }, { "content": "pub fn new() -> SchedulerData {\n\n SchedulerData::new()\n\n}\n\n\n", "file_path": "src/scheduler/data.rs", "rank": 31, "score": 101066.05896092448 }, { "content": "pub fn new() -> SchedulerDataHandle {\n\n SchedulerDataHandle::new()\n\n}\n", "file_path": "src/scheduler/handle.rs", "rank": 32, "score": 99440.12684207561 }, { "content": "fn bench_200ms<F>(name: &str, fun: F) where F : FnMut(u64) {\n\n let start = Instant::now();\n\n let mut diff;\n\n let mut diff_ns;\n\n let mut iteration = 0u64;\n\n let mut fun = fun;\n\n let mut repeat = 1;\n\n loop {\n\n // unrolled loop to reduce the number of timer calls and also amortize\n\n // the cost of the initial branch int the for's condition\n\n for _i in 0..repeat {\n\n fun(iteration); fun(iteration+1); fun(iteration+2); fun(iteration+3);\n\n fun(iteration+4); fun(iteration+5); fun(iteration+6); fun(iteration+7);\n\n fun(iteration+8); fun(iteration+9); fun(iteration+10); fun(iteration+11);\n\n fun(iteration+12); fun(iteration+13); fun(iteration+14); fun(iteration+15);\n\n fun(iteration+16); fun(iteration+17); fun(iteration+18); fun(iteration+19);\n\n fun(iteration+20); fun(iteration+21); fun(iteration+22); fun(iteration+23);\n\n fun(iteration+24); fun(iteration+25); fun(iteration+26); fun(iteration+27);\n\n fun(iteration+28); fun(iteration+29); fun(iteration+30); fun(iteration+31);\n\n fun(iteration+32); fun(iteration+33); fun(iteration+34); fun(iteration+35);\n", "file_path": "src/bench/mod.rs", "rank": 33, "score": 92030.11332014535 }, { "content": "struct ExecFlags (AtomicUsize);\n\n\n\npub struct TaskPage {\n\n data: Vec<(AtomicPtr<wrap::TaskWrap>, ExecFlags, PeriodLengthInUsec)>,\n\n}\n\n\n", "file_path": "src/scheduler/page.rs", "rank": 34, "score": 89089.2076060097 }, { "content": "fn mpsc_send() {\n\n use std::sync::mpsc;\n\n let (tx, _rx) = mpsc::channel();\n\n bench_200ms(\"mpsc-send\", |i| {\n\n tx.send(i).unwrap();\n\n });\n\n}\n\n\n", "file_path": "src/bench/basics.rs", "rank": 35, "score": 85790.76289319817 }, { "content": "fn lossyq_send() {\n\n let (mut tx, _rx) = channel(100);\n\n bench_200ms(\"lossyq-send\", |i| {\n\n tx.put(|v| *v = Some(i));\n\n });\n\n}\n\n\n", "file_path": "src/bench/basics.rs", "rank": 36, "score": 85790.76289319817 }, { "content": "fn lossyq_send_recv() {\n\n let (mut tx, mut rx) = channel(100);\n\n bench_200ms(\"lossyq-send-recv\", |i| {\n\n tx.put(|v| *v = Some(i));\n\n for _i in rx.iter() {\n\n }\n\n });\n\n}\n\n\n", "file_path": "src/bench/basics.rs", "rank": 37, "score": 83942.01769437332 }, { "content": "fn mpsc_send_recv_3() {\n\n use std::sync::mpsc;\n\n let (tx, rx) = mpsc::channel();\n\n bench_200ms(\"mpsc-send-recv3\", |i| {\n\n tx.send(i).unwrap();\n\n tx.send(i).unwrap();\n\n tx.send(i).unwrap();\n\n for _i in 0..3 {\n\n let _x = rx.try_recv();\n\n }\n\n });\n\n}\n\n\n", "file_path": "src/bench/basics.rs", "rank": 38, "score": 83942.01769437332 }, { "content": "fn locked_send_data() {\n\n use std::sync::{Arc, Mutex};\n\n let (tx, _rx) = channel(100);\n\n let locked = Arc::new(Mutex::new(tx));\n\n bench_200ms(\"std::mutex+send\", |i| {\n\n let mut x = locked.lock().unwrap();\n\n x.put(|v| *v = Some(i));\n\n });\n\n}\n\n\n", "file_path": "src/bench/basics.rs", "rank": 39, "score": 83942.01769437332 }, { "content": "fn indirect_send_data() {\n\n let (mut tx, _rx) = channel(100);\n\n let sender = |val: u64, chan: &mut Sender<u64>| {\n\n chan.put(|v: &mut Option<u64>| *v = Some(val));\n\n };\n\n bench_200ms(\"indirect-send\", |i| { sender(i, &mut tx); });\n\n}\n\n\n", "file_path": "src/bench/basics.rs", "rank": 40, "score": 83942.01769437332 }, { "content": "fn mpsc_send_recv() {\n\n use std::sync::mpsc;\n\n let (tx, rx) = mpsc::channel();\n\n bench_200ms(\"mpsc-send-recv\", |i| {\n\n tx.send(i).unwrap();\n\n rx.recv().unwrap();\n\n });\n\n}\n\n\n", "file_path": "src/bench/basics.rs", "rank": 41, "score": 83942.01769437332 }, { "content": "fn lossyq_send_recv_3() {\n\n let (mut tx, mut rx) = channel(100);\n\n bench_200ms(\"lossyq-send-recv3\", |i| {\n\n tx.put(|v| *v = Some(i));\n\n tx.put(|v| *v = Some(i));\n\n tx.put(|v| *v = Some(i));\n\n for _i in rx.iter() {\n\n }\n\n });\n\n}\n\n\n\n\n", "file_path": "src/bench/basics.rs", "rank": 42, "score": 83942.01769437332 }, { "content": "pub trait IdentifiedInput {\n\n fn get_input_id(&self, ch_id: ReceiverChannelId) -> Option<(ChannelId, SenderName)>;\n\n}\n", "file_path": "src/elem/identified_input.rs", "rank": 43, "score": 81046.0436656143 }, { "content": "fn latency(stop_delay: u32, dummies: usize) {\n\n let spinner = Spinner::new();\n\n let mut pipe = MeasuredPipeline::new(spinner.get(), dummies);\n\n pipe.start();\n\n let printout = format!(\"pipe-latency-{}-{}\",stop_delay,dummies);\n\n bench_200ms(printout.as_str(), |_v| {\n\n pipe.notify();\n\n pipe.wait();\n\n });\n\n unsafe { libc::usleep(stop_delay); }\n\n pipe.stop();\n\n spinner.stop();\n\n}\n\n\n", "file_path": "src/bench/ext_pipeline_latency.rs", "rank": 44, "score": 71804.15757256688 }, { "content": "struct BitSet {\n\n bits: Vec<u64>,\n\n}\n\n\n\nimpl BitSet {\n\n fn new(count: usize) -> BitSet {\n\n let count64 = 1 + (count/64);\n\n\n\n BitSet {\n\n bits: vec![0; count64],\n\n }\n\n }\n\n\n\n fn set(&mut self, at: usize) {\n\n let segment = at/64;\n\n let offset = at%64;\n\n let mut slice = self.bits.as_mut_slice();\n\n slice[segment] |= 1<<offset;\n\n }\n\n\n", "file_path": "src/bench/bitset.rs", "rank": 45, "score": 62373.94773032954 }, { "content": "#[allow(dead_code)]\n\nstruct DummyFilter {}\n\n\n\nimpl filter::Filter for DummyFilter {\n\n type InputValue = usize;\n\n type InputError = &'static str;\n\n type OutputValue = usize;\n\n type OutputError = &'static str;\n\n\n\n fn process(\n\n &mut self,\n\n _input: &mut ChannelWrapper<Self::InputValue, Self::InputError>,\n\n _output: &mut Sender<Message<Self::OutputValue, Self::OutputError>>,\n\n _stop: &mut bool)\n\n {\n\n }\n\n}\n", "file_path": "src/sample/dummy_filter.rs", "rank": 46, "score": 61412.53827113786 }, { "content": "#[cfg(not(feature = \"bench\"))]\n\nfn main() {\n\n println!(\"not running benchmarks. if you need them add --features \\\"bench\\\" flag\");\n\n}\n", "file_path": "src/main.rs", "rank": 47, "score": 56089.24512370193 }, { "content": "pub trait Task {\n\n fn execute(&mut self, stop: &mut bool);\n\n fn name(&self) -> &String;\n\n fn input_count(&self) -> usize;\n\n fn output_count(&self) -> usize;\n\n fn input_id(&self, ch_id: ReceiverChannelId) -> Option<(ChannelId, SenderName)>;\n\n fn input_channel_pos(&self, ch_id: ReceiverChannelId) -> ChannelPosition;\n\n fn output_channel_pos(&self, ch_id: SenderChannelId) -> ChannelPosition;\n\n}\n\n\n\npub enum ChannelWrapper<Value: Send, Error: Send> {\n\n ReceiverNotConnected(ReceiverChannelId, ReceiverName),\n\n ConnectedReceiver(ChannelId, Receiver<Message<Value, Error>>, SenderName),\n\n SenderNotConnected(SenderChannelId, Receiver<Message<Value, Error>>, SenderName),\n\n ConnectedSender(ChannelId, ReceiverName),\n\n}\n\n\n\n#[cfg(test)]\n\npub mod tests;\n\n\n\n#[cfg(any(test, feature = \"bench\"))]\n\npub mod sample;\n\n\n\n#[cfg(any(test, feature = \"bench\"))]\n\npub mod bench;\n", "file_path": "src/lib.rs", "rank": 48, "score": 55507.79884617111 }, { "content": "fn spinner() {\n\n let sp = spinner::Spinner::new();\n\n let counter = sp.get();\n\n bench_200ms(\"spinner\", |_v| {\n\n let _x = counter.load(Ordering::Acquire);\n\n });\n\n sp.stop();\n\n}\n\n\n", "file_path": "src/bench/basics.rs", "rank": 49, "score": 55060.181501831845 }, { "content": "pub trait Gather {\n\n type InputValue : Send;\n\n type InputError : Send;\n\n type OutputValue : Send;\n\n type OutputError : Send;\n\n\n\n fn process(\n\n &mut self,\n\n input: &mut Vec<ChannelWrapper<Self::InputValue, Self::InputError>>,\n\n output: &mut Sender<Message<Self::OutputValue, Self::OutputError>>,\n\n stop: &mut bool);\n\n}\n\n\n", "file_path": "src/elem/gather.rs", "rank": 50, "score": 54531.13171660085 }, { "content": "pub trait Connectable {\n\n type InputValue: Send;\n\n type InputError: Send;\n\n\n\n fn input(&mut self) -> &mut ChannelWrapper<Self::InputValue, Self::InputError>;\n\n\n\n fn connect(&mut self,\n\n other: &mut ChannelWrapper<Self::InputValue, Self::InputError>)\n\n -> Result<(), ActorError>\n\n {\n\n connect_to(self.input(), other)\n\n }\n\n\n\n fn disconnect(&mut self,\n\n other: &mut ChannelWrapper<Self::InputValue, Self::InputError>)\n\n -> Result<(), ActorError> {\n\n disconnect_from(self.input(), other)\n\n }\n\n}\n\n\n", "file_path": "src/elem/connectable.rs", "rank": 51, "score": 54531.13171660085 }, { "content": "pub trait Source {\n\n type OutputValue : Send;\n\n type OutputError : Send;\n\n\n\n fn process(\n\n &mut self,\n\n output: &mut Sender<Message<Self::OutputValue, Self::OutputError>>,\n\n stop: &mut bool);\n\n}\n\n\n", "file_path": "src/elem/source.rs", "rank": 52, "score": 54531.13171660085 }, { "content": "pub trait Filter {\n\n type InputValue : Send;\n\n type InputError : Send;\n\n type OutputValue : Send;\n\n type OutputError : Send;\n\n\n\n fn process(\n\n &mut self,\n\n input: &mut ChannelWrapper<Self::InputValue, Self::InputError>,\n\n output: &mut Sender<Message<Self::OutputValue, Self::OutputError>>,\n\n stop: &mut bool);\n\n}\n\n\n", "file_path": "src/elem/filter.rs", "rank": 53, "score": 54531.13171660085 }, { "content": "pub trait Scatter {\n\n type InputValue : Send;\n\n type InputError : Send;\n\n type OutputValue : Send;\n\n type OutputError : Send;\n\n\n\n fn process(\n\n &mut self,\n\n input: &mut ChannelWrapper<Self::InputValue, Self::InputError>,\n\n output: &mut Vec<Sender<Message<Self::OutputValue, Self::OutputError>>>,\n\n stop: &mut bool);\n\n}\n\n\n", "file_path": "src/elem/scatter.rs", "rank": 54, "score": 54531.13171660085 }, { "content": "pub trait Sink {\n\n type InputValue : Send;\n\n type InputError : Send;\n\n\n\n fn process(\n\n &mut self,\n\n input: &mut ChannelWrapper<Self::InputValue, Self::InputError>,\n\n stop: &mut bool);\n\n}\n\n\n", "file_path": "src/elem/sink.rs", "rank": 55, "score": 54531.13171660085 }, { "content": "fn mpsc_recv() {\n\n use std::sync::mpsc;\n\n let (tx, rx) = mpsc::channel();\n\n tx.send(0u64).unwrap();\n\n bench_200ms(\"mpsc-recv\", |_i| {\n\n let _tr = rx.try_recv();\n\n });\n\n}\n\n\n", "file_path": "src/bench/basics.rs", "rank": 56, "score": 54094.01033782217 }, { "content": "fn atomic_ops() {\n\n let val = AtomicUsize::new(0);\n\n bench_200ms(\"fetch-add-relaxed\", |_v| {\n\n val.fetch_add(1, Ordering::Relaxed);\n\n });\n\n bench_200ms(\"fetch-add-seqcst\", |_v| {\n\n val.fetch_add(1, Ordering::SeqCst);\n\n });\n\n bench_200ms(\"fetch-add-acqrel\", |_v| {\n\n val.fetch_add(1, Ordering::AcqRel);\n\n });\n\n}\n\n\n", "file_path": "src/bench/basics.rs", "rank": 57, "score": 54094.01033782217 }, { "content": "fn source_execute() {\n\n let (mut source_task, mut _source_out) =\n\n source::new( \"Source\", 2, Box::new(DummySource{}));\n\n let mut stop = false;\n\n bench_200ms(\"source-execute\", |_i| {\n\n source_task.execute(&mut stop);\n\n });\n\n}\n\n\n", "file_path": "src/bench/basics.rs", "rank": 58, "score": 54094.01033782217 }, { "content": "fn time_baseline() {\n\n bench_200ms(\"time-baseline\", |_v| {} );\n\n}\n\n\n", "file_path": "src/bench/basics.rs", "rank": 59, "score": 54094.01033782217 }, { "content": "fn hash_map_10() {\n\n let mut m = HashMap::new();\n\n for i in 0..10u64 {\n\n m.insert(i, i);\n\n }\n\n bench_200ms(\"hash-map-nonex\", |v| {\n\n let _x = m.get(&v);\n\n });\n\n bench_200ms(\"hash-map-seq\", |v| {\n\n let k = v%10;\n\n let _x = m.get(&k);\n\n });\n\n}\n\n\n", "file_path": "src/bench/basics.rs", "rank": 60, "score": 54094.01033782217 }, { "content": "#[test]\n\nfn connect_disconnect() {\n\n let (_source_task, mut source_out) = source::new( \"Source\", 20, Box::new(dummy_source::DummySource{}));\n\n let mut sink_task = sink::new( \"Sink\", Box::new(dummy_sink::DummySink{}));\n\n\n\n assert!(sink_task.connect(&mut source_out).is_ok());\n\n assert!(sink_task.disconnect(&mut source_out).is_ok());\n\n assert!(sink_task.connect(&mut source_out).is_ok());\n\n}\n", "file_path": "src/elem/tests.rs", "rank": 61, "score": 54094.01033782217 }, { "content": "fn bin_heap_4096() {\n\n let mut h = BinaryHeap::with_capacity(4096);\n\n for i in 0..4096u64 {\n\n h.push(i);\n\n }\n\n bench_200ms(\"binheap_4k_add_remove\", |v| {\n\n let _popped = h.pop();\n\n h.push(v%4096);\n\n });\n\n}\n\n\n", "file_path": "src/bench/basics.rs", "rank": 62, "score": 54094.01033782217 }, { "content": "fn locked_data() {\n\n use std::sync::{Arc, Mutex};\n\n let locked = Arc::new(Mutex::new(0u64));\n\n bench_200ms(\"std::mutex\", |_i| {\n\n let mut _x = locked.lock().unwrap();\n\n });\n\n}\n\n\n", "file_path": "src/bench/basics.rs", "rank": 63, "score": 54094.01033782217 }, { "content": "fn lossyq_recv() {\n\n let (mut _tx, mut rx) = channel::<u64>(100);\n\n bench_200ms(\"lossyq-recv\", |_i| {\n\n for _ii in rx.iter() {\n\n }\n\n });\n\n}\n\n\n", "file_path": "src/bench/basics.rs", "rank": 64, "score": 54094.01033782217 }, { "content": "pub trait ConnectableYN {\n\n type InputValueA: Send;\n\n type InputErrorA: Send;\n\n type InputValueB: Send;\n\n type InputErrorB: Send;\n\n\n\n fn input_a(&mut self,\n\n n: ReceiverChannelId) -> &mut ChannelWrapper<Self::InputValueA, Self::InputErrorA>;\n\n\n\n fn input_b(&mut self,\n\n n: ReceiverChannelId) -> &mut ChannelWrapper<Self::InputValueB, Self::InputErrorB>;\n\n\n\n fn connect_a(&mut self,\n\n n: ReceiverChannelId,\n\n other: &mut ChannelWrapper<Self::InputValueA, Self::InputErrorA>)\n\n -> Result<(), ActorError>\n\n {\n\n connect_to(self.input_a(n), other)\n\n }\n\n\n", "file_path": "src/elem/connectable.rs", "rank": 65, "score": 53612.38483760047 }, { "content": "pub trait ConnectableY {\n\n type InputValueA: Send;\n\n type InputErrorA: Send;\n\n type InputValueB: Send;\n\n type InputErrorB: Send;\n\n\n\n fn input_a(&mut self) -> &mut ChannelWrapper<Self::InputValueA, Self::InputErrorA>;\n\n fn input_b(&mut self) -> &mut ChannelWrapper<Self::InputValueB, Self::InputErrorB>;\n\n\n\n fn connect_a(&mut self,\n\n other: &mut ChannelWrapper<Self::InputValueA, Self::InputErrorA>)\n\n -> Result<(), ActorError>\n\n {\n\n connect_to(self.input_a(), other)\n\n }\n\n\n\n fn connect_b(&mut self,\n\n other: &mut ChannelWrapper<Self::InputValueB, Self::InputErrorB>)\n\n -> Result<(), ActorError>\n\n {\n", "file_path": "src/elem/connectable.rs", "rank": 66, "score": 53612.38483760047 }, { "content": "pub trait ConnectableN {\n\n type InputValue: Send;\n\n type InputError: Send;\n\n\n\n fn input(&mut self,\n\n n: ReceiverChannelId) -> &mut ChannelWrapper<Self::InputValue, Self::InputError>;\n\n\n\n fn connect(&mut self,\n\n n: ReceiverChannelId,\n\n other: &mut ChannelWrapper<Self::InputValue, Self::InputError>)\n\n -> Result<(), ActorError>\n\n {\n\n connect_to(self.input(n), other)\n\n }\n\n\n\n fn disconnect(&mut self,\n\n n: ReceiverChannelId,\n\n other: &mut ChannelWrapper<Self::InputValue, Self::InputError>)\n\n -> Result<(), ActorError>\n\n {\n\n disconnect_from(self.input(n), other)\n\n }\n\n}\n\n\n", "file_path": "src/elem/connectable.rs", "rank": 67, "score": 53612.38483760047 }, { "content": "pub trait OutputCounter {\n\n fn get_tx_count(&self, ch_id: SenderChannelId) -> usize;\n\n}\n", "file_path": "src/elem/counter.rs", "rank": 68, "score": 53612.38483760047 }, { "content": "pub trait YSplit {\n\n type InputValue : Send;\n\n type InputError : Send;\n\n type OutputValueA : Send;\n\n type OutputErrorA : Send;\n\n type OutputValueB : Send;\n\n type OutputErrorB : Send;\n\n\n\n fn process(\n\n &mut self,\n\n input: &mut ChannelWrapper<Self::InputValue, Self::InputError>,\n\n output_a: &mut Sender<Message<Self::OutputValueA, Self::OutputErrorA>>,\n\n output_b: &mut Sender<Message<Self::OutputValueB, Self::OutputErrorB>>,\n\n stop: &mut bool);\n\n}\n\n\n", "file_path": "src/elem/ysplit.rs", "rank": 69, "score": 53612.38483760047 }, { "content": "pub trait YMerge {\n\n type InputValueA : Send;\n\n type InputErrorA : Send;\n\n type InputValueB : Send;\n\n type InputErrorB : Send;\n\n type OutputValue : Send;\n\n type OutputError : Send;\n\n\n\n fn process(\n\n &mut self,\n\n input_a: &mut ChannelWrapper<Self::InputValueA, Self::InputErrorA>,\n\n input_b: &mut ChannelWrapper<Self::InputValueB, Self::InputErrorB>,\n\n output: &mut Sender<Message<Self::OutputValue, Self::OutputError>>,\n\n stop: &mut bool);\n\n}\n\n\n", "file_path": "src/elem/ymerge.rs", "rank": 70, "score": 53612.38483760047 }, { "content": "pub trait InputCounter {\n\n fn get_rx_count(&self, ch_id: ReceiverChannelId) -> usize;\n\n}\n\n\n", "file_path": "src/elem/counter.rs", "rank": 71, "score": 53612.38483760047 }, { "content": "#[allow(dead_code)]\n\nfn setup_pipeline() {\n\n\n\n let mut sched = Scheduler::new();\n\n sched.start_with_threads(4);\n\n\n\n let dummy_queue_size = 2_000;\n\n let (source_task, mut source_out) =\n\n source::new( \"Source\", dummy_queue_size, Box::new(DummySource{}));\n\n\n\n let mut sink_task =\n\n sink::new( \"Sink\", Box::new(DummySink{}));\n\n\n\n sink_task.connect(&mut source_out).unwrap();\n\n\n\n let source_id = sched.add_task(source_task, SchedulingRule::OnExternalEvent).unwrap();\n\n sched.add_task(sink_task, SchedulingRule::OnMessage).unwrap();\n\n\n\n sched.notify(&source_id).unwrap();\n\n\n\n sched.stop();\n\n}\n", "file_path": "src/sample/dummy_pipeline.rs", "rank": 72, "score": 53185.13697182218 }, { "content": "fn source_execute_with_swap() {\n\n use std::sync::atomic::{AtomicPtr, Ordering};\n\n use std::ptr;\n\n\n\n let (source_task, mut _source_out) =\n\n source::new( \"Source\", 2, Box::new(DummySource{}));\n\n let source_ptr = AtomicPtr::new(Box::into_raw(source_task));\n\n\n\n bench_200ms(\"source-execute-w-swap\", |_i| {\n\n let old_ptr = source_ptr.swap(ptr::null_mut(), Ordering::AcqRel);\n\n let mut stop = false;\n\n unsafe { (*old_ptr).execute(&mut stop); }\n\n source_ptr.swap(old_ptr, Ordering::AcqRel);\n\n });\n\n\n\n let _bx = unsafe { Box::from_raw(source_ptr.swap(ptr::null_mut(), Ordering::AcqRel)) };\n\n}\n\n\n", "file_path": "src/bench/basics.rs", "rank": 73, "score": 53185.13697182218 }, { "content": "use lossyq::spsc::{Sender};\n\nuse super::super::super::{Task, Message, ChannelId, SenderName,\n\n SenderChannelId, ReceiverChannelId, ChannelPosition\n\n};\n\nuse super::super::counter::{OutputCounter};\n\nuse super::super::source::{Source};\n\n\n\npub struct SourceWrap<OutputValue: Send, OutputError: Send>\n\n{\n\n name : String,\n\n state : Box<Source<OutputValue=OutputValue, OutputError=OutputError>+Send>,\n\n output_tx : Sender<Message<OutputValue, OutputError>>,\n\n}\n\n\n", "file_path": "src/elem/wrap/source_wrap.rs", "rank": 74, "score": 16.764065496535945 }, { "content": "use lossyq::spsc::{Sender};\n\nuse super::super::super::{Task, Message, ChannelWrapper, ChannelId,\n\n SenderChannelId, ReceiverChannelId, SenderName, ChannelPosition\n\n};\n\nuse super::super::connectable::{Connectable};\n\nuse super::super::identified_input::{IdentifiedInput};\n\nuse super::super::counter::{OutputCounter, InputCounter};\n\nuse super::super::filter::{Filter};\n\n\n\npub struct FilterWrap<InputValue: Send, InputError: Send,\n\n OutputValue: Send, OutputError: Send> {\n\n name : String,\n\n state : Box<Filter<InputValue=InputValue, InputError=InputError,\n\n OutputValue=OutputValue, OutputError=OutputError>+Send>,\n\n input_rx : ChannelWrapper<InputValue, InputError>,\n\n output_tx : Sender<Message<OutputValue, OutputError>>,\n\n}\n\n\n", "file_path": "src/elem/wrap/filter_wrap.rs", "rank": 75, "score": 14.725155657930804 }, { "content": "use lossyq::spsc::{Sender};\n\nuse super::super::super::{Task, Message, ChannelWrapper, ChannelId,\n\n SenderName, SenderChannelId, ReceiverChannelId, ChannelPosition\n\n};\n\nuse super::super::connectable::{Connectable};\n\nuse super::super::identified_input::{IdentifiedInput};\n\nuse super::super::counter::{OutputCounter, InputCounter};\n\nuse super::super::scatter::{Scatter};\n\n\n\npub struct ScatterWrap<InputValue: Send, InputError: Send,\n\n OutputValue: Send, OutputError: Send> {\n\n name : String,\n\n state : Box<Scatter<InputValue=InputValue, InputError=InputError,\n\n OutputValue=OutputValue, OutputError=OutputError>+Send>,\n\n input_rx : ChannelWrapper<InputValue, InputError>,\n\n output_tx_vec : Vec<Sender<Message<OutputValue, OutputError>>>,\n\n}\n\n\n", "file_path": "src/elem/wrap/scatter_wrap.rs", "rank": 76, "score": 14.601587010808657 }, { "content": "use lossyq::spsc::{Sender};\n\nuse super::super::super::{Task, Message, ChannelWrapper, ChannelId,\n\n SenderName, SenderChannelId, ReceiverChannelId, ChannelPosition\n\n};\n\nuse super::super::connectable::{ConnectableN};\n\nuse super::super::identified_input::{IdentifiedInput};\n\nuse super::super::counter::{OutputCounter, InputCounter};\n\nuse super::super::gather::{Gather};\n\n\n\npub struct GatherWrap<InputValue: Send, InputError: Send,\n\n OutputValue: Send, OutputError: Send> {\n\n name : String,\n\n state : Box<Gather<InputValue=InputValue, InputError=InputError,\n\n OutputValue=OutputValue, OutputError=OutputError>+Send>,\n\n input_rx_vec : Vec<ChannelWrapper<InputValue, InputError>>,\n\n output_tx : Sender<Message<OutputValue, OutputError>>,\n\n}\n\n\n", "file_path": "src/elem/wrap/gather_wrap.rs", "rank": 77, "score": 14.540668122180938 }, { "content": " },\n\n _ => None,\n\n }\n\n } else {\n\n None\n\n }\n\n }\n\n}\n\n\n\nimpl<InputValue: Send, InputError: Send, OutputValue: Send, OutputError: Send> InputCounter\n\n for GatherWrap<InputValue, InputError, OutputValue, OutputError>\n\n{\n\n fn get_rx_count(&self, ch_id: ReceiverChannelId) -> usize {\n\n if ch_id.0 < self.input_rx_vec.len() {\n\n let slice = self.input_rx_vec.as_slice();\n\n match &slice[ch_id.0] {\n\n &ChannelWrapper::ConnectedReceiver(ref _channel_id, ref receiver, ref _sender_name) => {\n\n receiver.seqno()\n\n },\n\n _ => 0,\n", "file_path": "src/elem/wrap/gather_wrap.rs", "rank": 78, "score": 14.351712481751964 }, { "content": "use lossyq::spsc::{Sender};\n\nuse super::super::super::{Task, Message, ChannelWrapper, ChannelId,\n\n SenderChannelId, ReceiverChannelId, SenderName, ChannelPosition\n\n};\n\nuse super::super::connectable::{Connectable};\n\nuse super::super::identified_input::{IdentifiedInput};\n\nuse super::super::counter::{OutputCounter, InputCounter};\n\nuse super::super::ysplit::{YSplit};\n\n\n\npub struct YSplitWrap<InputValue: Send, InputError: Send,\n\n OutputValueA: Send, OutputErrorA: Send,\n\n OutputValueB: Send, OutputErrorB: Send> {\n\n name : String,\n\n state : Box<YSplit<InputValue=InputValue, InputError=InputError,\n\n OutputValueA=OutputValueA, OutputErrorA=OutputErrorA,\n\n OutputValueB=OutputValueB, OutputErrorB=OutputErrorB>+Send>,\n\n input_rx : ChannelWrapper<InputValue, InputError>,\n\n output_a_tx : Sender<Message<OutputValueA, OutputErrorA>>,\n\n output_b_tx : Sender<Message<OutputValueB, OutputErrorB>>,\n\n}\n\n\n", "file_path": "src/elem/wrap/ysplit_wrap.rs", "rank": 81, "score": 13.77773108443689 }, { "content": "use super::super::super::{Task, ChannelWrapper, ChannelId, SenderName,\n\n ReceiverChannelId, SenderChannelId, ChannelPosition\n\n};\n\nuse super::super::connectable::{Connectable};\n\nuse super::super::identified_input::{IdentifiedInput};\n\nuse super::super::counter::{InputCounter};\n\nuse super::super::sink::{Sink};\n\n\n\npub struct SinkWrap<InputValue: Send, InputError: Send> {\n\n name : String,\n\n state : Box<Sink<InputValue=InputValue, InputError=InputError>+Send>,\n\n input_rx : ChannelWrapper<InputValue, InputError>,\n\n}\n\n\n", "file_path": "src/elem/wrap/sink_wrap.rs", "rank": 82, "score": 13.666413440183277 }, { "content": "\n\nimpl<InputValue: Send, InputError: Send,\n\n OutputValueA: Send, OutputErrorA: Send,\n\n OutputValueB: Send, OutputErrorB: Send> IdentifiedInput\n\n for YSplitWrap<InputValue, InputError,\n\n OutputValueA, OutputErrorA,\n\n OutputValueB, OutputErrorB>\n\n{\n\n fn get_input_id(&self, ch_id: ReceiverChannelId) -> Option<(ChannelId, SenderName)> {\n\n if ch_id.0 != 0 {\n\n None\n\n } else {\n\n match &self.input_rx {\n\n &ChannelWrapper::ConnectedReceiver(ref channel_id, ref _receiver, ref sender_name) => {\n\n Some((*channel_id, sender_name.clone()))\n\n },\n\n _ => None,\n\n }\n\n }\n\n }\n", "file_path": "src/elem/wrap/ysplit_wrap.rs", "rank": 83, "score": 13.472556375697579 }, { "content": "}\n\n\n\n#[derive(Copy,Clone,Debug,PartialEq,Eq)]\n\npub struct PeriodLengthInUsec (pub usize);\n\n\n\n#[derive(Copy,Clone,Debug)]\n\npub enum SchedulingRule {\n\n Loop,\n\n OnMessage,\n\n Periodic(PeriodLengthInUsec),\n\n OnExternalEvent,\n\n}\n\n\n\n#[derive(Copy,Clone,Debug,PartialEq,Eq,Hash)]\n\npub struct TaskId (usize);\n\n\n\n\n\n#[derive(Clone,Debug,PartialEq,Eq)]\n\npub struct SenderName (pub String);\n\n\n\n#[derive(Clone,Debug,PartialEq,Eq)]\n\npub struct ReceiverName (pub String);\n\n\n", "file_path": "src/lib.rs", "rank": 84, "score": 13.367068293189565 }, { "content": " _ => None,\n\n }\n\n } else {\n\n None\n\n }\n\n }\n\n}\n\n\n\nimpl<InputValue: Send, InputError: Send, OutputValue: Send, OutputError: Send> InputCounter\n\n for FilterWrap<InputValue, InputError, OutputValue, OutputError>\n\n{\n\n fn get_rx_count(&self, ch_id: ReceiverChannelId) -> usize {\n\n if ch_id.0 == 0 {\n\n if let &ChannelWrapper::ConnectedReceiver(ref _channel_id, ref receiver, ref _sender_name) = &self.input_rx {\n\n receiver.seqno()\n\n } else {\n\n 0\n\n }\n\n } else {\n\n 0\n", "file_path": "src/elem/wrap/filter_wrap.rs", "rank": 85, "score": 13.339944585274568 }, { "content": " Some((*channel_id, sender_name.clone()))\n\n },\n\n _ => None,\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl<InputValue: Send, InputError: Send, OutputValue: Send, OutputError: Send> InputCounter\n\n for ScatterWrap<InputValue, InputError, OutputValue, OutputError>\n\n{\n\n fn get_rx_count(&self, ch_id: ReceiverChannelId) -> usize {\n\n if ch_id.0 == 0 {\n\n if let &ChannelWrapper::ConnectedReceiver(ref _channel_id, ref receiver, ref _sender_name) = &self.input_rx {\n\n receiver.seqno()\n\n } else {\n\n 0\n\n }\n\n } else {\n\n 0\n", "file_path": "src/elem/wrap/scatter_wrap.rs", "rank": 87, "score": 12.966743604850183 }, { "content": " },\n\n _ => None,\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl<InputValue: Send, InputError: Send> InputCounter\n\n for SinkWrap<InputValue, InputError>\n\n{\n\n fn get_rx_count(&self, ch_id: ReceiverChannelId) -> usize {\n\n if ch_id.0 == 0 {\n\n if let &ChannelWrapper::ConnectedReceiver(ref _channel_id, ref receiver, ref _sender_name) = &self.input_rx {\n\n receiver.seqno()\n\n } else {\n\n 0\n\n }\n\n } else {\n\n 0\n\n }\n", "file_path": "src/elem/wrap/sink_wrap.rs", "rank": 88, "score": 12.754364382833463 }, { "content": "\n\nimpl<OutputValue: Send, OutputError: Send> Task\n\n for SourceWrap<OutputValue, OutputError>\n\n{\n\n fn execute(&mut self, stop: &mut bool) {\n\n self.state.process(&mut self.output_tx, stop);\n\n }\n\n fn name(&self) -> &String { &self.name }\n\n fn input_count(&self) -> usize { 0 }\n\n fn output_count(&self) -> usize { 1 }\n\n\n\n fn input_id(&self, _ch_id: ReceiverChannelId) -> Option<(ChannelId, SenderName)> {\n\n None\n\n }\n\n\n\n fn input_channel_pos(&self, _ch_id: ReceiverChannelId) -> ChannelPosition {\n\n ChannelPosition( 0 )\n\n }\n\n\n\n fn output_channel_pos(&self, ch_id: SenderChannelId) -> ChannelPosition {\n\n ChannelPosition( self.get_tx_count(ch_id) )\n\n }\n\n}\n", "file_path": "src/elem/wrap/source_wrap.rs", "rank": 89, "score": 12.516615764737521 }, { "content": "pub struct ExpectedChannelState (pub ChannelState);\n\n\n\n#[derive(Copy, Clone, Debug)]\n\npub struct ActualChannelState (pub ChannelState);\n\n\n\n#[derive(Copy, Clone, Debug)]\n\npub enum Error {\n\n Busy,\n\n NonExistent,\n\n Stopping,\n\n AlreadyExists,\n\n InvalidChannelState(ExpectedChannelState, ActualChannelState)\n\n}\n\n\n\n#[derive(Copy,Clone,Debug,PartialEq,Eq)]\n\npub struct InclusiveMessageRange {\n\n pub from: usize,\n\n pub to: usize,\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 90, "score": 12.431650201360215 }, { "content": " type InputError = InputError;\n\n\n\n fn input(&mut self) -> &mut ChannelWrapper<InputValue, InputError> {\n\n &mut self.input_rx\n\n }\n\n}\n\n\n\nimpl<InputValue: Send, InputError: Send, OutputValue: Send, OutputError: Send> Task\n\n for FilterWrap<InputValue, InputError, OutputValue, OutputError>\n\n{\n\n fn execute(&mut self, stop: &mut bool) {\n\n self.state.process(&mut self.input_rx, &mut self.output_tx, stop);\n\n }\n\n fn name(&self) -> &String { &self.name }\n\n fn input_count(&self) -> usize { 1 }\n\n fn output_count(&self) -> usize { 1 }\n\n\n\n fn input_id(&self, ch_id: ReceiverChannelId) -> Option<(ChannelId, SenderName)> {\n\n self.get_input_id(ch_id)\n\n }\n", "file_path": "src/elem/wrap/filter_wrap.rs", "rank": 91, "score": 11.374669343520104 }, { "content": " type InputValue = InputValue;\n\n type InputError = InputError;\n\n\n\n fn input(&mut self) -> &mut ChannelWrapper<Self::InputValue, Self::InputError> {\n\n &mut self.input_rx\n\n }\n\n}\n\n\n\nimpl<InputValue: Send, InputError: Send, OutputValue: Send, OutputError: Send> Task\n\n for ScatterWrap<InputValue, InputError, OutputValue, OutputError>\n\n{\n\n fn execute(&mut self, stop: &mut bool) {\n\n self.state.process(&mut self.input_rx,\n\n &mut self.output_tx_vec,\n\n stop);\n\n }\n\n\n\n fn name(&self) -> &String { &self.name }\n\n fn input_count(&self) -> usize { 1 }\n\n fn output_count(&self) -> usize { self.output_tx_vec.len() }\n", "file_path": "src/elem/wrap/scatter_wrap.rs", "rank": 92, "score": 11.28006776584054 }, { "content": "#[derive(Copy,Clone,Debug,PartialEq,Eq)]\n\npub struct ChannelPosition (pub usize);\n\n\n\n#[derive(Copy, Clone,Debug)]\n\npub enum Message<ValueType: Send, ErrorType: Send> {\n\n Value(ValueType),\n\n Ack(InclusiveMessageRange),\n\n Error(ChannelPosition, ErrorType),\n\n}\n\n\n\n#[derive(Copy,Clone,Debug,PartialEq,Eq)]\n\npub struct SenderChannelId (pub usize);\n\n\n\n#[derive(Copy,Clone,Debug,PartialEq,Eq)]\n\npub struct ReceiverChannelId (pub usize);\n\n\n\n#[derive(Copy,Clone,Debug,PartialEq,Eq)]\n\npub struct ChannelId {\n\n pub sender_id: SenderChannelId,\n\n pub receiver_id: ReceiverChannelId,\n", "file_path": "src/lib.rs", "rank": 93, "score": 11.125097639261309 }, { "content": "\n\nuse lossyq::spsc::Sender;\n\nuse super::super::elem::source;\n\nuse super::super::{Message};\n\nuse std::sync::atomic::{AtomicUsize, Ordering};\n\nuse std::sync::Arc;\n\n\n\n#[allow(dead_code)]\n\npub struct MeasuredPipelineSource {\n\n on_exec: u64,\n\n spinned: Arc<AtomicUsize>,\n\n}\n\n\n\nimpl source::Source for MeasuredPipelineSource {\n\n type OutputValue = usize;\n\n type OutputError = &'static str;\n\n\n\n fn process(&mut self,\n\n output: &mut Sender<Message<Self::OutputValue, Self::OutputError>>,\n\n _stop: &mut bool)\n", "file_path": "src/sample/measured_pipeline_source.rs", "rank": 95, "score": 10.706067626557946 }, { "content": "\n\nuse lossyq::spsc::Sender;\n\nuse super::super::elem::source;\n\nuse super::super::{Message};\n\n\n\npub struct DummySource {}\n\n\n\nimpl source::Source for DummySource {\n\n type OutputValue = usize;\n\n type OutputError = &'static str;\n\n\n\n fn process(&mut self,\n\n _output: &mut Sender<Message<Self::OutputValue, Self::OutputError>>,\n\n _stop: &mut bool)\n\n {\n\n }\n\n}\n\n\n\nimpl DummySource {\n\n pub fn new() -> DummySource {\n\n DummySource{}\n\n }\n\n}\n", "file_path": "src/sample/dummy_source.rs", "rank": 96, "score": 10.555847095100425 }, { "content": "\n\nuse super::super::elem::sink;\n\nuse super::super::{ChannelWrapper, Message};\n\nuse std::sync::atomic::{AtomicUsize, Ordering};\n\nuse std::sync::Arc;\n\nuse super::tick::Tick;\n\n\n\n#[allow(dead_code)]\n\npub struct MeasuredPipelineSink {\n\n latency: u64,\n\n count: u64,\n\n exec: u64,\n\n start: usize,\n\n spinned: Arc<AtomicUsize>,\n\n elapsed: Tick,\n\n others_spins: u64,\n\n last_spin: usize,\n\n}\n\n\n\nimpl sink::Sink for MeasuredPipelineSink {\n", "file_path": "src/sample/measured_pipeline_sink.rs", "rank": 97, "score": 10.49981419574608 }, { "content": "\n\nuse lossyq::spsc::Sender;\n\nuse super::super::elem::filter;\n\nuse super::super::{ChannelWrapper, Message};\n\nuse std::sync::atomic::{AtomicUsize, Ordering};\n\nuse std::sync::Arc;\n\n\n\n#[allow(dead_code)]\n\npub struct MeasuredPipelineFilter {\n\n on_exec: u64,\n\n on_msg: u64,\n\n latency: u64,\n\n spinned: Arc<AtomicUsize>,\n\n}\n\n\n\nimpl filter::Filter for MeasuredPipelineFilter {\n\n type InputValue = usize;\n\n type InputError = &'static str;\n\n type OutputValue = usize;\n\n type OutputError = &'static str;\n", "file_path": "src/sample/measured_pipeline_filter.rs", "rank": 98, "score": 10.286298647064804 }, { "content": "### Overview\n\n\n\n- Implement the actors based on one of the elem traits\n\n- Start/stop the scheduler\n\n- Pass the actor instances to the scheduler\n\n\n\n### Creating the actors\n\n\n\nThe actors need to implement one of the traits above. Examples:\n\n\n\n- Source: [dummy source](/src/sample/dummy_source.rs)\n\n- Filter: [dummy filter](/src/sample/dummy_source.rs)\n\n- Sink: [dummy sink](/src/sample/dummy_source.rs)\n\n\n\n#### Creating a source element\n\n\n\nThis is a somewhat more realistic element that reads UDP messages from the network and passes it forward to the next element in the topology.\n\n\n\n```rust\n\nuse actors::*;\n\nuse std::net::{UdpSocket, SocketAddr, Ipv4Addr, SocketAddrV4};\n\nuse std::io;\n\nuse std::mem;\n\n\n\npub struct ReadBytes {\n\n socket: UdpSocket\n\n}\n\n\n\n//\n\n// this item reads 1024 bytes on UDP and passes the data forward with\n\n// the data size and the sender address. if an error happens, then the\n\n// error goes forward instead.\n\n//\n\nimpl source::Source for ReadBytes {\n\n\n\n type OutputValue = ([u8; 1024], (usize, SocketAddr));\n\n type OutputError = io::Error;\n\n\n\n fn process(&mut self,\n\n output: &mut Sender<Message<Self::OutputValue, Self::OutputError>>,\n\n _stop: &mut bool)\n\n {\n\n output.put(|value| {\n\n if let &mut Some(Message::Value(ref mut item)) = value {\n\n // re-use the preallocated space in the queue\n\n match self.socket.recv_from(&mut item.0) {\n\n Ok((read_bytes, from_addr)) => {\n\n item.1 = (read_bytes, from_addr);\n\n },\n\n Err(io_error) => {\n\n // swap in the error message\n\n let error_message = Some(Message::Error(ChannelPosition(output.seqno()), io_error));\n\n mem::swap(value, &mut error_message);\n\n }\n\n };\n\n } else {\n\n // allocate new buffer and swap it in\n\n let dummy_address = Ipv4Addr::from(0);\n\n let dummy_sockaddr = SocketAddrV4::new(dummy_address, 1);\n\n let item = ([0; 1024],(0, SocketAddr::V4(dummy_sockaddr)));\n\n\n\n match self.socket.recv_from(&mut item.0) {\n\n Ok((read_bytes, from_addr)) => {\n\n item.1 = (read_bytes, from_addr);\n\n let message = Some(Message::Value(item));\n\n mem::swap(value, &mut message);\n\n },\n\n Err(io_error) => {\n\n // swap in the error message\n\n let error_message = Some(Message::Error(ChannelPosition(output.seqno()), io_error));\n\n mem::swap(value, &mut error_message);\n\n }\n\n };\n\n }\n\n });\n\n }\n\n}\n\n```\n\n\n", "file_path": "README.md", "rank": 99, "score": 10.139436584790406 } ]
Rust
src/level1.rs
DGriffin91/Bevy-BakedGI-Demo
b919d7fe6e4b6a472e2a41ff25d94c1c40966ce9
use bevy::prelude::*; use crate::custom_material::{ CustomMaterial, MaterialProperties, MaterialSetProp, MaterialTexture, }; use crate::emissive_material::EmissiveMaterial; pub fn setup_room( commands: &mut Commands, custom_materials: &mut Assets<CustomMaterial>, emissive_materials: &mut Assets<EmissiveMaterial>, asset_server: &Res<AssetServer>, ) { let variation_texture = MaterialTexture::new(asset_server, "textures/detail.jpg", "variation_texture"); let base_texture = MaterialTexture::new(asset_server, "textures/concrete.jpg", "base_texture"); let walls_texture = MaterialTexture::new(asset_server, "textures/concrete3.jpg", "walls_texture"); let reflection_texture = MaterialTexture::new( asset_server, "textures/scene1/reflection.jpg", "reflection_texture", ); let objects_lightmap = MaterialTexture::new( asset_server, "textures/scene1/objects_lightmap.jpg", "objects_lightmap", ); let building_objects = asset_server.load("models/scene1/building.glb#Mesh0/Primitive0"); let material_properties = MaterialProperties { lightmap: MaterialSetProp { scale: 1.0, contrast: 1.8, brightness: 3.1, blend: 1.0, }, base_a: MaterialSetProp { scale: 8.5, contrast: 0.33, brightness: 2.0, blend: 1.0, }, base_b: MaterialSetProp { scale: 30.0, contrast: 0.3, brightness: 2.2, blend: 1.0, }, vary_a: MaterialSetProp { scale: 0.14, contrast: 0.77, brightness: 4.2, blend: 0.057, }, vary_b: MaterialSetProp { scale: 5.0, contrast: 0.14, brightness: 1.05, blend: 1.0, }, reflection: MaterialSetProp { scale: 1.0, contrast: 3.0, brightness: 0.115, blend: 1.0, }, walls: MaterialSetProp { scale: 10.5, contrast: 0.53, brightness: 1.6, blend: 1.0, }, reflection_mask: MaterialSetProp { scale: 0.033, contrast: 2.3, brightness: 40.0, blend: 1.0, }, mist: MaterialSetProp { scale: 0.032, contrast: 1.0, brightness: 1.0, blend: 0.567, }, directional_light_blend: 0.6, }; let material = custom_materials.add(CustomMaterial { material_properties, textures: [ objects_lightmap, base_texture.clone(), variation_texture.clone(), reflection_texture.clone(), walls_texture.clone(), ], }); commands.spawn().insert_bundle(MaterialMeshBundle { mesh: building_objects, transform: Transform::from_xyz(0.0, 0.0, 0.0), material, ..Default::default() }); let main_lightmap = MaterialTexture::new( asset_server, "textures/scene1/main_lightmap.jpg", "main_lightmap", ); let building_main = asset_server.load("models/scene1/building.glb#Mesh1/Primitive0"); let material = custom_materials.add(CustomMaterial { material_properties, textures: [ main_lightmap, base_texture, variation_texture, reflection_texture, walls_texture, ], }); commands.spawn().insert_bundle(MaterialMeshBundle { mesh: building_main, transform: Transform::from_xyz(0.0, 0.0, 0.0), material, ..Default::default() }); let skybox_texture = asset_server.load("textures/scene1/skybox.jpg"); let skybox = asset_server.load("models/scene1/skybox.glb#Mesh0/Primitive0"); commands.spawn().insert_bundle(MaterialMeshBundle { mesh: skybox, transform: Transform::from_xyz(0.0, 0.0, 0.0).with_scale(Vec3::new(10.0, 10.0, 10.0)), material: emissive_materials.add(EmissiveMaterial { emissive: Color::WHITE, emissive_texture: Some(skybox_texture), }), ..Default::default() }); let size: f32 = 50.0; commands.spawn_bundle(DirectionalLightBundle { directional_light: DirectionalLight { shadow_projection: OrthographicProjection { left: -size * 4.0, right: size * 2.0, bottom: -size * 2.0, top: size * 1.0, near: -size * 2.0, far: size * 1.0, ..Default::default() }, illuminance: 100000.0, shadows_enabled: true, ..Default::default() }, transform: Transform { translation: Vec3::new(0.0, 0.0, 0.0), rotation: Quat::from_euler( EulerRot::XYZ, (-14.0f32).to_radians(), -(192.0 - 180.0f32).to_radians(), 0.0, ), ..Default::default() }, ..Default::default() }); commands.spawn_bundle(PointLightBundle { transform: Transform::from_xyz(0.0, 5.0, 100.0), point_light: PointLight { intensity: 30000.0, range: 1000.0, radius: 100.0, color: Color::rgb(0.5, 0.45, 1.0), shadows_enabled: false, ..Default::default() }, ..Default::default() }); let lamp_locations = [ Vec3::new(-10.0, 17.0, -16.0), Vec3::new(10.0, 17.0, -16.0), ]; for lamp_loc in lamp_locations { commands.spawn_bundle(PointLightBundle { transform: Transform::from_xyz(lamp_loc.x, lamp_loc.y, lamp_loc.z), point_light: PointLight { intensity: 500.0, range: 1000.0, radius: 10.0, color: Color::rgb(1.0, 1.0, 1.0), shadows_enabled: false, ..Default::default() }, ..Default::default() }); } asset_server.watch_for_changes().unwrap(); }
use bevy::prelude::*; use crate::custom_material::{ CustomMaterial, MaterialProperties, MaterialSetProp, MaterialTexture, }; use crate::emissive_material::EmissiveMaterial; pub fn setup_room( commands: &mut Commands, custom_materials: &mut Assets<CustomMaterial>, emissive_materials: &mut Assets<EmissiveMaterial>, asset_server: &Res<AssetServer>, ) { let variation_texture = MaterialTexture::new(asset_server, "textures/detail.jpg", "variation_texture"); let base_texture = MaterialTexture::new(asset_server, "textures/concrete.jpg", "base_texture"); let walls_texture = MaterialTexture::new(asset_server, "textures/concrete3.jpg", "walls_texture");
let objects_lightmap = MaterialTexture::new( asset_server, "textures/scene1/objects_lightmap.jpg", "objects_lightmap", ); let building_objects = asset_server.load("models/scene1/building.glb#Mesh0/Primitive0"); let material_properties = MaterialProperties { lightmap: MaterialSetProp { scale: 1.0, contrast: 1.8, brightness: 3.1, blend: 1.0, }, base_a: MaterialSetProp { scale: 8.5, contrast: 0.33, brightness: 2.0, blend: 1.0, }, base_b: MaterialSetProp { scale: 30.0, contrast: 0.3, brightness: 2.2, blend: 1.0, }, vary_a: MaterialSetProp { scale: 0.14, contrast: 0.77, brightness: 4.2, blend: 0.057, }, vary_b: MaterialSetProp { scale: 5.0, contrast: 0.14, brightness: 1.05, blend: 1.0, }, reflection: MaterialSetProp { scale: 1.0, contrast: 3.0, brightness: 0.115, blend: 1.0, }, walls: MaterialSetProp { scale: 10.5, contrast: 0.53, brightness: 1.6, blend: 1.0, }, reflection_mask: MaterialSetProp { scale: 0.033, contrast: 2.3, brightness: 40.0, blend: 1.0, }, mist: MaterialSetProp { scale: 0.032, contrast: 1.0, brightness: 1.0, blend: 0.567, }, directional_light_blend: 0.6, }; let material = custom_materials.add(CustomMaterial { material_properties, textures: [ objects_lightmap, base_texture.clone(), variation_texture.clone(), reflection_texture.clone(), walls_texture.clone(), ], }); commands.spawn().insert_bundle(MaterialMeshBundle { mesh: building_objects, transform: Transform::from_xyz(0.0, 0.0, 0.0), material, ..Default::default() }); let main_lightmap = MaterialTexture::new( asset_server, "textures/scene1/main_lightmap.jpg", "main_lightmap", ); let building_main = asset_server.load("models/scene1/building.glb#Mesh1/Primitive0"); let material = custom_materials.add(CustomMaterial { material_properties, textures: [ main_lightmap, base_texture, variation_texture, reflection_texture, walls_texture, ], }); commands.spawn().insert_bundle(MaterialMeshBundle { mesh: building_main, transform: Transform::from_xyz(0.0, 0.0, 0.0), material, ..Default::default() }); let skybox_texture = asset_server.load("textures/scene1/skybox.jpg"); let skybox = asset_server.load("models/scene1/skybox.glb#Mesh0/Primitive0"); commands.spawn().insert_bundle(MaterialMeshBundle { mesh: skybox, transform: Transform::from_xyz(0.0, 0.0, 0.0).with_scale(Vec3::new(10.0, 10.0, 10.0)), material: emissive_materials.add(EmissiveMaterial { emissive: Color::WHITE, emissive_texture: Some(skybox_texture), }), ..Default::default() }); let size: f32 = 50.0; commands.spawn_bundle(DirectionalLightBundle { directional_light: DirectionalLight { shadow_projection: OrthographicProjection { left: -size * 4.0, right: size * 2.0, bottom: -size * 2.0, top: size * 1.0, near: -size * 2.0, far: size * 1.0, ..Default::default() }, illuminance: 100000.0, shadows_enabled: true, ..Default::default() }, transform: Transform { translation: Vec3::new(0.0, 0.0, 0.0), rotation: Quat::from_euler( EulerRot::XYZ, (-14.0f32).to_radians(), -(192.0 - 180.0f32).to_radians(), 0.0, ), ..Default::default() }, ..Default::default() }); commands.spawn_bundle(PointLightBundle { transform: Transform::from_xyz(0.0, 5.0, 100.0), point_light: PointLight { intensity: 30000.0, range: 1000.0, radius: 100.0, color: Color::rgb(0.5, 0.45, 1.0), shadows_enabled: false, ..Default::default() }, ..Default::default() }); let lamp_locations = [ Vec3::new(-10.0, 17.0, -16.0), Vec3::new(10.0, 17.0, -16.0), ]; for lamp_loc in lamp_locations { commands.spawn_bundle(PointLightBundle { transform: Transform::from_xyz(lamp_loc.x, lamp_loc.y, lamp_loc.z), point_light: PointLight { intensity: 500.0, range: 1000.0, radius: 10.0, color: Color::rgb(1.0, 1.0, 1.0), shadows_enabled: false, ..Default::default() }, ..Default::default() }); } asset_server.watch_for_changes().unwrap(); }
let reflection_texture = MaterialTexture::new( asset_server, "textures/scene1/reflection.jpg", "reflection_texture", );
assignment_statement
[ { "content": "fn player(commands: &mut Commands) {\n\n commands.spawn_bundle(UnrealCameraBundle::new(\n\n UnrealCameraController::default(),\n\n PerspectiveCameraBundle::default(),\n\n Vec3::new(-30.0, 3.0, -3.0),\n\n Vec3::new(0.0, 3.0, -3.0),\n\n ));\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 0, "score": 76715.87972354727 }, { "content": "pub fn setup_room(\n\n commands: &mut Commands,\n\n custom_materials: &mut Assets<CustomMaterial>,\n\n emissive_materials: &mut Assets<EmissiveMaterial>,\n\n asset_server: &Res<AssetServer>,\n\n) {\n\n let variation_texture =\n\n MaterialTexture::new(asset_server, \"textures/detail.jpg\", \"variation_texture\");\n\n let base_texture = MaterialTexture::new(asset_server, \"textures/concrete.jpg\", \"base_texture\");\n\n\n\n let walls_texture =\n\n MaterialTexture::new(asset_server, \"textures/concrete3.jpg\", \"walls_texture\");\n\n\n\n let reflection_texture = MaterialTexture::new(\n\n asset_server,\n\n \"textures/scene1/reflection.jpg\",\n\n \"reflection_texture\",\n\n );\n\n\n\n //Building Objects\n", "file_path": "src/level2.rs", "rank": 1, "score": 69491.32867454487 }, { "content": "pub fn planitary_physics(time: Res<Time>, mut planet_query: Query<(&mut Planet, &mut Transform)>) {\n\n let mut pos_mass = Vec::new(); //todo, don't allocate\n\n for (planet, transform) in planet_query.iter() {\n\n pos_mass.push((transform.translation, planet.mass));\n\n }\n\n for (i, (mut planet, mut transform)) in planet_query.iter_mut().enumerate() {\n\n for (j, (transform_a, mass_a)) in pos_mass.iter().enumerate() {\n\n if i != j {\n\n let difference = transform.translation - *transform_a;\n\n let d = difference.powf(2.0);\n\n let distance_squared = d.x + d.y + d.z;\n\n\n\n planet.velocity -= (difference).normalize()\n\n * ((1.0 / mass_a.powf(2.0).max(distance_squared)) * mass_a)\n\n * time.delta_seconds();\n\n }\n\n }\n\n transform.translation += planet.velocity * time.delta_seconds();\n\n let r = planet.mass * 0.1;\n\n //bounce off walls-ish\n", "file_path": "src/planets.rs", "rank": 3, "score": 67007.08581503727 }, { "content": "pub fn spawn_planets(\n\n mut commands: Commands,\n\n mut meshes: ResMut<Assets<Mesh>>,\n\n mut materials: ResMut<Assets<StandardMaterial>>,\n\n) {\n\n let mut rng = rand::thread_rng();\n\n\n\n let n = 6.0;\n\n\n\n for _ in 0..30 {\n\n let x = rng.gen_range(-n..n);\n\n let y = rng.gen_range(4.0..7.0);\n\n let z = rng.gen_range(-n..n);\n\n\n\n let mass = rng.gen_range(0.05..5.0);\n\n\n\n commands\n\n .spawn_bundle(PbrBundle {\n\n mesh: meshes.add(Mesh::from(shape::UVSphere {\n\n radius: mass * 0.1,\n", "file_path": "src/planets.rs", "rank": 4, "score": 56330.64110433294 }, { "content": "fn setup_room(\n\n mut commands: Commands,\n\n mut custom_materials: ResMut<Assets<CustomMaterial>>,\n\n mut emissive_materials: ResMut<Assets<EmissiveMaterial>>,\n\n asset_server: Res<AssetServer>,\n\n) {\n\n let variation_texture =\n\n MaterialTexture::new(&asset_server, \"textures/detail.jpg\", \"variation_texture\");\n\n let base_texture = MaterialTexture::new(&asset_server, \"textures/concrete.jpg\", \"base_texture\");\n\n\n\n let walls_texture =\n\n MaterialTexture::new(&asset_server, \"textures/concrete3.jpg\", \"walls_texture\");\n\n\n\n let reflection_texture = MaterialTexture::new(\n\n &asset_server,\n\n \"textures/reflection.jpg\",\n\n \"reflection_texture\",\n\n );\n\n\n\n //Building Objects\n", "file_path": "src/setup_room.rs", "rank": 5, "score": 39871.7705646144 }, { "content": "fn get_custom_sampler(render_device: &mut Res<RenderDevice>) -> Sampler {\n\n let mut sampler_descriptor = SamplerDescriptor::default();\n\n\n\n sampler_descriptor.address_mode_u = AddressMode::Repeat;\n\n sampler_descriptor.address_mode_v = AddressMode::Repeat;\n\n sampler_descriptor.mipmap_filter = FilterMode::Linear;\n\n sampler_descriptor.mag_filter = FilterMode::Linear;\n\n sampler_descriptor.min_filter = FilterMode::Linear;\n\n sampler_descriptor.anisotropy_clamp = NonZeroU8::new(16);\n\n\n\n render_device.create_sampler(&sampler_descriptor)\n\n}\n\n\n\n// The implementation of [`Material`] needs this impl to work properly.\n\nimpl RenderAsset for EmissiveMaterial {\n\n type ExtractedAsset = EmissiveMaterial;\n\n type PreparedAsset = GpuEmissiveMaterial;\n\n type Param = (\n\n SRes<RenderDevice>,\n\n SRes<MaterialPipeline<Self>>,\n", "file_path": "src/emissive_material.rs", "rank": 6, "score": 38961.012029047546 }, { "content": "fn get_custom_sampler(render_device: &mut Res<RenderDevice>) -> Sampler {\n\n let mut sampler_descriptor = SamplerDescriptor::default();\n\n\n\n sampler_descriptor.address_mode_u = AddressMode::Repeat;\n\n sampler_descriptor.address_mode_v = AddressMode::Repeat;\n\n sampler_descriptor.mipmap_filter = FilterMode::Linear;\n\n sampler_descriptor.mag_filter = FilterMode::Linear;\n\n sampler_descriptor.min_filter = FilterMode::Linear;\n\n sampler_descriptor.anisotropy_clamp = NonZeroU8::new(16);\n\n\n\n render_device.create_sampler(&sampler_descriptor)\n\n}\n\n\n\nimpl RenderAsset for CustomMaterial {\n\n type ExtractedAsset = CustomMaterial;\n\n type PreparedAsset = GpuCustomMaterial;\n\n type Param = (\n\n SRes<RenderDevice>,\n\n SRes<MaterialPipeline<Self>>,\n\n SRes<RenderAssets<Image>>,\n", "file_path": "src/custom_material.rs", "rank": 7, "score": 38961.012029047546 }, { "content": "fn main() {\n\n App::new()\n\n .insert_resource(Msaa { samples: 4 })\n\n .add_plugins(DefaultPlugins)\n\n .add_plugin(EguiPlugin)\n\n .add_plugin(LookTransformPlugin)\n\n .add_plugin(UnrealCameraPlugin::default())\n\n .add_plugin(MaterialPlugin::<CustomMaterial>::default())\n\n .add_plugin(MaterialPlugin::<EmissiveMaterial>::default())\n\n .add_system(menu_ui)\n\n .add_startup_system(spawn_planets)\n\n .add_system(planitary_physics)\n\n .run();\n\n}\n", "file_path": "src/main.rs", "rank": 8, "score": 27815.31252821614 }, { "content": "fn menu_ui(\n\n mut commands: Commands,\n\n query: Query<Entity>,\n\n mut windows: ResMut<Windows>,\n\n mut egui_context: ResMut<EguiContext>,\n\n mut custom_materials: ResMut<Assets<CustomMaterial>>,\n\n mut emissive_materials: ResMut<Assets<EmissiveMaterial>>,\n\n mut material_handles: Query<&mut Handle<CustomMaterial>>,\n\n asset_server: Res<AssetServer>,\n\n mut controllers: Query<&mut UnrealCameraController>,\n\n) {\n\n let window = windows.get_primary_mut().unwrap();\n\n let show_ui = window.is_focused() && !window.cursor_locked();\n\n if show_ui {\n\n egui::Window::new(\"Settings\").show(egui_context.ctx_mut(), |ui| {\n\n if ui.button(\"Load Level 1\").clicked() {\n\n for entity in query.iter() {\n\n commands.entity(entity).despawn_recursive();\n\n }\n\n player(&mut commands);\n", "file_path": "src/main.rs", "rank": 9, "score": 26478.466928553826 }, { "content": "fn log_slider<Num: egui::emath::Numeric>(\n\n ui: &mut egui::Ui,\n\n value: &mut Num,\n\n range: RangeInclusive<Num>,\n\n text: &str,\n\n) {\n\n ui.add(egui::Slider::new(value, range).logarithmic(true).text(text));\n\n}\n\n\n\nimpl MaterialSetProp {\n\n pub fn build_ui(&mut self, ui: &mut egui::Ui, label: &str) {\n\n ui.label(label);\n\n log_slider(ui, &mut self.scale, 0.0..=100.0, \"scale\");\n\n log_slider(ui, &mut self.contrast, 0.0..=10.0, \"contrast\");\n\n log_slider(ui, &mut self.brightness, 0.0..=40.0, \"brightness\");\n\n log_slider(ui, &mut self.blend, 0.0..=1.0, \"blend\");\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, Copy, AsStd140)]\n", "file_path": "src/custom_material.rs", "rank": 10, "score": 19706.647857757285 }, { "content": " level1::setup_room(\n\n &mut commands,\n\n &mut custom_materials,\n\n &mut emissive_materials,\n\n &asset_server,\n\n );\n\n }\n\n if ui.button(\"Load Level 2\").clicked() {\n\n for entity in query.iter() {\n\n commands.entity(entity).despawn_recursive();\n\n }\n\n player(&mut commands);\n\n level2::setup_room(\n\n &mut commands,\n\n &mut custom_materials,\n\n &mut emissive_materials,\n\n &asset_server,\n\n );\n\n }\n\n if let Some(handle) = material_handles.iter_mut().next() {\n", "file_path": "src/main.rs", "rank": 11, "score": 5.949572028136462 }, { "content": "\n\nuse bevy_egui::egui;\n\n\n\n#[derive(Debug, Clone, Copy, AsStd140)]\n\npub struct MaterialSetProp {\n\n pub scale: f32,\n\n pub contrast: f32,\n\n pub brightness: f32,\n\n pub blend: f32,\n\n}\n\n\n", "file_path": "src/custom_material.rs", "rank": 12, "score": 5.244761205508404 }, { "content": "use bevy::prelude::*;\n\nuse rand::Rng;\n\n\n\n#[derive(Component, Debug)]\n\npub struct Planet {\n\n velocity: Vec3,\n\n mass: f32,\n\n}\n\n\n", "file_path": "src/planets.rs", "rank": 13, "score": 5.168730086850111 }, { "content": "pub struct MaterialProperties {\n\n pub lightmap: MaterialSetProp,\n\n pub base_a: MaterialSetProp,\n\n pub base_b: MaterialSetProp,\n\n pub vary_a: MaterialSetProp,\n\n pub vary_b: MaterialSetProp,\n\n pub reflection: MaterialSetProp,\n\n pub walls: MaterialSetProp,\n\n pub reflection_mask: MaterialSetProp,\n\n pub mist: MaterialSetProp,\n\n pub directional_light_blend: f32,\n\n //pub directional_light_color: Vec3,\n\n}\n\n\n\nimpl MaterialProperties {\n\n pub fn build_ui(&mut self, ui: &mut egui::Ui) {\n\n if ui.button(\"Debug Print\").clicked() {\n\n dbg!(&self);\n\n }\n\n self.lightmap.build_ui(ui, \"lightmap\");\n", "file_path": "src/custom_material.rs", "rank": 14, "score": 5.031266188585164 }, { "content": " }\n\n}\n\n\n\n// This is the struct that will be passed to your shader\n\n#[derive(Debug, Clone, TypeUuid)]\n\n#[uuid = \"4ee9c361-1124-4113-890e-197d82b00123\"]\n\npub struct CustomMaterial {\n\n pub material_properties: MaterialProperties,\n\n pub textures: [MaterialTexture; 5],\n\n}\n\n\n\nimpl CustomMaterial {\n\n pub fn build_ui(&mut self, ui: &mut egui::Ui, asset_server: &Res<AssetServer>) {\n\n self.material_properties.build_ui(ui);\n\n ui.label(\"CustomMaterial\");\n\n if ui.button(\"Print Paths\").clicked() {\n\n for texture in &self.textures {\n\n println!(\"{}\", texture.path);\n\n }\n\n }\n", "file_path": "src/custom_material.rs", "rank": 15, "score": 4.626556648111122 }, { "content": " pub name: String,\n\n}\n\n\n\nimpl MaterialTexture {\n\n pub fn build_ui(&mut self, ui: &mut egui::Ui, asset_server: &Res<AssetServer>) {\n\n ui.label(&self.name);\n\n ui.horizontal(|ui| {\n\n ui.text_edit_singleline(&mut self.path);\n\n if ui.button(\"LOAD\").clicked() {\n\n self.texture_handle = Some(asset_server.load(&self.path));\n\n }\n\n });\n\n }\n\n\n\n pub fn new(asset_server: &Res<AssetServer>, path: &str, name: &str) -> Self {\n\n MaterialTexture {\n\n texture_handle: Some(asset_server.load(path)),\n\n path: String::from(path),\n\n name: String::from(name),\n\n }\n", "file_path": "src/custom_material.rs", "rank": 16, "score": 4.5146514982425945 }, { "content": " for texture in &mut self.textures {\n\n texture.build_ui(ui, asset_server)\n\n }\n\n }\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct GpuCustomMaterial {\n\n _buffer: Buffer,\n\n bind_group: BindGroup,\n\n}\n\n\n", "file_path": "src/custom_material.rs", "rank": 17, "score": 4.139570001681275 }, { "content": " let main_mat = if let Some(main_mat) = custom_materials.get_mut(&handle.clone()) {\n\n ui.collapsing(\"material properties\", |ui| {\n\n main_mat.build_ui(ui, &asset_server);\n\n });\n\n Some(main_mat.clone())\n\n } else {\n\n None\n\n };\n\n if let Some(main_mat) = main_mat {\n\n for handle in material_handles.iter_mut() {\n\n if let Some(mat) = custom_materials.get_mut(&handle.clone()) {\n\n mat.material_properties = main_mat.material_properties\n\n }\n\n }\n\n }\n\n }\n\n if let Some(mut controller) = controllers.iter_mut().next() {\n\n controller.enabled = !ui.ctx().is_using_pointer();\n\n }\n\n });\n\n }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 18, "score": 4.052947308549151 }, { "content": " self.base_a.build_ui(ui, \"base_a\");\n\n self.base_b.build_ui(ui, \"base_b\");\n\n self.vary_a.build_ui(ui, \"vary_a\");\n\n self.vary_b.build_ui(ui, \"vary_b\");\n\n self.reflection.build_ui(ui, \"reflection\");\n\n self.reflection_mask.build_ui(ui, \"reflection_mask\");\n\n self.walls.build_ui(ui, \"walls\");\n\n self.mist.build_ui(ui, \"mist\");\n\n ui.label(\"-------------\");\n\n ui.add(\n\n egui::Slider::new(&mut self.directional_light_blend, 0.0..=5.0)\n\n .text(\"directional_light_blend\"),\n\n );\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct MaterialTexture {\n\n pub texture_handle: Option<Handle<Image>>,\n\n pub path: String,\n", "file_path": "src/custom_material.rs", "rank": 19, "score": 3.4468494740666586 }, { "content": "\n\n// This is the struct that will be passed to your shader\n\n#[derive(Debug, Clone, TypeUuid)]\n\n#[uuid = \"4ee9c361-1124-4113-890e-197d82b00321\"]\n\npub struct EmissiveMaterial {\n\n pub emissive: Color,\n\n pub emissive_texture: Option<Handle<Image>>,\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct GpuEmissiveMaterial {\n\n _buffer: Buffer,\n\n bind_group: BindGroup,\n\n}\n\n\n", "file_path": "src/emissive_material.rs", "rank": 20, "score": 2.9991430929379366 }, { "content": "use bevy::prelude::*;\n\n\n\nuse crate::custom_material::{\n\n CustomMaterial, MaterialProperties, MaterialSetProp, MaterialTexture,\n\n};\n\nuse crate::emissive_material::EmissiveMaterial;\n\n\n", "file_path": "src/level2.rs", "rank": 21, "score": 2.995871698865977 }, { "content": "use bevy::prelude::*;\n\n\n\nmod custom_material;\n\nmod emissive_material;\n\nmod level1;\n\nmod level2;\n\nmod planets;\n\nuse bevy_egui::{egui, EguiContext, EguiPlugin};\n\nuse custom_material::CustomMaterial;\n\nuse emissive_material::EmissiveMaterial;\n\nuse planets::{planitary_physics, spawn_planets};\n\nuse smooth_bevy_cameras::{\n\n controllers::unreal::{UnrealCameraBundle, UnrealCameraController, UnrealCameraPlugin},\n\n LookTransformPlugin,\n\n};\n\n\n", "file_path": "src/main.rs", "rank": 23, "score": 2.962730593722827 }, { "content": " );\n\n fn extract_asset(&self) -> Self::ExtractedAsset {\n\n self.clone()\n\n }\n\n\n\n fn prepare_asset(\n\n material: Self::ExtractedAsset,\n\n (render_device, material_pipeline, gpu_images): &mut SystemParamItem<Self::Param>,\n\n ) -> Result<Self::PreparedAsset, PrepareAssetError<Self::ExtractedAsset>> {\n\n let material_properties = &material.material_properties;\n\n let buffer = render_device.create_buffer_with_data(&BufferInitDescriptor {\n\n contents: material_properties.as_std140().as_bytes(),\n\n label: None,\n\n usage: BufferUsages::UNIFORM | BufferUsages::COPY_DST,\n\n });\n\n\n\n //TODO don't allocate?\n\n let mut entries = Vec::new();\n\n let mut samplers = Vec::new();\n\n\n", "file_path": "src/custom_material.rs", "rank": 24, "score": 2.1732996329532543 }, { "content": " material: material.clone(),\n\n ..Default::default()\n\n });\n\n\n\n let building_objects = asset_server.load(\"models/scene2/building.glb#Mesh2/Primitive0\");\n\n\n\n commands.spawn().insert_bundle(MaterialMeshBundle {\n\n mesh: building_objects,\n\n transform: Transform::from_xyz(0.0, 0.0, 0.0),\n\n material: material.clone(),\n\n ..Default::default()\n\n });\n\n\n\n let building_objects = asset_server.load(\"models/scene2/building.glb#Mesh3/Primitive0\");\n\n\n\n commands.spawn().insert_bundle(MaterialMeshBundle {\n\n mesh: building_objects,\n\n transform: Transform::from_xyz(0.0, 0.0, 0.0),\n\n material,\n\n ..Default::default()\n", "file_path": "src/level2.rs", "rank": 25, "score": 1.867893561628384 }, { "content": "\n\n commands.spawn().insert_bundle(MaterialMeshBundle {\n\n mesh: building_main,\n\n transform: Transform::from_xyz(0.0, 0.0, 0.0),\n\n material,\n\n ..Default::default()\n\n });\n\n\n\n //Sky Box\n\n let skybox_texture = asset_server.load(\"textures/scene2/skybox.jpg\");\n\n let skybox = asset_server.load(\"models/scene2/skybox.glb#Mesh0/Primitive0\");\n\n commands.spawn().insert_bundle(MaterialMeshBundle {\n\n mesh: skybox,\n\n transform: Transform::from_xyz(0.0, 0.0, 0.0).with_scale(Vec3::new(10.0, 10.0, 10.0)),\n\n material: emissive_materials.add(EmissiveMaterial {\n\n emissive: Color::WHITE,\n\n emissive_texture: Some(skybox_texture),\n\n }),\n\n ..Default::default()\n\n });\n", "file_path": "src/level2.rs", "rank": 26, "score": 1.7900404407060062 }, { "content": " let skybox = asset_server.load(\"models/skybox.glb#Mesh0/Primitive0\");\n\n commands.spawn().insert_bundle(MaterialMeshBundle {\n\n mesh: skybox,\n\n transform: Transform::from_xyz(0.0, 0.0, 0.0).with_scale(Vec3::new(10.0, 10.0, 10.0)),\n\n material: emissive_materials.add(EmissiveMaterial {\n\n emissive: Color::WHITE,\n\n emissive_texture: Some(sky_box_texture),\n\n }),\n\n ..Default::default()\n\n });\n\n\n\n //Bevy Sun\n\n let size: f32 = 50.0;\n\n commands.spawn_bundle(DirectionalLightBundle {\n\n directional_light: DirectionalLight {\n\n // Configure the projection to better fit the scene\n\n shadow_projection: OrthographicProjection {\n\n left: -size * 4.0,\n\n right: size * 2.0,\n\n bottom: -size * 2.0,\n", "file_path": "src/setup_room.rs", "rank": 28, "score": 1.777691518583067 }, { "content": " blend: 0.567,\n\n },\n\n directional_light_blend: 0.6,\n\n };\n\n\n\n let material = custom_materials.add(CustomMaterial {\n\n material_properties,\n\n textures: [\n\n objects_lightmap,\n\n base_texture.clone(),\n\n variation_texture.clone(),\n\n reflection_texture.clone(),\n\n walls_texture.clone(),\n\n ],\n\n });\n\n\n\n commands\n\n .spawn()\n\n .insert_bundle(MaterialMeshBundle {\n\n mesh: building_objects,\n", "file_path": "src/setup_room.rs", "rank": 29, "score": 1.620737375511073 }, { "content": "use std::num::NonZeroU8;\n\n\n\nuse bevy::{\n\n ecs::system::{lifetimeless::SRes, SystemParamItem},\n\n pbr::MaterialPipeline,\n\n prelude::*,\n\n reflect::TypeUuid,\n\n render::{\n\n render_asset::{PrepareAssetError, RenderAsset, RenderAssets},\n\n render_resource::{\n\n std140::{AsStd140, Std140},\n\n AddressMode, BindGroup, BindGroupDescriptor, BindGroupEntry, BindGroupLayout,\n\n BindGroupLayoutDescriptor, BindGroupLayoutEntry, BindingResource, BindingType, Buffer,\n\n BufferBindingType, BufferInitDescriptor, BufferSize, BufferUsages, FilterMode, Sampler,\n\n SamplerBindingType, SamplerDescriptor, ShaderStages, TextureSampleType,\n\n TextureViewDimension,\n\n },\n\n renderer::RenderDevice,\n\n },\n\n};\n", "file_path": "src/emissive_material.rs", "rank": 31, "score": 1.5560779052326703 }, { "content": "use std::{num::NonZeroU8, ops::RangeInclusive};\n\n\n\nuse bevy::{\n\n ecs::system::{lifetimeless::SRes, SystemParamItem},\n\n pbr::MaterialPipeline,\n\n prelude::*,\n\n reflect::TypeUuid,\n\n render::{\n\n render_asset::{PrepareAssetError, RenderAsset, RenderAssets},\n\n render_resource::{\n\n std140::{AsStd140, Std140},\n\n AddressMode, BindGroup, BindGroupDescriptor, BindGroupEntry, BindGroupLayout,\n\n BindGroupLayoutDescriptor, BindGroupLayoutEntry, BindingResource, BindingType, Buffer,\n\n BufferBindingType, BufferInitDescriptor, BufferSize, BufferUsages, FilterMode, Sampler,\n\n SamplerBindingType, SamplerDescriptor, ShaderStages, TextureSampleType,\n\n TextureViewDimension,\n\n },\n\n renderer::RenderDevice,\n\n },\n\n};\n", "file_path": "src/custom_material.rs", "rank": 33, "score": 1.5290869052911158 }, { "content": " // },\n\n // transform: Transform {\n\n // translation: Vec3::new(0.0, 0.0, 0.0),\n\n // rotation: Quat::from_euler(\n\n // EulerRot::XYZ,\n\n // (-blender_sun_elev).to_radians(),\n\n // -(blender_sun_rot - 180.0f32).to_radians(),\n\n // 0.0,\n\n // ),\n\n // ..Default::default()\n\n // },\n\n // ..Default::default()\n\n //});\n\n\n\n //Sky Light for PBR\n\n commands.spawn_bundle(PointLightBundle {\n\n transform: Transform::from_xyz(0.0, 100.0, 0.0),\n\n point_light: PointLight {\n\n intensity: 30000.0,\n\n range: 1000.0,\n", "file_path": "src/level2.rs", "rank": 35, "score": 1.5069966512313848 }, { "content": " reflection_texture,\n\n walls_texture,\n\n ],\n\n });\n\n\n\n commands\n\n .spawn()\n\n .insert_bundle(MaterialMeshBundle {\n\n mesh: building_main,\n\n transform: Transform::from_xyz(0.0, 0.0, 0.0),\n\n material: material.clone(),\n\n ..Default::default()\n\n })\n\n .insert(LevelAsset {\n\n material_properties,\n\n material_handle: material,\n\n });\n\n\n\n //Sky Box\n\n let sky_box_texture = asset_server.load(\"textures/sky_box.jpg\");\n", "file_path": "src/setup_room.rs", "rank": 36, "score": 1.5069966512313848 }, { "content": "# Bevy Baked GI Demo\n\nA demo scene with baked GI in Bevy\n\n\n\nVideo showing the baking and texturing process:\n\nhttps://youtu.be/XhJ3bTOBDhI\n\n![Level 1](demo.jpg)\n\n![Level 2](demo2.jpg)\n\n\n\nI think there is a lot that can be improved in the demo project. Suggestions/PRs are welcome. I've mainly just been using this to test out various workflow/setup ideas.\n", "file_path": "README.md", "rank": 37, "score": 1.4942047360598232 }, { "content": " },\n\n directional_light_blend: 0.6,\n\n };\n\n\n\n let material = custom_materials.add(CustomMaterial {\n\n material_properties,\n\n textures: [\n\n objects_lightmap,\n\n base_texture.clone(),\n\n variation_texture.clone(),\n\n reflection_texture.clone(),\n\n walls_texture.clone(),\n\n ],\n\n });\n\n\n\n let building_objects = asset_server.load(\"models/scene2/building.glb#Mesh0/Primitive0\");\n\n\n\n commands.spawn().insert_bundle(MaterialMeshBundle {\n\n mesh: building_objects,\n\n transform: Transform::from_xyz(0.0, 0.0, 0.0),\n", "file_path": "src/level2.rs", "rank": 38, "score": 1.3929492422508996 }, { "content": "\n\n //let blender_sun_elev = 24.4f32;\n\n //let blender_sun_rot = 248.0f32;\n\n ////Bevy Sun\n\n //let size: f32 = 250.0;\n\n //commands.spawn_bundle(DirectionalLightBundle {\n\n // directional_light: DirectionalLight {\n\n // // Configure the projection to better fit the scene\n\n // shadow_projection: OrthographicProjection {\n\n // left: -size * 4.0,\n\n // right: size * 2.0,\n\n // bottom: -size * 2.0,\n\n // top: size * 1.0,\n\n // near: -size * 2.0,\n\n // far: size * 1.0,\n\n // ..Default::default()\n\n // },\n\n // illuminance: 100000.0,\n\n // shadows_enabled: true,\n\n // ..Default::default()\n", "file_path": "src/level2.rs", "rank": 39, "score": 1.3780508182763542 }, { "content": " //Vec3::new(-15.0, 17.0, -16.0),\n\n Vec3::new(-10.0, 17.0, -16.0),\n\n //Vec3::new(-10.0, 17.0, -16.0),\n\n //Vec3::new(-5.0, 17.0, -16.0),\n\n //Vec3::new(-5.0, 17.0, -16.0),\n\n //Vec3::new(0.0, 17.0, -16.0),\n\n //Vec3::new(5.0, 17.0, -16.0),\n\n Vec3::new(10.0, 17.0, -16.0),\n\n //Vec3::new(15.0, 17.0, -16.0),\n\n ];\n\n\n\n for lamp_loc in lamp_locations {\n\n commands.spawn_bundle(PointLightBundle {\n\n transform: Transform::from_xyz(lamp_loc.x, lamp_loc.y, lamp_loc.z),\n\n point_light: PointLight {\n\n intensity: 500.0,\n\n range: 1000.0,\n\n radius: 10.0, //Oversize since we only have 2\n\n color: Color::rgb(1.0, 1.0, 1.0),\n\n shadows_enabled: false,\n", "file_path": "src/setup_room.rs", "rank": 41, "score": 1.3491900320959274 }, { "content": " });\n\n\n\n //Sky Light for PBR\n\n commands.spawn_bundle(PointLightBundle {\n\n transform: Transform::from_xyz(0.0, 5.0, 100.0),\n\n point_light: PointLight {\n\n intensity: 30000.0,\n\n range: 1000.0,\n\n radius: 100.0,\n\n color: Color::rgb(0.5, 0.45, 1.0),\n\n shadows_enabled: false,\n\n ..Default::default()\n\n },\n\n ..Default::default()\n\n });\n\n\n\n // Only doing a couple light positions because Bevy complains:\n\n // WARN bevy_pbr::render::light: Cluster light index lists is full!\n\n // The PointLights in the view are affecting too many clusters.\n\n let lamp_locations = [\n", "file_path": "src/setup_room.rs", "rank": 42, "score": 1.3352082689255398 }, { "content": " }\n\n}\n\n\n\nimpl Material for CustomMaterial {\n\n fn fragment_shader(asset_server: &AssetServer) -> Option<Handle<Shader>> {\n\n let r = Some(asset_server.load(\"shaders/custom_material.wgsl\"));\n\n asset_server.watch_for_changes().unwrap();\n\n r\n\n }\n\n\n\n fn bind_group(render_asset: &<Self as RenderAsset>::PreparedAsset) -> &BindGroup {\n\n &render_asset.bind_group\n\n }\n\n\n\n fn bind_group_layout(render_device: &RenderDevice) -> BindGroupLayout {\n\n let texture_quantity = 5;\n\n\n\n //TODO don't allocate?\n\n let mut entries = Vec::new();\n\n\n", "file_path": "src/custom_material.rs", "rank": 43, "score": 1.3091218677178467 }, { "content": " resource: BindingResource::Sampler(sampler1),\n\n },\n\n ],\n\n label: None,\n\n layout: &material_pipeline.material_layout,\n\n });\n\n\n\n Ok(GpuEmissiveMaterial {\n\n _buffer: buffer,\n\n bind_group,\n\n })\n\n }\n\n}\n\n\n\nimpl Material for EmissiveMaterial {\n\n // When creating a custom material, you need to define either a vertex shader, a fragment shader or both.\n\n // If you don't define one of them it will use the default mesh shader which can be found at\n\n // <https://github.com/bevyengine/bevy/blob/latest/crates/bevy_pbr/src/render/mesh.wgsl>\n\n\n\n // For this example we don't need a vertex shader\n", "file_path": "src/emissive_material.rs", "rank": 44, "score": 1.2654191303464328 }, { "content": " SRes<RenderAssets<Image>>,\n\n );\n\n fn extract_asset(&self) -> Self::ExtractedAsset {\n\n self.clone()\n\n }\n\n\n\n fn prepare_asset(\n\n material: Self::ExtractedAsset,\n\n (render_device, material_pipeline, gpu_images): &mut SystemParamItem<Self::Param>,\n\n ) -> Result<Self::PreparedAsset, PrepareAssetError<Self::ExtractedAsset>> {\n\n let emissive = Vec4::from_slice(&material.emissive.as_linear_rgba_f32());\n\n let buffer = render_device.create_buffer_with_data(&BufferInitDescriptor {\n\n contents: emissive.as_std140().as_bytes(),\n\n label: None,\n\n usage: BufferUsages::UNIFORM | BufferUsages::COPY_DST,\n\n });\n\n\n\n let (emissive_texture_view, _sampler) = if let Some(result) = material_pipeline\n\n .mesh_pipeline\n\n .get_image_texture(gpu_images, &material.emissive_texture)\n", "file_path": "src/emissive_material.rs", "rank": 45, "score": 1.0181979067827607 }, { "content": " // fn vertex_shader(asset_server: &AssetServer) -> Option<Handle<Shader>> {\n\n // // Use the same path as the fragment shader since wgsl let's you define both shader in the same file\n\n // Some(asset_server.load(\"shaders/custom_material.wgsl\"))\n\n // }\n\n\n\n fn fragment_shader(asset_server: &AssetServer) -> Option<Handle<Shader>> {\n\n let r = Some(asset_server.load(\"shaders/emissive_material.wgsl\"));\n\n asset_server.watch_for_changes().unwrap();\n\n r\n\n }\n\n\n\n fn bind_group(render_asset: &<Self as RenderAsset>::PreparedAsset) -> &BindGroup {\n\n &render_asset.bind_group\n\n }\n\n\n\n fn bind_group_layout(render_device: &RenderDevice) -> BindGroupLayout {\n\n render_device.create_bind_group_layout(&BindGroupLayoutDescriptor {\n\n entries: &[\n\n BindGroupLayoutEntry {\n\n binding: 0,\n", "file_path": "src/emissive_material.rs", "rank": 46, "score": 1.0132980495127533 } ]
Rust
diesel/src/pg/expression/array_comparison.rs
robertmaloney/diesel
332ba12617ff05e5077fc1879caf83fe2e7fd8ff
use std::marker::PhantomData; use backend::*; use expression::{AsExpression, Expression, SelectableExpression, NonAggregate}; use pg::{Pg, PgQueryBuilder}; use query_builder::*; use query_builder::debug::DebugQueryBuilder; use result::QueryResult; use types::{Array, HasSqlType}; pub fn any<ST, T>(vals: T) -> Any<T::Expression, ST> where Pg: HasSqlType<ST>, T: AsExpression<Array<ST>>, { Any::new(vals.as_expression()) } pub fn all<ST, T>(vals: T) -> All<T::Expression, ST> where Pg: HasSqlType<ST>, T: AsExpression<Array<ST>>, { All::new(vals.as_expression()) } #[doc(hidden)] #[derive(Debug, Copy, Clone)] pub struct Any<Expr, ST> { expr: Expr, _marker: PhantomData<ST>, } impl<Expr, ST> Any<Expr, ST> { fn new(expr: Expr) -> Self { Any { expr: expr, _marker: PhantomData, } } } impl<Expr, ST> Expression for Any<Expr, ST> where Pg: HasSqlType<ST>, Expr: Expression<SqlType=Array<ST>>, { type SqlType = ST; } impl<Expr, ST> QueryFragment<Pg> for Any<Expr, ST> where Expr: QueryFragment<Pg>, { fn to_sql(&self, out: &mut PgQueryBuilder) -> BuildQueryResult { out.push_sql("ANY("); try!(self.expr.to_sql(out)); out.push_sql(")"); Ok(()) } fn collect_binds(&self, out: &mut <Pg as Backend>::BindCollector) -> QueryResult<()> { try!(self.expr.collect_binds(out)); Ok(()) } fn is_safe_to_cache_prepared(&self) -> bool { self.expr.is_safe_to_cache_prepared() } } impl<Expr, ST> QueryFragment<Debug> for Any<Expr, ST> where Expr: QueryFragment<Debug>, { fn to_sql(&self, out: &mut DebugQueryBuilder) -> BuildQueryResult { out.push_sql("ANY("); try!(self.expr.to_sql(out)); out.push_sql(")"); Ok(()) } fn collect_binds(&self, out: &mut <Debug as Backend>::BindCollector) -> QueryResult<()> { try!(self.expr.collect_binds(out)); Ok(()) } fn is_safe_to_cache_prepared(&self) -> bool { self.expr.is_safe_to_cache_prepared() } } impl_query_id!(Any<Expr, ST>); impl<Expr, ST, QS> SelectableExpression<QS> for Any<Expr, ST> where Pg: HasSqlType<ST>, Any<Expr, ST>: Expression, Expr: SelectableExpression<QS>, { } impl<Expr, ST> NonAggregate for Any<Expr, ST> where Expr: NonAggregate, Any<Expr, ST>: Expression, { } #[doc(hidden)] #[derive(Debug, Copy, Clone)] pub struct All<Expr, ST> { expr: Expr, _marker: PhantomData<ST>, } impl<Expr, ST> All<Expr, ST> { fn new(expr: Expr) -> Self { All { expr: expr, _marker: PhantomData, } } } impl<Expr, ST> Expression for All<Expr, ST> where Pg: HasSqlType<ST>, Expr: Expression<SqlType=Array<ST>>, { type SqlType = ST; } impl<Expr, ST> QueryFragment<Pg> for All<Expr, ST> where Expr: QueryFragment<Pg>, { fn to_sql(&self, out: &mut PgQueryBuilder) -> BuildQueryResult { out.push_sql("ALL("); try!(self.expr.to_sql(out)); out.push_sql(")"); Ok(()) } fn collect_binds(&self, out: &mut <Pg as Backend>::BindCollector) -> QueryResult<()> { try!(self.expr.collect_binds(out)); Ok(()) } fn is_safe_to_cache_prepared(&self) -> bool { self.expr.is_safe_to_cache_prepared() } } impl<Expr, ST> QueryFragment<Debug> for All<Expr, ST> where Expr: QueryFragment<Debug>, { fn to_sql(&self, out: &mut DebugQueryBuilder) -> BuildQueryResult { out.push_sql("ALL("); try!(self.expr.to_sql(out)); out.push_sql(")"); Ok(()) } fn collect_binds(&self, out: &mut <Debug as Backend>::BindCollector) -> QueryResult<()> { try!(self.expr.collect_binds(out)); Ok(()) } fn is_safe_to_cache_prepared(&self) -> bool { self.expr.is_safe_to_cache_prepared() } } impl_query_id!(All<Expr, ST>); impl<Expr, ST, QS> SelectableExpression<QS> for All<Expr, ST> where Pg: HasSqlType<ST>, All<Expr, ST>: Expression, Expr: SelectableExpression<QS>, { } impl<Expr, ST> NonAggregate for All<Expr, ST> where Expr: NonAggregate, All<Expr, ST>: Expression, { }
use std::marker::PhantomData; use backend::*; use expression::{AsExpression, Expression, SelectableExpression, NonAggregate}; use pg::{Pg, PgQueryBuilder}; use query_builder::*; use query_builder::debug::DebugQueryBuilder; use result::QueryResult; use types::{Array, HasSqlType}; pub fn any<ST, T>(vals: T) -> Any<T::Expression, ST> where Pg: HasSqlType<ST>, T: AsExpression<Array<ST>>, { Any::new(vals.as_expression()) } pub fn all<ST, T>(vals: T) -> All<T::Expression, ST> where Pg: HasSqlType<ST>, T: AsExpression<Array<ST>>, { All::new(vals.as_expression()) } #[doc(hidden)] #[derive(Debug, Copy, Clone)] pub struct Any<Expr, ST> { expr: Expr, _marker: PhantomData<ST>, } impl<Expr, ST> Any<Expr, ST> { fn new(expr: Expr) -> Self { Any { expr: expr, _marker: PhantomData, } } } impl<Expr, ST> Expression for Any<Expr, ST> where Pg: HasSqlType<ST>, Expr: Expression<SqlType=Array<ST>>, { type SqlType = ST; } impl<Expr, ST> QueryFragment<Pg> for Any<Expr, ST> where Expr: QueryFragment<Pg>, { fn to_sql(&self, out: &mut PgQueryBuilder) -> BuildQueryResult { out.push_sql("ANY("); try!(self.expr.to_sql(out)); out.push_sql(")"); Ok(()) } fn collect_binds(&self, out: &mut <Pg as Backend>::BindCollector) -> QueryResult<()> { try!(self.expr.collect_binds(out)); Ok(()) } fn is_safe_to_cache_prepared(&self) -> bool { self.expr.is_safe_to_cache_prepared() } } impl<Expr, ST> QueryFragment<Debug> for Any<Expr, ST> where Expr: QueryFragment<Debug>, { fn to_sql(&self, out: &mut DebugQueryBuilder) -> BuildQueryResult { out.push_sql("ANY("); try!(self.expr.to_sql(out)); out.push_sql(")"); Ok(()) } fn collect_binds(&self, out: &mut <Debug as Backend>::BindCollector) -> QueryResult<()> { try!(self.expr.collect_binds(out)); Ok(()) } fn is_safe_to_cache_prepared(&self) -> bool { self.expr.is_safe_to_cache_prepared() } }
QS>, { } impl<Expr, ST> NonAggregate for Any<Expr, ST> where Expr: NonAggregate, Any<Expr, ST>: Expression, { } #[doc(hidden)] #[derive(Debug, Copy, Clone)] pub struct All<Expr, ST> { expr: Expr, _marker: PhantomData<ST>, } impl<Expr, ST> All<Expr, ST> { fn new(expr: Expr) -> Self { All { expr: expr, _marker: PhantomData, } } } impl<Expr, ST> Expression for All<Expr, ST> where Pg: HasSqlType<ST>, Expr: Expression<SqlType=Array<ST>>, { type SqlType = ST; } impl<Expr, ST> QueryFragment<Pg> for All<Expr, ST> where Expr: QueryFragment<Pg>, { fn to_sql(&self, out: &mut PgQueryBuilder) -> BuildQueryResult { out.push_sql("ALL("); try!(self.expr.to_sql(out)); out.push_sql(")"); Ok(()) } fn collect_binds(&self, out: &mut <Pg as Backend>::BindCollector) -> QueryResult<()> { try!(self.expr.collect_binds(out)); Ok(()) } fn is_safe_to_cache_prepared(&self) -> bool { self.expr.is_safe_to_cache_prepared() } } impl<Expr, ST> QueryFragment<Debug> for All<Expr, ST> where Expr: QueryFragment<Debug>, { fn to_sql(&self, out: &mut DebugQueryBuilder) -> BuildQueryResult { out.push_sql("ALL("); try!(self.expr.to_sql(out)); out.push_sql(")"); Ok(()) } fn collect_binds(&self, out: &mut <Debug as Backend>::BindCollector) -> QueryResult<()> { try!(self.expr.collect_binds(out)); Ok(()) } fn is_safe_to_cache_prepared(&self) -> bool { self.expr.is_safe_to_cache_prepared() } } impl_query_id!(All<Expr, ST>); impl<Expr, ST, QS> SelectableExpression<QS> for All<Expr, ST> where Pg: HasSqlType<ST>, All<Expr, ST>: Expression, Expr: SelectableExpression<QS>, { } impl<Expr, ST> NonAggregate for All<Expr, ST> where Expr: NonAggregate, All<Expr, ST>: Expression, { }
impl_query_id!(Any<Expr, ST>); impl<Expr, ST, QS> SelectableExpression<QS> for Any<Expr, ST> where Pg: HasSqlType<ST>, Any<Expr, ST>: Expression, Expr: SelectableExpression<
random
[ { "content": "pub trait ArrayExpressionMethods<ST>: Expression<SqlType=Array<ST>> + Sized {\n\n /// Compares two arrays for common elements, using the `&&` operator in\n\n /// the final SQL\n\n ///\n\n /// # Example\n\n ///\n\n /// ```rust\n\n /// # #[macro_use] extern crate diesel;\n\n /// # include!(\"src/doctest_setup.rs\");\n\n /// #\n\n /// # table! {\n\n /// # posts {\n\n /// # id -> Integer,\n\n /// # tags -> Array<VarChar>,\n\n /// # }\n\n /// # }\n\n /// #\n\n /// # // FIXME: We shouldn't need to define a users table here\n\n /// # table! {\n\n /// # users {\n", "file_path": "diesel/src/pg/expression/expression_methods.rs", "rank": 0, "score": 268220.43060407724 }, { "content": "pub fn test_type_round_trips<ST, T>(value: T) -> bool where\n\n ST: QueryId,\n\n <TestConnection as Connection>::Backend: HasSqlType<ST>,\n\n T: AsExpression<ST> + Queryable<ST, <TestConnection as Connection>::Backend> + PartialEq + Clone + ::std::fmt::Debug,\n\n <T as AsExpression<ST>>::Expression: SelectableExpression<()> + QueryFragment<<TestConnection as Connection>::Backend> + QueryId,\n\n{\n\n let connection = connection();\n\n let query = select(AsExpression::<ST>::as_expression(value.clone()));\n\n let result = query.get_result::<T>(&connection);\n\n match result {\n\n Ok(res) => {\n\n if value != res {\n\n println!(\"{:?}, {:?}\", value, res);\n\n false\n\n } else {\n\n true\n\n }\n\n }\n\n Err(Error::DatabaseError(_, ref e))\n\n if e.message() == \"invalid byte sequence for encoding \\\"UTF8\\\": 0x00\" => true,\n\n Err(e) => panic!(\"Query failed: {:?}\", e),\n\n }\n\n}\n\n\n", "file_path": "diesel_tests/tests/types_roundtrip.rs", "rank": 1, "score": 260811.7135192058 }, { "content": "pub trait BoolExpressionMethods: Expression<SqlType=Bool> + Sized {\n\n /// Creates a SQL `AND` expression\n\n fn and<T: AsExpression<Bool>>(self, other: T) -> And<Self, T::Expression> {\n\n And::new(self.as_expression(), other.as_expression())\n\n }\n\n\n\n /// Creates a SQL `OR` expression\n\n ///\n\n /// The result will be wrapped in parenthesis, so that precidence matches\n\n /// that of your function calls. For example, `false.and(true.or(false))`\n\n /// will return `false`\n\n fn or<T: AsExpression<Bool>>(self, other: T) -> Grouped<Or<Self, T::Expression>> {\n\n Grouped(Or::new(self, other.as_expression()))\n\n }\n\n}\n\n\n\nimpl<T: Expression<SqlType=Bool>> BoolExpressionMethods for T {}\n", "file_path": "diesel/src/expression/expression_methods/bool_expression_methods.rs", "rank": 4, "score": 246950.5397605445 }, { "content": "pub fn sql<ST>(sql: &str) -> SqlLiteral<ST> {\n\n SqlLiteral::new(sql.into())\n\n}\n", "file_path": "diesel/src/expression/sql_literal.rs", "rank": 5, "score": 226254.97653915198 }, { "content": "/// A DSL added to `i64` and `f64` to construct PostgreSQL intervals of less\n\n/// than 1 day.\n\n///\n\n/// The behavior of these methods when called on `NAN` or `Infinity` is\n\n/// undefined.\n\n///\n\n/// # Example\n\n///\n\n/// ```rust\n\n/// # #[macro_use] extern crate diesel;\n\n/// # include!(\"src/doctest_setup.rs\");\n\n/// # use diesel::expression::dsl::*;\n\n/// #\n\n/// # table! {\n\n/// # users {\n\n/// # id -> Serial,\n\n/// # name -> VarChar,\n\n/// # created_at -> Timestamp,\n\n/// # }\n\n/// # }\n\n/// #\n\n/// # fn main() {\n\n/// # use self::users::dsl::*;\n\n/// # let connection = connection_no_data();\n\n/// # connection.execute(\"CREATE TABLE users (id serial primary key, name\n\n/// # varchar not null, created_at timestamp not null)\").unwrap();\n\n/// connection.execute(\"INSERT INTO users (name, created_at) VALUES\n\n/// ('Sean', NOW()), ('Tess', NOW() - '5 minutes'::interval),\n\n/// ('Jim', NOW() - '10 minutes'::interval)\").unwrap();\n\n///\n\n/// let mut data: Vec<String> = users\n\n/// .select(name)\n\n/// .filter(created_at.gt(now - 7.minutes()))\n\n/// .load(&connection).unwrap();\n\n/// assert_eq!(2, data.len());\n\n/// assert_eq!(\"Sean\".to_string(), data[0]);\n\n/// assert_eq!(\"Tess\".to_string(), data[1]);\n\n/// # }\n\n/// ```\n\npub trait MicroIntervalDsl: Sized + Mul<Self, Output=Self> {\n\n /// Returns a PgInterval representing `self` as microseconds\n\n fn microseconds(self) -> PgInterval;\n\n #[doc(hidden)]\n\n fn times(self, x: i32) -> Self;\n\n\n\n /// Returns a PgInterval representing `self` as milliseconds\n\n fn milliseconds(self) -> PgInterval {\n\n (self.times(1000)).microseconds()\n\n }\n\n\n\n /// Returns a PgInterval representing `self` as seconds\n\n fn seconds(self) -> PgInterval {\n\n (self.times(1000)).milliseconds()\n\n }\n\n\n\n /// Returns a PgInterval representing `self` as minutes\n\n fn minutes(self) -> PgInterval {\n\n (self.times(60)).seconds()\n\n }\n", "file_path": "diesel/src/pg/expression/extensions/interval_dsl.rs", "rank": 6, "score": 196323.4594473925 }, { "content": "/// A DSL added to `i32` and `f64` to construct PostgreSQL intervals of greater\n\n/// than 1 day.\n\n///\n\n/// The behavior of these methods when called on `NAN` or `Infinity` is\n\n/// undefined.\n\n///\n\n/// # Example\n\n///\n\n/// ```rust\n\n/// # #[macro_use] extern crate diesel;\n\n/// # include!(\"src/doctest_setup.rs\");\n\n/// # use diesel::expression::dsl::*;\n\n/// #\n\n/// # table! {\n\n/// # users {\n\n/// # id -> Serial,\n\n/// # name -> VarChar,\n\n/// # created_at -> Timestamp,\n\n/// # }\n\n/// # }\n\n/// #\n\n/// # fn main() {\n\n/// # use self::users::dsl::*;\n\n/// # let connection = connection_no_data();\n\n/// # connection.execute(\"CREATE TABLE users (id serial primary key, name\n\n/// # varchar not null, created_at timestamp not null)\").unwrap();\n\n/// connection.execute(\"INSERT INTO users (name, created_at) VALUES\n\n/// ('Sean', NOW()), ('Tess', NOW() - '5 days'::interval),\n\n/// ('Jim', NOW() - '10 days'::interval)\").unwrap();\n\n///\n\n/// let mut data: Vec<String> = users\n\n/// .select(name)\n\n/// .filter(created_at.gt(now - 7.days()))\n\n/// .load(&connection).unwrap();\n\n/// assert_eq!(2, data.len());\n\n/// assert_eq!(\"Sean\".to_string(), data[0]);\n\n/// assert_eq!(\"Tess\".to_string(), data[1]);\n\n/// # }\n\n/// ```\n\npub trait DayAndMonthIntervalDsl: Sized + Mul<Self, Output=Self> {\n\n /// Returns a PgInterval representing `self` in days\n\n fn days(self) -> PgInterval;\n\n /// Returns a PgInterval representing `self` in monhts\n\n fn months(self) -> PgInterval;\n\n #[doc(hidden)]\n\n fn times(self, x: i32) -> Self;\n\n\n\n /// Returns a PgInterval representing `self` in weeks\n\n ///\n\n /// Note: When called on a high precision float, the returned interval may\n\n /// be 1 microsecond different than the equivalent string passed to\n\n /// PostgreSQL.\n\n fn weeks(self) -> PgInterval {\n\n (self.times(7)).days()\n\n }\n\n\n\n /// Returns a PgInterval representing `self` in weeks\n\n ///\n\n /// Note: When called on a float, this method will mimic the behavior of\n", "file_path": "diesel/src/pg/expression/extensions/interval_dsl.rs", "rank": 7, "score": 193331.01044782123 }, { "content": "/// Returns a &str representing the type of backend being used, determined\n\n/// by the format of the database url.\n\npub fn backend(database_url: &String) -> &str {\n\n if database_url.starts_with(\"postgres://\") || database_url.starts_with(\"postgresql://\") && cfg!(feature = \"postgres\") {\n\n \"postgres\"\n\n } else if cfg!(feature = \"sqlite\") {\n\n \"sqlite\"\n\n } else {\n\n panic!(\"{:?} is not a valid PostgreSQL URL. It should start with\\\n\n `postgres://` or `postgresql://`\", database_url);\n\n }\n\n}\n\n\n", "file_path": "diesel_cli/src/database.rs", "rank": 8, "score": 188497.13196110562 }, { "content": "pub trait HasSqlType<ST>: TypeMetadata {\n\n fn metadata() -> Self::TypeMetadata;\n\n}\n\n\n", "file_path": "diesel/src/types/mod.rs", "rank": 9, "score": 187354.49949947477 }, { "content": "#[test]\n\nfn bool_to_sql() {\n\n let mut bytes = vec![];\n\n ToSql::<types::Bool, Pg>::to_sql(&true, &mut bytes).unwrap();\n\n ToSql::<types::Bool, Pg>::to_sql(&false, &mut bytes).unwrap();\n\n assert_eq!(bytes, vec![1u8, 0u8]);\n\n}\n\n\n", "file_path": "diesel/src/pg/types/primitives.rs", "rank": 10, "score": 186174.10726072956 }, { "content": "struct PgErrorInformation(*mut PGresult);\n\n\n\nunsafe impl Send for PgErrorInformation {}\n\n\n\nimpl Drop for PgErrorInformation {\n\n fn drop(&mut self) {\n\n unsafe { PQclear(self.0) };\n\n }\n\n}\n\n\n\nimpl DatabaseErrorInformation for PgErrorInformation {\n\n fn message(&self) -> &str {\n\n match get_result_field(self.0, ResultField::MessagePrimary) {\n\n Some(e) => e,\n\n None => unreachable!(\"Per PGs documentation, all errors should have a message\"),\n\n }\n\n }\n\n\n\n fn details(&self) -> Option<&str> {\n\n get_result_field(self.0, ResultField::MessageDetail)\n", "file_path": "diesel/src/pg/connection/result.rs", "rank": 11, "score": 185399.9140692214 }, { "content": "pub fn determine_column_type(cx: &mut ExtCtxt, _span: Span, attr: &ColumnInformation) -> P<ast::Ty> {\n\n let tpe = if attr.type_name.starts_with(\"_\") {\n\n let subtype = str_to_ident(&capitalize(&attr.type_name[1..]));\n\n quote_ty!(cx, Array<$subtype>)\n\n } else {\n\n let type_name = str_to_ident(&capitalize(&attr.type_name));\n\n quote_ty!(cx, $type_name)\n\n };\n\n\n\n if attr.nullable {\n\n quote_ty!(cx, Nullable<$tpe>)\n\n } else {\n\n tpe\n\n }\n\n}\n\n\n", "file_path": "diesel_codegen_syntex/src/schema_inference/pg.rs", "rank": 12, "score": 183833.15494494466 }, { "content": "/// Creates a bare select statement, with no from clause. Primarily used for\n\n/// testing diesel itself, but likely useful for third party crates as well. The\n\n/// given expressions must be selectable from anywhere.\n\npub fn select<T>(expression: T) -> SelectStatement<T::SqlType, T, ()> where\n\n T: Expression,\n\n{\n\n SelectStatement::simple(expression, ())\n\n}\n", "file_path": "diesel/src/query_builder/functions.rs", "rank": 13, "score": 181560.231822266 }, { "content": "#[cfg(feature = \"with-syntex\")]\n\npub fn register(reg: &mut syntex::Registry) {\n\n reg.add_attr(\"feature(custom_derive)\");\n\n reg.add_attr(\"feature(custom_attribute)\");\n\n\n\n reg.add_decorator(\"derive_Queryable\", queryable::expand_derive_queryable);\n\n reg.add_decorator(\"derive_Identifiable\", identifiable::expand_derive_identifiable);\n\n reg.add_decorator(\"insertable_into\", insertable::expand_insert);\n\n reg.add_decorator(\"changeset_for\", update::expand_changeset_for);\n\n reg.add_decorator(\"has_many\", associations::expand_has_many);\n\n reg.add_decorator(\"belongs_to\", associations::expand_belongs_to);\n\n reg.add_macro(\"embed_migrations\", migrations::expand_embed_migrations);\n\n reg.add_macro(\"infer_table_from_schema\", schema_inference::expand_load_table);\n\n reg.add_macro(\"infer_schema\", schema_inference::expand_infer_schema);\n\n\n\n reg.add_post_expansion_pass(util::strip_attributes);\n\n}\n", "file_path": "diesel_codegen_syntex/src/lib.rs", "rank": 14, "score": 180412.37654564442 }, { "content": "#[plugin_registrar]\n\npub fn register(reg: &mut rustc_plugin::Registry) {\n\n use syntax::parse::token::intern;\n\n use syntax::ext::base::MultiDecorator;\n\n reg.register_syntax_extension(\n\n intern(\"derive_Queryable\"),\n\n MultiDecorator(Box::new(queryable::expand_derive_queryable))\n\n );\n\n reg.register_syntax_extension(\n\n intern(\"derive_Identifiable\"),\n\n MultiDecorator(Box::new(identifiable::expand_derive_identifiable))\n\n );\n\n reg.register_syntax_extension(\n\n intern(\"insertable_into\"),\n\n MultiDecorator(Box::new(insertable::expand_insert))\n\n );\n\n reg.register_syntax_extension(\n\n intern(\"changeset_for\"),\n\n MultiDecorator(Box::new(update::expand_changeset_for)),\n\n );\n\n reg.register_syntax_extension(\n", "file_path": "diesel_codegen/src/lib.rs", "rank": 15, "score": 180412.37654564442 }, { "content": "pub fn is_option_ty(ty: &ast::Ty) -> bool {\n\n ty_param_of_option(ty).is_some()\n\n}\n\n\n", "file_path": "diesel_codegen_syntex/src/util.rs", "rank": 16, "score": 177596.52413121573 }, { "content": "pub fn struct_ty(\n\n cx: &mut ExtCtxt,\n\n span: Span,\n\n name: ast::Ident,\n\n generics: &ast::Generics,\n\n) -> P<ast::Ty> {\n\n let lifetimes = generics.lifetimes.iter().map(|lt| lt.lifetime).collect();\n\n let ty_params = generics.ty_params.iter()\n\n .map(|param| cx.ty_ident(span, param.ident))\n\n .collect();\n\n cx.ty_path(cx.path_all(span, false, vec![name], lifetimes, ty_params, Vec::new()))\n\n}\n\n\n", "file_path": "diesel_codegen_syntex/src/util.rs", "rank": 17, "score": 177224.84594207577 }, { "content": "/// Helper trait used when boxing expressions. This exists to work around the\n\n/// fact that Rust will not let us use non-core types as bounds on a trait\n\n/// object (you could not return `Box<Expression+NonAggregate>`)\n\npub trait BoxableExpression<QS, ST, DB> where\n\n DB: Backend,\n\n Self: Expression,\n\n Self: SelectableExpression<QS, ST>,\n\n Self: NonAggregate,\n\n Self: QueryFragment<DB>,\n\n{}\n\n\n\nimpl<QS, T, ST, DB> BoxableExpression<QS, ST, DB> for T where\n\n DB: Backend,\n\n T: Expression,\n\n T: SelectableExpression<QS, ST>,\n\n T: NonAggregate,\n\n T: QueryFragment<DB>,\n\n{\n\n}\n\n\n\nimpl<QS, ST, DB> QueryId for BoxableExpression<QS, ST, DB, SqlType=ST> {\n\n type QueryId = ();\n\n\n\n fn has_static_query_id() -> bool {\n\n false\n\n }\n\n}\n", "file_path": "diesel/src/expression/mod.rs", "rank": 18, "score": 174328.60216074207 }, { "content": "/// Returns true if the '__diesel_schema_migrations' table exists in the\n\n/// database we connect to, returns false if it does not.\n\npub fn schema_table_exists(database_url: &String) -> DatabaseResult<bool> {\n\n let result = match backend(database_url) {\n\n #[cfg(feature = \"postgres\")]\n\n \"postgres\" => {\n\n let conn = PgConnection::establish(database_url).unwrap();\n\n try!(select(sql::<Bool>(\"EXISTS \\\n\n (SELECT 1 \\\n\n FROM information_schema.tables \\\n\n WHERE table_name = '__diesel_schema_migrations')\"))\n\n .get_result(&conn))\n\n },\n\n #[cfg(feature = \"sqlite\")]\n\n \"sqlite\" => {\n\n let conn = SqliteConnection::establish(database_url).unwrap();\n\n try!(select(sql::<Bool>(\"EXISTS \\\n\n (SELECT 1 \\\n\n FROM sqlite_master \\\n\n WHERE type = 'table' \\\n\n AND name = '__diesel_schema_migrations')\"))\n\n .get_result(&conn))\n\n },\n\n _ => unreachable!(\"The backend function should ensure we never get here.\"),\n\n };\n\n Ok(result)\n\n}\n\n\n", "file_path": "diesel_cli/src/database.rs", "rank": 19, "score": 172242.06313504954 }, { "content": "#[test]\n\nfn bool_from_sql_treats_null_as_false() {\n\n let result = <bool as FromSql<types::Bool, Pg>>::from_sql(None).unwrap();\n\n assert!(!result);\n\n}\n", "file_path": "diesel/src/pg/types/primitives.rs", "rank": 20, "score": 172236.6472070274 }, { "content": "/// Serializes a single value to be sent to the database. The output will be\n\n/// included as a bind parameter, and is expected to be the binary format, not\n\n/// text.\n\npub trait ToSql<A, DB: Backend + HasSqlType<A>> {\n\n fn to_sql<W: Write>(&self, out: &mut W) -> Result<IsNull, Box<Error+Send+Sync>>;\n\n}\n\n\n\nimpl<'a, A, T, DB> ToSql<A, DB> for &'a T where\n\n DB: Backend + HasSqlType<A>,\n\n T: ToSql<A, DB>,\n\n{\n\n fn to_sql<W: Write>(&self, out: &mut W) -> Result<IsNull, Box<Error+Send+Sync>> {\n\n (*self).to_sql(out)\n\n }\n\n}\n", "file_path": "diesel/src/types/mod.rs", "rank": 21, "score": 170656.9997642196 }, { "content": "pub fn load_table_names(connection: &PgConnection) -> QueryResult<Vec<String>> {\n\n use diesel::prelude::*;\n\n use diesel::expression::dsl::sql;\n\n\n\n let query = select(sql::<types::VarChar>(\"table_name FROM information_schema.tables\"))\n\n .filter(sql::<types::Bool>(\"table_schema = 'public' AND table_name NOT LIKE '\\\\_\\\\_%'\"));\n\n query.load(connection)\n\n}\n\n\n", "file_path": "diesel_codegen_syntex/src/schema_inference/pg.rs", "rank": 22, "score": 170521.19910623212 }, { "content": "fn is_bool(type_name: &str) -> bool {\n\n type_name == \"boolean\" ||\n\n type_name.contains(\"tiny\") &&\n\n type_name.contains(\"int\")\n\n}\n\n\n", "file_path": "diesel_codegen_syntex/src/schema_inference/sqlite.rs", "rank": 23, "score": 169307.1976195127 }, { "content": "pub trait TextExpressionMethods: Expression<SqlType=Text> + Sized {\n\n /// Returns a SQL `LIKE` expression\n\n fn like<T: AsExpression<Text>>(self, other: T) -> Like<Self, T::Expression> {\n\n Like::new(self.as_expression(), other.as_expression())\n\n }\n\n\n\n /// Returns a SQL `NOT LIKE` expression\n\n fn not_like<T: AsExpression<Text>>(self, other: T) -> NotLike<Self, T::Expression> {\n\n NotLike::new(self.as_expression(), other.as_expression())\n\n }\n\n}\n\n\n\nimpl<T: Expression<SqlType=Text>> TextExpressionMethods for T {}\n", "file_path": "diesel/src/expression/expression_methods/text_expression_methods.rs", "rank": 24, "score": 169294.79058131765 }, { "content": "/// Creates a SQL `COUNT(*)` expression\n\n///\n\n/// For selecting the count of a query, and nothing else, you can just call\n\n/// [`count`](../../trait.CountDsl.html) on the query instead.\n\n///\n\n/// As with most bare functions, this is not exported by default. You can import\n\n/// it specifically as `diesel::expression::count_star`, or glob import\n\n/// `diesel::expression::dsl::*`\n\n///\n\n/// # Example\n\n///\n\n/// ```rust\n\n/// # #[macro_use] extern crate diesel;\n\n/// # include!(\"src/doctest_setup.rs\");\n\n/// # use diesel::expression::dsl::*;\n\n/// #\n\n/// # table! {\n\n/// # users {\n\n/// # id -> Integer,\n\n/// # name -> VarChar,\n\n/// # }\n\n/// # }\n\n/// #\n\n/// # fn main() {\n\n/// # use self::users::dsl::*;\n\n/// # let connection = establish_connection();\n\n/// assert_eq!(Ok(2), users.select(count_star()).first(&connection));\n\n/// # }\n\n/// ```\n\npub fn count_star() -> CountStar {\n\n CountStar\n\n}\n\n\n\n#[derive(Debug, Clone, Copy)]\n\n#[doc(hidden)]\n\npub struct Count<T> {\n\n target: T,\n\n}\n\n\n\nimpl<T: Expression> Expression for Count<T> {\n\n type SqlType = BigInt;\n\n}\n\n\n\nimpl<T: QueryFragment<DB>, DB: Backend> QueryFragment<DB> for Count<T> {\n\n fn to_sql(&self, out: &mut DB::QueryBuilder) -> BuildQueryResult {\n\n out.push_sql(\"COUNT(\");\n\n try!(self.target.to_sql(out));\n\n out.push_sql(\")\");\n\n Ok(())\n", "file_path": "diesel/src/expression/count.rs", "rank": 25, "score": 168410.02411286783 }, { "content": "pub trait GroupByDsl<Expr: Expression> {\n\n type Output: Query;\n\n\n\n fn group_by(self, expr: Expr) -> Self::Output;\n\n}\n\n\n\nimpl<T, Expr> GroupByDsl<Expr> for T where\n\n Expr: Expression,\n\n T: QuerySource + AsQuery,\n\n T::Query: GroupByDsl<Expr>,\n\n{\n\n type Output = <T::Query as GroupByDsl<Expr>>::Output;\n\n\n\n fn group_by(self, expr: Expr) -> Self::Output {\n\n self.as_query().group_by(expr)\n\n }\n\n}\n", "file_path": "diesel/src/query_dsl/group_by_dsl.rs", "rank": 26, "score": 166702.17571722213 }, { "content": "pub trait PgExpressionMethods: Expression + Sized {\n\n /// Creates a PostgreSQL `IS NOT DISTINCT FROM` expression. This behaves\n\n /// identically to the `=` operator, except that `NULL` is treated as a\n\n /// normal value.\n\n ///\n\n /// # Example\n\n ///\n\n /// ```rust\n\n /// # #[macro_use] extern crate diesel;\n\n /// # include!(\"src/doctest_setup.rs\");\n\n /// #\n\n /// # table! {\n\n /// # users {\n\n /// # id -> Integer,\n\n /// # name -> VarChar,\n\n /// # }\n\n /// # }\n\n /// #\n\n /// # fn main() {\n\n /// # use self::users::dsl::*;\n", "file_path": "diesel/src/pg/expression/expression_methods.rs", "rank": 27, "score": 165335.6145340128 }, { "content": "/// How to deserialize a single field of a given type. The input will always be\n\n/// the binary representation, not the text.\n\npub trait FromSql<A, DB: Backend + HasSqlType<A>>: Sized {\n\n fn from_sql(bytes: Option<&DB::RawValue>) -> Result<Self, Box<Error+Send+Sync>>;\n\n}\n\n\n", "file_path": "diesel/src/types/mod.rs", "rank": 28, "score": 164865.9507045425 }, { "content": "pub trait Backend where\n\n Self: Sized,\n\n Self: HasSqlType<types::SmallInt>,\n\n Self: HasSqlType<types::Integer>,\n\n Self: HasSqlType<types::BigInt>,\n\n Self: HasSqlType<types::Float>,\n\n Self: HasSqlType<types::Double>,\n\n Self: HasSqlType<types::VarChar>,\n\n Self: HasSqlType<types::Text>,\n\n Self: HasSqlType<types::Binary>,\n\n Self: HasSqlType<types::Date>,\n\n Self: HasSqlType<types::Time>,\n\n Self: HasSqlType<types::Timestamp>,\n\n{\n\n type QueryBuilder: QueryBuilder<Self>;\n\n type BindCollector: BindCollector<Self>;\n\n type RawValue: ?Sized;\n\n}\n\n\n", "file_path": "diesel/src/backend.rs", "rank": 29, "score": 162913.08823533537 }, { "content": "#[doc(hidden)]\n\npub trait PgTimestampExpressionMethods: Expression + Sized {\n\n /// Returns a PostgreSQL \"AT TIME ZONE\" expression\n\n fn at_time_zone<T>(self, timezone: T) -> AtTimeZone<Self, T::Expression> where\n\n T: AsExpression<VarChar>,\n\n {\n\n AtTimeZone::new(self, timezone.as_expression())\n\n }\n\n}\n\n\n\nimpl<T: Expression> PgTimestampExpressionMethods for T where\n\n T::SqlType: DateTimeLike,\n\n{}\n\n\n", "file_path": "diesel/src/pg/expression/expression_methods.rs", "rank": 30, "score": 162895.29698414175 }, { "content": "pub fn establish_connection() -> PgConnection {\n\n dotenv().ok();\n\n\n\n let database_url = env::var(\"DATABASE_URL\")\n\n .expect(\"DATABASE_URL must be set\");\n\n PgConnection::establish(&database_url)\n\n .expect(&format!(\"Error connecting to {}\", database_url))\n\n}\n\n\n", "file_path": "examples/getting_started_step_4/src/lib.rs", "rank": 31, "score": 162101.40611145186 }, { "content": "pub fn establish_connection() -> PgConnection {\n\n dotenv().ok();\n\n\n\n let database_url = env::var(\"DATABASE_URL\")\n\n .expect(\"DATABASE_URL must be set\");\n\n PgConnection::establish(&database_url)\n\n .expect(&format!(\"Error connecting to {}\", database_url))\n\n}\n\n\n", "file_path": "examples/getting_started_step_2/src/lib.rs", "rank": 32, "score": 162101.40611145186 }, { "content": "pub fn establish_connection() -> PgConnection {\n\n dotenv().ok();\n\n\n\n let database_url = env::var(\"DATABASE_URL\")\n\n .expect(\"DATABASE_URL must be set\");\n\n PgConnection::establish(&database_url)\n\n .expect(&format!(\"Error connecting to {}\", database_url))\n\n}\n\n\n", "file_path": "examples/getting_started_step_3/src/lib.rs", "rank": 33, "score": 162101.40611145186 }, { "content": "pub fn establish_connection() -> PgConnection {\n\n dotenv().ok();\n\n\n\n let database_url = env::var(\"DATABASE_URL\")\n\n .expect(\"DATABASE_URL must be set\");\n\n PgConnection::establish(&database_url)\n\n .expect(&format!(\"Error connecting to {}\", database_url))\n\n}\n", "file_path": "examples/getting_started_step_1/src/lib.rs", "rank": 34, "score": 162101.40611145186 }, { "content": "/// How to deserialize multiple fields, with a known type. This type is\n\n/// implemented for tuples of various sizes.\n\npub trait FromSqlRow<A, DB: Backend + HasSqlType<A>>: Sized {\n\n fn build_from_row<T: Row<DB>>(row: &mut T) -> Result<Self, Box<Error+Send+Sync>>;\n\n}\n\n\n\n#[cfg(feature = \"unstable\")]\n\nimpl<T, ST, DB> FromSqlRow<Nullable<ST>, DB> for Option<T> where\n\n T: FromSqlRow<ST, DB>,\n\n DB: Backend + HasSqlType<ST>,\n\n ST: NotNull,\n\n{\n\n default fn build_from_row<R: Row<DB>>(row: &mut R) -> Result<Self, Box<Error+Send+Sync>> {\n\n if row.next_is_null(1) {\n\n row.take();\n\n Ok(None)\n\n } else {\n\n T::build_from_row(row).map(Some)\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Copy, Clone, PartialEq, Eq)]\n\n/// Tiny enum to make the return type of `ToSql` more descriptive\n\npub enum IsNull {\n\n Yes,\n\n No,\n\n}\n\n\n", "file_path": "diesel/src/types/mod.rs", "rank": 35, "score": 162011.30286053923 }, { "content": "#[cfg(feature = \"postgres\")]\n\ntype DB = diesel::pg::Pg;\n\n\n", "file_path": "diesel/src/doctest_setup.rs", "rank": 36, "score": 161846.69494442575 }, { "content": "pub fn id<A>(a: A) -> A { a }\n\n\n\nmacro_rules! test_round_trip {\n\n ($test_name:ident, $sql_type:ident, $tpe:ty) => {\n\n test_round_trip!($test_name, $sql_type, $tpe, id);\n\n };\n\n\n\n ($test_name:ident, $sql_type:ident, $tpe:ty, $map_fn:ident) => {\n\n #[test]\n\n fn $test_name() {\n\n fn round_trip(val: $tpe) -> bool {\n\n test_type_round_trips::<types::$sql_type, _>($map_fn(val))\n\n }\n\n\n\n fn option_round_trip(val: Option<$tpe>) -> bool {\n\n let val = val.map($map_fn);\n\n test_type_round_trips::<Nullable<types::$sql_type>, _>(val)\n\n }\n\n\n\n #[cfg(feature = \"postgres\")]\n", "file_path": "diesel_tests/tests/types_roundtrip.rs", "rank": 37, "score": 161697.08081126495 }, { "content": "#[cfg(feature = \"postgres\")]\n\n#[test]\n\n#[should_panic(expected=\"Received more than 4 bytes decoding i32\")]\n\nfn debug_check_catches_reading_bigint_as_i32_when_using_raw_sql() {\n\n use diesel::expression::dsl::sql;\n\n use diesel::types::Integer;\n\n\n\n let connection = connection();\n\n users::table.select(sql::<Integer>(\"COUNT(*)\")).get_result::<i32>(&connection).unwrap();\n\n}\n", "file_path": "diesel_tests/tests/types.rs", "rank": 38, "score": 161241.67598960456 }, { "content": "/// Sets the order clause of a query. If there was already a order clause, it\n\n/// will be overridden. The expression passed to `order` must actually be valid\n\n/// for the query. See also:\n\n/// [`.desc()`](expression/expression_methods/global_expression_methods/trait.ExpressionMethods.html#method.desc)\n\n/// and [`.asc()`](expression/expression_methods/global_expression_methods/trait.ExpressionMethods.html#method.asc)\n\n///\n\n/// This is automatically implemented for the various query builder types.\n\npub trait OrderDsl<Expr: Expression>: AsQuery {\n\n type Output: AsQuery<SqlType=Self::SqlType>;\n\n\n\n fn order(self, expr: Expr) -> Self::Output;\n\n}\n\n\n\nimpl<T, Expr, ST> OrderDsl<Expr> for T where\n\n Expr: Expression,\n\n T: QuerySource + AsQuery<SqlType=ST>,\n\n T::Query: OrderDsl<Expr, SqlType=ST>,\n\n{\n\n type Output = <T::Query as OrderDsl<Expr>>::Output;\n\n\n\n fn order(self, expr: Expr) -> Self::Output {\n\n self.as_query().order(expr)\n\n }\n\n}\n", "file_path": "diesel/src/query_dsl/order_dsl.rs", "rank": 39, "score": 159783.86020949227 }, { "content": "pub fn determine_column_type(cx: &mut ExtCtxt, span: Span, attr: &ColumnInformation) -> P<ast::Ty> {\n\n let type_name = attr.type_name.to_lowercase();\n\n let tpe = if is_bool(&type_name) {\n\n quote_ty!(cx, ::diesel::types::Bool)\n\n } else if is_smallint(&type_name) {\n\n quote_ty!(cx, ::diesel::types::SmallInt)\n\n } else if is_bigint(&type_name) {\n\n quote_ty!(cx, ::diesel::types::BigInt)\n\n } else if type_name.contains(\"int\") {\n\n quote_ty!(cx, ::diesel::types::Integer)\n\n } else if is_text(&type_name) {\n\n quote_ty!(cx, ::diesel::types::Text)\n\n } else if type_name.contains(\"blob\") || type_name.is_empty() {\n\n quote_ty!(cx, ::diesel::types::Binary)\n\n } else if is_float(&type_name) {\n\n quote_ty!(cx, ::diesel::types::Float)\n\n } else if is_double(&type_name) {\n\n quote_ty!(cx, ::diesel::types::Double)\n\n } else {\n\n cx.span_err(span, &format!(\"Unsupported type: {}\", type_name));\n\n quote_ty!(cx, ())\n\n };\n\n\n\n if attr.nullable {\n\n quote_ty!(cx, Nullable<$tpe>)\n\n } else {\n\n tpe\n\n }\n\n}\n\n\n", "file_path": "diesel_codegen_syntex/src/schema_inference/sqlite.rs", "rank": 40, "score": 158451.60247926164 }, { "content": "/// Creates a SQL `COUNT` expression\n\n///\n\n/// As with most bare functions, this is not exported by default. You can import\n\n/// it specifically as `diesel::expression::count`, or glob import\n\n/// `diesel::expression::dsl::*`\n\npub fn count<T: Expression>(t: T) -> Count<T> {\n\n Count {\n\n target: t,\n\n }\n\n}\n\n\n", "file_path": "diesel/src/expression/count.rs", "rank": 41, "score": 158240.89939113354 }, { "content": "// NOTE: This test is meant to be comprehensive, but not exhaustive.\n\nfn main() {\n\n use self::users::dsl::*;\n\n let connection = SqliteConnection::establish(\":memory:\").unwrap();\n\n\n\n users.select(id).filter(name.eq(any(Vec::<String>::new())))\n\n .load::<i32>(&connection);\n\n //~^ ERROR type mismatch resolving `<diesel::sqlite::SqliteConnection as diesel::Connection>::Backend == diesel::pg::Pg`\n\n users.select(id).filter(name.is_not_distinct_from(\"Sean\"))\n\n .load::<i32>(&connection);\n\n //~^ ERROR E0277\n\n let n = lower(\"sean\").aliased(\"n\");\n\n users.with(n).select(id)\n\n .load::<i32>(&connection);\n\n //~^ ERROR E0277\n\n users.select(id).filter(now.eq(now.at_time_zone(\"UTC\")))\n\n .load::<i32>(&connection);\n\n //~^ ERROR E0277\n\n}\n", "file_path": "diesel_compile_tests/tests/compile-fail/pg_specific_expressions_cant_be_used_in_a_sqlite_query.rs", "rank": 42, "score": 157469.82070634243 }, { "content": "pub fn get_primary_keys(conn: &PgConnection, table_name: &str) -> QueryResult<Vec<String>> {\n\n use self::pg_attribute::dsl::*;\n\n use self::pg_index::dsl::{pg_index, indisprimary, indexrelid, indrelid};\n\n use self::pg_class::dsl::*;\n\n\n\n let table_oid = pg_class.select(oid).filter(relname.eq(table_name)).limit(1);\n\n\n\n let pk_query = pg_index.select(indexrelid)\n\n .filter(indrelid.eq_any(table_oid))\n\n .filter(indisprimary.eq(true));\n\n\n\n pg_attribute.select(attname)\n\n .filter(attrelid.eq_any(pk_query))\n\n .order(attnum)\n\n .load(conn)\n\n}\n", "file_path": "diesel_codegen_syntex/src/schema_inference/pg.rs", "rank": 43, "score": 157275.2194350534 }, { "content": "pub fn infer_schema_body<T: Iterator<Item=P<ast::Expr>>>(\n\n cx: &mut ExtCtxt,\n\n sp: Span,\n\n exprs: &mut T,\n\n) -> Result<Box<MacResult>, Box<MacResult>> {\n\n let database_url = try!(next_str_lit(cx, sp, exprs));\n\n let connection = try!(establish_connection(cx, sp, &database_url));\n\n let table_names = load_table_names(&connection).unwrap();\n\n let impls = table_names.into_iter()\n\n .map(|n| table_macro_call(cx, sp, &connection, &n))\n\n .collect();\n\n Ok(MacEager::items(SmallVector::many(try!(impls))))\n\n}\n\n\n", "file_path": "diesel_codegen_syntex/src/schema_inference/mod.rs", "rank": 44, "score": 157000.93514919255 }, { "content": "pub fn load_table_body<T: Iterator<Item=P<ast::Expr>>>(\n\n cx: &mut ExtCtxt,\n\n sp: Span,\n\n exprs: &mut T,\n\n) -> Result<Box<MacResult>, Box<MacResult>> {\n\n let database_url = try!(next_str_lit(cx, sp, exprs));\n\n let table_name = try!(next_str_lit(cx, sp, exprs));\n\n let connection = try!(establish_connection(cx, sp, &database_url));\n\n table_macro_call(cx, sp, &connection, &table_name)\n\n .map(|item| MacEager::items(SmallVector::one(item)))\n\n}\n\n\n", "file_path": "diesel_codegen_syntex/src/schema_inference/mod.rs", "rank": 45, "score": 157000.93514919255 }, { "content": "fn is_double(type_name: &str) -> bool {\n\n type_name.contains(\"double\") ||\n\n type_name.contains(\"num\") ||\n\n type_name.contains(\"dec\")\n\n}\n\n\n\ntable! {\n\n sqlite_master (name) {\n\n name -> VarChar,\n\n }\n\n}\n\n\n", "file_path": "diesel_codegen_syntex/src/schema_inference/sqlite.rs", "rank": 46, "score": 156883.0532625452 }, { "content": "fn is_smallint(type_name: &str) -> bool {\n\n type_name == \"int2\" ||\n\n type_name.contains(\"small\") &&\n\n type_name.contains(\"int\")\n\n}\n\n\n", "file_path": "diesel_codegen_syntex/src/schema_inference/sqlite.rs", "rank": 47, "score": 156883.0532625452 }, { "content": "fn is_float(type_name: &str) -> bool {\n\n type_name.contains(\"float\") ||\n\n type_name.contains(\"real\")\n\n}\n\n\n", "file_path": "diesel_codegen_syntex/src/schema_inference/sqlite.rs", "rank": 48, "score": 156883.0532625452 }, { "content": "fn is_bigint(type_name: &str) -> bool {\n\n type_name == \"int8\" ||\n\n type_name.contains(\"big\") &&\n\n type_name.contains(\"int\")\n\n}\n\n\n", "file_path": "diesel_codegen_syntex/src/schema_inference/sqlite.rs", "rank": 49, "score": 156883.0532625452 }, { "content": "fn is_text(type_name: &str) -> bool {\n\n type_name.contains(\"char\") ||\n\n type_name.contains(\"clob\") ||\n\n type_name.contains(\"text\")\n\n}\n\n\n", "file_path": "diesel_codegen_syntex/src/schema_inference/sqlite.rs", "rank": 50, "score": 156883.0532625452 }, { "content": "pub trait TypeMetadata {\n\n type TypeMetadata;\n\n}\n\n\n", "file_path": "diesel/src/backend.rs", "rank": 51, "score": 156368.3755031309 }, { "content": "pub fn get_table_data(conn: &PgConnection, table_name: &str) -> QueryResult<Vec<ColumnInformation>> {\n\n use self::pg_attribute::dsl::*;\n\n use self::pg_type::dsl::{pg_type, typname};\n\n use self::pg_class::dsl::*;\n\n\n\n let table_oid = pg_class.select(oid).filter(relname.eq(table_name)).limit(1);\n\n\n\n pg_attribute.inner_join(pg_type)\n\n .select((attname, typname, attnotnull))\n\n .filter(attrelid.eq_any(table_oid))\n\n .filter(attnum.gt(0).and(attisdropped.ne(true)))\n\n .order(attnum)\n\n .load(conn)\n\n}\n\n\n\n\n", "file_path": "diesel_codegen_syntex/src/schema_inference/pg.rs", "rank": 52, "score": 155487.4930232799 }, { "content": "pub trait SortExpressionMethods : Sized {\n\n /// Specify that nulls should come before other values in this ordering.\n\n /// Normally, nulls come last when sorting in ascending order and first\n\n /// when sorting in descending order.\n\n ///\n\n /// # Example\n\n ///\n\n /// ```rust\n\n /// # #[macro_use] extern crate diesel;\n\n /// # include!(\"src/doctest_setup.rs\");\n\n /// #\n\n /// # table! {\n\n /// # users {\n\n /// # id -> Integer,\n\n /// # name -> VarChar,\n\n /// # }\n\n /// # }\n\n /// #\n\n /// # table! {\n\n /// # foos {\n", "file_path": "diesel/src/pg/expression/expression_methods.rs", "rank": 53, "score": 154204.93903135706 }, { "content": "#[doc(hidden)]\n\npub fn run_migration_with_version<Conn>(conn: &Conn, ver: &str, output: &mut Write)\n\n -> Result<(), RunMigrationsError> where\n\n Conn: MigrationConnection,\n\n{\n\n migration_with_version(ver)\n\n .map_err(|e| e.into())\n\n .and_then(|m| run_migration(conn, &*m, output))\n\n}\n\n\n", "file_path": "diesel/src/migrations/mod.rs", "rank": 54, "score": 152400.2263278864 }, { "content": "pub fn mk_naive_date(days: u32) -> NaiveDate {\n\n let earliest_pg_date = NaiveDate::from_ymd(-4713, 11, 24);\n\n let latest_chrono_date = date::MAX;\n\n let num_days_representable = (latest_chrono_date - earliest_pg_date).num_days();\n\n earliest_pg_date + Duration::days(days as i64 % num_days_representable)\n\n}\n\n\n\n#[cfg(feature = \"postgres\")]\n\nmod unstable_types {\n\n use super::*;\n\n use std::time::*;\n\n\n\n fn strip_nanosecond_precision(time: SystemTime) -> SystemTime {\n\n match time.duration_since(UNIX_EPOCH) {\n\n Ok(duration) => time - Duration::new(0, duration.subsec_nanos() % 1000),\n\n Err(e) => time + Duration::new(0, e.duration().subsec_nanos() % 1000),\n\n }\n\n }\n\n\n\n test_round_trip!(systemtime_roundtrips, Timestamp, SystemTime, strip_nanosecond_precision);\n\n}\n", "file_path": "diesel_tests/tests/types_roundtrip.rs", "rank": 55, "score": 148535.6329495685 }, { "content": "#[test]\n\nfn max_returns_same_type_as_expression_being_maximized() {\n\n let connection = connection();\n\n let source = users.select(max(name));\n\n\n\n let data: &[_] = &[\n\n NewUser::new(\"B\", None),\n\n NewUser::new(\"C\", None),\n\n NewUser::new(\"A\", None),\n\n ];\n\n batch_insert(data, users, &connection);\n\n assert_eq!(Ok(\"C\".to_string()), source.first(&connection));\n\n connection.execute(\"DELETE FROM users WHERE name = 'C'\").unwrap();\n\n assert_eq!(Ok(\"B\".to_string()), source.first(&connection));\n\n}\n\n\n\nuse std::marker::PhantomData;\n\n\n", "file_path": "diesel_tests/tests/expressions/mod.rs", "rank": 56, "score": 148284.54451619924 }, { "content": "#[doc(hidden)]\n\npub fn run_pending_migrations_in_directory<Conn>(conn: &Conn, migrations_dir: &Path, output: &mut Write)\n\n -> Result<(), RunMigrationsError> where\n\n Conn: MigrationConnection,\n\n{\n\n let all_migrations = try!(migrations_in_directory(migrations_dir));\n\n run_migrations(conn, all_migrations, output)\n\n}\n\n\n", "file_path": "diesel/src/migrations/mod.rs", "rank": 57, "score": 148058.5379718498 }, { "content": "#[doc(hidden)]\n\npub fn revert_migration_with_version<Conn: Connection>(conn: &Conn, ver: &str, output: &mut Write)\n\n -> Result<(), RunMigrationsError>\n\n{\n\n migration_with_version(ver)\n\n .map_err(|e| e.into())\n\n .and_then(|m| revert_migration(conn, m, output))\n\n}\n\n\n", "file_path": "diesel/src/migrations/mod.rs", "rank": 58, "score": 147295.99552049107 }, { "content": "#[derive(Debug, Clone, Copy)]\n\nstruct Digit(i16);\n\n\n\nimpl Arbitrary for Digit {\n\n fn arbitrary<G: Gen>(g: &mut G) -> Self {\n\n let mut n = -1;\n\n while n < 0 || n >= 10000 {\n\n n = i16::arbitrary(g);\n\n }\n\n Digit(n)\n\n }\n\n}\n", "file_path": "diesel/src/pg/types/floats/quickcheck_impls.rs", "rank": 59, "score": 146826.95237781515 }, { "content": "#[test]\n\nfn uuid_to_sql() {\n\n let mut bytes = vec![];\n\n let test_uuid = uuid::Uuid::from_fields(4_294_967_295, 65_535, 65_535, b\"abcdef12\").unwrap();\n\n ToSql::<types::Uuid, Pg>::to_sql(&test_uuid, &mut bytes).unwrap();\n\n assert_eq!(bytes, test_uuid.as_bytes());\n\n}\n\n\n", "file_path": "diesel/src/pg/types/uuid.rs", "rank": 60, "score": 146661.88004224063 }, { "content": "#[test]\n\nfn some_uuid_from_sql() {\n\n let input_uuid =\n\n uuid::Uuid::from_fields(4_294_967_295, 65_535, 65_535, b\"abcdef12\").unwrap();\n\n let output_uuid =\n\n FromSql::<types::Uuid, Pg>::from_sql(Some(input_uuid.as_bytes())).unwrap();\n\n assert_eq!(input_uuid, output_uuid);\n\n}\n\n\n", "file_path": "diesel/src/pg/types/uuid.rs", "rank": 61, "score": 146661.88004224063 }, { "content": "#[test]\n\nfn no_uuid_from_sql() {\n\n let uuid: Result<uuid::Uuid, Box<Error+Send+Sync>> =\n\n FromSql::<types::Uuid, Pg>::from_sql(None);\n\n assert_eq!(uuid.unwrap_err().description(), \"Unexpected null for non-null column\");\n\n}\n", "file_path": "diesel/src/pg/types/uuid.rs", "rank": 62, "score": 146661.88004224063 }, { "content": "#[test]\n\n#[cfg(feature = \"postgres\")]\n\nfn pg_array_from_sql() {\n\n assert_eq!(vec![true, false, true],\n\n query_single_value::<Array<Bool>, Vec<bool>>(\n\n \"ARRAY['t', 'f', 't']::bool[]\"));\n\n assert_eq!(vec![1, 2, 3],\n\n query_single_value::<Array<Integer>, Vec<i32>>(\"ARRAY[1, 2, 3]\"));\n\n assert_eq!(vec![\"Hello\".to_string(), \"\".to_string(), \"world\".to_string()],\n\n query_single_value::<Array<VarChar>, Vec<String>>(\n\n \"ARRAY['Hello', '', 'world']\"));\n\n}\n\n\n", "file_path": "diesel_tests/tests/types.rs", "rank": 63, "score": 146661.88004224063 }, { "content": "#[test]\n\nfn i16_to_sql() {\n\n let mut bytes = vec![];\n\n ToSql::<types::SmallInt, Pg>::to_sql(&1i16, &mut bytes).unwrap();\n\n ToSql::<types::SmallInt, Pg>::to_sql(&0i16, &mut bytes).unwrap();\n\n ToSql::<types::SmallInt, Pg>::to_sql(&-1i16, &mut bytes).unwrap();\n\n assert_eq!(bytes, vec![0, 1, 0, 0, 255, 255]);\n\n}\n\n\n", "file_path": "diesel/src/pg/types/integers.rs", "rank": 64, "score": 146661.88004224063 }, { "content": "#[test]\n\n#[cfg(feature = \"postgres\")]\n\nfn pg_uuid_from_sql() {\n\n extern crate uuid;\n\n\n\n let query = \"'8a645207-42d6-4d17-82e7-f5e42ede0f67'::uuid\";\n\n let expected_value = uuid::Uuid::parse_str(\"8a645207-42d6-4d17-82e7-f5e42ede0f67\").unwrap();\n\n assert_eq!(expected_value, query_single_value::<Uuid, uuid::Uuid>(query));\n\n let query = \"'f94e0e4d-c7b0-405f-9c0e-57b97f4afb58'::uuid\";\n\n let expected_value = uuid::Uuid::parse_str(\"f94e0e4d-c7b0-405f-9c0e-57b97f4afb58\").unwrap();\n\n assert_eq!(expected_value, query_single_value::<Uuid, uuid::Uuid>(query));\n\n}\n\n\n", "file_path": "diesel_tests/tests/types.rs", "rank": 65, "score": 146661.88004224063 }, { "content": "#[test]\n\nfn i64_to_sql() {\n\n let mut bytes = vec![];\n\n ToSql::<types::BigInt, Pg>::to_sql(&1i64, &mut bytes).unwrap();\n\n ToSql::<types::BigInt, Pg>::to_sql(&0i64, &mut bytes).unwrap();\n\n ToSql::<types::BigInt, Pg>::to_sql(&-1i64, &mut bytes).unwrap();\n\n assert_eq!(bytes, vec![\n\n 0, 0, 0, 0, 0, 0, 0, 1,\n\n 0, 0, 0, 0, 0, 0, 0, 0,\n\n 255, 255, 255, 255, 255, 255, 255, 255]);\n\n}\n", "file_path": "diesel/src/pg/types/integers.rs", "rank": 66, "score": 146661.88004224063 }, { "content": "#[test]\n\n#[cfg(feature = \"postgres\")]\n\nfn pg_numeric_from_sql() {\n\n use diesel::data_types::PgNumeric;\n\n\n\n let query = \"1.0::numeric\";\n\n let expected_value = PgNumeric::Positive {\n\n digits: vec![1],\n\n weight: 0,\n\n scale: 1,\n\n };\n\n assert_eq!(expected_value, query_single_value::<Numeric, PgNumeric>(query));\n\n let query = \"-31.0::numeric\";\n\n let expected_value = PgNumeric::Negative {\n\n digits: vec![31],\n\n weight: 0,\n\n scale: 1,\n\n };\n\n assert_eq!(expected_value, query_single_value::<Numeric, PgNumeric>(query));\n\n let query = \"'NaN'::numeric\";\n\n let expected_value = PgNumeric::NaN;\n\n assert_eq!(expected_value, query_single_value::<Numeric, PgNumeric>(query));\n\n}\n\n\n", "file_path": "diesel_tests/tests/types.rs", "rank": 67, "score": 146661.88004224063 }, { "content": "#[test]\n\nfn i32_to_sql() {\n\n let mut bytes = vec![];\n\n ToSql::<types::Integer, Pg>::to_sql(&1i32, &mut bytes).unwrap();\n\n ToSql::<types::Integer, Pg>::to_sql(&0i32, &mut bytes).unwrap();\n\n ToSql::<types::Integer, Pg>::to_sql(&-1i32, &mut bytes).unwrap();\n\n assert_eq!(bytes, vec![0, 0, 0, 1, 0, 0, 0, 0, 255, 255, 255, 255]);\n\n}\n\n\n", "file_path": "diesel/src/pg/types/integers.rs", "rank": 68, "score": 146661.88004224063 }, { "content": "#[insertable_into(users)]\n\nstruct User {\n\n id: i32,\n\n}\n\n\n", "file_path": "diesel_compile_tests/tests/compile-fail/sqlite_upsert_cannot_be_used_on_pg.rs", "rank": 69, "score": 144581.707499634 }, { "content": "#[derive(Debug, Clone, Copy)]\n\nstruct InvalidNumericSign(u16);\n\n\n\nimpl ::std::fmt::Display for InvalidNumericSign {\n\n fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {\n\n write!(f, \"InvalidNumericSign({0:x})\", self.0)\n\n }\n\n}\n\n\n\nimpl Error for InvalidNumericSign {\n\n fn description(&self) -> &str {\n\n \"sign for numeric field was not one of 0, 0x4000, 0xC000\"\n\n }\n\n}\n\n\n\nimpl FromSql<types::Numeric, Pg> for PgNumeric {\n\n fn from_sql(bytes: Option<&[u8]>) -> Result<Self, Box<Error+Send+Sync>> {\n\n let mut bytes = not_none!(bytes);\n\n let ndigits = try!(bytes.read_u16::<BigEndian>());\n\n let mut digits = Vec::with_capacity(ndigits as usize);\n\n let weight = try!(bytes.read_i16::<BigEndian>());\n", "file_path": "diesel/src/pg/types/floats/mod.rs", "rank": 70, "score": 143534.97752823183 }, { "content": "#[test]\n\n#[cfg(feature = \"postgres\")]\n\nfn pg_specific_option_from_sql() {\n\n assert_eq!(Some(true),\n\n query_single_value::<Nullable<Bool>, Option<bool>>(\"'t'::bool\"));\n\n}\n\n\n", "file_path": "diesel_tests/tests/types.rs", "rank": 71, "score": 142983.26579257622 }, { "content": "#[test]\n\n#[cfg(feature = \"postgres\")]\n\nfn pg_specific_option_to_sql() {\n\n assert!(query_to_sql_equality::<Nullable<Bool>, Option<bool>>(\"'t'::bool\", Some(true)));\n\n assert!(!query_to_sql_equality::<Nullable<Bool>, Option<bool>>(\"'f'::bool\", Some(true)));\n\n assert!(query_to_sql_equality::<Nullable<Bool>, Option<bool>>(\"NULL\", None));\n\n assert!(!query_to_sql_equality::<Nullable<Bool>, Option<bool>>(\"NULL::bool\", Some(false)));\n\n}\n\n\n", "file_path": "diesel_tests/tests/types.rs", "rank": 72, "score": 142983.26579257622 }, { "content": "#[test]\n\n#[cfg(feature = \"postgres\")]\n\nfn pg_uuid_to_sql_uuid() {\n\n extern crate uuid;\n\n\n\n let expected_value = \"'8a645207-42d6-4d17-82e7-f5e42ede0f67'::uuid\";\n\n let value = uuid::Uuid::parse_str(\"8a645207-42d6-4d17-82e7-f5e42ede0f67\").unwrap();\n\n assert!(query_to_sql_equality::<Uuid, uuid::Uuid>(expected_value, value));\n\n let expected_value = \"'f94e0e4d-c7b0-405f-9c0e-57b97f4afb58'::uuid\";\n\n let value = uuid::Uuid::parse_str(\"f94e0e4d-c7b0-405f-9c0e-57b97f4afb58\").unwrap();\n\n assert!(query_to_sql_equality::<Uuid, uuid::Uuid>(expected_value, value));\n\n let expected_non_equal_value = \"'8e940686-97a5-4e8b-ac44-64cf3cceea9b'::uuid\";\n\n assert!(!query_to_sql_equality::<Uuid, uuid::Uuid>(expected_non_equal_value, value));\n\n}\n\n\n", "file_path": "diesel_tests/tests/types.rs", "rank": 73, "score": 142983.26579257622 }, { "content": "#[test]\n\n#[cfg(feature = \"postgres\")]\n\nfn pg_array_containing_null() {\n\n let query = \"ARRAY['Hello', '', NULL, 'world']\";\n\n let data = query_single_value::<Array<Nullable<VarChar>>, Vec<Option<String>>>(query);\n\n let expected = vec![\n\n Some(\"Hello\".to_string()),\n\n Some(\"\".to_string()),\n\n None,\n\n Some(\"world\".to_string()),\n\n ];\n\n assert_eq!(expected, data);\n\n}\n\n\n", "file_path": "diesel_tests/tests/types.rs", "rank": 74, "score": 142983.26579257622 }, { "content": "#[test]\n\n#[cfg(feature = \"postgres\")]\n\nfn pg_timestamp_to_sql_timestamp() {\n\n use diesel::data_types::PgTimestamp;\n\n\n\n let expected_value = \"'2015-11-13 13:26:48.041057-07'::timestamp\";\n\n let value = PgTimestamp(500736408041057);\n\n assert!(query_to_sql_equality::<Timestamp, PgTimestamp>(expected_value, value));\n\n let expected_value = \"'2015-11-13 13:26:49.041057-07'::timestamp\";\n\n let value = PgTimestamp(500736409041057);\n\n assert!(query_to_sql_equality::<Timestamp, PgTimestamp>(expected_value, value));\n\n let expected_non_equal_value = \"'2015-11-13 13:26:48.041057-07'::timestamp\";\n\n assert!(!query_to_sql_equality::<Timestamp, PgTimestamp>(expected_non_equal_value, value));\n\n}\n\n\n", "file_path": "diesel_tests/tests/types.rs", "rank": 75, "score": 142983.26579257622 }, { "content": "#[test]\n\nfn bad_uuid_from_sql() {\n\n let uuid: Result<uuid::Uuid, Box<Error+Send+Sync>> =\n\n FromSql::<types::Uuid, Pg>::from_sql(Some(b\"boom\"));\n\n assert_eq!(uuid.unwrap_err().description(), \"UUID parse error\");\n\n}\n\n\n", "file_path": "diesel/src/pg/types/uuid.rs", "rank": 76, "score": 142983.26579257622 }, { "content": "pub fn mk_naive_time(data: (u32, u32)) -> NaiveTime {\n\n NaiveTime::from_num_seconds_from_midnight(data.0, data.1 / 1000)\n\n}\n\n\n", "file_path": "diesel_tests/tests/types_roundtrip.rs", "rank": 77, "score": 142837.89042243175 }, { "content": "/// Marker trait for types which are valid in `AT TIME ZONE` expressions\n\npub trait DateTimeLike {}\n\nimpl DateTimeLike for Date {}\n\nimpl DateTimeLike for Timestamp {}\n\nimpl DateTimeLike for Timestamptz {}\n\n\n\n#[derive(Debug, Copy, Clone)]\n\npub struct AtTimeZone<Ts, Tz> {\n\n timestamp: Ts,\n\n timezone: Tz,\n\n}\n\n\n\nimpl<Ts, Tz> AtTimeZone<Ts, Tz> {\n\n pub fn new(timestamp: Ts, timezone: Tz) -> Self {\n\n AtTimeZone {\n\n timestamp: timestamp,\n\n timezone: timezone,\n\n }\n\n }\n\n}\n\n\n", "file_path": "diesel/src/pg/expression/date_and_time.rs", "rank": 78, "score": 141557.7668143715 }, { "content": "pub trait Row<DB: Backend> {\n\n fn take(&mut self) -> Option<&DB::RawValue>;\n\n fn next_is_null(&self, count: usize) -> bool;\n\n}\n", "file_path": "diesel/src/row.rs", "rank": 79, "score": 141508.60314213097 }, { "content": "pub fn mk_naive_datetime(data: (i64, u32)) -> NaiveDateTime {\n\n NaiveDateTime::from_timestamp(data.0, data.1 / 1000)\n\n}\n\n\n", "file_path": "diesel_tests/tests/types_roundtrip.rs", "rank": 80, "score": 140394.38893125975 }, { "content": "// Postgres timestamps start from January 1st 2000.\n\nfn pg_epoch() -> NaiveDateTime {\n\n NaiveDate::from_ymd(2000, 1, 1).and_hms(0, 0, 0)\n\n}\n\n\n\nimpl FromSql<Timestamp, Pg> for NaiveDateTime {\n\n fn from_sql(bytes: Option<&[u8]>) -> Result<Self, Box<Error+Send+Sync>> {\n\n let PgTimestamp(offset) = try!(FromSql::<Timestamp, Pg>::from_sql(bytes));\n\n match pg_epoch().checked_add(Duration::microseconds(offset)) {\n\n Some(v) => Ok(v),\n\n None => {\n\n let message = \"Tried to deserialize a timestamp that is too large for Chrono\";\n\n Err(message.into())\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl ToSql<Timestamp, Pg> for NaiveDateTime {\n\n fn to_sql<W: Write>(&self, out: &mut W) -> Result<IsNull, Box<Error+Send+Sync>> {\n\n let time = match (*self - pg_epoch()).num_microseconds() {\n", "file_path": "diesel/src/pg/types/date_and_time/chrono.rs", "rank": 81, "score": 140091.722087527 }, { "content": "fn pg_epoch_date() -> NaiveDate {\n\n NaiveDate::from_ymd(2000, 1, 1)\n\n}\n\n\n\nimpl ToSql<Date, Pg> for NaiveDate {\n\n fn to_sql<W: Write>(&self, out: &mut W) -> Result<IsNull, Box<Error+Send+Sync>> {\n\n let days_since_epoch = (*self - pg_epoch_date()).num_days();\n\n ToSql::<Date, Pg>::to_sql(&PgDate(days_since_epoch as i32), out)\n\n }\n\n}\n\n\n\nimpl FromSql<Date, Pg> for NaiveDate {\n\n fn from_sql(bytes: Option<&[u8]>) -> Result<Self, Box<Error+Send+Sync>> {\n\n let PgDate(offset) = try!(FromSql::<Date, Pg>::from_sql(bytes));\n\n match pg_epoch_date().checked_add(Duration::days(offset as i64)) {\n\n Some(date) => Ok(date),\n\n None => {\n\n let error_message = format!(\"Chrono can only represent dates up to {:?}\",\n\n date::MAX);\n\n Err(Box::<Error + Send + Sync>::from(error_message))\n", "file_path": "diesel/src/pg/types/date_and_time/chrono.rs", "rank": 82, "score": 140086.9392538621 }, { "content": "fn pg_epoch() -> SystemTime {\n\n let thirty_years = Duration::from_secs(946684800);\n\n UNIX_EPOCH + thirty_years\n\n}\n\n\n\nimpl ToSql<types::Timestamp, Pg> for SystemTime {\n\n fn to_sql<W: Write>(&self, out: &mut W) -> Result<IsNull, Box<Error+Send+Sync>> {\n\n let (before_epoch, duration) = match self.duration_since(pg_epoch()) {\n\n Ok(duration) => (false, duration),\n\n Err(time_err) => (true, time_err.duration()),\n\n };\n\n let time_since_epoch = if before_epoch {\n\n -(duration_to_usecs(duration) as i64)\n\n } else {\n\n duration_to_usecs(duration) as i64\n\n };\n\n ToSql::<types::BigInt, Pg>::to_sql(&time_since_epoch, out)\n\n }\n\n}\n\n\n", "file_path": "diesel/src/pg/types/date_and_time/std_time.rs", "rank": 83, "score": 140086.9392538621 }, { "content": "pub fn mk_datetime(data: (i64, u32)) -> DateTime<UTC> {\n\n DateTime::from_utc(mk_naive_datetime(data), UTC)\n\n}\n\n\n", "file_path": "diesel_tests/tests/types_roundtrip.rs", "rank": 84, "score": 140068.35324429724 }, { "content": "fn main() {\n\n use self::users::dsl::*;\n\n\n\n let pred = id.eq(\"string\");\n\n //~^ ERROR E0277\n\n let pred = id.eq(name);\n\n //~^ ERROR type mismatch\n\n}\n", "file_path": "diesel_compile_tests/tests/compile-fail/expressions_can_only_be_compared_for_equality_to_expressions_of_same_type.rs", "rank": 85, "score": 139853.4585560433 }, { "content": "#[cfg(feature = \"postgres\")]\n\nfn establish_connection() -> diesel::pg::PgConnection {\n\n let connection = connection_no_data();\n\n\n\n connection.execute(\"CREATE TABLE users (\n\n id SERIAL PRIMARY KEY,\n\n name VARCHAR NOT NULL\n\n )\").unwrap();\n\n connection.execute(\"INSERT INTO users (name) VALUES ('Sean'), ('Tess')\").unwrap();\n\n\n\n connection\n\n}\n\n\n", "file_path": "diesel/src/doctest_setup.rs", "rank": 86, "score": 139380.44630107086 }, { "content": "#[cfg(feature = \"postgres\")]\n\nfn connection_no_data() -> diesel::pg::PgConnection {\n\n dotenv().ok();\n\n\n\n let connection_url = ::std::env::var(\"DATABASE_URL\")\n\n .expect(\"DATABASE_URL must be set in order to run tests\");\n\n let connection = diesel::pg::PgConnection::establish(&connection_url).unwrap();\n\n connection.begin_test_transaction().unwrap();\n\n connection.execute(\"DROP TABLE IF EXISTS users\").unwrap();\n\n\n\n connection\n\n}\n\n\n", "file_path": "diesel/src/doctest_setup.rs", "rank": 87, "score": 139380.44630107086 }, { "content": "/// Adds an additional expression to the FROM clause. This is useful for things\n\n/// like full text search, where you need to access the result of an expensive\n\n/// computation for the where clause that shouldn't be redone for each row, such\n\n/// as `plain_to_tsquery`. See\n\n/// [`.aliased`](expression/expression_methods/global_expression_methods/trait.ExpressionMethods.html#method.aliased)\n\n/// for more\n\npub trait WithDsl<'a, Expr> {\n\n type Output: AsQuery;\n\n\n\n fn with(self, expr: Aliased<'a, Expr>) -> Self::Output;\n\n}\n\n\n\nimpl<'a, T, Expr> WithDsl<'a, Expr> for T where\n\n T: QuerySource + AsQuery,\n\n T::Query: WithDsl<'a, Expr>\n\n{\n\n type Output = <T::Query as WithDsl<'a, Expr>>::Output;\n\n\n\n fn with(self, expr: Aliased<'a, Expr>) -> Self::Output {\n\n self.as_query().with(expr)\n\n }\n\n}\n\n\n\n#[doc(hidden)]\n\n#[derive(Debug, Copy, Clone)]\n\npub struct WithQuerySource<'a, Left, Right> {\n", "file_path": "diesel/src/query_dsl/with_dsl.rs", "rank": 88, "score": 139119.75390968696 }, { "content": "pub fn expand_insert(\n\n cx: &mut ExtCtxt,\n\n span: Span,\n\n meta_item: &MetaItem,\n\n annotatable: &Annotatable,\n\n push: &mut FnMut(Annotatable)\n\n) {\n\n if let Annotatable::Item(ref item) = *annotatable {\n\n let tables = insertable_tables(cx, meta_item);\n\n for body in tables.into_iter().filter_map(|t| insertable_impl(cx, span, t, item)) {\n\n push(Annotatable::Item(body));\n\n }\n\n } else {\n\n cx.span_err(meta_item.span,\n\n \"`insertable_into` may only be applied to enums and structs\");\n\n };\n\n}\n\n\n", "file_path": "diesel_codegen_syntex/src/insertable.rs", "rank": 89, "score": 139012.2474572426 }, { "content": "pub fn expand_changeset_for(\n\n cx: &mut ExtCtxt,\n\n span: Span,\n\n meta_item: &MetaItem,\n\n annotatable: &Annotatable,\n\n push: &mut FnMut(Annotatable),\n\n) {\n\n if let Some(model) = Model::from_annotable(cx, span, annotatable) {\n\n let options = changeset_options(cx, meta_item).unwrap();\n\n push(Annotatable::Item(changeset_impl(cx, span, &options, &model).unwrap()));\n\n } else {\n\n cx.span_err(meta_item.span,\n\n \"`changeset_for` may only be apllied to enums and structs\");\n\n }\n\n}\n\n\n", "file_path": "diesel_codegen_syntex/src/update.rs", "rank": 90, "score": 139012.2474572426 }, { "content": "pub trait InsertValues<DB: Backend> {\n\n fn column_names(&self, out: &mut DB::QueryBuilder) -> BuildQueryResult;\n\n fn values_clause(&self, out: &mut DB::QueryBuilder) -> BuildQueryResult;\n\n fn values_bind_params(&self, out: &mut DB::BindCollector) -> QueryResult<()>;\n\n}\n\n\n\n#[derive(Debug, Copy, Clone)]\n\npub enum ColumnInsertValue<Col, Expr> where\n\n Col: Column,\n\n Expr: Expression<SqlType=Col::SqlType>,\n\n{\n\n Expression(Col, Expr),\n\n Default(Col),\n\n}\n\n\n\nimpl<'a, T, U: 'a, DB> Insertable<T, DB> for &'a [U] where\n\n T: Table,\n\n DB: Backend,\n\n &'a U: Insertable<T, DB>,\n\n DB: SupportsDefaultKeyword,\n", "file_path": "diesel/src/persistable.rs", "rank": 91, "score": 138997.87876527352 }, { "content": "pub fn expand_derive_identifiable(\n\n cx: &mut ExtCtxt,\n\n span: Span,\n\n _meta_item: &ast::MetaItem,\n\n annotatable: &Annotatable,\n\n push: &mut FnMut(Annotatable)\n\n) {\n\n if let Some(model) = Model::from_annotable(cx, span, annotatable) {\n\n let table_name = model.table_name();\n\n let struct_ty = &model.ty;\n\n let fields = model.field_tokens_for_stable_macro(cx);\n\n if model.attr_named(str_to_ident(\"id\")).is_some() {\n\n push(Annotatable::Item(quote_item!(cx, Identifiable! {\n\n (\n\n table_name = $table_name,\n\n struct_ty = $struct_ty,\n\n ),\n\n fields = [$fields],\n\n }).unwrap()));\n\n } else {\n\n cx.span_err(span, &format!(\"Could not find a field named `id` on `{}`\", model.name));\n\n }\n\n }\n\n}\n", "file_path": "diesel_codegen_syntex/src/identifiable.rs", "rank": 92, "score": 136710.14642252045 }, { "content": "pub fn expand_derive_queryable(\n\n cx: &mut ExtCtxt,\n\n span: Span,\n\n meta_item: &MetaItem,\n\n annotatable: &Annotatable,\n\n push: &mut FnMut(Annotatable)\n\n) {\n\n if let Annotatable::Item(ref item) = *annotatable {\n\n let (mut generics, attrs) = match Attr::from_item(cx, item) {\n\n Some((generics, attrs)) => (generics, attrs),\n\n None => {\n\n cx.span_err(span, \"`#[derive(Queryable)]` can only be applied to structs or tuple structs\");\n\n return;\n\n }\n\n };\n\n\n\n let ty = struct_ty(cx, span, item.ident, &generics);\n\n\n\n let row_type = cx.ty(span, TyKind::Tup(attrs.iter().map(|f| f.ty.clone()).collect()));\n\n\n", "file_path": "diesel_codegen_syntex/src/queryable.rs", "rank": 93, "score": 136710.14642252045 }, { "content": "#[allow(unused_imports)]\n\npub fn expand_belongs_to(\n\n cx: &mut ExtCtxt,\n\n span: Span,\n\n meta_item: &MetaItem,\n\n annotatable: &Annotatable,\n\n push: &mut FnMut(Annotatable),\n\n) {\n\n let options = parse_association_options(\"belongs_to\", cx, span, meta_item, annotatable);\n\n if let Some((model, options)) = options {\n\n let parent_struct = options.name;\n\n let struct_name = model.name;\n\n\n\n let foreign_key_name = options.foreign_key_name.unwrap_or_else(||\n\n to_foreign_key(&parent_struct.name.as_str()));\n\n let child_table_name = model.table_name();\n\n let fields = model.field_tokens_for_stable_macro(cx);\n\n push(Annotatable::Item(quote_item!(cx, BelongsTo! {\n\n (\n\n struct_name = $struct_name,\n\n parent_struct = $parent_struct,\n\n foreign_key_name = $foreign_key_name,\n\n child_table_name = $child_table_name,\n\n ),\n\n fields = [$fields],\n\n }).unwrap()));\n\n }\n\n}\n", "file_path": "diesel_codegen_syntex/src/associations/belongs_to.rs", "rank": 94, "score": 136710.14642252045 }, { "content": "pub fn expand_has_many(\n\n cx: &mut ExtCtxt,\n\n span: Span,\n\n meta_item: &MetaItem,\n\n annotatable: &Annotatable,\n\n push: &mut FnMut(Annotatable)\n\n) {\n\n let options = parse_association_options(\"has_many\", cx, span, meta_item, annotatable);\n\n if let Some((model, options)) = options {\n\n let parent_table_name = model.table_name();\n\n let child_table_name = options.name;\n\n let foreign_key_name = options.foreign_key_name.unwrap_or_else(||\n\n to_foreign_key(&model.name.name.as_str()));\n\n let fields = model.field_tokens_for_stable_macro(cx);\n\n push(Annotatable::Item(quote_item!(cx, HasMany! {\n\n (\n\n parent_table_name = $parent_table_name,\n\n child_table = $child_table_name::table,\n\n foreign_key = $child_table_name::$foreign_key_name,\n\n ),\n\n fields = [$fields],\n\n }).unwrap()));\n\n }\n\n}\n", "file_path": "diesel_codegen_syntex/src/associations/has_many.rs", "rank": 95, "score": 136710.14642252045 }, { "content": "pub fn integer<'a>(name: &'a str) -> Column<'a, types::Integer> {\n\n Column::new(name, \"INTEGER\")\n\n}\n\n\n", "file_path": "diesel_tests/tests/schema_dsl/functions.rs", "rank": 96, "score": 135933.4530657961 }, { "content": "fn query_to_sql_equality<T, U>(sql_str: &str, value: U) -> bool where\n\n TestBackend: HasSqlType<T>,\n\n U: AsExpression<T> + Debug + Clone,\n\n U::Expression: SelectableExpression<(), T> + QueryFragment<TestBackend> + QueryId,\n\n T: QueryId,\n\n{\n\n use diesel::expression::dsl::sql;\n\n let connection = connection();\n\n let query = select(\n\n sql::<T>(sql_str).is_null().and(value.clone().as_expression().is_null()).or(\n\n sql::<T>(sql_str).eq(value.clone()))\n\n );\n\n query.get_result(&connection).expect(&format!(\"Error comparing {}, {:?}\", sql_str, value))\n\n}\n\n\n", "file_path": "diesel_tests/tests/types.rs", "rank": 97, "score": 134565.04666748212 }, { "content": "pub fn str_value_of_attr_with_name(\n\n cx: &mut ExtCtxt,\n\n attrs: &[ast::Attribute],\n\n name: &str,\n\n) -> Option<ast::Ident> {\n\n attrs.iter()\n\n .find(|a| a.check_name(name))\n\n .and_then(|a| str_value_of_attr(cx, &a, name))\n\n}\n\n\n", "file_path": "diesel_codegen_syntex/src/util.rs", "rank": 98, "score": 134546.53741009897 }, { "content": "pub fn ident_value_of_attr_with_name(\n\n cx: &mut ExtCtxt,\n\n attrs: &[ast::Attribute],\n\n name: &str,\n\n) -> Option<ast::Ident> {\n\n attrs.iter()\n\n .find(|a| a.check_name(name))\n\n .and_then(|a| single_arg_value_of_attr(cx, &a, name))\n\n}\n\n\n", "file_path": "diesel_codegen_syntex/src/util.rs", "rank": 99, "score": 134546.53741009897 } ]
Rust
components/zcash_address/src/kind/unified/f4jumble.rs
MixinNetwork/librustzcash
9be36f3e54127fc2e6a30a70625eddfb12367d40
use blake2b_simd::{Params as Blake2bParams, OUTBYTES}; use std::cmp::min; use std::ops::RangeInclusive; #[cfg(test)] mod test_vectors; const VALID_LENGTH: RangeInclusive<usize> = 48..=16448; macro_rules! H_PERS { ( $i:expr ) => { [ 85, 65, 95, 70, 52, 74, 117, 109, 98, 108, 101, 95, 72, 95, $i, 0, ] }; } macro_rules! G_PERS { ( $i:expr, $j:expr ) => { [ 85, 65, 95, 70, 52, 74, 117, 109, 98, 108, 101, 95, 71, 95, $i, $j, ] }; } struct Hashes { l_l: usize, l_r: usize, } impl Hashes { fn new(message_length: usize) -> Self { let l_l = min(OUTBYTES, message_length / 2); let l_r = message_length - l_l; Hashes { l_l, l_r } } fn h(&self, i: u8, u: &[u8]) -> Vec<u8> { Blake2bParams::new() .hash_length(self.l_l) .personal(&H_PERS!(i)) .hash(&u) .as_ref() .to_vec() } fn g(&self, i: u8, u: &[u8]) -> Vec<u8> { (0..ceildiv(self.l_r, OUTBYTES)) .flat_map(|j| { Blake2bParams::new() .hash_length(OUTBYTES) .personal(&G_PERS!(i, j as u8)) .hash(u) .as_ref() .to_vec() .into_iter() }) .take(self.l_r) .collect() } } fn xor(a: &[u8], b: &[u8]) -> Vec<u8> { a.iter().zip(b.iter()).map(|(a0, b0)| a0 ^ b0).collect() } fn ceildiv(num: usize, den: usize) -> usize { (num + den - 1) / den } #[allow(clippy::many_single_char_names)] pub fn f4jumble(a: &[u8]) -> Option<Vec<u8>> { if VALID_LENGTH.contains(&a.len()) { let hashes = Hashes::new(a.len()); let (a, b) = a.split_at(hashes.l_l); let x = xor(b, &hashes.g(0, a)); let y = xor(a, &hashes.h(0, &x)); let d = xor(&x, &hashes.g(1, &y)); let mut c = xor(&y, &hashes.h(1, &d)); c.extend(d); Some(c) } else { None } } #[allow(clippy::many_single_char_names)] pub fn f4jumble_inv(c: &[u8]) -> Option<Vec<u8>> { if VALID_LENGTH.contains(&c.len()) { let hashes = Hashes::new(c.len()); let (c, d) = c.split_at(hashes.l_l); let y = xor(c, &hashes.h(1, d)); let x = xor(d, &hashes.g(1, &y)); let mut a = xor(&y, &hashes.h(0, &x)); let b = xor(&x, &hashes.g(0, &a)); a.extend(b); Some(a) } else { None } } #[cfg(test)] mod tests { use proptest::collection::vec; use proptest::prelude::*; use super::{f4jumble, f4jumble_inv, test_vectors::test_vectors, VALID_LENGTH}; #[test] fn h_pers() { assert_eq!(&H_PERS!(7), b"UA_F4Jumble_H_\x07\x00"); } #[test] fn g_pers() { assert_eq!(&G_PERS!(7, 13), b"UA_F4Jumble_G_\x07\x0d"); } proptest! { #[test] fn f4jumble_roundtrip(msg in vec(any::<u8>(), VALID_LENGTH)) { let jumbled = f4jumble(&msg).unwrap(); let jumbled_len = jumbled.len(); prop_assert_eq!( msg.len(), jumbled_len, "Jumbled length {} was not equal to message length {}", jumbled_len, msg.len() ); let unjumbled = f4jumble_inv(&jumbled).unwrap(); prop_assert_eq!( jumbled_len, unjumbled.len(), "Unjumbled length {} was not equal to jumbled length {}", unjumbled.len(), jumbled_len ); prop_assert_eq!(msg, unjumbled, "Unjumbled message did not match original message."); } } #[test] fn f4jumble_check_vectors() { for v in test_vectors() { let jumbled = f4jumble(&v.normal).unwrap(); assert_eq!(jumbled, v.jumbled); let unjumbled = f4jumble_inv(&v.jumbled).unwrap(); assert_eq!(unjumbled, v.normal); } } }
use blake2b_simd::{Params as Blake2bParams, OUTBYTES}; use std::cmp::min; use std::ops::RangeInclusive; #[cfg(test)] mod test_vectors; const VALID_LENGTH: RangeInclusive<usize> = 48..=16448; macro_rules! H_PERS { ( $i:expr ) => { [ 85, 65, 95, 70, 52, 74, 117, 109, 98, 108, 101, 95, 72, 95, $i, 0, ] }; } macro_rules! G_PERS { ( $i:expr, $j:expr ) => { [ 85, 65, 95, 70, 52, 74, 117, 109, 98, 108, 101, 95, 71, 95, $i, $j, ] }; } struct Hashes { l_l: usize, l_r: usize, } impl Hashes { fn new(message_length: usize) -> Self { let l_l = min(OUTBYTES, message_length / 2); let l_r = message_length - l_l; Hashes { l_l, l_r } } fn h(&self, i: u8, u: &[u8]) -> Vec<u8> { Blake2bParams::new() .hash_length(self.l_l) .personal(&H_PERS!(i)) .hash(&u) .as_ref() .to_vec() } fn g(&self, i: u8, u: &[u8]) -> Vec<u8> { (0..ceildiv(self.l_r, OUTBYTES)) .flat_map(|j| { Blake2bParams::new() .hash_length(OUTBYTES) .personal(&G_PERS!(i, j as u8)) .hash(u) .as_ref() .to_vec() .into_iter() }) .take(self.l_r) .collect() } } fn xor(a: &[u8], b: &[u8]) -> Vec<u8> { a.iter().zip(b.iter()).map(|(a0, b0)| a0 ^ b0).collect() } fn ceildiv(num: usize, den: usize) -> usize { (num + den - 1) / den } #[allow(clippy::many_single_char_names)] pub fn f4jumble(a: &[u8]) -> Option<Vec<u8>> { if VALID_LENGTH.contains(&a.len()) { let hashes = Hashes::new(a.len()); let (a, b) = a.split_at(hashes.l_l); let x = xor(b, &hashes.g(0, a)); let y = xor(a, &hashes.h(0, &x)); let d = xor(&x, &hashes.g(1, &y)); let mut c = xor(&y, &hashes.h(1, &d)); c.extend(d); Some(c) } else { None } } #[allow(clippy::many_single_char_names)]
#[cfg(test)] mod tests { use proptest::collection::vec; use proptest::prelude::*; use super::{f4jumble, f4jumble_inv, test_vectors::test_vectors, VALID_LENGTH}; #[test] fn h_pers() { assert_eq!(&H_PERS!(7), b"UA_F4Jumble_H_\x07\x00"); } #[test] fn g_pers() { assert_eq!(&G_PERS!(7, 13), b"UA_F4Jumble_G_\x07\x0d"); } proptest! { #[test] fn f4jumble_roundtrip(msg in vec(any::<u8>(), VALID_LENGTH)) { let jumbled = f4jumble(&msg).unwrap(); let jumbled_len = jumbled.len(); prop_assert_eq!( msg.len(), jumbled_len, "Jumbled length {} was not equal to message length {}", jumbled_len, msg.len() ); let unjumbled = f4jumble_inv(&jumbled).unwrap(); prop_assert_eq!( jumbled_len, unjumbled.len(), "Unjumbled length {} was not equal to jumbled length {}", unjumbled.len(), jumbled_len ); prop_assert_eq!(msg, unjumbled, "Unjumbled message did not match original message."); } } #[test] fn f4jumble_check_vectors() { for v in test_vectors() { let jumbled = f4jumble(&v.normal).unwrap(); assert_eq!(jumbled, v.jumbled); let unjumbled = f4jumble_inv(&v.jumbled).unwrap(); assert_eq!(unjumbled, v.normal); } } }
pub fn f4jumble_inv(c: &[u8]) -> Option<Vec<u8>> { if VALID_LENGTH.contains(&c.len()) { let hashes = Hashes::new(c.len()); let (c, d) = c.split_at(hashes.l_l); let y = xor(c, &hashes.h(1, d)); let x = xor(d, &hashes.g(1, &y)); let mut a = xor(&y, &hashes.h(0, &x)); let b = xor(&x, &hashes.g(0, &a)); a.extend(b); Some(a) } else { None } }
function_block-full_function
[ { "content": "/// Compute a parent node in the Sapling commitment tree given its two children.\n\npub fn merkle_hash(depth: usize, lhs: &[u8; 32], rhs: &[u8; 32]) -> [u8; 32] {\n\n let lhs = {\n\n let mut tmp = [false; 256];\n\n for (a, b) in tmp.iter_mut().zip(lhs.as_bits::<Lsb0>()) {\n\n *a = *b;\n\n }\n\n tmp\n\n };\n\n\n\n let rhs = {\n\n let mut tmp = [false; 256];\n\n for (a, b) in tmp.iter_mut().zip(rhs.as_bits::<Lsb0>()) {\n\n *a = *b;\n\n }\n\n tmp\n\n };\n\n\n\n jubjub::ExtendedPoint::from(pedersen_hash(\n\n Personalization::MerkleTree(depth),\n\n lhs.iter()\n", "file_path": "zcash_primitives/src/sapling.rs", "rank": 0, "score": 279140.0329771331 }, { "content": "pub fn hash_to_scalar(persona: &[u8], a: &[u8], b: &[u8]) -> jubjub::Fr {\n\n let mut hasher = Params::new().hash_length(64).personal(persona).to_state();\n\n hasher.update(a);\n\n hasher.update(b);\n\n let ret = hasher.finalize();\n\n jubjub::Fr::from_bytes_wide(ret.as_array())\n\n}\n\n\n", "file_path": "zcash_primitives/src/sapling/util.rs", "rank": 1, "score": 264404.6698361105 }, { "content": "/// PRF^expand(sk, t) := BLAKE2b-512(\"Zcash_ExpandSeed\", sk || t)\n\npub fn prf_expand(sk: &[u8], t: &[u8]) -> Blake2bHash {\n\n prf_expand_vec(sk, &[t])\n\n}\n\n\n", "file_path": "zcash_primitives/src/sapling/keys.rs", "rank": 3, "score": 247817.8841102623 }, { "content": "pub fn prf_expand_vec(sk: &[u8], ts: &[&[u8]]) -> Blake2bHash {\n\n let mut h = Blake2bParams::new()\n\n .hash_length(64)\n\n .personal(PRF_EXPAND_PERSONALIZATION)\n\n .to_state();\n\n h.update(sk);\n\n for t in ts {\n\n h.update(t);\n\n }\n\n h.finalize()\n\n}\n\n\n\n/// An outgoing viewing key\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub struct OutgoingViewingKey(pub [u8; 32]);\n\n\n\n/// A Sapling expanded spending key\n\n#[derive(Clone)]\n\npub struct ExpandedSpendingKey {\n\n pub ask: jubjub::Fr,\n", "file_path": "zcash_primitives/src/sapling/keys.rs", "rank": 4, "score": 241323.2726902154 }, { "content": "#[allow(clippy::assertions_on_constants)]\n\npub fn group_hash(tag: &[u8], personalization: &[u8]) -> Option<jubjub::SubgroupPoint> {\n\n assert_eq!(personalization.len(), 8);\n\n\n\n // Check to see that scalar field is 255 bits\n\n assert!(bls12_381::Scalar::NUM_BITS == 255);\n\n\n\n let h = Params::new()\n\n .hash_length(32)\n\n .personal(personalization)\n\n .to_state()\n\n .update(constants::GH_FIRST_BLOCK)\n\n .update(tag)\n\n .finalize();\n\n\n\n let p = jubjub::ExtendedPoint::from_bytes(h.as_array());\n\n if p.is_some().into() {\n\n // <ExtendedPoint as CofactorGroup>::clear_cofactor is implemented using\n\n // ExtendedPoint::mul_by_cofactor in the jubjub crate.\n\n let p = CofactorGroup::clear_cofactor(&p.unwrap());\n\n\n\n if p.is_identity().into() {\n\n None\n\n } else {\n\n Some(p)\n\n }\n\n } else {\n\n None\n\n }\n\n}\n", "file_path": "zcash_primitives/src/sapling/group_hash.rs", "rank": 5, "score": 240230.12655849865 }, { "content": "fn bench_pedersen_hash(c: &mut Criterion) {\n\n let rng = &mut OsRng;\n\n let bits = (0..510)\n\n .map(|_| (rng.next_u32() % 2) != 0)\n\n .collect::<Vec<_>>();\n\n let personalization = Personalization::MerkleTree(31);\n\n\n\n c.bench_function(\"pedersen-hash\", |b| {\n\n b.iter(|| pedersen_hash(personalization, bits.clone()))\n\n });\n\n}\n\n\n\n#[cfg(unix)]\n\ncriterion_group! {\n\n name = benches;\n\n config = Criterion::default().with_profiler(PProfProfiler::new(100, Output::Flamegraph(None)));\n\n targets = bench_pedersen_hash\n\n}\n\n#[cfg(not(unix))]\n\ncriterion_group!(benches, bench_pedersen_hash);\n\ncriterion_main!(benches);\n", "file_path": "zcash_primitives/benches/pedersen_hash.rs", "rank": 7, "score": 223082.49164982216 }, { "content": "fn hash_1(preimage_1: &[u8; 32], hash_2: &[u8; 32]) -> [u8; 32] {\n\n let mut hash = [0; 32];\n\n hash.copy_from_slice(\n\n Params::new()\n\n .hash_length(32)\n\n .personal(b\"demo_pc_h1_perso\")\n\n .to_state()\n\n .update(preimage_1)\n\n .update(hash_2)\n\n .finalize()\n\n .as_bytes(),\n\n );\n\n hash\n\n}\n\n\n\n/// Wrapper for [`zcash_primitives::transaction::builder::Builder`] that simplifies\n\n/// constructing transactions that utilize the features of the demo extension.\n\npub struct DemoBuilder<B> {\n\n /// The wrapped transaction builder.\n\n pub txn_builder: B,\n", "file_path": "zcash_extensions/src/transparent/demo.rs", "rank": 8, "score": 211135.26597232535 }, { "content": "pub fn signature_hash<\n\n 'a,\n\n SA: sapling::Authorization<Proof = GrothProofBytes>,\n\n A: Authorization<SaplingAuth = SA>,\n\n>(\n\n tx: &TransactionData<A>,\n\n hash_type: u32,\n\n signable_input: &SignableInput<'a>,\n\n txid_parts: &TxDigests<Blake2bHash>,\n\n) -> SignatureHash {\n\n SignatureHash(match tx.version {\n\n TxVersion::Sprout(_) | TxVersion::Overwinter | TxVersion::Sapling => {\n\n v4_signature_hash(tx, hash_type, signable_input)\n\n }\n\n\n\n TxVersion::Zip225 => v5_signature_hash(tx, hash_type, signable_input, txid_parts),\n\n\n\n #[cfg(feature = \"zfuture\")]\n\n TxVersion::ZFuture => v5_signature_hash(tx, hash_type, signable_input, txid_parts),\n\n })\n\n}\n", "file_path": "zcash_primitives/src/transaction/sighash.rs", "rank": 10, "score": 198552.08790189115 }, { "content": "pub fn pedersen_hash<CS>(\n\n mut cs: CS,\n\n personalization: Personalization,\n\n bits: &[Boolean],\n\n) -> Result<EdwardsPoint, SynthesisError>\n\nwhere\n\n CS: ConstraintSystem<bls12_381::Scalar>,\n\n{\n\n let personalization = get_constant_bools(&personalization);\n\n assert_eq!(personalization.len(), 6);\n\n\n\n let mut edwards_result = None;\n\n let mut bits = personalization.iter().chain(bits.iter()).peekable();\n\n let mut segment_generators = PEDERSEN_CIRCUIT_GENERATORS.iter();\n\n let boolean_false = Boolean::constant(false);\n\n\n\n let mut segment_i = 0;\n\n while bits.peek().is_some() {\n\n let mut segment_result = None;\n\n let mut segment_windows = &segment_generators.next().expect(\"enough segments\")[..];\n", "file_path": "zcash_proofs/src/circuit/pedersen_hash.rs", "rank": 11, "score": 198503.44989091373 }, { "content": "pub fn v4_signature_hash<\n\n SA: sapling::Authorization<Proof = GrothProofBytes>,\n\n A: Authorization<SaplingAuth = SA>,\n\n>(\n\n tx: &TransactionData<A>,\n\n hash_type: u32,\n\n signable_input: &SignableInput<'_>,\n\n) -> Blake2bHash {\n\n if tx.version.has_overwinter() {\n\n let mut personal = [0; 16];\n\n (&mut personal[..12]).copy_from_slice(ZCASH_SIGHASH_PERSONALIZATION_PREFIX);\n\n (&mut personal[12..])\n\n .write_u32::<LittleEndian>(tx.consensus_branch_id.into())\n\n .unwrap();\n\n\n\n let mut h = Blake2bParams::new()\n\n .hash_length(32)\n\n .personal(&personal)\n\n .to_state();\n\n let mut tmp = [0; 8];\n", "file_path": "zcash_primitives/src/transaction/sighash_v4.rs", "rank": 12, "score": 192266.7628600193 }, { "content": "/// Format a byte array as a colon-delimited hex string.\n\n///\n\n/// - Source: <https://github.com/tendermint/signatory>\n\n/// - License: MIT / Apache 2.0\n\nfn fmt_colon_delimited_hex<B>(f: &mut fmt::Formatter<'_>, bytes: B) -> fmt::Result\n\nwhere\n\n B: AsRef<[u8]>,\n\n{\n\n let len = bytes.as_ref().len();\n\n\n\n for (i, byte) in bytes.as_ref().iter().enumerate() {\n\n write!(f, \"{:02x}\", byte)?;\n\n\n\n if i != len - 1 {\n\n write!(f, \":\")?;\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n\n\n\n/// Errors that may result from attempting to construct an invalid memo.\n\n#[derive(Debug, PartialEq)]\n\npub enum Error {\n", "file_path": "zcash_primitives/src/memo.rs", "rank": 13, "score": 187254.7511365672 }, { "content": "fn expand_array(vin: &[u8], bit_len: usize, byte_pad: usize) -> Vec<u8> {\n\n assert!(bit_len >= 8);\n\n assert!(8 * size_of::<u32>() >= 7 + bit_len);\n\n\n\n let out_width = (bit_len + 7) / 8 + byte_pad;\n\n let out_len = 8 * out_width * vin.len() / bit_len;\n\n\n\n // Shortcut for parameters where expansion is a no-op\n\n if out_len == vin.len() {\n\n return vin.to_vec();\n\n }\n\n\n\n let mut vout: Vec<u8> = vec![0; out_len];\n\n let bit_len_mask: u32 = (1 << bit_len) - 1;\n\n\n\n // The acc_bits least-significant bits of acc_value represent a bit sequence\n\n // in big-endian order.\n\n let mut acc_bits = 0;\n\n let mut acc_value: u32 = 0;\n\n\n", "file_path": "components/equihash/src/verify.rs", "rank": 14, "score": 187157.12169893432 }, { "content": "/// Checks that the scanned blocks in the data database, when combined with the recent\n\n/// `CompactBlock`s in the cache database, form a valid chain.\n\n///\n\n/// This function is built on the core assumption that the information provided in the\n\n/// cache database is more likely to be accurate than the previously-scanned information.\n\n/// This follows from the design (and trust) assumption that the `lightwalletd` server\n\n/// provides accurate block information as of the time it was requested.\n\n///\n\n/// Arguments:\n\n/// - `parameters` Network parameters\n\n/// - `cache` Source of compact blocks\n\n/// - `from_tip` Height & hash of last validated block; if no validation has previously\n\n/// been performed, this will begin scanning from `sapling_activation_height - 1`\n\n///\n\n/// Returns:\n\n/// - `Ok(())` if the combined chain is valid.\n\n/// - `Err(ErrorKind::InvalidChain(upper_bound, cause))` if the combined chain is invalid.\n\n/// `upper_bound` is the height of the highest invalid block (on the assumption that the\n\n/// highest block in the cache database is correct).\n\n/// - `Err(e)` if there was an error during validation unrelated to chain validity.\n\n///\n\n/// This function does not mutate either of the databases.\n\npub fn validate_chain<N, E, P, C>(\n\n parameters: &P,\n\n cache: &C,\n\n validate_from: Option<(BlockHeight, BlockHash)>,\n\n) -> Result<(), E>\n\nwhere\n\n E: From<Error<N>>,\n\n P: consensus::Parameters,\n\n C: BlockSource<Error = E>,\n\n{\n\n let sapling_activation_height = parameters\n\n .activation_height(NetworkUpgrade::Sapling)\n\n .ok_or(Error::SaplingNotActive)?;\n\n\n\n // The cache will contain blocks above the `validate_from` height. Validate from that maximum\n\n // height up to the chain tip, returning the hash of the block found in the cache at the\n\n // `validate_from` height, which can then be used to verify chain integrity by comparing\n\n // against the `validate_from` hash.\n\n let from_height = validate_from\n\n .map(|(height, _)| height)\n", "file_path": "zcash_client_backend/src/data_api/chain.rs", "rank": 15, "score": 186684.82911298267 }, { "content": "/// Returns the block hash for the block at the specified height,\n\n/// if any.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use tempfile::NamedTempFile;\n\n/// use zcash_primitives::consensus::{H0, Network};\n\n/// use zcash_client_sqlite::{\n\n/// WalletDb,\n\n/// wallet::get_block_hash,\n\n/// };\n\n///\n\n/// let data_file = NamedTempFile::new().unwrap();\n\n/// let db = WalletDb::for_path(data_file, Network::TestNetwork).unwrap();\n\n/// let hash = get_block_hash(&db, H0);\n\n/// ```\n\npub fn get_block_hash<P>(\n\n wdb: &WalletDb<P>,\n\n block_height: BlockHeight,\n\n) -> Result<Option<BlockHash>, rusqlite::Error> {\n\n wdb.conn\n\n .query_row(\n\n \"SELECT hash FROM blocks WHERE height = ?\",\n\n &[u32::from(block_height)],\n\n |row| {\n\n let row_data = row.get::<_, Vec<_>>(0)?;\n\n Ok(BlockHash::from_slice(&row_data))\n\n },\n\n )\n\n .optional()\n\n}\n\n\n", "file_path": "zcash_client_sqlite/src/wallet.rs", "rank": 16, "score": 186216.32059528277 }, { "content": "pub fn write_tree<H: HashSer + Hash + Eq, W: Write>(\n\n mut writer: W,\n\n tree: &BridgeTree<H, 32>,\n\n) -> io::Result<()> {\n\n writer.write_u8(SER_V1)?;\n\n write_tree_v1(&mut writer, tree)\n\n}\n\n\n", "file_path": "zcash_primitives/src/merkle_tree/incremental.rs", "rank": 17, "score": 185763.3964610263 }, { "content": "pub fn write_tree_v1<H: HashSer + Hash + Eq, W: Write>(\n\n mut writer: W,\n\n tree: &BridgeTree<H, 32>,\n\n) -> io::Result<()> {\n\n Vector::write(&mut writer, tree.bridges(), |w, b| write_bridge_v1(w, b))?;\n\n Vector::write(\n\n &mut writer,\n\n &tree.witnessable_leaves().iter().collect::<Vec<_>>(),\n\n |mut w, (a, i)| {\n\n a.write(&mut w)?;\n\n w.write_u64::<LittleEndian>(**i as u64)?;\n\n Ok(())\n\n },\n\n )?;\n\n Vector::write(&mut writer, tree.checkpoints(), |w, c| {\n\n write_checkpoint_v1(w, c)\n\n })?;\n\n writer.write_u64::<LittleEndian>(tree.max_checkpoints() as u64)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "zcash_primitives/src/merkle_tree/incremental.rs", "rank": 18, "score": 183572.88490635116 }, { "content": "pub fn v5_signature_hash<A: Authorization>(\n\n tx: &TransactionData<A>,\n\n hash_type: u32,\n\n signable_input: &SignableInput<'_>,\n\n txid_parts: &TxDigests<Blake2bHash>,\n\n) -> Blake2bHash {\n\n match signable_input {\n\n SignableInput::Shielded => to_hash(\n\n tx.version,\n\n tx.consensus_branch_id,\n\n txid_parts.header_digest,\n\n txid_parts.transparent_digests.as_ref(),\n\n txid_parts.sapling_digest,\n\n txid_parts.orchard_digest,\n\n #[cfg(feature = \"zfuture\")]\n\n txid_parts.tze_digests.as_ref(),\n\n ),\n\n SignableInput::Transparent(input) => {\n\n if let Some((bundle, txid_digests)) = tx\n\n .transparent_bundle\n", "file_path": "zcash_primitives/src/transaction/sighash_v5.rs", "rank": 20, "score": 178246.3387578031 }, { "content": "/// Scans at most `limit` new blocks added to the cache for any transactions received by\n\n/// the tracked accounts.\n\n///\n\n/// This function will return without error after scanning at most `limit` new blocks, to\n\n/// enable the caller to update their UI with scanning progress. Repeatedly calling this\n\n/// function will process sequential ranges of blocks, and is equivalent to calling\n\n/// `scan_cached_blocks` and passing `None` for the optional `limit` value.\n\n///\n\n/// This function pays attention only to cached blocks with heights greater than the\n\n/// highest scanned block in `data`. Cached blocks with lower heights are not verified\n\n/// against previously-scanned blocks. In particular, this function **assumes** that the\n\n/// caller is handling rollbacks.\n\n///\n\n/// For brand-new light client databases, this function starts scanning from the Sapling\n\n/// activation height. This height can be fast-forwarded to a more recent block by\n\n/// initializing the client database with a starting block (for example, calling\n\n/// `init_blocks_table` before this function if using `zcash_client_sqlite`).\n\n///\n\n/// Scanned blocks are required to be height-sequential. If a block is missing from the\n\n/// cache, an error will be returned with kind [`ChainInvalid::BlockHeightDiscontinuity`].\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use tempfile::NamedTempFile;\n\n/// use zcash_primitives::consensus::{\n\n/// Network,\n\n/// Parameters,\n\n/// };\n\n/// use zcash_client_backend::{\n\n/// data_api::chain::scan_cached_blocks,\n\n/// };\n\n/// use zcash_client_sqlite::{\n\n/// BlockDb,\n\n/// WalletDb,\n\n/// error::SqliteClientError,\n\n/// wallet::init::init_wallet_db,\n\n/// };\n\n///\n\n/// # // doctests have a problem with sqlite IO, so we ignore errors\n\n/// # // generated in this example code as it's not really testing anything\n\n/// # fn main() {\n\n/// # test();\n\n/// # }\n\n/// #\n\n/// # fn test() -> Result<(), SqliteClientError> {\n\n/// let cache_file = NamedTempFile::new().unwrap();\n\n/// let cache = BlockDb::for_path(cache_file).unwrap();\n\n///\n\n/// let data_file = NamedTempFile::new().unwrap();\n\n/// let db_read = WalletDb::for_path(data_file, Network::TestNetwork)?;\n\n/// init_wallet_db(&db_read)?;\n\n///\n\n/// let mut data = db_read.get_update_ops()?;\n\n/// scan_cached_blocks(&Network::TestNetwork, &cache, &mut data, None)?;\n\n/// # Ok(())\n\n/// # }\n\n/// ```\n\npub fn scan_cached_blocks<E, N, P, C, D>(\n\n params: &P,\n\n cache: &C,\n\n data: &mut D,\n\n limit: Option<u32>,\n\n) -> Result<(), E>\n\nwhere\n\n P: consensus::Parameters,\n\n C: BlockSource<Error = E>,\n\n D: WalletWrite<Error = E, NoteRef = N>,\n\n N: Copy + Debug,\n\n E: From<Error<N>>,\n\n{\n\n let sapling_activation_height = params\n\n .activation_height(NetworkUpgrade::Sapling)\n\n .ok_or(Error::SaplingNotActive)?;\n\n\n\n // Recall where we synced up to previously.\n\n // If we have never synced, use sapling activation height to select all cached CompactBlocks.\n\n let mut last_height = data.block_height_extrema().map(|opt| {\n", "file_path": "zcash_client_backend/src/data_api/chain.rs", "rank": 21, "score": 177130.6605477673 }, { "content": "/// Creates the 3-bit window table `[0, 1, ..., 8]` for different magnitudes of a fixed\n\n/// generator.\n\npub fn generate_circuit_generator(mut gen: jubjub::SubgroupPoint) -> FixedGeneratorOwned {\n\n let mut windows = vec![];\n\n\n\n for _ in 0..FIXED_BASE_CHUNKS_PER_GENERATOR {\n\n let mut coeffs = vec![(Scalar::zero(), Scalar::one())];\n\n let mut g = gen;\n\n for _ in 0..7 {\n\n let g_affine = jubjub::ExtendedPoint::from(g).to_affine();\n\n coeffs.push((g_affine.get_u(), g_affine.get_v()));\n\n g += gen;\n\n }\n\n windows.push(coeffs);\n\n\n\n // gen = gen * 8\n\n gen = g;\n\n }\n\n\n\n windows\n\n}\n\n\n", "file_path": "zcash_proofs/src/constants.rs", "rank": 22, "score": 176946.51373611053 }, { "content": "pub fn read_checkpoint_v1<H: HashSer + Clone, R: Read>(mut reader: R) -> io::Result<Checkpoint<H>> {\n\n match reader.read_u8()? {\n\n EMPTY_CHECKPOINT => Ok(Checkpoint::Empty),\n\n BRIDGE_CHECKPOINT => Ok(Checkpoint::AtIndex(\n\n reader.read_u64::<LittleEndian>()? as usize,\n\n read_bridge_v1(&mut reader)?,\n\n )),\n\n flag => Err(io::Error::new(\n\n io::ErrorKind::InvalidInput,\n\n format!(\"Unrecognized checkpoint variant identifier: {:?}\", flag),\n\n )),\n\n }\n\n}\n\n\n", "file_path": "zcash_primitives/src/merkle_tree/incremental.rs", "rank": 23, "score": 174635.5941791148 }, { "content": "pub fn read_tree<H: Hashable + HashSer + Hash + Eq + Clone, R: Read>(\n\n mut reader: R,\n\n) -> io::Result<BridgeTree<H, 32>> {\n\n match reader.read_u8()? {\n\n SER_V1 => read_tree_v1(&mut reader),\n\n flag => Err(io::Error::new(\n\n io::ErrorKind::InvalidInput,\n\n format!(\"Unrecognized tree serialization version: {:?}\", flag),\n\n )),\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use proptest::prelude::*;\n\n\n\n use incrementalmerkletree::bridgetree::Frontier;\n\n\n\n use super::*;\n\n use crate::{\n", "file_path": "zcash_primitives/src/merkle_tree/incremental.rs", "rank": 24, "score": 174569.3646316069 }, { "content": "#[allow(clippy::redundant_closure)]\n\npub fn read_auth_fragment_v1<H: HashSer, R: Read>(mut reader: R) -> io::Result<AuthFragment<H>> {\n\n let position = read_position(&mut reader)?;\n\n let alts_observed = reader.read_u64::<LittleEndian>()? as usize;\n\n let values = Vector::read(&mut reader, |r| H::read(r))?;\n\n\n\n Ok(AuthFragment::from_parts(position, alts_observed, values))\n\n}\n\n\n", "file_path": "zcash_primitives/src/merkle_tree/incremental.rs", "rank": 25, "score": 174449.28775071565 }, { "content": "fn h_star(a: &[u8], b: &[u8]) -> jubjub::Fr {\n\n hash_to_scalar(b\"Zcash_RedJubjubH\", a, b)\n\n}\n\n\n\n#[derive(Copy, Clone, Debug)]\n\npub struct Signature {\n\n rbar: [u8; 32],\n\n sbar: [u8; 32],\n\n}\n\n\n\npub struct PrivateKey(pub jubjub::Fr);\n\n\n\n#[derive(Debug, Clone)]\n\npub struct PublicKey(pub ExtendedPoint);\n\n\n\nimpl Signature {\n\n pub fn read<R: Read>(mut reader: R) -> io::Result<Self> {\n\n let mut rbar = [0u8; 32];\n\n let mut sbar = [0u8; 32];\n\n reader.read_exact(&mut rbar)?;\n", "file_path": "zcash_primitives/src/sapling/redjubjub.rs", "rank": 26, "score": 174222.8847325325 }, { "content": "#[allow(clippy::redundant_closure)]\n\npub fn read_tree_v1<H: Hashable + HashSer + Hash + Eq + Clone, R: Read>(\n\n mut reader: R,\n\n) -> io::Result<BridgeTree<H, 32>> {\n\n BridgeTree::from_parts(\n\n Vector::read(&mut reader, |r| read_bridge_v1(r))?,\n\n Vector::read(&mut reader, |mut r| {\n\n Ok((H::read(&mut r)?, r.read_u64::<LittleEndian>()? as usize))\n\n })?\n\n .into_iter()\n\n .collect(),\n\n Vector::read(&mut reader, |r| read_checkpoint_v1(r))?,\n\n reader.read_u64::<LittleEndian>()? as usize,\n\n )\n\n .map_err(|err| {\n\n io::Error::new(\n\n io::ErrorKind::InvalidInput,\n\n format!(\n\n \"Consistency violation found when attempting to deserialize Merkle tree: {:?}\",\n\n err\n\n ),\n\n )\n\n })\n\n}\n\n\n", "file_path": "zcash_primitives/src/merkle_tree/incremental.rs", "rank": 27, "score": 172530.32926612147 }, { "content": "pub fn read_bridge_v1<H: HashSer + Clone, R: Read>(mut reader: R) -> io::Result<MerkleBridge<H>> {\n\n let prior_position = Optional::read(&mut reader, read_position)?;\n\n let auth_fragments = Vector::read(&mut reader, |r| {\n\n Ok((\n\n r.read_u64::<LittleEndian>()? as usize,\n\n read_auth_fragment_v1(r)?,\n\n ))\n\n })?\n\n .into_iter()\n\n .collect();\n\n let frontier = read_nonempty_frontier_v1(&mut reader)?;\n\n\n\n Ok(MerkleBridge::from_parts(\n\n prior_position,\n\n auth_fragments,\n\n frontier,\n\n ))\n\n}\n\n\n\npub const EMPTY_CHECKPOINT: u8 = 0;\n\npub const BRIDGE_CHECKPOINT: u8 = 1;\n\n\n", "file_path": "zcash_primitives/src/merkle_tree/incremental.rs", "rank": 28, "score": 172326.34671221016 }, { "content": "fn criterion_benchmark(c: &mut Criterion) {\n\n let mut rng = XorShiftRng::from_seed([\n\n 0x59, 0x62, 0xbe, 0x3d, 0x76, 0x3d, 0x31, 0x8d, 0x17, 0xdb, 0x37, 0x32, 0x54, 0x06, 0xbc,\n\n 0xe5,\n\n ]);\n\n\n\n let groth_params = generate_random_parameters::<Bls12, _, _>(\n\n Spend {\n\n value_commitment: None,\n\n proof_generation_key: None,\n\n payment_address: None,\n\n commitment_randomness: None,\n\n ar: None,\n\n auth_path: vec![None; TREE_DEPTH],\n\n anchor: None,\n\n },\n\n &mut rng,\n\n )\n\n .unwrap();\n\n\n", "file_path": "zcash_proofs/benches/sapling.rs", "rank": 29, "score": 172237.1214655372 }, { "content": "fn encode_b58(prefix: [u8; 2], data: &[u8]) -> String {\n\n let mut bytes = Vec::with_capacity(2 + data.len());\n\n bytes.extend_from_slice(&prefix);\n\n bytes.extend_from_slice(data);\n\n bs58::encode(bytes).with_check().into_string()\n\n}\n\n\n\nimpl fmt::Display for ZcashAddress {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n let encoded = match &self.kind {\n\n AddressKind::Sprout(data) => encode_b58(\n\n match self.net {\n\n Network::Main => sprout::MAINNET,\n\n Network::Test | Network::Regtest => sprout::TESTNET,\n\n },\n\n data,\n\n ),\n\n AddressKind::Sapling(data) => encode_bech32(\n\n match self.net {\n\n Network::Main => sapling::MAINNET,\n", "file_path": "components/zcash_address/src/encoding.rs", "rank": 30, "score": 170736.8552770961 }, { "content": "fn bench_note_decryption(c: &mut Criterion) {\n\n let mut rng = OsRng;\n\n let height = TEST_NETWORK.activation_height(Canopy).unwrap();\n\n\n\n let valid_ivk = SaplingIvk(jubjub::Fr::random(&mut rng));\n\n let invalid_ivk = SaplingIvk(jubjub::Fr::random(&mut rng));\n\n\n\n // Construct a fake Sapling output as if we had just deserialized a transaction.\n\n let output: OutputDescription<GrothProofBytes> = {\n\n let diversifier = Diversifier([0; 11]);\n\n let pk_d = diversifier.g_d().unwrap() * valid_ivk.0;\n\n let pa = PaymentAddress::from_parts(diversifier, pk_d).unwrap();\n\n\n\n let rseed = generate_random_rseed(&TEST_NETWORK, height, &mut rng);\n\n\n\n // Construct the value commitment for the proof instance\n\n let value = 100;\n\n let value_commitment = ValueCommitment {\n\n value,\n\n randomness: jubjub::Fr::random(&mut rng),\n", "file_path": "zcash_primitives/benches/note_decryption.rs", "rank": 31, "score": 168038.17688232535 }, { "content": "/// Derives the ZIP 32 [`ExtendedSpendingKey`] for a given coin type and account from the\n\n/// given seed.\n\n///\n\n/// # Panics\n\n///\n\n/// Panics if `seed` is shorter than 32 bytes.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use zcash_primitives::{constants::testnet::COIN_TYPE};\n\n/// use zcash_client_backend::{keys::spending_key};\n\n///\n\n/// let extsk = spending_key(&[0; 32][..], COIN_TYPE, 0);\n\n/// ```\n\n/// [`ExtendedSpendingKey`]: zcash_primitives::zip32::ExtendedSpendingKey\n\npub fn spending_key(seed: &[u8], coin_type: u32, account: u32) -> ExtendedSpendingKey {\n\n if seed.len() < 32 {\n\n panic!(\"ZIP 32 seeds MUST be at least 32 bytes\");\n\n }\n\n\n\n ExtendedSpendingKey::from_path(\n\n &ExtendedSpendingKey::master(&seed),\n\n &[\n\n ChildIndex::Hardened(32),\n\n ChildIndex::Hardened(coin_type),\n\n ChildIndex::Hardened(account),\n\n ],\n\n )\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::spending_key;\n\n\n\n #[test]\n\n #[should_panic]\n\n fn spending_key_panics_on_short_seed() {\n\n let _ = spending_key(&[0; 31][..], 0, 0);\n\n }\n\n}\n", "file_path": "zcash_client_backend/src/keys.rs", "rank": 32, "score": 164738.1046330323 }, { "content": "pub fn pedersen_hash<I>(personalization: Personalization, bits: I) -> jubjub::SubgroupPoint\n\nwhere\n\n I: IntoIterator<Item = bool>,\n\n{\n\n let mut bits = personalization\n\n .get_bits()\n\n .into_iter()\n\n .chain(bits.into_iter());\n\n\n\n let mut result = jubjub::SubgroupPoint::identity();\n\n let mut generators = PEDERSEN_HASH_EXP_TABLE.iter();\n\n\n\n loop {\n\n let mut acc = jubjub::Fr::zero();\n\n let mut cur = jubjub::Fr::one();\n\n let mut chunks_remaining = PEDERSEN_HASH_CHUNKS_PER_GENERATOR;\n\n let mut encountered_bits = false;\n\n\n\n // Grab three bits from the input\n\n while let Some(a) = bits.next() {\n", "file_path": "zcash_primitives/src/sapling/pedersen_hash.rs", "rank": 33, "score": 164638.87495433982 }, { "content": "pub fn read_nullifier<R: Read>(mut reader: R) -> io::Result<Nullifier> {\n\n let mut bytes = [0u8; 32];\n\n reader.read_exact(&mut bytes)?;\n\n let nullifier_ctopt = Nullifier::from_bytes(&bytes);\n\n if nullifier_ctopt.is_none().into() {\n\n Err(io::Error::new(\n\n io::ErrorKind::InvalidInput,\n\n \"invalid Pallas point for nullifier\".to_owned(),\n\n ))\n\n } else {\n\n Ok(nullifier_ctopt.unwrap())\n\n }\n\n}\n\n\n", "file_path": "zcash_primitives/src/transaction/components/orchard.rs", "rank": 34, "score": 163666.33784441586 }, { "content": "pub fn read_anchor<R: Read>(mut reader: R) -> io::Result<Anchor> {\n\n let mut bytes = [0u8; 32];\n\n reader.read_exact(&mut bytes)?;\n\n Anchor::from_bytes(bytes).ok_or_else(|| {\n\n io::Error::new(\n\n io::ErrorKind::InvalidInput,\n\n \"invalid Orchard anchor\".to_owned(),\n\n )\n\n })\n\n}\n\n\n", "file_path": "zcash_primitives/src/transaction/components/orchard.rs", "rank": 35, "score": 163666.33784441586 }, { "content": "pub fn read_position<R: Read>(mut reader: R) -> io::Result<Position> {\n\n Ok(Position::from(reader.read_u64::<LittleEndian>()? as usize))\n\n}\n\n\n", "file_path": "zcash_primitives/src/merkle_tree/incremental.rs", "rank": 36, "score": 163666.33784441586 }, { "content": "pub fn read_flags<R: Read>(mut reader: R) -> io::Result<Flags> {\n\n let mut byte = [0u8; 1];\n\n reader.read_exact(&mut byte)?;\n\n Flags::from_byte(byte[0])\n\n}\n\n\n", "file_path": "zcash_primitives/src/transaction/components/orchard.rs", "rank": 37, "score": 163666.33784441586 }, { "content": "pub fn get_vectors<'a>() -> Vec<TestVector<'a>> {\n\n return vec![\n\n TestVector {\n\n personalization: Personalization::NoteCommitment,\n\n input_bits: vec![1, 1, 1, 1, 1, 1],\n\n hash_u: \"0x06b1187c11ca4fb4383b2e0d0dbbde3ad3617338b5029187ec65a5eaed5e4d0b\",\n\n hash_v: \"0x3ce70f536652f0dea496393a1e55c4e08b9d55508e16d11e5db40d4810cbc982\",\n\n },\n\n TestVector {\n\n personalization: Personalization::NoteCommitment,\n\n input_bits: vec![1, 1, 1, 1, 1, 1, 0],\n\n hash_u: \"0x2fc3bc454c337f71d4f04f86304262fcbfc9ecd808716b92fc42cbe6827f7f1a\",\n\n hash_v: \"0x46d0d25bf1a654eedc6a9b1e5af398925113959feac31b7a2c036ff9b9ec0638\",\n\n },\n\n TestVector {\n\n personalization: Personalization::NoteCommitment,\n\n input_bits: vec![1, 1, 1, 1, 1, 1, 1],\n\n hash_u: \"0x4f8ce0e0a9e674b3ab9606a7d7aefba386e81583d81918127814cde41d209d97\",\n\n hash_v: \"0x312b5ab93b14c9b9af334fe1fe3c50fffb53fbd074fa40ca600febde7c97e346\",\n\n },\n", "file_path": "zcash_primitives/src/sapling/pedersen_hash/test_vectors.rs", "rank": 38, "score": 162382.48538597298 }, { "content": "fn blake2b_personal(personalization: &[u8], input: &[u8]) -> [u8; 32] {\n\n let hash_result = Blake2Params::new()\n\n .hash_length(32)\n\n .personal(personalization)\n\n .to_state()\n\n .update(input)\n\n .finalize();\n\n let mut result = [0u8; 32];\n\n result.copy_from_slice(hash_result.as_bytes());\n\n result\n\n}\n\n\n", "file_path": "zcash_history/src/version.rs", "rank": 39, "score": 162374.3324794431 }, { "content": "pub fn write_bridge_v1<H: HashSer, W: Write>(\n\n mut writer: W,\n\n bridge: &MerkleBridge<H>,\n\n) -> io::Result<()> {\n\n Optional::write(\n\n &mut writer,\n\n bridge.prior_position().map(<u64>::from),\n\n |w, n| w.write_u64::<LittleEndian>(n),\n\n )?;\n\n Vector::write(\n\n &mut writer,\n\n &bridge.auth_fragments().iter().collect::<Vec<_>>(),\n\n |w, (i, a)| {\n\n w.write_u64::<LittleEndian>(**i as u64)?;\n\n write_auth_fragment_v1(w, a)\n\n },\n\n )?;\n\n write_nonempty_frontier_v1(&mut writer, bridge.frontier())?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "zcash_primitives/src/merkle_tree/incremental.rs", "rank": 40, "score": 159931.15804449754 }, { "content": "pub fn write_frontier_v1<H: HashSer, W: Write>(\n\n writer: W,\n\n frontier: &Frontier<H, 32>,\n\n) -> io::Result<()> {\n\n Optional::write(writer, frontier.value(), write_nonempty_frontier_v1)\n\n}\n\n\n", "file_path": "zcash_primitives/src/merkle_tree/incremental.rs", "rank": 41, "score": 159931.15804449754 }, { "content": "pub fn write_checkpoint_v1<H: HashSer, W: Write>(\n\n mut writer: W,\n\n checkpoint: &Checkpoint<H>,\n\n) -> io::Result<()> {\n\n match checkpoint {\n\n Checkpoint::Empty => {\n\n writer.write_u8(EMPTY_CHECKPOINT)?;\n\n }\n\n Checkpoint::AtIndex(i, b) => {\n\n writer.write_u8(BRIDGE_CHECKPOINT)?;\n\n writer.write_u64::<LittleEndian>(*i as u64)?;\n\n write_bridge_v1(&mut writer, b)?;\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "zcash_primitives/src/merkle_tree/incremental.rs", "rank": 42, "score": 159931.15804449754 }, { "content": "pub fn read_value_commitment<R: Read>(mut reader: R) -> io::Result<ValueCommitment> {\n\n let mut bytes = [0u8; 32];\n\n reader.read_exact(&mut bytes)?;\n\n let cv = ValueCommitment::from_bytes(&bytes);\n\n\n\n if cv.is_none().into() {\n\n Err(io::Error::new(\n\n io::ErrorKind::InvalidInput,\n\n \"invalid Pallas point for value commitment\".to_owned(),\n\n ))\n\n } else {\n\n Ok(cv.unwrap())\n\n }\n\n}\n\n\n", "file_path": "zcash_primitives/src/transaction/components/orchard.rs", "rank": 43, "score": 159410.1870939623 }, { "content": "pub fn read_cmx<R: Read>(mut reader: R) -> io::Result<ExtractedNoteCommitment> {\n\n let mut bytes = [0u8; 32];\n\n reader.read_exact(&mut bytes)?;\n\n let cmx = ExtractedNoteCommitment::from_bytes(&bytes);\n\n Option::from(cmx).ok_or_else(|| {\n\n io::Error::new(\n\n io::ErrorKind::InvalidInput,\n\n \"invalid Pallas base for field cmx\".to_owned(),\n\n )\n\n })\n\n}\n\n\n", "file_path": "zcash_primitives/src/transaction/components/orchard.rs", "rank": 44, "score": 159410.1870939623 }, { "content": "/// Consensus rules (§4.4) & (§4.5):\n\n/// - Canonical encoding is enforced by the API of SaplingVerificationContext::check_spend()\n\n/// and SaplingVerificationContext::check_output() due to the need to parse this into a\n\n/// bellman::groth16::Proof.\n\n/// - Proof validity is enforced in SaplingVerificationContext::check_spend()\n\n/// and SaplingVerificationContext::check_output()\n\npub fn read_zkproof<R: Read>(mut reader: R) -> io::Result<GrothProofBytes> {\n\n let mut zkproof = [0u8; GROTH_PROOF_SIZE];\n\n reader.read_exact(&mut zkproof)?;\n\n Ok(zkproof)\n\n}\n\n\n\nimpl SpendDescription<Authorized> {\n\n pub fn read_nullifier<R: Read>(mut reader: R) -> io::Result<Nullifier> {\n\n let mut nullifier = Nullifier([0u8; 32]);\n\n reader.read_exact(&mut nullifier.0)?;\n\n Ok(nullifier)\n\n }\n\n\n\n /// Consensus rules (§4.4):\n\n /// - Canonical encoding is enforced here.\n\n /// - \"Not small order\" is enforced in SaplingVerificationContext::check_spend()\n\n pub fn read_rk<R: Read>(mut reader: R) -> io::Result<PublicKey> {\n\n PublicKey::read(&mut reader)\n\n }\n\n\n", "file_path": "zcash_primitives/src/transaction/components/sapling.rs", "rank": 45, "score": 159410.1870939623 }, { "content": "pub fn read_action_without_auth<R: Read>(mut reader: R) -> io::Result<Action<()>> {\n\n let cv_net = read_value_commitment(&mut reader)?;\n\n let nf_old = read_nullifier(&mut reader)?;\n\n let rk = read_verification_key(&mut reader)?;\n\n let cmx = read_cmx(&mut reader)?;\n\n let encrypted_note = read_note_ciphertext(&mut reader)?;\n\n\n\n Ok(Action::from_parts(\n\n nf_old,\n\n rk,\n\n cmx,\n\n encrypted_note,\n\n cv_net,\n\n (),\n\n ))\n\n}\n\n\n", "file_path": "zcash_primitives/src/transaction/components/orchard.rs", "rank": 46, "score": 159410.1870939623 }, { "content": "pub fn write_nullifier<W: Write>(mut writer: W, nf: &Nullifier) -> io::Result<()> {\n\n writer.write_all(&nf.to_bytes())\n\n}\n\n\n", "file_path": "zcash_primitives/src/transaction/components/orchard.rs", "rank": 47, "score": 158611.6983560287 }, { "content": "pub fn write_auth_fragment_v1<H: HashSer, W: Write>(\n\n mut writer: W,\n\n fragment: &AuthFragment<H>,\n\n) -> io::Result<()> {\n\n writer.write_u64::<LittleEndian>(<u64>::from(fragment.position()))?;\n\n writer.write_u64::<LittleEndian>(fragment.altitudes_observed() as u64)?;\n\n Vector::write(&mut writer, fragment.values(), |w, a| a.write(w))\n\n}\n\n\n", "file_path": "zcash_primitives/src/merkle_tree/incremental.rs", "rank": 48, "score": 157580.57201819605 }, { "content": "pub fn write_nonempty_frontier_v1<H: HashSer, W: Write>(\n\n mut writer: W,\n\n frontier: &NonEmptyFrontier<H>,\n\n) -> io::Result<()> {\n\n writer.write_u64::<LittleEndian>(<u64>::from(frontier.position()))?;\n\n match frontier.leaf() {\n\n Leaf::Left(a) => {\n\n a.write(&mut writer)?;\n\n Optional::write(&mut writer, None, |w, n: &H| n.write(w))?;\n\n }\n\n Leaf::Right(a, b) => {\n\n a.write(&mut writer)?;\n\n Optional::write(&mut writer, Some(b), |w, n| n.write(w))?;\n\n }\n\n }\n\n Vector::write(&mut writer, &frontier.ommers(), |w, e| e.write(w))?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "zcash_primitives/src/merkle_tree/incremental.rs", "rank": 49, "score": 157580.57201819605 }, { "content": "pub fn read_note_ciphertext<R: Read>(mut reader: R) -> io::Result<TransmittedNoteCiphertext> {\n\n let mut tnc = TransmittedNoteCiphertext {\n\n epk_bytes: [0u8; 32],\n\n enc_ciphertext: [0u8; 580],\n\n out_ciphertext: [0u8; 80],\n\n };\n\n\n\n reader.read_exact(&mut tnc.epk_bytes)?;\n\n reader.read_exact(&mut tnc.enc_ciphertext)?;\n\n reader.read_exact(&mut tnc.out_ciphertext)?;\n\n\n\n Ok(tnc)\n\n}\n\n\n", "file_path": "zcash_primitives/src/transaction/components/orchard.rs", "rank": 50, "score": 157403.3332429118 }, { "content": "pub fn to_txid(\n\n txversion: TxVersion,\n\n consensus_branch_id: BranchId,\n\n digests: &TxDigests<Blake2bHash>,\n\n) -> TxId {\n\n let txid_digest = to_hash(\n\n txversion,\n\n consensus_branch_id,\n\n digests.header_digest,\n\n digests.transparent_digests.as_ref(),\n\n digests.sapling_digest,\n\n digests.orchard_digest,\n\n #[cfg(feature = \"zfuture\")]\n\n digests.tze_digests.as_ref(),\n\n );\n\n\n\n TxId(<[u8; 32]>::try_from(txid_digest.as_bytes()).unwrap())\n\n}\n\n\n\n/// Digester which constructs a digest of only the witness data.\n", "file_path": "zcash_primitives/src/transaction/txid.rs", "rank": 51, "score": 155961.42086907758 }, { "content": "#[allow(clippy::too_many_arguments)]\n\npub fn create_proof(\n\n phi: [u8; 32],\n\n rt: [u8; 32],\n\n h_sig: [u8; 32],\n\n\n\n // First input\n\n in_sk1: [u8; 32],\n\n in_value1: u64,\n\n in_rho1: [u8; 32],\n\n in_r1: [u8; 32],\n\n in_auth1: &[u8; WITNESS_PATH_SIZE],\n\n\n\n // Second input\n\n in_sk2: [u8; 32],\n\n in_value2: u64,\n\n in_rho2: [u8; 32],\n\n in_r2: [u8; 32],\n\n in_auth2: &[u8; WITNESS_PATH_SIZE],\n\n\n\n // First output\n", "file_path": "zcash_proofs/src/sprout.rs", "rank": 52, "score": 155961.42086907758 }, { "content": "pub fn load_parameters(\n\n spend_path: &Path,\n\n output_path: &Path,\n\n sprout_path: Option<&Path>,\n\n) -> ZcashParameters {\n\n // Load from each of the paths\n\n let spend_fs = File::open(spend_path).expect(\"couldn't load Sapling spend parameters file\");\n\n let output_fs = File::open(output_path).expect(\"couldn't load Sapling output parameters file\");\n\n let sprout_fs =\n\n sprout_path.map(|p| File::open(p).expect(\"couldn't load Sprout groth16 parameters file\"));\n\n\n\n parse_parameters(\n\n BufReader::with_capacity(1024 * 1024, spend_fs),\n\n BufReader::with_capacity(1024 * 1024, output_fs),\n\n sprout_fs.map(|fs| BufReader::with_capacity(1024 * 1024, fs)),\n\n )\n\n}\n\n\n", "file_path": "zcash_proofs/src/lib.rs", "rank": 53, "score": 155961.42086907758 }, { "content": "// dummy example\n\npub fn main() {}\n", "file_path": "zcash_history/examples/lib/main.rs", "rank": 54, "score": 155961.42086907758 }, { "content": "#[allow(clippy::too_many_arguments)]\n\npub fn verify_proof(\n\n proof: &[u8; GROTH_PROOF_SIZE],\n\n rt: &[u8; 32],\n\n h_sig: &[u8; 32],\n\n mac1: &[u8; 32],\n\n mac2: &[u8; 32],\n\n nf1: &[u8; 32],\n\n nf2: &[u8; 32],\n\n cm1: &[u8; 32],\n\n cm2: &[u8; 32],\n\n vpub_old: u64,\n\n vpub_new: u64,\n\n verifying_key: &PreparedVerifyingKey<Bls12>,\n\n) -> bool {\n\n // Prepare the public input for the verifier\n\n let mut public_input = Vec::with_capacity((32 * 8) + (8 * 2));\n\n public_input.extend(rt);\n\n public_input.extend(h_sig);\n\n public_input.extend(nf1);\n\n public_input.extend(mac1);\n", "file_path": "zcash_proofs/src/sprout.rs", "rank": 55, "score": 155961.42086907758 }, { "content": "/// Checks whether `soln` is a valid solution for `(input, nonce)` with the\n\n/// parameters `(n, k)`.\n\npub fn is_valid_solution(\n\n n: u32,\n\n k: u32,\n\n input: &[u8],\n\n nonce: &[u8],\n\n soln: &[u8],\n\n) -> Result<(), Error> {\n\n let p = Params::new(n, k)?;\n\n let indices = indices_from_minimal(p, soln)?;\n\n\n\n // Recursive validation is faster\n\n is_valid_solution_recursive(p, input, nonce, &indices)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::{\n\n expand_array, indices_from_minimal, is_valid_solution, is_valid_solution_iterative,\n\n is_valid_solution_recursive, Params,\n\n };\n", "file_path": "components/equihash/src/verify.rs", "rank": 56, "score": 155961.42086907758 }, { "content": "pub fn write_value_commitment<W: Write>(mut writer: W, cv: &ValueCommitment) -> io::Result<()> {\n\n writer.write_all(&cv.to_bytes())\n\n}\n\n\n", "file_path": "zcash_primitives/src/transaction/components/orchard.rs", "rank": 57, "score": 154518.77135244454 }, { "content": "pub fn write_cmx<W: Write>(mut writer: W, cmx: &ExtractedNoteCommitment) -> io::Result<()> {\n\n writer.write_all(&cmx.to_bytes())\n\n}\n\n\n", "file_path": "zcash_primitives/src/transaction/components/orchard.rs", "rank": 58, "score": 154518.77135244454 }, { "content": "/// A Sapling full viewing key fingerprint\n\nstruct FvkFingerprint([u8; 32]);\n\n\n\nimpl From<&FullViewingKey> for FvkFingerprint {\n\n fn from(fvk: &FullViewingKey) -> Self {\n\n let mut h = Blake2bParams::new()\n\n .hash_length(32)\n\n .personal(ZIP32_SAPLING_FVFP_PERSONALIZATION)\n\n .to_state();\n\n h.update(&fvk.to_bytes());\n\n let mut fvfp = [0u8; 32];\n\n fvfp.copy_from_slice(h.finalize().as_bytes());\n\n FvkFingerprint(fvfp)\n\n }\n\n}\n\n\n\n/// A Sapling full viewing key tag\n", "file_path": "zcash_primitives/src/zip32.rs", "rank": 59, "score": 153948.90306578856 }, { "content": "#[derive(Clone, Copy, Debug, PartialEq)]\n\nstruct FvkTag([u8; 4]);\n\n\n\nimpl FvkFingerprint {\n\n fn tag(&self) -> FvkTag {\n\n let mut tag = [0u8; 4];\n\n tag.copy_from_slice(&self.0[..4]);\n\n FvkTag(tag)\n\n }\n\n}\n\n\n\nimpl FvkTag {\n\n fn master() -> Self {\n\n FvkTag([0u8; 4])\n\n }\n\n}\n\n\n\n/// A child index for a derived key\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum ChildIndex {\n\n NonHardened(u32),\n", "file_path": "zcash_primitives/src/zip32.rs", "rank": 60, "score": 153948.90306578856 }, { "content": "#[derive(Clone, Copy, Debug, PartialEq)]\n\nstruct ChainCode([u8; 32]);\n\n\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub struct DiversifierIndex(pub [u8; 11]);\n\n\n\nimpl Default for DiversifierIndex {\n\n fn default() -> Self {\n\n DiversifierIndex::new()\n\n }\n\n}\n\n\n\nimpl DiversifierIndex {\n\n pub fn new() -> Self {\n\n DiversifierIndex([0; 11])\n\n }\n\n\n\n pub fn increment(&mut self) -> Result<(), ()> {\n\n for k in 0..11 {\n\n self.0[k] = self.0[k].wrapping_add(1);\n\n if self.0[k] != 0 {\n", "file_path": "zcash_primitives/src/zip32.rs", "rank": 61, "score": 153948.90306578856 }, { "content": "pub fn read_verification_key<R: Read>(mut reader: R) -> io::Result<VerificationKey<SpendAuth>> {\n\n let mut bytes = [0u8; 32];\n\n reader.read_exact(&mut bytes)?;\n\n VerificationKey::try_from(bytes).map_err(|_| {\n\n io::Error::new(\n\n io::ErrorKind::InvalidInput,\n\n \"invalid verification key\".to_owned(),\n\n )\n\n })\n\n}\n\n\n", "file_path": "zcash_primitives/src/transaction/components/orchard.rs", "rank": 62, "score": 152586.7083215549 }, { "content": "#[allow(clippy::redundant_closure)]\n\npub fn read_nonempty_frontier_v1<H: HashSer + Clone, R: Read>(\n\n mut reader: R,\n\n) -> io::Result<NonEmptyFrontier<H>> {\n\n let position = read_position(&mut reader)?;\n\n let left = H::read(&mut reader)?;\n\n let right = Optional::read(&mut reader, H::read)?;\n\n\n\n let leaf = right.map_or_else(\n\n || Leaf::Left(left.clone()),\n\n |r| Leaf::Right(left.clone(), r),\n\n );\n\n let ommers = Vector::read(&mut reader, |r| H::read(r))?;\n\n\n\n NonEmptyFrontier::from_parts(position, leaf, ommers).map_err(|err| {\n\n io::Error::new(\n\n io::ErrorKind::InvalidInput,\n\n format!(\"Parsing resulted in an invalid Merkle frontier: {:?}\", err),\n\n )\n\n })\n\n}\n\n\n", "file_path": "zcash_primitives/src/merkle_tree/incremental.rs", "rank": 63, "score": 152210.3899444309 }, { "content": "/// Decodes a [`TransparentAddress`] from a Base58Check-encoded string.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use zcash_primitives::{\n\n/// constants::testnet::{B58_PUBKEY_ADDRESS_PREFIX, B58_SCRIPT_ADDRESS_PREFIX},\n\n/// };\n\n/// use zcash_client_backend::{\n\n/// encoding::decode_transparent_address,\n\n/// };\n\n/// use zcash_primitives::legacy::TransparentAddress;\n\n///\n\n/// assert_eq!(\n\n/// decode_transparent_address(\n\n/// &B58_PUBKEY_ADDRESS_PREFIX,\n\n/// &B58_SCRIPT_ADDRESS_PREFIX,\n\n/// \"tm9iMLAuYMzJ6jtFLcA7rzUmfreGuKvr7Ma\",\n\n/// ),\n\n/// Ok(Some(TransparentAddress::PublicKey([0; 20]))),\n\n/// );\n\n///\n\n/// assert_eq!(\n\n/// decode_transparent_address(\n\n/// &B58_PUBKEY_ADDRESS_PREFIX,\n\n/// &B58_SCRIPT_ADDRESS_PREFIX,\n\n/// \"t26YoyZ1iPgiMEWL4zGUm74eVWfhyDMXzY2\",\n\n/// ),\n\n/// Ok(Some(TransparentAddress::Script([0; 20]))),\n\n/// );\n\n/// ```\n\n/// [`TransparentAddress`]: zcash_primitives::legacy::TransparentAddress\n\npub fn decode_transparent_address(\n\n pubkey_version: &[u8],\n\n script_version: &[u8],\n\n s: &str,\n\n) -> Result<Option<TransparentAddress>, Bs58Error> {\n\n bs58::decode(s).with_check(None).into_vec().map(|decoded| {\n\n if decoded.starts_with(pubkey_version) {\n\n decoded[pubkey_version.len()..]\n\n .try_into()\n\n .ok()\n\n .map(TransparentAddress::PublicKey)\n\n } else if decoded.starts_with(script_version) {\n\n decoded[script_version.len()..]\n\n .try_into()\n\n .ok()\n\n .map(TransparentAddress::Script)\n\n } else {\n\n None\n\n }\n\n })\n", "file_path": "zcash_client_backend/src/encoding.rs", "rank": 64, "score": 151575.10867999462 }, { "content": "/// Writes a [`TransparentAddress`] as a Base58Check-encoded string.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use zcash_client_backend::{\n\n/// encoding::encode_transparent_address,\n\n/// };\n\n/// use zcash_primitives::{\n\n/// constants::testnet::{B58_PUBKEY_ADDRESS_PREFIX, B58_SCRIPT_ADDRESS_PREFIX},\n\n/// legacy::TransparentAddress,\n\n/// };\n\n///\n\n/// assert_eq!(\n\n/// encode_transparent_address(\n\n/// &B58_PUBKEY_ADDRESS_PREFIX,\n\n/// &B58_SCRIPT_ADDRESS_PREFIX,\n\n/// &TransparentAddress::PublicKey([0; 20]),\n\n/// ),\n\n/// \"tm9iMLAuYMzJ6jtFLcA7rzUmfreGuKvr7Ma\",\n\n/// );\n\n///\n\n/// assert_eq!(\n\n/// encode_transparent_address(\n\n/// &B58_PUBKEY_ADDRESS_PREFIX,\n\n/// &B58_SCRIPT_ADDRESS_PREFIX,\n\n/// &TransparentAddress::Script([0; 20]),\n\n/// ),\n\n/// \"t26YoyZ1iPgiMEWL4zGUm74eVWfhyDMXzY2\",\n\n/// );\n\n/// ```\n\n/// [`TransparentAddress`]: zcash_primitives::legacy::TransparentAddress\n\npub fn encode_transparent_address(\n\n pubkey_version: &[u8],\n\n script_version: &[u8],\n\n addr: &TransparentAddress,\n\n) -> String {\n\n let decoded = match addr {\n\n TransparentAddress::PublicKey(key_id) => {\n\n let mut decoded = vec![0; pubkey_version.len() + 20];\n\n decoded[..pubkey_version.len()].copy_from_slice(pubkey_version);\n\n decoded[pubkey_version.len()..].copy_from_slice(key_id);\n\n decoded\n\n }\n\n TransparentAddress::Script(script_id) => {\n\n let mut decoded = vec![0; script_version.len() + 20];\n\n decoded[..script_version.len()].copy_from_slice(script_version);\n\n decoded[script_version.len()..].copy_from_slice(script_id);\n\n decoded\n\n }\n\n };\n\n bs58::encode(decoded).with_check().into_string()\n\n}\n\n\n", "file_path": "zcash_client_backend/src/encoding.rs", "rank": 65, "score": 151574.5088297695 }, { "content": "/// Sapling PRF^ock.\n\n///\n\n/// Implemented per section 5.4.2 of the Zcash Protocol Specification.\n\npub fn prf_ock(\n\n ovk: &OutgoingViewingKey,\n\n cv: &jubjub::ExtendedPoint,\n\n cmu_bytes: &[u8; 32],\n\n ephemeral_key: &EphemeralKeyBytes,\n\n) -> OutgoingCipherKey {\n\n OutgoingCipherKey(\n\n Blake2bParams::new()\n\n .hash_length(32)\n\n .personal(PRF_OCK_PERSONALIZATION)\n\n .to_state()\n\n .update(&ovk.0)\n\n .update(&cv.to_bytes())\n\n .update(cmu_bytes)\n\n .update(ephemeral_key.as_ref())\n\n .finalize()\n\n .as_bytes()\n\n .try_into()\n\n .unwrap(),\n\n )\n\n}\n\n\n", "file_path": "zcash_primitives/src/sapling/note_encryption.rs", "rank": 66, "score": 151569.64033759717 }, { "content": "/// This is the primary trait which must be implemented by an extension type for that type to be\n\n/// eligible for inclusion in Zcash consensus rules.\n\npub trait Extension<C> {\n\n /// Extension-specific precondition type. The extension will need to implement\n\n /// [`FromPayload<Error = Self::Error>`] for this type in order for their extension to be\n\n /// eligible for integration into consensus rules.\n\n type Precondition;\n\n\n\n /// Extension-specific witness type. The extension will need to implement [`FromPayload<Error =\n\n /// Self::Error>`] for this type in order for their extension to be eligible for integration\n\n /// into consensus rules.\n\n type Witness;\n\n\n\n /// Extension-specific error type. This should encompass both parsing and verification errors.\n\n type Error;\n\n\n\n /// This is the primary method that an extension must implement. Implementations should return\n\n /// [`Ok(())`] if verification of the witness succeeds against the supplied precondition, and\n\n /// an error in any other case.\n\n fn verify_inner(\n\n &self,\n\n precondition: &Self::Precondition,\n", "file_path": "zcash_primitives/src/extensions/transparent.rs", "rank": 67, "score": 150225.366170907 }, { "content": "/// Consensus rules (§7.3) & (§7.4):\n\n/// - Canonical encoding is enforced here\n\npub fn read_base<R: Read>(mut reader: R, field: &str) -> io::Result<bls12_381::Scalar> {\n\n let mut f = [0u8; 32];\n\n reader.read_exact(&mut f)?;\n\n bls12_381::Scalar::from_repr(f).ok_or_else(|| {\n\n io::Error::new(\n\n io::ErrorKind::InvalidInput,\n\n format!(\"{} not in field\", field),\n\n )\n\n })\n\n}\n\n\n", "file_path": "zcash_primitives/src/transaction/components/sapling.rs", "rank": 68, "score": 149588.89584761285 }, { "content": "/// Decodes an [`ExtendedSpendingKey`] from a Bech32-encoded string.\n\n///\n\n/// [`ExtendedSpendingKey`]: zcash_primitives::zip32::ExtendedSpendingKey\n\npub fn decode_extended_spending_key(\n\n hrp: &str,\n\n s: &str,\n\n) -> Result<Option<ExtendedSpendingKey>, Error> {\n\n bech32_decode(hrp, s, |data| ExtendedSpendingKey::read(&data[..]).ok())\n\n}\n\n\n", "file_path": "zcash_client_backend/src/encoding.rs", "rank": 69, "score": 149528.15181476384 }, { "content": "pub fn read_signature<R: Read, T: SigType>(mut reader: R) -> io::Result<Signature<T>> {\n\n let mut bytes = [0u8; 64];\n\n reader.read_exact(&mut bytes)?;\n\n Ok(Signature::from(bytes))\n\n}\n\n\n", "file_path": "zcash_primitives/src/transaction/components/orchard.rs", "rank": 70, "score": 147656.8328167232 }, { "content": "/// Consensus rules (§4.4) & (§4.5):\n\n/// - Canonical encoding is enforced here.\n\n/// - \"Not small order\" is enforced in SaplingVerificationContext::(check_spend()/check_output())\n\n/// (located in zcash_proofs::sapling::verifier).\n\npub fn read_point<R: Read>(mut reader: R, field: &str) -> io::Result<jubjub::ExtendedPoint> {\n\n let mut bytes = [0u8; 32];\n\n reader.read_exact(&mut bytes)?;\n\n let point = jubjub::ExtendedPoint::from_bytes(&bytes);\n\n\n\n if point.is_none().into() {\n\n Err(io::Error::new(\n\n io::ErrorKind::InvalidInput,\n\n format!(\"invalid {}\", field),\n\n ))\n\n } else {\n\n Ok(point.unwrap())\n\n }\n\n}\n\n\n", "file_path": "zcash_primitives/src/transaction/components/sapling.rs", "rank": 71, "score": 147656.8328167232 }, { "content": "/// Implements a traversal of `limit` blocks of the block cache database.\n\n///\n\n/// Starting at `from_height`, the `with_row` callback is invoked\n\n/// with each block retrieved from the backing store. If the `limit`\n\n/// value provided is `None`, all blocks are traversed up to the\n\n/// maximum height.\n\npub fn with_blocks<F>(\n\n cache: &BlockDb,\n\n from_height: BlockHeight,\n\n limit: Option<u32>,\n\n mut with_row: F,\n\n) -> Result<(), SqliteClientError>\n\nwhere\n\n F: FnMut(CompactBlock) -> Result<(), SqliteClientError>,\n\n{\n\n // Fetch the CompactBlocks we need to scan\n\n let mut stmt_blocks = cache.0.prepare(\n\n \"SELECT height, data FROM compactblocks WHERE height > ? ORDER BY height ASC LIMIT ?\",\n\n )?;\n\n\n\n let rows = stmt_blocks.query_map(\n\n params![u32::from(from_height), limit.unwrap_or(u32::max_value()),],\n\n |row| {\n\n Ok(CompactBlockRow {\n\n height: BlockHeight::from_u32(row.get(0)?),\n\n data: row.get(1)?,\n", "file_path": "zcash_client_sqlite/src/chain.rs", "rank": 72, "score": 147647.57468691684 }, { "content": "pub fn try_sapling_note_decryption<\n\n P: consensus::Parameters,\n\n Output: ShieldedOutput<SaplingDomain<P>>,\n\n>(\n\n params: &P,\n\n height: BlockHeight,\n\n ivk: &SaplingIvk,\n\n output: &Output,\n\n) -> Option<(Note, PaymentAddress, MemoBytes)> {\n\n let domain = SaplingDomain {\n\n params: params.clone(),\n\n height,\n\n };\n\n try_note_decryption(&domain, ivk, output)\n\n}\n\n\n", "file_path": "zcash_primitives/src/sapling/note_encryption.rs", "rank": 73, "score": 147580.1684816697 }, { "content": "/// Decodes an [`ExtendedFullViewingKey`] from a Bech32-encoded string.\n\n///\n\n/// [`ExtendedFullViewingKey`]: zcash_primitives::zip32::ExtendedFullViewingKey\n\npub fn decode_extended_full_viewing_key(\n\n hrp: &str,\n\n s: &str,\n\n) -> Result<Option<ExtendedFullViewingKey>, Error> {\n\n bech32_decode(hrp, s, |data| ExtendedFullViewingKey::read(&data[..]).ok())\n\n}\n\n\n", "file_path": "zcash_client_backend/src/encoding.rs", "rank": 74, "score": 147580.1684816697 }, { "content": "pub fn try_sapling_compact_note_decryption<\n\n P: consensus::Parameters,\n\n Output: ShieldedOutput<SaplingDomain<P>>,\n\n>(\n\n params: &P,\n\n height: BlockHeight,\n\n ivk: &SaplingIvk,\n\n output: &Output,\n\n) -> Option<(Note, PaymentAddress)> {\n\n let domain = SaplingDomain {\n\n params: params.clone(),\n\n height,\n\n };\n\n\n\n try_compact_note_decryption(&domain, ivk, output)\n\n}\n\n\n", "file_path": "zcash_primitives/src/sapling/note_encryption.rs", "rank": 75, "score": 145719.41001433728 }, { "content": "/// Returns the verified balance for the account at the specified height,\n\n/// This may be used to obtain a balance that ignores notes that have been\n\n/// received so recently that they are not yet deemed spendable.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use tempfile::NamedTempFile;\n\n/// use zcash_primitives::consensus::{BlockHeight, Network};\n\n/// use zcash_client_backend::wallet::AccountId;\n\n/// use zcash_client_sqlite::{\n\n/// WalletDb,\n\n/// wallet::get_balance_at,\n\n/// };\n\n///\n\n/// let data_file = NamedTempFile::new().unwrap();\n\n/// let db = WalletDb::for_path(data_file, Network::TestNetwork).unwrap();\n\n/// let addr = get_balance_at(&db, AccountId(0), BlockHeight::from_u32(0));\n\n/// ```\n\npub fn get_balance_at<P>(\n\n wdb: &WalletDb<P>,\n\n account: AccountId,\n\n anchor_height: BlockHeight,\n\n) -> Result<Amount, SqliteClientError> {\n\n let balance = wdb.conn.query_row(\n\n \"SELECT SUM(value) FROM received_notes\n\n INNER JOIN transactions ON transactions.id_tx = received_notes.tx\n\n WHERE account = ? AND spent IS NULL AND transactions.block <= ?\",\n\n &[account.0, u32::from(anchor_height)],\n\n |row| row.get(0).or(Ok(0)),\n\n )?;\n\n\n\n match Amount::from_i64(balance) {\n\n Ok(amount) if !amount.is_negative() => Ok(amount),\n\n _ => Err(SqliteClientError::CorruptedData(\n\n \"Sum of values in received_notes is out of range\".to_string(),\n\n )),\n\n }\n\n}\n\n\n", "file_path": "zcash_client_sqlite/src/wallet.rs", "rank": 76, "score": 145512.89280511957 }, { "content": "/// Returns the incremental witnesses for the block at the specified height,\n\n/// if any.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use tempfile::NamedTempFile;\n\n/// use zcash_primitives::consensus::{Network, H0};\n\n/// use zcash_client_sqlite::{\n\n/// WalletDb,\n\n/// wallet::get_witnesses,\n\n/// };\n\n///\n\n/// let data_file = NamedTempFile::new().unwrap();\n\n/// let db = WalletDb::for_path(data_file, Network::TestNetwork).unwrap();\n\n/// let witnesses = get_witnesses(&db, H0);\n\n/// ```\n\npub fn get_witnesses<P>(\n\n wdb: &WalletDb<P>,\n\n block_height: BlockHeight,\n\n) -> Result<Vec<(NoteId, IncrementalWitness<Node>)>, SqliteClientError> {\n\n let mut stmt_fetch_witnesses = wdb\n\n .conn\n\n .prepare(\"SELECT note, witness FROM sapling_witnesses WHERE block = ?\")?;\n\n let witnesses = stmt_fetch_witnesses\n\n .query_map(&[u32::from(block_height)], |row| {\n\n let id_note = NoteId::ReceivedNoteId(row.get(0)?);\n\n let wdb: Vec<u8> = row.get(1)?;\n\n Ok(IncrementalWitness::read(&wdb[..]).map(|witness| (id_note, witness)))\n\n })\n\n .map_err(SqliteClientError::from)?;\n\n\n\n // unwrap database error & IO error from IncrementalWitness::read\n\n let res: Vec<_> = witnesses.collect::<Result<Result<_, _>, _>>()??;\n\n Ok(res)\n\n}\n\n\n", "file_path": "zcash_client_sqlite/src/wallet.rs", "rank": 77, "score": 145512.88939762372 }, { "content": "/// Removes old incremental witnesses up to the given block height.\n\npub fn prune_witnesses<P>(\n\n stmts: &mut DataConnStmtCache<'_, P>,\n\n below_height: BlockHeight,\n\n) -> Result<(), SqliteClientError> {\n\n stmts\n\n .stmt_prune_witnesses\n\n .execute(&[u32::from(below_height)])?;\n\n Ok(())\n\n}\n\n\n", "file_path": "zcash_client_sqlite/src/wallet.rs", "rank": 78, "score": 145500.62364625026 }, { "content": "/// Retrieve the nullifiers for notes that the wallet is tracking\n\n/// that have not yet been confirmed as a consequence of the spending\n\n/// transaction being included in a block.\n\npub fn get_nullifiers<P>(\n\n wdb: &WalletDb<P>,\n\n) -> Result<Vec<(AccountId, Nullifier)>, SqliteClientError> {\n\n // Get the nullifiers for the notes we are tracking\n\n let mut stmt_fetch_nullifiers = wdb.conn.prepare(\n\n \"SELECT rn.id_note, rn.account, rn.nf, tx.block as block\n\n FROM received_notes rn\n\n LEFT OUTER JOIN transactions tx\n\n ON tx.id_tx = rn.spent\n\n WHERE block IS NULL\",\n\n )?;\n\n let nullifiers = stmt_fetch_nullifiers.query_map(NO_PARAMS, |row| {\n\n let account = AccountId(row.get(1)?);\n\n let nf_bytes: Vec<u8> = row.get(2)?;\n\n Ok((account, Nullifier::from_slice(&nf_bytes).unwrap()))\n\n })?;\n\n\n\n let res: Vec<_> = nullifiers.collect::<Result<_, _>>()?;\n\n Ok(res)\n\n}\n\n\n", "file_path": "zcash_client_sqlite/src/wallet.rs", "rank": 79, "score": 145500.62364625026 }, { "content": "/// Returns the block height at which the specified transaction was mined,\n\n/// if any.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use tempfile::NamedTempFile;\n\n/// use zcash_primitives::consensus::Network;\n\n/// use zcash_primitives::transaction::TxId;\n\n/// use zcash_client_sqlite::{\n\n/// WalletDb,\n\n/// wallet::get_tx_height,\n\n/// };\n\n///\n\n/// let data_file = NamedTempFile::new().unwrap();\n\n/// let db = WalletDb::for_path(data_file, Network::TestNetwork).unwrap();\n\n/// let height = get_tx_height(&db, TxId::from_bytes([0u8; 32]));\n\n/// ```\n\npub fn get_tx_height<P>(\n\n wdb: &WalletDb<P>,\n\n txid: TxId,\n\n) -> Result<Option<BlockHeight>, rusqlite::Error> {\n\n wdb.conn\n\n .query_row(\n\n \"SELECT block FROM transactions WHERE txid = ?\",\n\n &[txid.as_ref().to_vec()],\n\n |row| row.get(0).map(u32::into),\n\n )\n\n .optional()\n\n}\n\n\n", "file_path": "zcash_client_sqlite/src/wallet.rs", "rank": 80, "score": 143475.6511791534 }, { "content": "/// Returns the minimum and maximum heights for blocks stored in the wallet database.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use tempfile::NamedTempFile;\n\n/// use zcash_primitives::consensus::Network;\n\n/// use zcash_client_sqlite::{\n\n/// WalletDb,\n\n/// wallet::block_height_extrema,\n\n/// };\n\n///\n\n/// let data_file = NamedTempFile::new().unwrap();\n\n/// let db = WalletDb::for_path(data_file, Network::TestNetwork).unwrap();\n\n/// let bounds = block_height_extrema(&db);\n\n/// ```\n\npub fn block_height_extrema<P>(\n\n wdb: &WalletDb<P>,\n\n) -> Result<Option<(BlockHeight, BlockHeight)>, rusqlite::Error> {\n\n wdb.conn\n\n .query_row(\n\n \"SELECT MIN(height), MAX(height) FROM blocks\",\n\n NO_PARAMS,\n\n |row| {\n\n let min_height: u32 = row.get(0)?;\n\n let max_height: u32 = row.get(1)?;\n\n Ok(Some((\n\n BlockHeight::from(min_height),\n\n BlockHeight::from(max_height),\n\n )))\n\n },\n\n )\n\n //.optional() doesn't work here because a failed aggregate function\n\n //produces a runtime error, not an empty set of rows.\n\n .or(Ok(None))\n\n}\n\n\n", "file_path": "zcash_client_sqlite/src/wallet.rs", "rank": 81, "score": 143471.34657926136 }, { "content": "/// Returns the commitment tree for the block at the specified height,\n\n/// if any.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use tempfile::NamedTempFile;\n\n/// use zcash_primitives::consensus::{Network, H0};\n\n/// use zcash_client_sqlite::{\n\n/// WalletDb,\n\n/// wallet::get_commitment_tree,\n\n/// };\n\n///\n\n/// let data_file = NamedTempFile::new().unwrap();\n\n/// let db = WalletDb::for_path(data_file, Network::TestNetwork).unwrap();\n\n/// let tree = get_commitment_tree(&db, H0);\n\n/// ```\n\npub fn get_commitment_tree<P>(\n\n wdb: &WalletDb<P>,\n\n block_height: BlockHeight,\n\n) -> Result<Option<CommitmentTree<Node>>, SqliteClientError> {\n\n wdb.conn\n\n .query_row_and_then(\n\n \"SELECT sapling_tree FROM blocks WHERE height = ?\",\n\n &[u32::from(block_height)],\n\n |row| {\n\n let row_data: Vec<u8> = row.get(0)?;\n\n CommitmentTree::read(&row_data[..]).map_err(|e| {\n\n rusqlite::Error::FromSqlConversionFailure(\n\n row_data.len(),\n\n rusqlite::types::Type::Blob,\n\n Box::new(e),\n\n )\n\n })\n\n },\n\n )\n\n .optional()\n\n .map_err(SqliteClientError::from)\n\n}\n\n\n", "file_path": "zcash_client_sqlite/src/wallet.rs", "rank": 82, "score": 143471.34657926136 }, { "content": "/// Marks notes that have not been mined in transactions\n\n/// as expired, up to the given block height.\n\npub fn update_expired_notes<P>(\n\n stmts: &mut DataConnStmtCache<'_, P>,\n\n height: BlockHeight,\n\n) -> Result<(), SqliteClientError> {\n\n stmts.stmt_update_expired.execute(&[u32::from(height)])?;\n\n Ok(())\n\n}\n\n\n", "file_path": "zcash_client_sqlite/src/wallet.rs", "rank": 83, "score": 143459.13512341696 }, { "content": "/// Perform a fixed-base scalar multiplication with\n\n/// `by` being in little-endian bit order.\n\npub fn fixed_base_multiplication<CS>(\n\n mut cs: CS,\n\n base: FixedGenerator,\n\n by: &[Boolean],\n\n) -> Result<EdwardsPoint, SynthesisError>\n\nwhere\n\n CS: ConstraintSystem<bls12_381::Scalar>,\n\n{\n\n // Represents the result of the multiplication\n\n let mut result = None;\n\n\n\n for (i, (chunk, window)) in by.chunks(3).zip(base.iter()).enumerate() {\n\n let chunk_a = chunk\n\n .get(0)\n\n .cloned()\n\n .unwrap_or_else(|| Boolean::constant(false));\n\n let chunk_b = chunk\n\n .get(1)\n\n .cloned()\n\n .unwrap_or_else(|| Boolean::constant(false));\n", "file_path": "zcash_proofs/src/circuit/ecc.rs", "rank": 84, "score": 143459.13512341696 }, { "content": "/// Initialises the data database with the given block.\n\n///\n\n/// This enables a newly-created database to be immediately-usable, without needing to\n\n/// synchronise historic blocks.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use tempfile::NamedTempFile;\n\n/// use zcash_primitives::{\n\n/// block::BlockHash,\n\n/// consensus::{BlockHeight, Network},\n\n/// };\n\n/// use zcash_client_sqlite::{\n\n/// WalletDb,\n\n/// wallet::init::init_blocks_table,\n\n/// };\n\n///\n\n/// // The block height.\n\n/// let height = BlockHeight::from_u32(500_000);\n\n/// // The hash of the block header.\n\n/// let hash = BlockHash([0; 32]);\n\n/// // The nTime field from the block header.\n\n/// let time = 12_3456_7890;\n\n/// // The serialized Sapling commitment tree as of this block.\n\n/// // Pre-compute and hard-code, or obtain from a service.\n\n/// let sapling_tree = &[];\n\n///\n\n/// let data_file = NamedTempFile::new().unwrap();\n\n/// let db = WalletDb::for_path(data_file.path(), Network::TestNetwork).unwrap();\n\n/// init_blocks_table(&db, height, hash, time, sapling_tree);\n\n/// ```\n\npub fn init_blocks_table<P>(\n\n wdb: &WalletDb<P>,\n\n height: BlockHeight,\n\n hash: BlockHash,\n\n time: u32,\n\n sapling_tree: &[u8],\n\n) -> Result<(), SqliteClientError> {\n\n let mut empty_check = wdb.conn.prepare(\"SELECT * FROM blocks LIMIT 1\")?;\n\n if empty_check.exists(NO_PARAMS)? {\n\n return Err(SqliteClientError::TableNotEmpty);\n\n }\n\n\n\n wdb.conn.execute(\n\n \"INSERT INTO blocks (height, hash, time, sapling_tree)\n\n VALUES (?, ?, ?, ?)\",\n\n &[\n\n u32::from(height).to_sql()?,\n\n hash.0.to_sql()?,\n\n time.to_sql()?,\n\n sapling_tree.to_sql()?,\n", "file_path": "zcash_client_sqlite/src/wallet/init.rs", "rank": 85, "score": 141529.12195776636 }, { "content": "pub fn select_spendable_notes<P>(\n\n wdb: &WalletDb<P>,\n\n account: AccountId,\n\n target_value: Amount,\n\n anchor_height: BlockHeight,\n\n) -> Result<Vec<SpendableNote>, SqliteClientError> {\n\n // The goal of this SQL statement is to select the oldest notes until the required\n\n // value has been reached, and then fetch the witnesses at the desired height for the\n\n // selected notes. This is achieved in several steps:\n\n //\n\n // 1) Use a window function to create a view of all notes, ordered from oldest to\n\n // newest, with an additional column containing a running sum:\n\n // - Unspent notes accumulate the values of all unspent notes in that note's\n\n // account, up to itself.\n\n // - Spent notes accumulate the values of all notes in the transaction they were\n\n // spent in, up to itself.\n\n //\n\n // 2) Select all unspent notes in the desired account, along with their running sum.\n\n //\n\n // 3) Select all notes for which the running sum was less than the required value, as\n", "file_path": "zcash_client_sqlite/src/wallet/transact.rs", "rank": 86, "score": 141511.1517903228 }, { "content": "pub fn get_spendable_notes<P>(\n\n wdb: &WalletDb<P>,\n\n account: AccountId,\n\n anchor_height: BlockHeight,\n\n) -> Result<Vec<SpendableNote>, SqliteClientError> {\n\n let mut stmt_select_notes = wdb.conn.prepare(\n\n \"SELECT diversifier, value, rcm, witness\n\n FROM received_notes\n\n INNER JOIN transactions ON transactions.id_tx = received_notes.tx\n\n INNER JOIN sapling_witnesses ON sapling_witnesses.note = received_notes.id_note\n\n WHERE account = :account\n\n AND spent IS NULL\n\n AND transactions.block <= :anchor_height\n\n AND sapling_witnesses.block = :anchor_height\",\n\n )?;\n\n\n\n // Select notes\n\n let notes = stmt_select_notes.query_and_then_named::<_, SqliteClientError, _>(\n\n named_params![\n\n \":account\": &i64::from(account.0),\n\n \":anchor_height\": &u32::from(anchor_height),\n\n ],\n\n to_spendable_note,\n\n )?;\n\n\n\n notes.collect::<Result<_, _>>()\n\n}\n\n\n", "file_path": "zcash_client_sqlite/src/wallet/transact.rs", "rank": 87, "score": 141511.1517903228 }, { "content": "fn draft(into: &mut Vec<(u32, Entry<V1>)>, vec: &[NodeData], peak_pos: usize, h: u32) {\n\n let node_data = vec[peak_pos - 1].clone();\n\n let peak = match h {\n\n 0 => Entry::new_leaf(node_data),\n\n _ => Entry::new(\n\n node_data,\n\n EntryLink::Stored((peak_pos - (1 << h) - 1) as u32),\n\n EntryLink::Stored((peak_pos - 2) as u32),\n\n ),\n\n };\n\n\n\n println!(\"Entry #{}: {}\", into.len(), peak);\n\n\n\n into.push(((peak_pos - 1) as u32, peak));\n\n}\n\n\n", "file_path": "zcash_history/examples/long.rs", "rank": 88, "score": 140948.5085012788 }, { "content": "/// Inserts information about a scanned block into the database.\n\npub fn insert_block<'a, P>(\n\n stmts: &mut DataConnStmtCache<'a, P>,\n\n block_height: BlockHeight,\n\n block_hash: BlockHash,\n\n block_time: u32,\n\n commitment_tree: &CommitmentTree<Node>,\n\n) -> Result<(), SqliteClientError> {\n\n let mut encoded_tree = Vec::new();\n\n commitment_tree.write(&mut encoded_tree).unwrap();\n\n\n\n stmts.stmt_insert_block.execute(params![\n\n u32::from(block_height),\n\n &block_hash.0[..],\n\n block_time,\n\n encoded_tree\n\n ])?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "zcash_client_sqlite/src/wallet.rs", "rank": 89, "score": 140475.9662601018 }, { "content": "/// Marks a given nullifier as having been revealed in the construction\n\n/// of the specified transaction.\n\n///\n\n/// Marking a note spent in this fashion does NOT imply that the\n\n/// spending transaction has been mined.\n\npub fn mark_spent<'a, P>(\n\n stmts: &mut DataConnStmtCache<'a, P>,\n\n tx_ref: i64,\n\n nf: &Nullifier,\n\n) -> Result<(), SqliteClientError> {\n\n stmts\n\n .stmt_mark_recived_note_spent\n\n .execute(&[tx_ref.to_sql()?, nf.0.to_sql()?])?;\n\n Ok(())\n\n}\n\n\n", "file_path": "zcash_client_sqlite/src/wallet.rs", "rank": 90, "score": 140475.9662601018 }, { "content": "/// Records the incremental witness for the specified note,\n\n/// as of the given block height.\n\npub fn insert_witness<'a, P>(\n\n stmts: &mut DataConnStmtCache<'a, P>,\n\n note_id: i64,\n\n witness: &IncrementalWitness<Node>,\n\n height: BlockHeight,\n\n) -> Result<(), SqliteClientError> {\n\n let mut encoded = Vec::new();\n\n witness.write(&mut encoded).unwrap();\n\n\n\n stmts\n\n .stmt_insert_witness\n\n .execute(params![note_id, u32::from(height), encoded])?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "zcash_client_sqlite/src/wallet.rs", "rank": 91, "score": 140475.9662601018 }, { "content": "/// Inserts full transaction data into the database.\n\npub fn put_tx_data<'a, P>(\n\n stmts: &mut DataConnStmtCache<'a, P>,\n\n tx: &Transaction,\n\n created_at: Option<time::OffsetDateTime>,\n\n) -> Result<i64, SqliteClientError> {\n\n let txid = tx.txid().as_ref().to_vec();\n\n\n\n let mut raw_tx = vec![];\n\n tx.write(&mut raw_tx)?;\n\n\n\n if stmts\n\n .stmt_update_tx_data\n\n .execute(params![u32::from(tx.expiry_height()), raw_tx, txid,])?\n\n == 0\n\n {\n\n // It isn't there, so insert our transaction into the database.\n\n stmts.stmt_insert_tx_data.execute(params![\n\n txid,\n\n created_at,\n\n u32::from(tx.expiry_height()),\n", "file_path": "zcash_client_sqlite/src/wallet.rs", "rank": 92, "score": 138434.4777372685 }, { "content": "fn hasher(personal: &[u8; 16]) -> State {\n\n Params::new().hash_length(32).personal(personal).to_state()\n\n}\n\n\n\n/// Sequentially append the serialized value of each transparent input\n\n/// to a hash personalized by ZCASH_PREVOUTS_HASH_PERSONALIZATION.\n\n/// In the case that no inputs are provided, this produces a default\n\n/// hash from just the personalization string.\n\npub(crate) fn transparent_prevout_hash<TransparentAuth: transparent::Authorization>(\n\n vin: &[TxIn<TransparentAuth>],\n\n) -> Blake2bHash {\n\n let mut h = hasher(ZCASH_PREVOUTS_HASH_PERSONALIZATION);\n\n for t_in in vin {\n\n t_in.prevout.write(&mut h).unwrap();\n\n }\n\n h.finalize()\n\n}\n\n\n\n/// Hash of the little-endian u32 interpretation of the\n\n/// `sequence` values for each TxIn record passed in vin.\n", "file_path": "zcash_primitives/src/transaction/txid.rs", "rank": 93, "score": 137556.43673063305 }, { "content": "fn personalization(branch_id: u32) -> [u8; 16] {\n\n let mut result = [0u8; 16];\n\n result[..12].copy_from_slice(b\"ZcashHistory\");\n\n LittleEndian::write_u32(&mut result[12..], branch_id);\n\n result\n\n}\n\n\n", "file_path": "zcash_history/src/version.rs", "rank": 94, "score": 137556.43673063305 }, { "content": "pub fn prf_rho<Scalar, CS>(\n\n cs: CS,\n\n phi: &[Boolean],\n\n h_sig: &[Boolean],\n\n nonce: bool,\n\n) -> Result<Vec<Boolean>, SynthesisError>\n\nwhere\n\n Scalar: PrimeField,\n\n CS: ConstraintSystem<Scalar>,\n\n{\n\n prf(cs, false, nonce, true, false, phi, h_sig)\n\n}\n", "file_path": "zcash_proofs/src/circuit/sprout/prfs.rs", "rank": 95, "score": 136486.49440417433 }, { "content": "pub fn note_comm<Scalar, CS>(\n\n cs: CS,\n\n a_pk: &[Boolean],\n\n value: &[Boolean],\n\n rho: &[Boolean],\n\n r: &[Boolean],\n\n) -> Result<Vec<Boolean>, SynthesisError>\n\nwhere\n\n Scalar: PrimeField,\n\n CS: ConstraintSystem<Scalar>,\n\n{\n\n assert_eq!(a_pk.len(), 256);\n\n assert_eq!(value.len(), 64);\n\n assert_eq!(rho.len(), 256);\n\n assert_eq!(r.len(), 256);\n\n\n\n let mut image = vec![\n\n Boolean::constant(true),\n\n Boolean::constant(false),\n\n Boolean::constant(true),\n", "file_path": "zcash_proofs/src/circuit/sprout/commitment.rs", "rank": 96, "score": 136486.49440417433 }, { "content": "#[cfg(feature = \"directories\")]\n\n#[cfg_attr(docsrs, doc(cfg(feature = \"directories\")))]\n\npub fn default_params_folder() -> Option<PathBuf> {\n\n BaseDirs::new().map(|base_dirs| {\n\n if cfg!(any(windows, target_os = \"macos\")) {\n\n base_dirs.data_dir().join(\"ZcashParams\")\n\n } else {\n\n base_dirs.home_dir().join(\".zcash-params\")\n\n }\n\n })\n\n}\n\n\n\n/// Download the Zcash Sapling parameters, storing them in the default location.\n\n///\n\n/// This mirrors the behaviour of the `fetch-params.sh` script from `zcashd`.\n", "file_path": "zcash_proofs/src/lib.rs", "rank": 97, "score": 136486.49440417433 }, { "content": "pub fn prf_pk<Scalar, CS>(\n\n cs: CS,\n\n a_sk: &[Boolean],\n\n h_sig: &[Boolean],\n\n nonce: bool,\n\n) -> Result<Vec<Boolean>, SynthesisError>\n\nwhere\n\n Scalar: PrimeField,\n\n CS: ConstraintSystem<Scalar>,\n\n{\n\n prf(cs, false, nonce, false, false, a_sk, h_sig)\n\n}\n\n\n", "file_path": "zcash_proofs/src/circuit/sprout/prfs.rs", "rank": 98, "score": 136486.49440417433 }, { "content": "pub fn prf_nf<Scalar, CS>(\n\n cs: CS,\n\n a_sk: &[Boolean],\n\n rho: &[Boolean],\n\n) -> Result<Vec<Boolean>, SynthesisError>\n\nwhere\n\n Scalar: PrimeField,\n\n CS: ConstraintSystem<Scalar>,\n\n{\n\n prf(cs, true, true, true, false, a_sk, rho)\n\n}\n\n\n", "file_path": "zcash_proofs/src/circuit/sprout/prfs.rs", "rank": 99, "score": 136486.49440417433 } ]
Rust
src/peer.rs
ckampfe/manix
fd1e5cf309b125dca6a17bf3fe67c8eff0a8d17d
use crate::peer_protocol; use crate::{signals, Begin, Index, InfoHash, Length, PeerId}; use futures_util::sink::SinkExt; use futures_util::StreamExt; use std::convert::TryInto; use tokio::time::{Interval, MissedTickBehavior}; use tracing::{debug, error, info, instrument}; #[derive(Debug)] pub(crate) struct PeerOptions { pub(crate) connection: tokio_util::codec::Framed<tokio::net::TcpStream, peer_protocol::MessageCodec>, pub(crate) peer_id: PeerId, pub(crate) remote_peer_id: PeerId, pub(crate) info_hash: InfoHash, pub(crate) peer_to_torrent_tx: tokio::sync::mpsc::Sender<signals::PeerToTorrent>, pub(crate) torrent_to_peer_rx: tokio::sync::mpsc::Receiver<signals::TorrentToPeer>, pub(crate) global_permit: tokio::sync::OwnedSemaphorePermit, pub(crate) torrent_permit: tokio::sync::OwnedSemaphorePermit, pub(crate) piece_length: usize, pub(crate) chunk_length: usize, pub(crate) choke_state: ChokeState, pub(crate) interest_state: InterestState, } #[derive(Debug)] pub(crate) struct Peer { connection: tokio_util::codec::Framed<tokio::net::TcpStream, peer_protocol::MessageCodec>, peer_id: PeerId, remote_peer_id: PeerId, info_hash: InfoHash, peer_to_torrent_tx: tokio::sync::mpsc::Sender<signals::PeerToTorrent>, torrent_to_peer_rx: tokio::sync::mpsc::Receiver<signals::TorrentToPeer>, global_permit: tokio::sync::OwnedSemaphorePermit, torrent_permit: tokio::sync::OwnedSemaphorePermit, choke_state: ChokeState, interest_state: InterestState, piece_length: usize, chunk_length: usize, } impl Peer { #[instrument] pub(crate) fn new(options: PeerOptions) -> Self { Self { connection: options.connection, peer_id: options.peer_id, remote_peer_id: options.remote_peer_id, info_hash: options.info_hash, peer_to_torrent_tx: options.peer_to_torrent_tx, torrent_to_peer_rx: options.torrent_to_peer_rx, global_permit: options.global_permit, torrent_permit: options.torrent_permit, choke_state: ChokeState::Choked, interest_state: InterestState::NotInterested, piece_length: options.piece_length, chunk_length: options.chunk_length, } } #[instrument(skip(self))] pub(crate) async fn event_loop(mut self) -> Result<(), std::io::Error> { info!( "registering remote peer id {:?} with owning torrent", self.remote_peer_id.to_string() ); let current_bitfield = self.request_current_bifield().await?.await.map_err(|_e| { std::io::Error::new( std::io::ErrorKind::BrokenPipe, "TODO handle this oneshot channel error", ) })?; info!("got bitfield from owning torrent"); self.send_bitfield(current_bitfield).await?; info!("Sent bitfield to peer"); self.send_unchoke().await?; info!("Sent unchoke to peer"); self.send_interested().await?; info!("Sent interested"); let Timers { mut keepalive } = self.start_timers().await; info!("Started peer timers"); loop { tokio::select! { result = Peer::receive_message(&mut self.connection) => { match result { Some(Ok(message)) => { match message { peer_protocol::Message::Keepalive => { info!("Receieved keepalive from peer") }, peer_protocol::Message::Choke => { info!("Received choke from peer"); self.send_choke().await? }, peer_protocol::Message::Unchoke => { info!("Received unchoke from peer"); self.send_unchoke().await?; }, peer_protocol::Message::Interested => { info!("Received interested from peer"); }, peer_protocol::Message::NotInterested => { info!("Received not interested from peer"); }, peer_protocol::Message::Have { index: _ } => todo!(), peer_protocol::Message::Bitfield { bitfield } => { info!("Received bitfield from peer"); self.report_bitfield(bitfield).await?; }, peer_protocol::Message::Request { index: _, begin: _, length: _ } => todo!(), peer_protocol::Message::Piece { index, begin, chunk: _ } => { info!("Received piece ({}, {})", index, begin) }, peer_protocol::Message::Cancel { index: _, begin: _, length: _ } => todo!(), } } Some(Err(e)) => { error!("{:#?}", e.to_string()); self.deregister_with_owning_torrent().await?; return Err(e); } None => { info!("remote peer hung up"); self.deregister_with_owning_torrent().await?; return Ok(()) }, } } result = Peer::receive_message_from_owning_torrent(&mut self.torrent_to_peer_rx) => { match result { Some(message) => match message { signals::TorrentToPeer::Choke => { self.choke_state = ChokeState::Choked; self.send_choke().await? }, signals::TorrentToPeer::NotChoked => { self.choke_state = ChokeState::NotChoked; self.send_unchoke().await? }, signals::TorrentToPeer::Interested => { self.interest_state = InterestState::Interested; self.send_interested().await? }, signals::TorrentToPeer::NotInterested => { self.interest_state = InterestState::NotInterested; self.send_not_interested().await? }, signals::TorrentToPeer::GetPiece(index) => { let offsets = peer_protocol::chunk_offsets_lengths(self.piece_length, self.chunk_length); for (chunk_offset, chunk_length) in offsets { self.send_request( index.try_into().unwrap(), chunk_offset.try_into().unwrap(), chunk_length.try_into().unwrap() ).await?; } } }, None => { debug!("Torrent to peer channel closed, disconnecting"); return Ok(()); }, } } _ = keepalive.tick() => { info!("sent keepalive"); self.send_keepalive().await?; } } } } #[instrument(skip(self))] async fn start_timers(&self) -> Timers { let mut keepalive = tokio::time::interval(std::time::Duration::from_secs(30)); keepalive.set_missed_tick_behavior(MissedTickBehavior::Burst); Timers { keepalive } } #[instrument(skip(self))] async fn send_keepalive(&mut self) -> Result<(), std::io::Error> { self.send_message(peer_protocol::Message::Keepalive).await } #[instrument(skip(self))] async fn send_choke(&mut self) -> Result<(), std::io::Error> { self.send_message(peer_protocol::Message::Choke).await } #[instrument(skip(self))] async fn send_unchoke(&mut self) -> Result<(), std::io::Error> { self.send_message(peer_protocol::Message::Unchoke).await } #[instrument(skip(self))] async fn send_interested(&mut self) -> Result<(), std::io::Error> { self.send_message(peer_protocol::Message::Interested).await } #[instrument(skip(self))] async fn send_not_interested(&mut self) -> Result<(), std::io::Error> { self.send_message(peer_protocol::Message::NotInterested) .await } #[instrument(skip(self))] async fn send_have(&mut self, index: Index) -> Result<(), std::io::Error> { self.send_message(peer_protocol::Message::Have { index }) .await } #[instrument(skip(self))] async fn send_bitfield( &mut self, bitfield: peer_protocol::Bitfield, ) -> Result<(), std::io::Error> { self.send_message(peer_protocol::Message::Bitfield { bitfield }) .await } #[instrument(skip(self))] async fn send_request( &mut self, index: Index, begin: Begin, length: Length, ) -> Result<(), std::io::Error> { self.send_message(peer_protocol::Message::Request { index, begin, length, }) .await } #[instrument(skip(self, chunk))] async fn send_piece( &mut self, index: Index, begin: Begin, chunk: Vec<u8>, ) -> Result<(), std::io::Error> { self.send_message(peer_protocol::Message::Piece { index, begin, chunk, }) .await } #[instrument(skip(self))] async fn send_cancel( &mut self, index: Index, begin: Begin, length: Length, ) -> Result<(), std::io::Error> { self.send_message(peer_protocol::Message::Cancel { index, begin, length, }) .await } async fn send_message( &mut self, message: peer_protocol::Message, ) -> Result<(), std::io::Error> { self.connection.send(message).await?; self.connection.flush().await } async fn receive_message( connection: &mut tokio_util::codec::Framed< tokio::net::TcpStream, peer_protocol::MessageCodec, >, ) -> Option<Result<peer_protocol::Message, std::io::Error>> { connection.next().await } #[instrument(skip(self))] async fn deregister_with_owning_torrent(&mut self) -> Result<(), std::io::Error> { self.send_to_owned_torrent(signals::PeerToTorrent::Deregister { remote_peer_id: self.remote_peer_id, }) .await } #[instrument(skip(self))] async fn request_current_bifield( &self, ) -> Result<tokio::sync::oneshot::Receiver<peer_protocol::Bitfield>, std::io::Error> { let (tx, rx) = tokio::sync::oneshot::channel(); self.send_to_owned_torrent(signals::PeerToTorrent::RequestBitfield { remote_peer_id: self.remote_peer_id, responder: tx, }) .await?; Ok(rx) } #[instrument(skip(self))] async fn report_bitfield( &self, bitfield: peer_protocol::Bitfield, ) -> Result<(), std::io::Error> { self.send_to_owned_torrent(signals::PeerToTorrent::Bitfield { remote_peer_id: self.remote_peer_id, bitfield, }) .await?; Ok(()) } #[instrument(skip(self))] async fn send_to_owned_torrent( &self, message: signals::PeerToTorrent, ) -> Result<(), std::io::Error> { self.peer_to_torrent_tx .send(message) .await .map_err(|e| std::io::Error::new(std::io::ErrorKind::BrokenPipe, e.to_string())) } #[instrument(skip(rx))] async fn receive_message_from_owning_torrent( rx: &mut tokio::sync::mpsc::Receiver<signals::TorrentToPeer>, ) -> Option<signals::TorrentToPeer> { rx.recv().await } } struct Timers { keepalive: Interval, } #[derive(Debug)] pub(crate) enum ChokeState { Choked, NotChoked, } #[derive(Debug)] pub(crate) enum InterestState { Interested, NotInterested, }
use crate::peer_protocol; use crate::{signals, Begin, Index, InfoHash, Length, PeerId}; use futures_util::sink::SinkExt; use futures_util::StreamExt; use std::convert::TryInto; use tokio::time::{Interval, MissedTickBehavior}; use tracing::{debug, error, info, instrument}; #[derive(Debug)] pub(crate) struct PeerOptions { pub(crate) connection: tokio_util::codec::Framed<tokio::net::TcpStream, peer_protocol::MessageCodec>, pub(crate) peer_id: PeerId, pub(crate) remote_peer_id: PeerId, pub(crate) info_hash: InfoHash, pub(crate) peer_to_torrent_tx: tokio::sync::mpsc::Sender<signals::PeerToTorrent>, pub(crate) torrent_to_peer_rx: tokio::sync::mpsc::Receiver<signals::TorrentToPeer>, pub(crate) global_permit: tokio::sync::OwnedSemaphorePermit, pub(crate) torrent_permit: tokio::sync::OwnedSemaphorePermit, pub(crate) piece_length: usize, pub(crate) chunk_length: usize, pub(crate) choke_state: ChokeState, pub(crate) interest_state: InterestState, } #[derive(Debug)] pub(crate) struct Peer { connection: tokio_util::codec::Framed<tokio::net::TcpStream, peer_protocol::MessageCodec>, peer_id: PeerId, remote_peer_id: PeerId, info_hash: InfoHash, peer_to_torrent_tx: tokio::sync::mpsc::Sender<signals::PeerToTorrent>, torrent_to_peer_rx: tokio::sync::mpsc::Receiver<signals::TorrentToPeer>, global_permit: tokio::sync::OwnedSemaphorePermit, torrent_permit: tokio::sync::OwnedSemaphorePermit, choke_state: ChokeState, interest_state: InterestState, piece_length: usize, chunk_length: usize, } impl Peer { #[instrument] pub(crate) fn new(options: PeerOptions) -> Self { Self { connection: options.connection, peer_id: options.peer_id, remote_peer_id: options.remote_peer_id, info_hash: options.info_hash, peer_to_torrent_tx: options.peer_to_torrent_tx, torrent_to_peer_rx: options.torrent_to_peer_rx, global_permit: options.global_permit, torrent_permit: options.torrent_permit, choke_state: ChokeState::Choked, interest_state: InterestState::NotInterested, piece_length: options.piece_length, chunk_length: options.chunk_length, } } #[instrument(skip(self))] pub(crate) async fn event_loop(mut self) -> Result<(), std::io::Error> { info!( "registering remote peer id {:?} with owning torrent", self.remote_peer_id.to_string() ); let current_bitfield = self.request_current_bifield().await?.await.map_err(|_e| { std::io::Error::new( std::io::ErrorKind::BrokenPipe, "TODO handle this oneshot channel error", ) })?; info!("got bitfield from owning torrent"); self.send_bitfield(current_bitfield).await?; info!("Sent bitfield to peer"); self.send_unchoke().await?; info!("Sent unchoke to peer"); self.send_interested().await?; info!("Sent interested"); let Timers { mut keepalive } = self.start_timers().await; info!("Started peer timers"); loop { tokio::select! { result = Peer::receive_message(&mut self.connection) => { match result { Some(Ok(message)) => { match message { peer_protocol::Message::Keepalive => { info!("Receieved keepalive from peer") }, peer_protocol::Message::Choke => { info!("Received choke from peer"); self.send_choke().await? }, peer_protocol::Message::Unchoke => { info!("Received unchoke from peer"); self.send_unchoke().await?; }, peer_protocol::Message::Interested => { info!("Received interested from peer"); }, peer_protocol::Message::NotInterested => { info!("Received not interested from peer"); }, peer_protocol::Message::Have { index: _ } => todo!(), peer_protocol::Message::Bitfield { bitfield } => { info!("Received bitfield from peer"); self.report_bitfield(bitfield).await?; }, peer_protocol::Message::Request { index: _, begin: _, length: _ } => todo!(), peer_protocol::Message::Piece { index, begin, chunk: _ } => { info!("Received piece ({}, {})", index, begin) }, peer_protocol::Message::Cancel { index: _, begin: _, length: _ } => todo!(), } } Some(Err(e)) => { error!("{:#?}", e.to_string()); self.deregister_with_owning_torrent().await?; return Err(e); } None => { info!("remote peer hung up"); self.deregister_with_owning_torrent().await?; return Ok(()) }, } } result = Peer::receive_message_from_owning_torrent(&mut self.torrent_to_peer_rx) => { match result { Some(message) => match message { signals::TorrentToPeer::Choke => { self.choke_state = ChokeState::Choked; self.send_choke().await? }, signals::TorrentToPeer::NotChoked => { self.choke_state = ChokeState::NotChoked; self.send_unchoke().await? }, signals::TorrentToPeer::Interested => { self.interest_state = InterestState::Interested; self.send_interested().await? }, signals::TorrentToPeer::NotInterested => { self.interest_state = InterestState::NotInterested; self.send_not_interested().await? }, signals::TorrentToPeer::GetPiece(index) => { let offsets = peer_protocol::chunk_offsets_lengths(self.piece_length, self.chunk_length); for (chunk_offset, chunk_length) in offsets { self.send_request( index.try_into().unwrap(), chunk_offset.try_into().unwrap(), chunk_length.try_into().unwrap() ).await?; } } }, None => { debug!("Torrent to peer channel closed, disconnecting"); return Ok(()); }, } } _ = keepalive.tick() => { info!("sent keepalive"); self.send_keepalive().await?; } } } } #[instrument(skip(self))] async fn start_timers(&self) -> Timers { let mut keepalive = tokio::time::interval(std::time::Duration::from_secs(30)); keepalive.set_missed_tick_behavior(MissedTickBehavior::Burst); Timers { keepalive } } #[instrument(skip(self))] async fn send_keepalive(&mut self) -> Result<(), std::io::Error> { self.send_message(peer_protocol::Message::Keepalive).await } #[instrument(skip(self))] async fn send_choke(&mut self) -> Result<(), std::io::Error> { self.send_message(peer_protocol::Message::Choke).await } #[instrument(skip(self))] async fn send_unchoke(&mut self) -> Result<(), std::io::Error> { self.send_message(peer_protocol::Message::Unchoke).await } #[instrument(skip(self))] async fn send_interested(&mut self) -> Result<(), std::io::Error> { self.send_message(peer_protocol::Message::Interested).await } #[instrument(skip(self))] async fn send_not_interested(&mut self) -> Result<(), std::io::Error> { self.send_message(peer_protocol::Message::NotInterested) .await } #[instrument(skip(self))] async fn send_have(&mut self, index: Index) -> Result<(), std::io::Error> { self.send_message(peer_protocol::Message::Have { index }) .await } #[instrument(skip(self))] async fn send_bitfield( &mut self, bitfield: peer_protoc
#[instrument(skip(self))] async fn send_request( &mut self, index: Index, begin: Begin, length: Length, ) -> Result<(), std::io::Error> { self.send_message(peer_protocol::Message::Request { index, begin, length, }) .await } #[instrument(skip(self, chunk))] async fn send_piece( &mut self, index: Index, begin: Begin, chunk: Vec<u8>, ) -> Result<(), std::io::Error> { self.send_message(peer_protocol::Message::Piece { index, begin, chunk, }) .await } #[instrument(skip(self))] async fn send_cancel( &mut self, index: Index, begin: Begin, length: Length, ) -> Result<(), std::io::Error> { self.send_message(peer_protocol::Message::Cancel { index, begin, length, }) .await } async fn send_message( &mut self, message: peer_protocol::Message, ) -> Result<(), std::io::Error> { self.connection.send(message).await?; self.connection.flush().await } async fn receive_message( connection: &mut tokio_util::codec::Framed< tokio::net::TcpStream, peer_protocol::MessageCodec, >, ) -> Option<Result<peer_protocol::Message, std::io::Error>> { connection.next().await } #[instrument(skip(self))] async fn deregister_with_owning_torrent(&mut self) -> Result<(), std::io::Error> { self.send_to_owned_torrent(signals::PeerToTorrent::Deregister { remote_peer_id: self.remote_peer_id, }) .await } #[instrument(skip(self))] async fn request_current_bifield( &self, ) -> Result<tokio::sync::oneshot::Receiver<peer_protocol::Bitfield>, std::io::Error> { let (tx, rx) = tokio::sync::oneshot::channel(); self.send_to_owned_torrent(signals::PeerToTorrent::RequestBitfield { remote_peer_id: self.remote_peer_id, responder: tx, }) .await?; Ok(rx) } #[instrument(skip(self))] async fn report_bitfield( &self, bitfield: peer_protocol::Bitfield, ) -> Result<(), std::io::Error> { self.send_to_owned_torrent(signals::PeerToTorrent::Bitfield { remote_peer_id: self.remote_peer_id, bitfield, }) .await?; Ok(()) } #[instrument(skip(self))] async fn send_to_owned_torrent( &self, message: signals::PeerToTorrent, ) -> Result<(), std::io::Error> { self.peer_to_torrent_tx .send(message) .await .map_err(|e| std::io::Error::new(std::io::ErrorKind::BrokenPipe, e.to_string())) } #[instrument(skip(rx))] async fn receive_message_from_owning_torrent( rx: &mut tokio::sync::mpsc::Receiver<signals::TorrentToPeer>, ) -> Option<signals::TorrentToPeer> { rx.recv().await } } struct Timers { keepalive: Interval, } #[derive(Debug)] pub(crate) enum ChokeState { Choked, NotChoked, } #[derive(Debug)] pub(crate) enum InterestState { Interested, NotInterested, }
ol::Bitfield, ) -> Result<(), std::io::Error> { self.send_message(peer_protocol::Message::Bitfield { bitfield }) .await }
function_block-function_prefixed
[ { "content": "fn info_hash(bencode: &nom_bencode::Bencode) -> InfoHash {\n\n match bencode {\n\n nom_bencode::Bencode::Dictionary(d) => {\n\n let info = d.get(&b\"info\".to_vec()).unwrap();\n\n let encoded = info.encode();\n\n InfoHash(hash(&encoded))\n\n }\n\n _ => panic!(\".torrent bencode must be a dictionary\"),\n\n }\n\n}\n\n\n", "file_path": "src/torrent.rs", "rank": 0, "score": 108398.9714831816 }, { "content": "struct Timers {\n\n announce_timer: Interval,\n\n assessment_timer: Interval,\n\n debug_timer: Interval,\n\n}\n\n\n\npub(crate) fn generate_peer_id() -> PeerId {\n\n let rand_string: String = rand::thread_rng()\n\n .sample_iter(&rand::distributions::Alphanumeric)\n\n .take(16)\n\n .map(char::from)\n\n .collect();\n\n\n\n let mut id = \"MNX-\".to_string();\n\n id.push_str(&rand_string);\n\n\n\n let id_bytes: [u8; 20] = id.as_bytes().try_into().unwrap();\n\n\n\n PeerId(id_bytes)\n\n}\n\n\n", "file_path": "src/torrent.rs", "rank": 1, "score": 86396.49799658012 }, { "content": "#[derive(Debug)]\n\nstruct PeerState {\n\n torrent_to_peer_tx: tokio::sync::mpsc::Sender<signals::TorrentToPeer>,\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct Torrent {\n\n peer_id: PeerId,\n\n meta_info: MetaInfo,\n\n torrent_data: PathBuf,\n\n peers: HashMap<PeerId, PeerState>,\n\n port: Port,\n\n uploaded: usize,\n\n downloaded: usize,\n\n pieces_bitfield: peer_protocol::Bitfield,\n\n pieces_to_peers: Vec<HashSet<PeerId>>,\n\n info_hash: InfoHash,\n\n listener: Option<Listener<String>>,\n\n global_max_peer_connections: Arc<tokio::sync::Semaphore>,\n\n torrent_max_peer_connections: Arc<tokio::sync::Semaphore>,\n\n peer_to_torrent_tx: tokio::sync::mpsc::Sender<signals::PeerToTorrent>,\n", "file_path": "src/torrent.rs", "rank": 3, "score": 78229.0395812049 }, { "content": "pub fn async_client(options: Options) -> AsyncClient {\n\n AsyncClient::new(options)\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 4, "score": 51308.66879182928 }, { "content": "fn hash(buf: &[u8]) -> [u8; 20] {\n\n let mut hasher = sha1::Sha1::new();\n\n hasher.update(buf);\n\n hasher\n\n .finalize()\n\n .try_into()\n\n .expect(\"result must be 20 bytes\")\n\n}\n", "file_path": "src/torrent.rs", "rank": 5, "score": 48107.242592273185 }, { "content": "type Length = u32;\n", "file_path": "src/lib.rs", "rank": 6, "score": 35426.20567394204 }, { "content": "type Begin = u32;\n", "file_path": "src/lib.rs", "rank": 7, "score": 35426.20567394204 }, { "content": "type Index = u32;\n", "file_path": "src/lib.rs", "rank": 8, "score": 35426.20567394204 }, { "content": " downloaded: 0,\n\n pieces_bitfield,\n\n pieces_to_peers,\n\n info_hash,\n\n global_max_peer_connections,\n\n listener: None,\n\n torrent_max_peer_connections,\n\n peer_to_torrent_tx,\n\n peer_to_torrent_rx,\n\n event_loop_interrupt_tx: None,\n\n chunk_length,\n\n })\n\n }\n\n\n\n #[instrument(skip(self))]\n\n pub(crate) async fn start(&mut self) -> Result<(), std::io::Error> {\n\n self.validate_torrent_data().await?;\n\n\n\n match self.get_status().await {\n\n Status::Leeching => self.announce(AnnounceEvent::Started).await?,\n", "file_path": "src/torrent.rs", "rank": 9, "score": 25417.175560493462 }, { "content": "pub fn blocking_client(options: Options) -> BlockingClient {\n\n BlockingClient::new(options)\n\n}\n\n\n\npub struct Options {\n\n global_max_peer_connections: usize,\n\n}\n\n\n\nimpl Default for Options {\n\n fn default() -> Self {\n\n Self {\n\n global_max_peer_connections: 500,\n\n }\n\n }\n\n}\n\n\n\n#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq)]\n\npub struct PeerId([u8; 20]);\n\n\n\nimpl PeerId {\n", "file_path": "src/lib.rs", "rank": 10, "score": 25416.715055283486 }, { "content": " signals::PeerToTorrent::Bitfield { remote_peer_id, bitfield } => {\n\n info!(\"received bitfield from {}\", remote_peer_id);\n\n for index in bitfield.iter_ones() {\n\n self.peer_have(index.try_into().unwrap(), remote_peer_id).await;\n\n }\n\n self.pieces_bitfield |= bitfield;\n\n }\n\n },\n\n None => error!(\"peer_to_torrent_rx was closed when torrent.rs tried to receive a message from peer\"),\n\n }\n\n }\n\n _ = announce_timer.tick() => {\n\n match self.get_status().await {\n\n Status::Leeching => {\n\n self.announce(AnnounceEvent::Empty).await?;\n\n },\n\n Status::Seeding => {\n\n // don't need to continue announcing\n\n },\n\n }\n", "file_path": "src/torrent.rs", "rank": 11, "score": 25416.130049586845 }, { "content": " Some(message) => match message {\n\n signals::PeerToTorrent::Register { remote_peer_id, torrent_to_peer_tx } => {\n\n let peer_state = PeerState {\n\n torrent_to_peer_tx,\n\n };\n\n\n\n self.peers.insert(remote_peer_id, peer_state);\n\n\n\n info!(\"registered remote peer {}\", remote_peer_id);\n\n },\n\n signals::PeerToTorrent::Deregister { remote_peer_id } => {\n\n info!(\"deregistered remote peer {}\", remote_peer_id);\n\n self.peer_not_have(remote_peer_id).await;\n\n self.peers.remove(&remote_peer_id);\n\n },\n\n signals::PeerToTorrent::RequestBitfield { remote_peer_id: _, responder } => {\n\n responder.send(\n\n self.pieces_bitfield.clone()\n\n ).map_err(|_e| std::io::Error::new(std::io::ErrorKind::Other, \"TODO actually handle this torrent to peer send error\"))?;\n\n }\n", "file_path": "src/torrent.rs", "rank": 12, "score": 25414.894950384412 }, { "content": " peer_to_torrent_rx: tokio::sync::mpsc::Receiver<signals::PeerToTorrent>,\n\n event_loop_interrupt_tx: Option<tokio::sync::oneshot::Sender<()>>,\n\n chunk_length: usize,\n\n}\n\n\n\n// PUBLIC\n\nimpl Torrent {\n\n pub(crate) fn new(\n\n options: TorrentOptions,\n\n dot_torrent_bencode: nom_bencode::Bencode,\n\n torrent_data: PathBuf,\n\n global_max_peer_connections: Arc<tokio::sync::Semaphore>,\n\n ) -> Result<Self, std::io::Error> {\n\n let peer_id = generate_peer_id();\n\n let info_hash = info_hash(&dot_torrent_bencode);\n\n let meta_info = MetaInfo::try_from(dot_torrent_bencode)?;\n\n info!(\"metainfo reports {} pieces\", meta_info.info.pieces.len());\n\n info!(\"metainfo reports length of {}\", meta_info.info.piece_length);\n\n let pieces_bitfield = peer_protocol::Bitfield::new(meta_info.info.pieces.len());\n\n\n", "file_path": "src/torrent.rs", "rank": 13, "score": 25413.683511713945 }, { "content": "\n\n loop {\n\n tokio::select! {\n\n peer_result = self.listener.as_ref().unwrap().accept() => {\n\n match peer_result {\n\n Ok(peer) => {\n\n info!(\"accepted peer, attempting handshake...\");\n\n tokio::spawn(async move {\n\n let peer = peer.handshake_remote_peer().await?;\n\n peer.event_loop().await\n\n });\n\n },\n\n Err(e) => {\n\n error!(\"{:?}\", e);\n\n panic!();\n\n }\n\n }\n\n }\n\n message = self.peer_to_torrent_rx.recv() => {\n\n match message {\n", "file_path": "src/torrent.rs", "rank": 14, "score": 25413.05344743781 }, { "content": " Status::Seeding => self.announce(AnnounceEvent::Completed).await?,\n\n };\n\n\n\n let listener_options = listener::ListenerOptions {\n\n address: \"127.0.0.1:6881\".to_string(),\n\n peer_id: self.peer_id,\n\n info_hash: self.info_hash,\n\n global_max_peer_connections: self.global_max_peer_connections.clone(),\n\n torrent_max_peer_connections: self.torrent_max_peer_connections.clone(),\n\n peer_to_torrent_tx: self.peer_to_torrent_tx.clone(),\n\n piece_length: self.meta_info.info.piece_length,\n\n chunk_length: self.chunk_length,\n\n };\n\n\n\n let listener = listener::Listener::new(listener_options);\n\n self.listener = Some(listener);\n\n self.listener.as_mut().unwrap().listen().await?;\n\n\n\n let (interrupt_tx, interrupt_rx) = tokio::sync::oneshot::channel();\n\n self.event_loop_interrupt_tx = Some(interrupt_tx);\n", "file_path": "src/torrent.rs", "rank": 15, "score": 25411.147977583532 }, { "content": " let torrent_max_peer_connections =\n\n Arc::new(tokio::sync::Semaphore::new(options.max_peer_connections));\n\n\n\n let (peer_to_torrent_tx, peer_to_torrent_rx) = tokio::sync::mpsc::channel(32);\n\n\n\n let peers = HashMap::new();\n\n\n\n // let pieces_to_peers = Vec::with_capacity(meta_info.info.pieces.len());\n\n let pieces_to_peers = vec![HashSet::new(); meta_info.info.pieces.len()];\n\n\n\n let chunks_per_piece = 4usize;\n\n let chunk_length = meta_info.info.piece_length / chunks_per_piece;\n\n\n\n Ok(Self {\n\n peer_id,\n\n meta_info,\n\n torrent_data,\n\n peers,\n\n port: options.port,\n\n uploaded: 0,\n", "file_path": "src/torrent.rs", "rank": 16, "score": 25411.079687013967 }, { "content": " Ok(())\n\n }\n\n}\n\n\n\n// PRIVATE\n\nimpl Torrent {\n\n #[instrument(skip(self, interrupt_rx))]\n\n async fn event_loop(\n\n &mut self,\n\n mut interrupt_rx: tokio::sync::oneshot::Receiver<()>,\n\n ) -> Result<(), std::io::Error> {\n\n let Timers {\n\n announce_timer,\n\n assessment_timer,\n\n debug_timer,\n\n } = self.start_timers().await;\n\n\n\n tokio::pin!(announce_timer);\n\n tokio::pin!(assessment_timer);\n\n tokio::pin!(debug_timer);\n", "file_path": "src/torrent.rs", "rank": 17, "score": 25406.345567286153 }, { "content": " pub fn get_piece_length(&self) -> usize {\n\n self.meta_info.info.piece_length\n\n }\n\n\n\n pub fn get_pieces(&self) -> &[[u8; 20]] {\n\n &self.meta_info.info.pieces\n\n }\n\n\n\n #[instrument(skip(self))]\n\n pub async fn announce(\n\n &self,\n\n event: AnnounceEvent,\n\n ) -> Result<nom_bencode::Bencode, std::io::Error> {\n\n let uploaded = self.get_uploaded().to_string();\n\n let downloaded = self.get_downloaded().to_string();\n\n let port = self.get_port().to_string();\n\n let peer_id = self.get_peer_id();\n\n let peer_id = std::str::from_utf8(peer_id.as_ref()).unwrap();\n\n let info_hash = self.get_info_hash_machine();\n\n let left = self.get_left().to_string();\n", "file_path": "src/torrent.rs", "rank": 18, "score": 25405.601403803917 }, { "content": " }\n\n }\n\n\n\n async fn get_status(&self) -> Status {\n\n let have_pieces_len = self.pieces_bitfield.count_ones();\n\n if have_pieces_len == self.pieces_bitfield.len() {\n\n Status::Seeding\n\n } else {\n\n Status::Leeching\n\n }\n\n }\n\n\n\n async fn peer_have(&mut self, index: crate::Index, peer_id: PeerId) {\n\n self.pieces_to_peers\n\n .get_mut(index as usize)\n\n .map(|peers| peers.insert(peer_id));\n\n }\n\n\n\n async fn peer_not_have(&mut self, peer_id: PeerId) {\n\n for peers in self.pieces_to_peers.iter_mut() {\n\n peers.remove(&peer_id);\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/torrent.rs", "rank": 19, "score": 25404.448691218604 }, { "content": " }\n\n _ = assessment_timer.tick() => {\n\n // TODO this should be somehow worked into a semaphore,\n\n // and we can acquire a specific number of permits that will\n\n // implement rate limiting. Right now it is a magic constant.\n\n // ex: PER_TORRENT_DOWNLOAD_ALLOWANCE = N_DL_PERMITS * PIECE_SIZE;\n\n // ex: GLOBAL_DOWNLOAD_ALLOWANCE = N_GLOBAL_DL_PERMITS * PIECE_SIZE\n\n let number_of_pieces_to_download = 5;\n\n\n\n match self.get_status().await {\n\n Status::Leeching => {\n\n // find some indexes we don't yet have\n\n let unhad_piece_indexes: Vec<usize> = self.random_unhad_piece_indexes(number_of_pieces_to_download).await;\n\n // find some peers who have them\n\n let peers_with_pieces = self.random_peers_with_pieces(unhad_piece_indexes).await;\n\n // ask those peers to download those indexes\n\n for (peer_id, index) in peers_with_pieces {\n\n let peer = self.peers.get(&peer_id);\n\n if let Some(peer) = peer {\n\n peer.torrent_to_peer_tx.send(signals::TorrentToPeer::GetPiece(index)).await.expect(\"Actually handle failing to send a message to a peer\")\n", "file_path": "src/torrent.rs", "rank": 20, "score": 25404.322644978554 }, { "content": " }\n\n }\n\n\n\n async fn random_unhad_piece_indexes(&self, n: usize) -> Vec<usize> {\n\n let mut rng = rand::thread_rng();\n\n self.pieces_bitfield\n\n .iter_zeros()\n\n .choose_multiple(&mut rng, n)\n\n }\n\n\n\n async fn random_peers_with_pieces(&self, piece_indexes: Vec<usize>) -> Vec<(PeerId, usize)> {\n\n let mut peers_with_pieces = vec![];\n\n let mut rng = thread_rng();\n\n\n\n for i in piece_indexes {\n\n if let Some(peer_id) = self.pieces_to_peers[i].iter().cloned().choose(&mut rng) {\n\n peers_with_pieces.push((peer_id, i));\n\n }\n\n }\n\n\n", "file_path": "src/torrent.rs", "rank": 21, "score": 25404.137857744245 }, { "content": " self.event_loop(interrupt_rx).await?;\n\n Ok(())\n\n }\n\n\n\n #[instrument(skip(self))]\n\n pub(crate) async fn pause(&mut self) -> Result<(), std::io::Error> {\n\n // stop the event loop\n\n if let Some(tx) = self.event_loop_interrupt_tx.take() {\n\n tx.send(()).unwrap();\n\n }\n\n\n\n // drop the listener as well\n\n let _ = self.listener.take();\n\n\n\n self.announce(AnnounceEvent::Stopped).await?;\n\n\n\n Ok(())\n\n }\n\n\n\n pub fn get_info_hash_human(&self) -> String {\n", "file_path": "src/torrent.rs", "rank": 22, "score": 25402.128814757005 }, { "content": " let mut buf = vec![0u8; buf_len];\n\n let piece_hashes = &self.meta_info.info.pieces;\n\n let piece_hashes_len = piece_hashes.len();\n\n let mut remaining_length = self.meta_info.info.length;\n\n\n\n assert!(\n\n piece_hashes_len > 0,\n\n \"There must be more than 0 pieces in a torrent\"\n\n );\n\n\n\n // regular case loop for all but the last piece\n\n for (i, known_piece_hash) in piece_hashes[..piece_hashes_len - 1].iter().enumerate() {\n\n torrent_data_file.read_exact(&mut buf).await?;\n\n remaining_length -= buf_len;\n\n let bytes_hash = hash(&buf);\n\n if &bytes_hash == known_piece_hash {\n\n if let Some(mut bit) = self.pieces_bitfield.get_mut(i) {\n\n *bit = true;\n\n }\n\n }\n", "file_path": "src/torrent.rs", "rank": 23, "score": 25402.049452562023 }, { "content": "use crate::listener::{self, Listener};\n\nuse crate::metainfo::MetaInfo;\n\nuse crate::InfoHash;\n\nuse crate::{peer_protocol, signals};\n\nuse crate::{PeerId, Port};\n\nuse rand::prelude::IteratorRandom;\n\nuse rand::{thread_rng, Rng};\n\nuse sha1::Digest;\n\nuse std::collections::{HashMap, HashSet};\n\nuse std::convert::{TryFrom, TryInto};\n\nuse std::fmt::Display;\n\nuse std::path::PathBuf;\n\nuse std::sync::Arc;\n\nuse tokio::io::AsyncReadExt;\n\nuse tokio::time::Interval;\n\nuse tracing::{debug, error, info, instrument};\n\n\n\npub struct TorrentOptions {\n\n port: Port,\n\n max_peer_connections: usize,\n", "file_path": "src/torrent.rs", "rank": 24, "score": 25401.293612789585 }, { "content": " }\n\n\n\n // special case for the last piece, which may be (probably is) shorter than a full piece\n\n buf.resize(remaining_length, 0u8);\n\n torrent_data_file.read_exact(&mut buf).await?;\n\n remaining_length -= buf.len();\n\n let bytes_hash = hash(&buf);\n\n if &bytes_hash == piece_hashes.last().unwrap() {\n\n if let Some(mut bit) = self.pieces_bitfield.last_mut() {\n\n *bit = true;\n\n }\n\n }\n\n\n\n if remaining_length > 0 {\n\n return Err(std::io::Error::new(\n\n std::io::ErrorKind::UnexpectedEof,\n\n \"Torrent ended early\",\n\n ));\n\n }\n\n\n", "file_path": "src/torrent.rs", "rank": 25, "score": 25398.81705391103 }, { "content": " .map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e.to_string()))?;\n\n\n\n let response_bytes = response\n\n .bytes()\n\n .await\n\n .map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e.to_string()))?\n\n .to_vec();\n\n\n\n let decoded = nom_bencode::decode(&response_bytes).map_err(|e| {\n\n std::io::Error::new(std::io::ErrorKind::InvalidData, format!(\"{:?}\", e))\n\n })?;\n\n\n\n Ok(decoded)\n\n }\n\n\n\n #[instrument(skip(self))]\n\n pub async fn validate_torrent_data(&mut self) -> Result<(), std::io::Error> {\n\n let mut torrent_data_file = tokio::fs::OpenOptions::new()\n\n .write(true)\n\n .read(true)\n", "file_path": "src/torrent.rs", "rank": 26, "score": 25398.421675656606 }, { "content": " .create(true)\n\n .truncate(false)\n\n .open(&self.torrent_data)\n\n .await?;\n\n\n\n let file_metadata = torrent_data_file.metadata().await?;\n\n\n\n if file_metadata.len() == 0 {\n\n torrent_data_file\n\n .set_len(\n\n self.meta_info\n\n .info\n\n .length\n\n .try_into()\n\n .expect(\"File size must be a u64\"),\n\n )\n\n .await?;\n\n }\n\n\n\n let buf_len = self.meta_info.info.piece_length;\n", "file_path": "src/torrent.rs", "rank": 27, "score": 25398.215147104922 }, { "content": "}\n\n\n\nimpl Default for TorrentOptions {\n\n fn default() -> Self {\n\n Self {\n\n port: 6881,\n\n max_peer_connections: 25,\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n", "file_path": "src/torrent.rs", "rank": 28, "score": 25395.60661447334 }, { "content": " let info_hash = self.get_info_hash_machine();\n\n info_hash.human_readable()\n\n }\n\n\n\n pub fn get_info_hash_machine(&self) -> InfoHash {\n\n self.info_hash\n\n }\n\n\n\n pub fn get_announce_url(&self) -> &str {\n\n &self.meta_info.announce\n\n }\n\n\n\n pub fn get_peer_id(&self) -> PeerId {\n\n self.peer_id\n\n }\n\n\n\n pub fn get_ip(&self) -> &str {\n\n \"localhost\"\n\n }\n\n\n", "file_path": "src/torrent.rs", "rank": 29, "score": 25394.418779093274 }, { "content": " }\n\n }\n\n }\n\n Status::Seeding => {\n\n // chill and wait for people to ask us for pieces\n\n // if we somehow lose pieces, set status to leeching,\n\n // and begin downloading again\n\n }\n\n }\n\n }\n\n _ = debug_timer.tick() => {\n\n let now = std::time::Instant::now();\n\n debug!(\"{}\", self.pieces_bitfield.count_ones());\n\n debug!(\"DEBUG {:?}\", now);\n\n }\n\n _ = (&mut interrupt_rx) => {\n\n debug!(\"RECEIVED INTERRUPT\");\n\n return Ok(());\n\n }\n\n }\n", "file_path": "src/torrent.rs", "rank": 30, "score": 25394.25130004472 }, { "content": " pub fn get_port(&self) -> Port {\n\n self.port\n\n }\n\n\n\n pub fn get_uploaded(&self) -> usize {\n\n self.uploaded\n\n }\n\n\n\n pub fn get_downloaded(&self) -> usize {\n\n self.downloaded\n\n }\n\n\n\n pub fn get_left(&self) -> usize {\n\n self.meta_info.info.length\n\n }\n\n\n\n pub fn get_name(&self) -> &str {\n\n &self.meta_info.info.name\n\n }\n\n\n", "file_path": "src/torrent.rs", "rank": 31, "score": 25393.376170109357 }, { "content": " // This is a hack to manually percent-encode the info_hash,\n\n // as reqwest and its internal machinery require that URL params be &str's,\n\n // and provide no way to pass raw bytes.\n\n // So, we reify the URL to a string, and append the info_hash query param manually.\n\n let mut url = url.to_string();\n\n url.push_str(\"&info_hash=\");\n\n url.push_str(\n\n &percent_encoding::percent_encode(\n\n info_hash.as_ref(),\n\n percent_encoding::NON_ALPHANUMERIC,\n\n )\n\n .to_string(),\n\n );\n\n\n\n // This provides some validation for the above hack, that we correctly made a valid URL\n\n let url = reqwest::Url::parse(&url)\n\n .map_err(|e| std::io::Error::new(std::io::ErrorKind::InvalidInput, e.to_string()))?;\n\n\n\n let response = reqwest::get(url)\n\n .await\n", "file_path": "src/torrent.rs", "rank": 32, "score": 25392.631881096888 }, { "content": "\n\n let mut params = vec![\n\n (\"peer_id\", peer_id),\n\n (\"ip\", self.get_ip()),\n\n (\"port\", &port),\n\n (\"uploaded\", &uploaded),\n\n (\"downloaded\", &downloaded),\n\n (\"left\", &left),\n\n ];\n\n\n\n let event_string = event.to_string();\n\n\n\n if event_string != \"empty\" {\n\n params.push((\"event\", &event_string));\n\n }\n\n\n\n let url = reqwest::Url::parse_with_params(self.get_announce_url(), params)\n\n .map_err(|e| std::io::Error::new(std::io::ErrorKind::InvalidInput, e.to_string()))?;\n\n\n\n // NOTE:\n", "file_path": "src/torrent.rs", "rank": 33, "score": 25391.6672312545 }, { "content": " peers_with_pieces\n\n }\n\n\n\n async fn start_timers(&self) -> Timers {\n\n let mut announce_timer = tokio::time::interval_at(\n\n tokio::time::Instant::now() + std::time::Duration::from_secs(60),\n\n std::time::Duration::from_secs(60),\n\n );\n\n announce_timer.set_missed_tick_behavior(tokio::time::MissedTickBehavior::Skip);\n\n\n\n let mut assessment_timer = tokio::time::interval(std::time::Duration::from_secs(1));\n\n assessment_timer.set_missed_tick_behavior(tokio::time::MissedTickBehavior::Skip);\n\n\n\n let mut debug_timer = tokio::time::interval(std::time::Duration::from_secs(5));\n\n debug_timer.set_missed_tick_behavior(tokio::time::MissedTickBehavior::Burst);\n\n\n\n Timers {\n\n announce_timer,\n\n assessment_timer,\n\n debug_timer,\n", "file_path": "src/torrent.rs", "rank": 34, "score": 25391.22209511222 }, { "content": " pub async fn start_torrent(&mut self, info_hash: &str) -> Result<(), std::io::Error> {\n\n let torrent = self.get_torrent_mut(info_hash)?;\n\n torrent.start().await\n\n }\n\n\n\n pub async fn pause_torrent(&mut self, info_hash: &str) -> Result<(), std::io::Error> {\n\n let torrent = self.get_torrent_mut(info_hash)?;\n\n torrent.pause().await\n\n }\n\n\n\n pub async fn delete_torrent(&mut self, info_hash: &str) -> Option<Torrent> {\n\n self.torrents.remove(info_hash)\n\n }\n\n\n\n pub async fn delete_data(&mut self, _info_hash: &str) {\n\n todo!()\n\n }\n\n\n\n pub async fn list_torrents(&self) -> Vec<&Torrent> {\n\n self.torrents.values().collect()\n", "file_path": "src/async_client.rs", "rank": 35, "score": 24792.961194674375 }, { "content": " }\n\n\n\n fn get_torrent_mut(&mut self, info_hash: &str) -> Result<&mut Torrent, std::io::Error> {\n\n let torrent = self.torrents.get_mut(info_hash);\n\n if let Some(torrent) = torrent {\n\n Ok(torrent)\n\n } else {\n\n Err(std::io::Error::new(\n\n std::io::ErrorKind::NotFound,\n\n format!(\"Could not find torrent for info hash {}\", info_hash),\n\n ))\n\n }\n\n }\n\n}\n", "file_path": "src/async_client.rs", "rank": 36, "score": 24790.78405698405 }, { "content": " dot_torrent_bencode,\n\n torrent_data,\n\n self.global_max_peer_connections.clone(),\n\n )?;\n\n if let std::collections::hash_map::Entry::Vacant(e) =\n\n self.torrents.entry(torrent.get_info_hash_human())\n\n {\n\n // if not, add it and return a reference to its Torrent\n\n Ok(e.insert(torrent))\n\n } else {\n\n Err(std::io::Error::new(\n\n std::io::ErrorKind::AlreadyExists,\n\n format!(\n\n \"(info_hash {}) already exists\",\n\n torrent.get_info_hash_human()\n\n ),\n\n ))\n\n }\n\n }\n\n\n", "file_path": "src/async_client.rs", "rank": 37, "score": 24787.267941899183 }, { "content": "use crate::torrent;\n\nuse crate::torrent::Torrent;\n\nuse crate::Options;\n\nuse std::collections::HashMap;\n\nuse std::io::Read;\n\nuse std::path::PathBuf;\n\nuse std::sync::Arc;\n\n\n\npub struct AsyncClient {\n\n torrents: HashMap<String, Torrent>,\n\n global_max_peer_connections: Arc<tokio::sync::Semaphore>,\n\n}\n\n\n\nimpl AsyncClient {\n\n pub fn new(options: Options) -> Self {\n\n Self {\n\n torrents: HashMap::new(),\n\n global_max_peer_connections: Arc::new(tokio::sync::Semaphore::new(\n\n options.global_max_peer_connections,\n\n )),\n", "file_path": "src/async_client.rs", "rank": 38, "score": 24786.450751711374 }, { "content": " }\n\n }\n\n\n\n pub async fn add_torrent<R: Read>(\n\n &mut self,\n\n mut dot_torrent_read: R,\n\n torrent_data: PathBuf,\n\n ) -> Result<&Torrent, std::io::Error> {\n\n // read the .torrent\n\n let mut buf = vec![];\n\n dot_torrent_read.read_to_end(&mut buf)?;\n\n\n\n // decode .torrent file\n\n let dot_torrent_bencode = nom_bencode::decode(&buf).map_err(|e| {\n\n std::io::Error::new(std::io::ErrorKind::InvalidData, format!(\"{:?}\", e))\n\n })?;\n\n // validate it\n\n let options = torrent::TorrentOptions::default();\n\n let torrent = Torrent::new(\n\n options,\n", "file_path": "src/async_client.rs", "rank": 39, "score": 24782.528957221497 }, { "content": "#[derive(Clone, Copy, Debug)]\n\nenum Status {\n\n Leeching,\n\n Seeding,\n\n}\n\n\n\n#[derive(Clone, Copy, Debug)]\n\npub enum AnnounceEvent {\n\n Started,\n\n Stopped,\n\n Completed,\n\n Empty,\n\n}\n\n\n\nimpl Display for AnnounceEvent {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n match self {\n\n AnnounceEvent::Started => write!(f, \"started\"),\n\n AnnounceEvent::Stopped => write!(f, \"stopped\"),\n\n AnnounceEvent::Completed => write!(f, \"completed\"),\n\n AnnounceEvent::Empty => write!(f, \"empty\"),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/torrent.rs", "rank": 40, "score": 23684.33463149316 }, { "content": " info_hash: options.info_hash,\n\n peer_to_torrent_tx: options.peer_to_torrent_tx,\n\n global_permit: options.global_permit,\n\n torrent_permit: options.torrent_permit,\n\n piece_length: options.piece_length,\n\n chunk_length: options.chunk_length,\n\n }\n\n }\n\n\n\n #[instrument(skip(self))]\n\n pub(crate) async fn handshake_remote_peer(mut self) -> Result<Peer, std::io::Error> {\n\n self.send_handshake().await?;\n\n info!(\"HANDSHAKE SENT\");\n\n\n\n let handshake = self.receive_handshake().await;\n\n info!(\"HANDSHAKE RECEIVED\");\n\n\n\n if let Some(message) = handshake\n\n // if the info hashes match, we can proceed\n\n // if not, sever the connection and drop the semaphore permit\n", "file_path": "src/handshake_peer.rs", "rank": 58, "score": 21168.52899898617 }, { "content": " torrent_to_peer_rx,\n\n global_permit: self.global_permit,\n\n torrent_permit: self.torrent_permit,\n\n piece_length: self.piece_length,\n\n chunk_length: self.chunk_length,\n\n choke_state: peer::ChokeState::Choked,\n\n interest_state: peer::InterestState::NotInterested,\n\n };\n\n\n\n let peer = Peer::new(peer_options);\n\n\n\n info!(\"transitioning from HandshakePeer to Peer\");\n\n\n\n Ok(peer)\n\n } else {\n\n info!(\"HANDSHAKE WAS BAD1\");\n\n Err(std::io::Error::new(\n\n std::io::ErrorKind::InvalidData,\n\n \"remote info_hash did not match local info hash\",\n\n ))\n", "file_path": "src/handshake_peer.rs", "rank": 59, "score": 21168.049081084147 }, { "content": "\n\n #[instrument(skip(self))]\n\n async fn register_with_owning_torrent(\n\n &mut self,\n\n remote_peer_id: PeerId,\n\n ) -> Result<tokio::sync::mpsc::Receiver<signals::TorrentToPeer>, std::io::Error> {\n\n let (torrent_to_peer_tx, torrent_to_peer_rx) = tokio::sync::mpsc::channel(32);\n\n self.send_to_owned_torrent(signals::PeerToTorrent::Register {\n\n remote_peer_id,\n\n torrent_to_peer_tx,\n\n })\n\n .await?;\n\n\n\n Ok(torrent_to_peer_rx)\n\n }\n\n\n\n #[instrument(skip(self))]\n\n async fn send_to_owned_torrent(\n\n &self,\n\n message: signals::PeerToTorrent,\n\n ) -> Result<(), std::io::Error> {\n\n self.peer_to_torrent_tx\n\n .send(message)\n\n .await\n\n .map_err(|e| std::io::Error::new(std::io::ErrorKind::BrokenPipe, e.to_string()))\n\n }\n\n}\n", "file_path": "src/handshake_peer.rs", "rank": 60, "score": 21166.37173576566 }, { "content": " }\n\n}\n\n\n\nimpl TryFrom<Vec<u8>> for Message {\n\n type Error = String;\n\n\n\n #[allow(clippy::many_single_char_names)]\n\n fn try_from(value: Vec<u8>) -> Result<Self, Self::Error> {\n\n match &value[..] {\n\n [] => Ok(Message::Keepalive),\n\n [CHOKE] => Ok(Message::Choke),\n\n [UNCHOKE] => Ok(Message::Unchoke),\n\n [INTERESTED] => Ok(Message::Interested),\n\n [NOT_INTERESTED] => Ok(Message::NotInterested),\n\n [HAVE, a, b, c, d] => Ok(Message::Have {\n\n index: decode_number([*a, *b, *c, *d]),\n\n }),\n\n [BITFIELD, bitfield @ ..] => Ok(Message::Bitfield {\n\n bitfield: Bitfield(\n\n BitVec::<bitvec::order::Msb0, u8>::from_slice(bitfield)\n", "file_path": "src/peer_protocol.rs", "rank": 61, "score": 21166.167106051274 }, { "content": " fn len(&self) -> usize {\n\n // type length + value length\n\n // only keepalive has no type length or value length\n\n match self {\n\n Message::Keepalive => 0,\n\n Message::Choke => 1,\n\n Message::Unchoke => 1,\n\n Message::Interested => 1,\n\n Message::NotInterested => 1,\n\n Message::Have { .. } => 1 + 4,\n\n Message::Bitfield { bitfield } => 1 + bitfield.as_raw_slice().len(),\n\n Message::Request { .. } => 1 + 4 + 4 + 4,\n\n Message::Piece { chunk, .. } => 1 + 4 + 4 + chunk.len(),\n\n Message::Cancel { .. } => 1 + 4 + 4 + 4,\n\n }\n\n }\n\n\n\n /// Length Type Value aka \"LTV\"\n\n /// 4 length bytes, 1 type byte, value bytes\n\n fn write_to_bytes(&self, buf: &mut BytesMut) {\n", "file_path": "src/peer_protocol.rs", "rank": 62, "score": 21163.32962417095 }, { "content": " .await?;\n\n\n\n self.connection.flush().await?;\n\n\n\n Ok(())\n\n }\n\n\n\n async fn receive_handshake(\n\n &mut self,\n\n ) -> Option<Result<peer_protocol::Handshake, std::io::Error>> {\n\n self.connection.next().await\n\n }\n\n\n\n fn get_peer_id_machine_readable(&self) -> PeerId {\n\n self.peer_id\n\n }\n\n\n\n fn get_info_hash(&self) -> InfoHash {\n\n self.info_hash\n\n }\n", "file_path": "src/handshake_peer.rs", "rank": 63, "score": 21163.144032508302 }, { "content": " {\n\n let peer_protocol::Handshake {\n\n peer_id: remote_peer_id,\n\n info_hash: remote_info_hash,\n\n ..\n\n } = message?;\n\n if self.info_hash == remote_info_hash {\n\n info!(\"HANDSHAKE WAS GOOD\");\n\n\n\n let torrent_to_peer_rx = self.register_with_owning_torrent(remote_peer_id).await?;\n\n\n\n let connection =\n\n peer_protocol::set_codec(self.connection, peer_protocol::MessageCodec);\n\n\n\n let peer_options = PeerOptions {\n\n connection,\n\n peer_id: self.peer_id,\n\n remote_peer_id,\n\n info_hash: self.info_hash,\n\n peer_to_torrent_tx: self.peer_to_torrent_tx,\n", "file_path": "src/handshake_peer.rs", "rank": 64, "score": 21162.82960498887 }, { "content": " }\n\n } else {\n\n todo!()\n\n }\n\n\n\n // otherwise if the message is NOT a handshake, it is invalid,\n\n // so drop the permit and the connection\n\n }\n\n\n\n #[instrument(skip(self))]\n\n async fn send_handshake(&mut self) -> Result<(), std::io::Error> {\n\n let peer_id = self.get_peer_id_machine_readable();\n\n let info_hash = self.get_info_hash();\n\n\n\n self.connection\n\n .send(peer_protocol::Handshake {\n\n protocol_extension_bytes: peer_protocol::PROTOCOL_EXTENSION_HEADER,\n\n peer_id,\n\n info_hash,\n\n })\n", "file_path": "src/handshake_peer.rs", "rank": 65, "score": 21161.71090245466 }, { "content": "use crate::{Begin, Index, InfoHash, Length, PeerId};\n\nuse bitvec::order::Msb0;\n\nuse bitvec::prelude::{bitvec, BitVec};\n\nuse bytes::{Buf, BufMut, BytesMut};\n\nuse std::convert::{TryFrom, TryInto};\n\nuse std::ops::BitOrAssign;\n\nuse std::ops::{Deref, DerefMut};\n\nuse tokio::io::{AsyncRead, AsyncWrite};\n\nuse tokio_util::codec::{Decoder, Encoder, Framed};\n\n\n\nconst MAX_MESSAGE_LENGTH: usize = 8 * 1024 * 1024;\n\nconst HANDSHAKE_LENGTH_LENGTH: usize = 1;\n\nconst BITTORRENT_PROTOCOL: &[u8] = b\"BitTorrent protocol\";\n\npub(crate) const PROTOCOL_EXTENSION_HEADER: [u8; 8] = [0, 0, 0, 0, 0, 0, 0, 0];\n\nconst INFO_HASH_LENGTH: usize = 20;\n\nconst PEER_ID_LENGTH: usize = 20;\n\n\n\npub(crate) const HANDSHAKE_LENGTH: usize = HANDSHAKE_LENGTH_LENGTH\n\n + BITTORRENT_PROTOCOL.len()\n\n + PROTOCOL_EXTENSION_HEADER.len()\n", "file_path": "src/peer_protocol.rs", "rank": 66, "score": 21160.227296309713 }, { "content": "// Torrents are composed of \"pieces\".\n\n// Pieces have a 20-byte SHA-1 hash.\n\n// Pieces are composed of an undocumented subunit called \"chunks\".\n\n// When a peer wants a piece, the peer has to ask for:\n\n// - The piece index\n\n// - The chunk offset within the piece (starting at byte 0)\n\n// - The length of the chunk\n\npub(crate) fn chunk_offsets_lengths(\n\n piece_length: usize,\n\n chunk_length: usize,\n\n) -> Vec<(usize, usize)> {\n\n let mut remaining_length = piece_length;\n\n let mut i = 0;\n\n let mut offsets = vec![];\n\n\n\n let chunks_per_piece = piece_length / chunk_length;\n\n\n\n while i < chunks_per_piece {\n\n let offset = chunk_length * i;\n\n offsets.push((offset, chunk_length));\n", "file_path": "src/peer_protocol.rs", "rank": 67, "score": 21159.5586526337 }, { "content": "\n\npub(crate) struct HandshakePeer {\n\n connection: tokio_util::codec::Framed<tokio::net::TcpStream, peer_protocol::HandshakeCodec>,\n\n peer_id: PeerId,\n\n info_hash: InfoHash,\n\n peer_to_torrent_tx: tokio::sync::mpsc::Sender<signals::PeerToTorrent>,\n\n global_permit: tokio::sync::OwnedSemaphorePermit,\n\n torrent_permit: tokio::sync::OwnedSemaphorePermit,\n\n piece_length: usize,\n\n chunk_length: usize,\n\n}\n\n\n\nimpl HandshakePeer {\n\n pub(crate) fn new(options: HandshakePeerOptions) -> Self {\n\n let connection =\n\n tokio_util::codec::Framed::new(options.socket, peer_protocol::HandshakeCodec);\n\n\n\n Self {\n\n connection,\n\n peer_id: options.peer_id,\n", "file_path": "src/handshake_peer.rs", "rank": 68, "score": 21159.48136664689 }, { "content": " Message::Cancel {\n\n index,\n\n begin,\n\n length\n\n }\n\n )\n\n }\n\n\n\n #[test]\n\n fn decode_handshake() {\n\n let peer_id = PeerId([\n\n 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19,\n\n ]);\n\n let info_hash = InfoHash([\n\n 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19,\n\n ]);\n\n\n\n let mut m = vec![];\n\n let mut codec = HandshakeCodec;\n\n\n", "file_path": "src/peer_protocol.rs", "rank": 69, "score": 21159.072044085966 }, { "content": " return Ok(None);\n\n }\n\n // Use advance to modify src such that it no longer contains\n\n // this frame.\n\n let data = src[4..4 + length].to_vec();\n\n src.advance(4 + length);\n\n\n\n let message = Message::try_from(data)\n\n .map_err(|e| std::io::Error::new(std::io::ErrorKind::InvalidData, e))?;\n\n\n\n Ok(Some(message))\n\n }\n\n}\n\n\n\nimpl tokio_util::codec::Encoder<Message> for MessageCodec {\n\n type Error = std::io::Error;\n\n\n\n fn encode(&mut self, item: Message, dst: &mut bytes::BytesMut) -> Result<(), Self::Error> {\n\n item.write_to_bytes(dst);\n\n Ok(())\n", "file_path": "src/peer_protocol.rs", "rank": 70, "score": 21158.350395326346 }, { "content": "use futures_util::{SinkExt, StreamExt};\n\nuse tokio::net::TcpStream;\n\nuse tracing::{info, instrument};\n\n\n\nuse crate::peer::Peer;\n\nuse crate::peer::PeerOptions;\n\nuse crate::signals;\n\nuse crate::{peer, peer_protocol, InfoHash, PeerId};\n\n\n\n#[derive(Debug)]\n\npub(crate) struct HandshakePeerOptions {\n\n pub(crate) socket: TcpStream,\n\n pub(crate) peer_id: PeerId,\n\n pub(crate) info_hash: InfoHash,\n\n pub(crate) peer_to_torrent_tx: tokio::sync::mpsc::Sender<signals::PeerToTorrent>,\n\n pub(crate) global_permit: tokio::sync::OwnedSemaphorePermit,\n\n pub(crate) torrent_permit: tokio::sync::OwnedSemaphorePermit,\n\n pub(crate) piece_length: usize,\n\n pub(crate) chunk_length: usize,\n\n}\n", "file_path": "src/handshake_peer.rs", "rank": 71, "score": 21157.922401944703 }, { "content": " .map_err(|e| e.to_string())?,\n\n ),\n\n }),\n\n [REQUEST, a, b, c, d, e, f, g, h, i, j, k, l] => {\n\n let index = decode_number([*a, *b, *c, *d]);\n\n let begin = decode_number([*e, *f, *g, *h]);\n\n let length = decode_number([*i, *j, *k, *l]);\n\n\n\n Ok(Message::Request {\n\n index,\n\n begin,\n\n length,\n\n })\n\n }\n\n [PIECE, a, b, c, d, e, f, g, h, chunk @ ..] => {\n\n let index = decode_number([*a, *b, *c, *d]);\n\n let begin = decode_number([*e, *f, *g, *h]);\n\n let chunk = chunk.to_owned();\n\n\n\n Ok(Message::Piece {\n", "file_path": "src/peer_protocol.rs", "rank": 72, "score": 21157.813159765505 }, { "content": " index,\n\n begin,\n\n chunk,\n\n })\n\n }\n\n [CANCEL, a, b, c, d, e, f, g, h, i, j, k, l] => {\n\n let index = decode_number([*a, *b, *c, *d]);\n\n let begin = decode_number([*e, *f, *g, *h]);\n\n let length = decode_number([*i, *j, *k, *l]);\n\n\n\n Ok(Message::Cancel {\n\n index,\n\n begin,\n\n length,\n\n })\n\n }\n\n _ => Err(\"Could not decode bencoded peer message\".to_string()),\n\n }\n\n }\n\n}\n", "file_path": "src/peer_protocol.rs", "rank": 73, "score": 21156.620052705093 }, { "content": " /// tag byte = 0\n\n Choke,\n\n /// tag byte = 1\n\n Unchoke,\n\n /// tag byte = 2\n\n Interested,\n\n /// tag byte = 3\n\n NotInterested,\n\n /// tag byte = 4\n\n Have { index: Index },\n\n /// tag byte = 5\n\n Bitfield { bitfield: Bitfield },\n\n /// tag byte = 6\n\n Request {\n\n index: Index,\n\n begin: Begin,\n\n length: Length,\n\n },\n\n /// tag byte = 7\n\n Piece {\n", "file_path": "src/peer_protocol.rs", "rank": 74, "score": 21156.59673851108 }, { "content": " + INFO_HASH_LENGTH\n\n + PEER_ID_LENGTH;\n\n\n\npub(crate) const CHOKE: u8 = 0;\n\npub(crate) const UNCHOKE: u8 = 1;\n\npub(crate) const INTERESTED: u8 = 2;\n\npub(crate) const NOT_INTERESTED: u8 = 3;\n\npub(crate) const HAVE: u8 = 4;\n\npub(crate) const BITFIELD: u8 = 5;\n\npub(crate) const REQUEST: u8 = 6;\n\npub(crate) const PIECE: u8 = 7;\n\npub(crate) const CANCEL: u8 = 8;\n\n\n\n#[derive(Debug, PartialEq)]\n\npub(crate) struct Handshake {\n\n pub(crate) protocol_extension_bytes: [u8; 8],\n\n pub(crate) peer_id: PeerId,\n\n pub(crate) info_hash: InfoHash,\n\n}\n\n\n", "file_path": "src/peer_protocol.rs", "rank": 75, "score": 21155.69338475572 }, { "content": "impl Handshake {\n\n fn write_to_bytes(&self, buf: &mut BytesMut) {\n\n buf.reserve(HANDSHAKE_LENGTH);\n\n buf.put_u8(19);\n\n buf.extend_from_slice(BITTORRENT_PROTOCOL);\n\n buf.extend_from_slice(&self.protocol_extension_bytes);\n\n buf.extend_from_slice(self.info_hash.as_ref());\n\n buf.extend_from_slice(self.peer_id.as_ref());\n\n }\n\n}\n\n\n\nimpl TryFrom<&[u8]> for Handshake {\n\n type Error = String;\n\n\n\n #[allow(clippy::many_single_char_names)]\n\n fn try_from(value: &[u8]) -> Result<Self, Self::Error> {\n\n match value {\n\n // '19' + \"BitTorrent protocol\"\n\n [19, b'B', b'i', b't', b'T', b'o', b'r', b'r', b'e', b'n', b't', b' ', b'p', b'r', b'o', b't', b'o', b'c', b'o', b'l', rest @ ..] =>\n\n {\n", "file_path": "src/peer_protocol.rs", "rank": 76, "score": 21155.687851382707 }, { "content": " let (protocol_extension_bytes, rest) = rest.split_at(8);\n\n let (info_hash, peer_id) = rest.split_at(20);\n\n\n\n let protocol_extension_bytes: [u8; 8] = protocol_extension_bytes\n\n .try_into()\n\n .expect(\"Protocol extension bytes must be length 20\");\n\n let peer_id: PeerId =\n\n PeerId(peer_id[..20].try_into().expect(\"Peer ID must be length 20\"));\n\n let info_hash: InfoHash =\n\n InfoHash(info_hash.try_into().expect(\"Info hash must be length 20\"));\n\n\n\n Ok(Handshake {\n\n protocol_extension_bytes,\n\n peer_id,\n\n info_hash,\n\n })\n\n }\n\n _ => Err(\"Could not decode bencoded peer message\".to_string()),\n\n }\n\n }\n", "file_path": "src/peer_protocol.rs", "rank": 77, "score": 21155.18854448055 }, { "content": "\n\n#[derive(Debug)]\n\npub(crate) struct MessageCodec;\n\n\n\n// ideally, this should be split into two codecs:\n\n// one for the handshake and one for the regular frames\n\nimpl tokio_util::codec::Decoder for MessageCodec {\n\n type Item = Message;\n\n\n\n type Error = std::io::Error;\n\n\n\n fn decode(&mut self, src: &mut bytes::BytesMut) -> Result<Option<Self::Item>, Self::Error> {\n\n if src.len() < 4 {\n\n // Not enough data to read length marker.\n\n return Ok(None);\n\n }\n\n\n\n // Read length marker.\n\n let mut length_bytes = [0u8; 4];\n\n length_bytes.copy_from_slice(&src[..4]);\n", "file_path": "src/peer_protocol.rs", "rank": 78, "score": 21154.536378178967 }, { "content": " let handshake = Handshake::try_from(&data[..])\n\n .map_err(|e| std::io::Error::new(std::io::ErrorKind::InvalidData, e))?;\n\n\n\n Ok(Some(handshake))\n\n }\n\n}\n\n\n\nimpl tokio_util::codec::Encoder<Handshake> for HandshakeCodec {\n\n type Error = std::io::Error;\n\n\n\n fn encode(&mut self, item: Handshake, dst: &mut bytes::BytesMut) -> Result<(), Self::Error> {\n\n item.write_to_bytes(dst);\n\n Ok(())\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug, PartialEq)]\n\npub(crate) enum Message {\n\n /// no tag byte\n\n Keepalive,\n", "file_path": "src/peer_protocol.rs", "rank": 79, "score": 21152.979235330386 }, { "content": "}\n\n\n\n#[derive(Debug)]\n\npub(crate) struct HandshakeCodec;\n\n\n\nimpl tokio_util::codec::Decoder for HandshakeCodec {\n\n type Item = Handshake;\n\n\n\n type Error = std::io::Error;\n\n\n\n fn decode(&mut self, src: &mut bytes::BytesMut) -> Result<Option<Self::Item>, Self::Error> {\n\n if src.len() < HANDSHAKE_LENGTH {\n\n src.reserve(HANDSHAKE_LENGTH);\n\n return Ok(None);\n\n }\n\n\n\n let data = src[0..HANDSHAKE_LENGTH].to_vec();\n\n\n\n src.advance(HANDSHAKE_LENGTH);\n\n\n", "file_path": "src/peer_protocol.rs", "rank": 80, "score": 21152.95133189364 }, { "content": " 1 + expected_index.len() as u32\n\n + expected_begin.len() as u32\n\n + expected_length.len() as u32,\n\n ));\n\n expected.push(REQUEST);\n\n expected.extend_from_slice(&expected_index);\n\n expected.extend_from_slice(&expected_begin);\n\n expected.extend_from_slice(&expected_length);\n\n\n\n assert_eq!(encoded, expected);\n\n }\n\n\n\n #[test]\n\n fn encode_piece() {\n\n let index = 31;\n\n let begin = 2u32.pow(14);\n\n let chunk = vec![1; 2usize.pow(14)];\n\n let cloned_chunk = chunk.clone();\n\n\n\n let message = Message::Piece {\n", "file_path": "src/peer_protocol.rs", "rank": 81, "score": 21152.420010917172 }, { "content": " index: Index,\n\n begin: Begin,\n\n chunk: Vec<u8>,\n\n },\n\n /// tag byte = 8\n\n Cancel {\n\n index: Index,\n\n begin: Begin,\n\n length: Length,\n\n },\n\n}\n\n\n\nimpl Message {\n\n /// in the bittorrent protocol,\n\n /// messages are of the form \"LTV\", or Length Type Value,\n\n /// where Length is a 4 byte big endian u32,\n\n /// Type is a single byte, and Value is a sequence of bytes\n\n /// of length Length - 1 (for the type tag byte).\n\n /// Keepalive alone has no Tag byte or Value, only a four-byte length of 0,\n\n /// i.e., \"0u8 0u8 0u8 0u8\"\n", "file_path": "src/peer_protocol.rs", "rank": 82, "score": 21152.29692713835 }, { "content": " let chunk = vec![1; 2usize.pow(14)];\n\n let cloned_chunk = chunk.clone();\n\n\n\n let expected_index = encode_number(index);\n\n let expected_begin = encode_number(begin);\n\n\n\n let mut buf = vec![];\n\n\n\n buf.extend_from_slice(&encode_number(1 + 4 + 4 + chunk.len() as u32));\n\n buf.push(PIECE);\n\n buf.extend_from_slice(&expected_index);\n\n buf.extend_from_slice(&expected_begin);\n\n buf.extend_from_slice(&cloned_chunk);\n\n\n\n assert_eq!(\n\n decode(&buf),\n\n Message::Piece {\n\n index,\n\n begin,\n\n chunk\n", "file_path": "src/peer_protocol.rs", "rank": 83, "score": 21151.75792031055 }, { "content": " let peer_id = PeerId(rand::random());\n\n let info_hash = InfoHash(rand::random());\n\n\n\n let mut expected = vec![];\n\n\n\n expected.push(19);\n\n expected.extend_from_slice(&BITTORRENT_PROTOCOL);\n\n expected.extend_from_slice(&PROTOCOL_EXTENSION_HEADER);\n\n expected.extend_from_slice(info_hash.as_ref());\n\n expected.extend_from_slice(peer_id.as_ref());\n\n\n\n let message = Handshake {\n\n protocol_extension_bytes: PROTOCOL_EXTENSION_HEADER,\n\n peer_id,\n\n info_hash,\n\n };\n\n\n\n let mut codec = HandshakeCodec;\n\n let mut buf = BytesMut::new();\n\n codec.encode(message, &mut buf).unwrap();\n", "file_path": "src/peer_protocol.rs", "rank": 84, "score": 21151.278344127688 }, { "content": " Message::Have { index } => {\n\n buf.put_u8(HAVE);\n\n buf.extend_from_slice(&index.to_be_bytes());\n\n }\n\n Message::Bitfield { bitfield } => {\n\n let bitfield_as_bytes = bitfield.as_raw_slice();\n\n buf.put_u8(BITFIELD);\n\n buf.extend_from_slice(bitfield_as_bytes);\n\n }\n\n Message::Request {\n\n index,\n\n begin,\n\n length,\n\n } => {\n\n buf.put_u8(REQUEST);\n\n buf.extend_from_slice(&encode_number(*index));\n\n buf.extend_from_slice(&encode_number(*begin));\n\n buf.extend_from_slice(&encode_number(*length));\n\n }\n\n Message::Piece {\n", "file_path": "src/peer_protocol.rs", "rank": 85, "score": 21150.78850243822 }, { "content": " }\n\n}\n\n\n\n#[derive(Clone, Debug, PartialEq)]\n\npub(crate) struct Bitfield(BitVec<Msb0, u8>);\n\n\n\nimpl Bitfield {\n\n pub(crate) fn new(length: usize) -> Self {\n\n Self(bitvec![Msb0, u8; 0; length])\n\n }\n\n}\n\n\n\nimpl BitOrAssign for Bitfield {\n\n fn bitor_assign(&mut self, rhs: Self) {\n\n self.0.bitor_assign(rhs.0);\n\n }\n\n}\n\n\n\nimpl Deref for Bitfield {\n\n type Target = BitVec<Msb0, u8>;\n", "file_path": "src/peer_protocol.rs", "rank": 86, "score": 21150.405668148524 }, { "content": " let len_len = 4;\n\n let value_len = self.len();\n\n let len_slice = u32::to_be_bytes(value_len as u32);\n\n buf.reserve(len_len + value_len);\n\n buf.extend_from_slice(&len_slice);\n\n\n\n match self {\n\n Message::Keepalive => (),\n\n Message::Choke => {\n\n buf.put_u8(CHOKE);\n\n }\n\n Message::Unchoke => {\n\n buf.put_u8(UNCHOKE);\n\n }\n\n Message::Interested => {\n\n buf.put_u8(INTERESTED);\n\n }\n\n Message::NotInterested => {\n\n buf.put_u8(NOT_INTERESTED);\n\n }\n", "file_path": "src/peer_protocol.rs", "rank": 87, "score": 21150.335775917338 }, { "content": " m.push(19);\n\n m.extend_from_slice(BITTORRENT_PROTOCOL);\n\n m.extend_from_slice(&PROTOCOL_EXTENSION_HEADER);\n\n m.extend_from_slice(peer_id.as_ref());\n\n m.extend_from_slice(info_hash.as_ref());\n\n\n\n let mut buf = BytesMut::from(m.as_slice());\n\n\n\n assert_eq!(\n\n codec.decode(&mut buf).unwrap().unwrap(),\n\n Handshake {\n\n protocol_extension_bytes: PROTOCOL_EXTENSION_HEADER,\n\n peer_id,\n\n info_hash\n\n }\n\n );\n\n }\n\n }\n\n\n\n const THIRTY_TWO_K: usize = 2usize.pow(15);\n", "file_path": "src/peer_protocol.rs", "rank": 88, "score": 21149.64496753626 }, { "content": " fn encode_request() {\n\n let index = 11;\n\n let begin = 0;\n\n let length = 2u32.pow(14);\n\n\n\n let message = Message::Request {\n\n index,\n\n begin,\n\n length,\n\n };\n\n\n\n let encoded = encode(message);\n\n\n\n let expected_index = encode_number(index);\n\n let expected_begin = encode_number(begin);\n\n let expected_length = encode_number(length);\n\n\n\n let mut expected = vec![];\n\n\n\n expected.extend_from_slice(&encode_number(\n", "file_path": "src/peer_protocol.rs", "rank": 89, "score": 21149.619378164505 }, { "content": " #[test]\n\n fn decode_have() {\n\n let buf = vec![0, 0, 0, 5, HAVE, 0, 0, 0, 44];\n\n assert_eq!(decode(&buf), Message::Have { index: 44 });\n\n }\n\n\n\n #[test]\n\n fn decode_bitfield() {\n\n let bitfield = Bitfield(bitvec![Msb0, u8; 0, 0, 1, 0, 1, 0, 0, 0]);\n\n let buf = vec![0, 0, 0, 2, BITFIELD, 40];\n\n assert_eq!(decode(&buf), Message::Bitfield { bitfield });\n\n }\n\n\n\n #[test]\n\n fn decode_request() {\n\n let index = 11;\n\n let begin = 0;\n\n let length = 2u32.pow(14);\n\n\n\n let mut buf = vec![0, 0, 0, 13];\n", "file_path": "src/peer_protocol.rs", "rank": 90, "score": 21149.55350009232 }, { "content": " index,\n\n begin,\n\n chunk,\n\n } => {\n\n buf.put_u8(PIECE);\n\n buf.extend_from_slice(&encode_number(*index));\n\n buf.extend_from_slice(&encode_number(*begin));\n\n buf.extend_from_slice(chunk);\n\n }\n\n Message::Cancel {\n\n index,\n\n begin,\n\n length,\n\n } => {\n\n buf.put_u8(CANCEL);\n\n buf.extend_from_slice(&encode_number(*index));\n\n buf.extend_from_slice(&encode_number(*begin));\n\n buf.extend_from_slice(&encode_number(*length));\n\n }\n\n }\n", "file_path": "src/peer_protocol.rs", "rank": 91, "score": 21149.526660018597 }, { "content": " index,\n\n begin,\n\n chunk,\n\n };\n\n\n\n let encoded = encode(message);\n\n\n\n let expected_index = encode_number(index);\n\n let expected_begin = encode_number(begin);\n\n\n\n let mut expected = vec![];\n\n\n\n expected.extend_from_slice(&encode_number(\n\n 1 + expected_index.len() as u32\n\n + expected_begin.len() as u32\n\n + cloned_chunk.len() as u32,\n\n ));\n\n expected.push(PIECE);\n\n expected.extend_from_slice(&expected_index);\n\n expected.extend_from_slice(&expected_begin);\n", "file_path": "src/peer_protocol.rs", "rank": 92, "score": 21149.302977774238 }, { "content": " expected.extend_from_slice(&cloned_chunk);\n\n\n\n assert_eq!(encoded, expected);\n\n }\n\n\n\n #[test]\n\n fn encode_cancel() {\n\n let index = 11;\n\n let begin = 0;\n\n let length = 2u32.pow(14);\n\n\n\n let message = Message::Cancel {\n\n index,\n\n begin,\n\n length,\n\n };\n\n\n\n let encoded = encode(message);\n\n\n\n let expected_index = encode_number(index);\n", "file_path": "src/peer_protocol.rs", "rank": 93, "score": 21149.290751259163 }, { "content": " }\n\n\n\n #[test]\n\n fn encode_choke() {\n\n let encoded = encode(Message::Choke);\n\n assert_eq!(encoded, vec![0, 0, 0, 1, CHOKE]);\n\n }\n\n\n\n #[test]\n\n fn encode_unchoke() {\n\n let encoded = encode(Message::Unchoke);\n\n assert_eq!(encoded, vec![0, 0, 0, 1, UNCHOKE]);\n\n }\n\n\n\n #[test]\n\n fn encode_interested() {\n\n let encoded = encode(Message::Interested);\n\n assert_eq!(encoded, vec![0, 0, 0, 1, INTERESTED]);\n\n }\n\n\n", "file_path": "src/peer_protocol.rs", "rank": 94, "score": 21147.840532172046 }, { "content": " const SIXTY_FOUR_K: usize = 2usize.pow(16);\n\n const ONE_HUNDRED_TWENTY_EIGHT_K: usize = 2usize.pow(17);\n\n\n\n #[test]\n\n fn offsets() {\n\n // piece of length SIXTY_FOUR_K\n\n let offsets = chunk_offsets_lengths(SIXTY_FOUR_K, THIRTY_TWO_K);\n\n assert_eq!(\n\n offsets,\n\n vec![(0, THIRTY_TWO_K), (THIRTY_TWO_K, THIRTY_TWO_K)]\n\n );\n\n\n\n // regular piece of length ONE_HUNDRED_TWENTY_EIGHT_K\n\n let offsets = chunk_offsets_lengths(ONE_HUNDRED_TWENTY_EIGHT_K, THIRTY_TWO_K);\n\n assert_eq!(\n\n offsets,\n\n vec![\n\n (0, THIRTY_TWO_K),\n\n (THIRTY_TWO_K, THIRTY_TWO_K),\n\n (THIRTY_TWO_K * 2, THIRTY_TWO_K),\n", "file_path": "src/peer_protocol.rs", "rank": 95, "score": 21147.788496654972 }, { "content": "\n\n buf.push(REQUEST);\n\n buf.extend_from_slice(&encode_number(index));\n\n buf.extend_from_slice(&encode_number(begin));\n\n buf.extend_from_slice(&encode_number(length));\n\n\n\n assert_eq!(\n\n decode(&buf),\n\n Message::Request {\n\n index,\n\n begin,\n\n length\n\n }\n\n )\n\n }\n\n\n\n #[test]\n\n fn decode_piece() {\n\n let index = 31;\n\n let begin = 2u32.pow(14);\n", "file_path": "src/peer_protocol.rs", "rank": 96, "score": 21147.630653124146 }, { "content": " i += 1;\n\n remaining_length -= chunk_length;\n\n }\n\n\n\n if remaining_length > 0 {\n\n let offset = chunk_length * i;\n\n offsets.push((offset, remaining_length));\n\n }\n\n\n\n offsets\n\n}\n\n\n\npub(crate) fn set_codec<T: AsyncRead + AsyncWrite, C1, E, C2: Encoder<E> + Decoder>(\n\n framed: Framed<T, C1>,\n\n codec: C2,\n\n) -> Framed<T, C2> {\n\n let parts1 = framed.into_parts();\n\n let mut parts2 = Framed::new(parts1.io, codec).into_parts();\n\n parts2.read_buf = parts1.read_buf;\n\n parts2.write_buf = parts1.write_buf;\n", "file_path": "src/peer_protocol.rs", "rank": 97, "score": 21147.169040857323 }, { "content": " let encoded = buf.to_vec();\n\n\n\n assert_eq!(encoded, expected);\n\n }\n\n }\n\n\n\n mod decoding {\n\n use super::helpers::decode;\n\n use super::*;\n\n\n\n #[test]\n\n fn decode_keepalive() {\n\n let buf = vec![0, 0, 0, 0];\n\n assert_eq!(decode(&buf), Message::Keepalive);\n\n }\n\n\n\n #[test]\n\n fn decode_choke() {\n\n let buf = vec![0, 0, 0, 1, CHOKE];\n\n assert_eq!(decode(&buf), Message::Choke);\n", "file_path": "src/peer_protocol.rs", "rank": 98, "score": 21146.46556777552 }, { "content": " #[test]\n\n fn encode_not_interested() {\n\n let encoded = encode(Message::NotInterested);\n\n assert_eq!(encoded, vec![0, 0, 0, 1, NOT_INTERESTED]);\n\n }\n\n\n\n #[test]\n\n fn encode_have() {\n\n let message = Message::Have { index: 63 };\n\n let encoded = encode(message);\n\n\n\n let mut expected = vec![];\n\n\n\n let expected_index = encode_number(63);\n\n\n\n expected.extend_from_slice(&encode_number(1 + expected_index.len() as u32));\n\n expected.push(HAVE);\n\n expected.extend_from_slice(&expected_index);\n\n\n\n assert_eq!(encoded, expected);\n", "file_path": "src/peer_protocol.rs", "rank": 99, "score": 21145.968238168127 } ]
Rust
src/coords.rs
mabruzzo/nmea0183
fd1a2d8a62cbcb6f3a9aeecabae88f7b81e850eb
use core::convert::TryFrom; #[derive(Debug, PartialEq, Clone)] pub enum Hemisphere { North, South, East, West, } #[derive(Debug, PartialEq, Clone)] pub struct Latitude { pub degrees: u8, pub minutes: u8, pub seconds: f32, pub hemisphere: Hemisphere, } impl TryFrom<f32> for Latitude { type Error = &'static str; fn try_from(value: f32) -> Result<Self, Self::Error> { TryFrom::try_from(value as f64) } } impl TryFrom<f64> for Latitude { type Error = &'static str; fn try_from(from: f64) -> Result<Self, Self::Error> { if from >= 90f64 || from <= -90f64 { Err("Latitude is not in range -90 to 90 degrees!") } else { let (value, hemisphere) = if from >= 0f64 { (from, Hemisphere::North) } else { (-from, Hemisphere::South) }; let degrees = value as u8; let min_sec = (value - degrees as f64) * 60f64; let minutes = min_sec as u8; let seconds = ((min_sec - minutes as f64) * 60f64) as f32; Ok({ Latitude { degrees, minutes, seconds, hemisphere, } }) } } } impl Latitude { pub(crate) fn parse( coord: Option<&str>, hemi: Option<&str>, ) -> Result<Option<Self>, &'static str> { match (coord, hemi) { (Some(lat), Some(lat_hemi)) if lat.len() == 0 && lat_hemi.len() == 0 => Ok(None), (Some(lat), Some(lat_hemi)) => { if lat.len() < 4 { return Err("Latitude field is too short!"); } let hemisphere = match lat_hemi { "N" => Hemisphere::North, "S" => Hemisphere::South, _ => return Err("Latitude hemisphere field has wrong format!"), }; let degrees = lat[..2] .parse::<u8>() .map_err(|_| "Wrong latitude field format")?; let min_sec = lat[2..] .parse::<f64>() .map_err(|_| "Wrong latitude field format")?; let minutes = min_sec as u8; let seconds = ((min_sec - minutes as f64) * 60f64) as f32; Ok(Some(Latitude { degrees, minutes, seconds, hemisphere, })) } (None, Some(_)) => Err("Could not parse latitude from hemisphere only"), (Some(_), None) => Err("Could not parse latitude from coordinate only"), (None, None) => Ok(None), } } pub fn as_f64(&self) -> f64 { let result = self.degrees as f64 + (self.minutes as f64) / 60f64 + (self.seconds as f64) / 3600f64; match self.hemisphere { Hemisphere::North => result, Hemisphere::South => -result, Hemisphere::East => panic!("Wrong East hemisphere for latitude!"), Hemisphere::West => panic!("Wrong West hemisphere for latitude!"), } } pub fn is_north(&self) -> bool { self.hemisphere == Hemisphere::North } pub fn is_south(&self) -> bool { self.hemisphere == Hemisphere::South } } #[derive(Debug, PartialEq, Clone)] pub struct Longitude { pub degrees: u8, pub minutes: u8, pub seconds: f32, pub hemisphere: Hemisphere, } impl TryFrom<f32> for Longitude { type Error = &'static str; fn try_from(value: f32) -> Result<Self, Self::Error> { TryFrom::try_from(value as f64) } } impl TryFrom<f64> for Longitude { type Error = &'static str; fn try_from(from: f64) -> Result<Self, Self::Error> { if from >= 180f64 || from <= -180f64 { Err("Latitude is not in range -180 to 180 degrees!") } else { let (value, hemisphere) = if from >= 0f64 { (from, Hemisphere::East) } else { (-from, Hemisphere::West) }; let degrees = value as u8; let min_sec = (value - degrees as f64) * 60f64; let minutes = min_sec as u8; let seconds = ((min_sec - minutes as f64) * 60f64) as f32; Ok({ Longitude { degrees, minutes, seconds, hemisphere, } }) } } } impl Longitude { pub(crate) fn parse( coord: Option<&str>, hemi: Option<&str>, ) -> Result<Option<Self>, &'static str> { match (coord, hemi) { (Some(lon), Some(lon_hemi)) if lon.len() == 0 && lon_hemi.len() == 0 => Ok(None), (Some(lon), Some(lon_hemi)) => { if lon.len() < 5 { return Err("Longitude field is too short!"); } let hemisphere = match lon_hemi { "E" => Hemisphere::East, "W" => Hemisphere::West, _ => return Err("Longitude hemisphere field has wrong format!"), }; let degrees = lon[..3] .parse::<u8>() .map_err(|_| "Wrong longitude field format")?; let min_sec = lon[3..] .parse::<f64>() .map_err(|_| "Wrong longitude field format")?; let minutes = min_sec as u8; let seconds = ((min_sec - minutes as f64) * 60f64) as f32; Ok(Some(Longitude { degrees, minutes, seconds, hemisphere, })) } (None, Some(_)) => Err("Could not parse longitude from hemisphere only"), (Some(_), None) => Err("Could not parse longitude from coordinate only"), (None, None) => Ok(None), } } pub fn as_f64(&self) -> f64 { let result = self.degrees as f64 + (self.minutes as f64) / 60f64 + (self.seconds as f64) / 3600f64; match self.hemisphere { Hemisphere::West => -result, Hemisphere::East => result, Hemisphere::North => panic!("Wrong North hemisphere for latitude!"), Hemisphere::South => panic!("Wrong South hemisphere for latitude!"), } } pub fn is_west(&self) -> bool { self.hemisphere == Hemisphere::West } pub fn is_east(&self) -> bool { self.hemisphere == Hemisphere::East } } #[derive(Debug, PartialEq, Clone)] pub struct Altitude { pub meters: f32, } impl Altitude { pub(crate) fn parse(input: Option<&str>) -> Result<Option<Self>, &'static str> { match input { Some("") => Ok(None), Some(alt) => Ok(Some(Altitude { meters: alt .parse::<f32>() .map_err(|_| "Wrong altitude field format")?, })), _ => Ok(None), } } } #[derive(Debug, PartialEq, Clone)] pub struct Speed { knots: f32, } impl Speed { pub fn from_knots(speed: f32) -> Speed { Speed { knots: speed } } pub fn from_mps(speed: f32) -> Speed { Speed { knots: speed * 1.94384f32, } } pub fn from_mph(speed: f32) -> Speed { Speed { knots: speed * 0.868976f32, } } pub fn from_kph(speed: f32) -> Speed { Speed { knots: speed * 0.539957f32, } } pub fn as_knots(&self) -> f32 { self.knots } pub fn as_kph(&self) -> f32 { self.knots * 1.852 } pub fn as_mph(&self) -> f32 { self.knots * 1.15078 } pub fn as_mps(&self) -> f32 { self.knots * 0.514444 } pub(crate) fn parse(input: Option<&str>) -> Result<Option<Self>, &'static str> { match input { Some(speed) if speed.len() == 0 => Ok(None), Some(speed) => speed .parse::<f32>() .map_err(|_| "Wrong speed field format") .and_then(|knots| Ok(Some(Speed { knots }))), _ => Ok(None), } } } #[derive(Debug, PartialEq, Clone)] pub struct Course { pub degrees: f32, } impl From<f32> for Course { fn from(value: f32) -> Self { Course { degrees: value } } } impl Course { pub(crate) fn parse(input: Option<&str>) -> Result<Option<Self>, &'static str> { match input { Some(course) if course.len() == 0 => Ok(None), Some(course) => course .parse::<f32>() .map_err(|_| "Wrong course field format") .and_then(|degrees| Ok(Some(Course { degrees }))), _ => Ok(None), } } } #[derive(Debug, PartialEq, Clone)] pub struct MagneticCourse { degrees: f32, } impl From<f32> for MagneticCourse { fn from(value: f32) -> Self { MagneticCourse { degrees: value } } } impl MagneticCourse { pub(crate) fn parse_from_str(input: Option<&str>) -> Result<Option<Self>, &'static str> { match input { Some(course) if course.len() == 0 => Ok(None), Some(course) => course .parse::<f32>() .map_err(|_| "Wrong course field format") .and_then(|degrees| Ok(Some(MagneticCourse { degrees }))), _ => Ok(None), } } pub(crate) fn parse_from_mvar_mdir( true_course: &Option<Course>, mvar: Option<&str>, mdir: Option<&str>, ) -> Result<Option<Self>, &'static str> { if let (Some(course), Some(variation), Some(direction)) = (true_course, mvar, mdir) { if variation.len() == 0 && direction.len() == 0 { Ok(None) } else { let magnetic = variation .parse::<f32>() .map_err(|_| "Wrong magnetic variation field format!")?; match direction { "E" => Ok(Some(MagneticCourse { degrees: course.degrees - magnetic, })), "W" => Ok(Some(MagneticCourse { degrees: course.degrees + magnetic, })), _ => Err("Wrong direction field for magnetic variation"), } } } else { Ok(None) } } }
use core::convert::TryFrom; #[derive(Debug, PartialEq, Clone)] pub enum Hemisphere { North, South, East, West, } #[derive(Debug, PartialEq, Clone)] pub struct Latitude { pub degrees: u8, pub minutes: u8, pub seconds: f32, pub hemisphere: Hemisphere, } impl TryFrom<f32> for Latitude { type Error = &'static str; fn try_from(value: f32) -> Result<Self, Self::Error> { TryFrom::try_from(value as f64) } } impl TryFrom<f64> for Latitude { type Error = &'static str; fn try_from(from: f64) -> Result<Self, Self::Error> { if from >= 90f64 || from <= -90f64 { Err("Latitude is not in range -90 to 90 degrees!") } else { let (value, hemisphere) = if from >= 0f64 { (from, Hemisphere::North) } else { (-from, Hemisphere::South) }; let degrees = value as u8; let min_sec = (value - degrees as f64) * 60f64; let minutes = min_sec as u8; let seconds = ((min_sec - minutes as f64) * 60f64) as f32; Ok({ Latitude { degrees, minutes, seconds, hemisphere, } }) } } } impl Latitude { pub(crate) fn parse( coord: Option<&str>, hemi: Option<&str>, ) -> Result<Option<Self>, &'static str> { match (coord, hemi) { (Some(lat), Some(lat_hemi)) if lat.len() == 0 && lat_hemi.len() == 0 => Ok(None), (Some(lat), Some(lat_hemi)) => { if lat.len() < 4 { return Err("Latitude field is too short!"); } let hemisphere = match lat_hemi { "N" => Hemisphere::North, "S" => Hemisphere::South, _ => return Err("Latitude hemisphere field has wrong format!"), }; let degrees = lat[..2] .parse::<u8>() .map_err(|_| "Wrong latitude field format")?; let min_sec = lat[2..] .parse::<f64>() .map_err(|_| "Wrong latitude field format")?; let minutes = min_sec as u8; let seconds = ((min_sec - minutes as f64) * 60f64) as f32; Ok(Some(Latitude { degrees, minutes, seconds, hemisphere, })) } (None, Some(_)) => Err("Could not parse latitude from hemisphere only"), (Some(_), None) => Err("Could not parse latitude from coordinate only"), (None, None) => Ok(None), } } pub fn as_f64(&self) -> f64 { let result = self.degrees as f64 + (self.minutes as f64) / 60f64 + (self.seconds as f64) / 3600f64; match self.hemisphere { Hemisphere::North => result, Hemisphere::South => -result, Hemisphere::East => panic!("Wrong East hemisphere for latitude!"), Hemisphere::West => panic!("Wrong West hemisphere for latitude!"), } } pub fn is_north(&self) -> bool { self.hemisphere == Hemisphere::North } pub fn is_south(&self) -> bool { self.hemisphere == Hemisphere::South } } #[derive(Debug, PartialEq, Clone)] pub struct Longitude { pub degrees: u8, pub minutes: u8, pub seconds: f32, pub hemisphere: Hemisphere, } impl TryFrom<f32> for Longitude { type Error = &'static str; fn try_from(value: f32) -> Result<Self, Self::Error> { TryFrom::try_from(value as f64) } } impl TryFrom<f64> for Longitude { type Error = &'static str; fn try_from(from: f64) -> Result<Self, Self::Error> { if from >= 180f64 || from <= -180f64 { Err("Latitude is not in range -180 to 180 degrees!") } else { let (value, hemisphere) = if from >= 0f64 { (from, Hemisphere::East) } else { (-from, Hemisphere::West) }; let degrees = value as u8; let min_sec = (value - degrees as f64) * 60f64; let minutes = min_sec as u8; let seconds = ((min_sec - minutes as f64) * 60f64) as f32; Ok({ Longitude { degrees, minutes, seconds, hemisphere, } }) } } } impl Longitude { pub(crate) fn parse( coord: Option<&str>, hemi: Option<&str>, ) -> Result<Option<Self>, &'static str> { match (coord, hemi) { (Some(lon), Some(lon_hemi)) if lon.len() == 0 && lon_hemi.len() == 0 => Ok(None), (Some(lon), Some(lon_hemi)) => { if lon.len() < 5 { return Err("Longitude field is too short!"); } let hemisphere = match lon_hemi { "E" => Hemisphere::East, "W" => Hemisphere::West, _ => return Err("Longitude hemisphere field has wrong format!"), }; let degrees = lon[..3] .parse::<u8>() .map_err(|_| "Wrong longitude field format")?; let min_sec = lon[3..] .parse::<f64>() .map_err(|_| "Wrong longitude field format")?; let minutes = min_sec as u8; let seconds = ((min_sec - minutes as f64) * 60f64) as f32; Ok(Some(Longitude { degrees, minutes, seconds, hemisphere, })) } (None, Some(_)) => Err("Could not parse longitude from hemisphere only"), (Some(_), None) => Err("Could not parse longitude from coordinate only"), (None, None) => Ok(None), } } pub fn as_f64(&self) -> f64 { let result = self.degrees as f64 + (self.minutes as f64) / 60f64 + (self.seconds as f64) / 3600f64; match self.hemisphere { Hemisphere::West => -result, Hemisphere::East => result, Hemisphere::North => panic!("Wrong North hemisphere for latitude!"), Hemisphere::South => panic!("Wrong South hemisphere for latitude!"), } } pub fn is_west(&self) -> bool { self.hemisphere == Hemisphere::West } pub fn is_east(&self) -> bool { self.hemisphere == Hemisphere::East } } #[derive(Debug, PartialEq, Clone)] pub struct Altitude { pub meters: f32, } impl Altitude { pub(crate) fn parse(input: Option<&str>) -> Result<Option<Self>, &'static str> { match input { Some("") => Ok(None), Some(alt) => Ok(Some(Altitude { meters: alt .parse::<f32>() .map_err(|_| "Wrong altitude field format")?, })), _ => Ok(None), } } } #[derive(Debug, PartialEq, Clone)] pub struct Speed { knots: f32, } impl Speed { pub fn from_knots(speed: f32) -> Speed { Speed { knots: speed } } pub fn from_mps(speed: f32) -> Speed { Speed { knots: speed * 1.94384f32, } } pub fn from_mph(speed: f32) -> Speed { Speed { knots: speed * 0.868976f32, } } pub fn from_kph(speed: f32) -> Speed { Speed { knots: speed * 0.539957f32, } } pub fn as_knots(&self) -> f32 { self.knots } pub fn as_kph(&self) -> f32 { self.knots * 1.852 } pub fn as_mph(&self) -> f32 { self.knots * 1.15078 } pub fn as_mps(&self) -> f32 { self.knots * 0.514444 } pub(crate) fn parse(input: Option<&str>) -> Result<Option<Self>, &'static str> { match input { Some(speed) if speed.len() == 0 => Ok(None), Some(speed) => speed .
} #[derive(Debug, PartialEq, Clone)] pub struct Course { pub degrees: f32, } impl From<f32> for Course { fn from(value: f32) -> Self { Course { degrees: value } } } impl Course { pub(crate) fn parse(input: Option<&str>) -> Result<Option<Self>, &'static str> { match input { Some(course) if course.len() == 0 => Ok(None), Some(course) => course .parse::<f32>() .map_err(|_| "Wrong course field format") .and_then(|degrees| Ok(Some(Course { degrees }))), _ => Ok(None), } } } #[derive(Debug, PartialEq, Clone)] pub struct MagneticCourse { degrees: f32, } impl From<f32> for MagneticCourse { fn from(value: f32) -> Self { MagneticCourse { degrees: value } } } impl MagneticCourse { pub(crate) fn parse_from_str(input: Option<&str>) -> Result<Option<Self>, &'static str> { match input { Some(course) if course.len() == 0 => Ok(None), Some(course) => course .parse::<f32>() .map_err(|_| "Wrong course field format") .and_then(|degrees| Ok(Some(MagneticCourse { degrees }))), _ => Ok(None), } } pub(crate) fn parse_from_mvar_mdir( true_course: &Option<Course>, mvar: Option<&str>, mdir: Option<&str>, ) -> Result<Option<Self>, &'static str> { if let (Some(course), Some(variation), Some(direction)) = (true_course, mvar, mdir) { if variation.len() == 0 && direction.len() == 0 { Ok(None) } else { let magnetic = variation .parse::<f32>() .map_err(|_| "Wrong magnetic variation field format!")?; match direction { "E" => Ok(Some(MagneticCourse { degrees: course.degrees - magnetic, })), "W" => Ok(Some(MagneticCourse { degrees: course.degrees + magnetic, })), _ => Err("Wrong direction field for magnetic variation"), } } } else { Ok(None) } } }
parse::<f32>() .map_err(|_| "Wrong speed field format") .and_then(|knots| Ok(Some(Speed { knots }))), _ => Ok(None), } }
function_block-function_prefix_line
[ { "content": "fn from_ascii(bytes: &[u8]) -> Result<&str, &'static str> {\n\n if bytes.iter().all(|b| *b < 128) {\n\n Ok(unsafe { core::str::from_utf8_unchecked(bytes) })\n\n } else {\n\n Err(\"Not an ascii!\")\n\n }\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 0, "score": 141692.18150351383 }, { "content": "fn parse_hex_halfbyte(symbol: u8) -> Result<u8, &'static str> {\n\n if symbol >= b'0' && symbol <= b'9' {\n\n return Ok(symbol - b'0');\n\n }\n\n if symbol >= b'A' && symbol <= b'F' {\n\n return Ok(symbol - b'A' + 10);\n\n }\n\n Err(\"Invalid HEX character.\")\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 1, "score": 134229.64223995915 }, { "content": "#[test]\n\nfn test_parse_f32() {\n\n assert_eq!(parse_f32(Some(\"\")), Ok(None));\n\n assert_eq!(parse_f32(Some(\"123.0\")), Ok(Some(123.0f32)));\n\n assert_eq!(parse_f32(Some(\"a123.0\")), Err(\"Wrong float field format\"));\n\n assert_eq!(parse_f32(None), Ok(None));\n\n}\n", "file_path": "src/common.rs", "rank": 2, "score": 52613.126052755746 }, { "content": "#[test]\n\nfn test_parse_u8() {\n\n assert_eq!(parse_u8(Some(\"\")), Ok(None));\n\n assert_eq!(parse_u8(Some(\"123\")), Ok(Some(123u8)));\n\n assert_eq!(\n\n parse_u8(Some(\"a123\")),\n\n Err(\"Wrong unsigned int field format\")\n\n );\n\n assert_eq!(\n\n parse_u8(Some(\"-123\")),\n\n Err(\"Wrong unsigned int field format\")\n\n );\n\n assert_eq!(\n\n parse_u8(Some(\"256\")),\n\n Err(\"Wrong unsigned int field format\")\n\n );\n\n assert_eq!(parse_u8(None), Ok(None));\n\n}\n\n\n", "file_path": "src/common.rs", "rank": 3, "score": 52512.99151649643 }, { "content": "#[derive(Debug)]\n\nenum ParserState {\n\n WaitStart,\n\n ReadUntilChkSum,\n\n ChkSumUpper,\n\n ChkSumLower,\n\n WaitCR,\n\n WaitLF,\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 4, "score": 40890.31819738078 }, { "content": "struct ParserIterator<'a> {\n\n parser: &'a mut Parser,\n\n input: Iter<'a, u8>,\n\n}\n\n\n\nimpl ParserIterator<'_> {\n\n fn new<'a>(p: &'a mut Parser, inp: &'a [u8]) -> ParserIterator<'a> {\n\n ParserIterator {\n\n parser: p,\n\n input: inp.iter(),\n\n }\n\n }\n\n}\n\n\n\nimpl Iterator for ParserIterator<'_> {\n\n type Item = Result<ParseResult, &'static str>;\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n\n while let Some(b) = self.input.next() {\n\n let symbol = *b;\n", "file_path": "src/lib.rs", "rank": 5, "score": 38367.8187695785 }, { "content": "#[test]\n\nfn test_parse_status() {\n\n assert_eq!(Status::from_str(\"A\"), Ok(Status::Valid));\n\n assert_eq!(Status::from_str(\"V\"), Ok(Status::NotValid));\n\n assert_eq!(Status::from_str(\"\"), Err(\"Invalid status field!\"));\n\n}\n\n\n", "file_path": "src/modes.rs", "rank": 6, "score": 30680.18173056583 }, { "content": "#[test]\n\nfn test_correct_vtg() {\n\n let mut p = Parser::new();\n\n let sentence = b\"$GPVTG,089.0,T,,,15.2,N,,,A*12\\r\\n\";\n\n let mut parsed = false;\n\n for b in sentence.iter() {\n\n let r = p.parse_from_byte(*b);\n\n if r.is_some() {\n\n assert_eq!(\n\n r.unwrap(),\n\n Ok(ParseResult::VTG(Some(VTG {\n\n source: Source::GPS,\n\n course: Some(From::from(89.0)),\n\n magnetic: None,\n\n speed: coords::Speed::from_knots(15.2),\n\n mode: Mode::Autonomous\n\n })))\n\n );\n\n parsed = true;\n\n break;\n\n }\n\n }\n\n assert!(parsed);\n\n}\n\n\n", "file_path": "tests/parsing.rs", "rank": 7, "score": 30680.18173056583 }, { "content": "#[test]\n\nfn test_too_long_sentence() {\n\n let line = \"$01234567890123456789012345678901234567890123456789012345678901234567890123456789\";\n\n let mut caught_error = false;\n\n for result in Parser::new().parse_from_bytes(line.as_bytes()) {\n\n match result {\n\n Ok(_) => continue,\n\n Err(\"NMEA sentence is too long!\") => {\n\n caught_error = true;\n\n break;\n\n },\n\n Err(_) => panic!(\"Unexpected error caught in test!\")\n\n }\n\n }\n\n assert!(caught_error);\n\n}\n\n\n", "file_path": "tests/parsing.rs", "rank": 8, "score": 30680.18173056583 }, { "content": "#[test]\n\nfn test_correct_rmc() {\n\n let mut p = Parser::new();\n\n let sentence = b\"$GPRMC,125504.049,A,5542.2389,N,03741.6063,E,0.06,25.82,200906,,,A*56\\r\\n\";\n\n let mut parsed = false;\n\n for b in sentence.iter() {\n\n let r = p.parse_from_byte(*b);\n\n if r.is_some() {\n\n assert_eq!(\n\n r.unwrap(),\n\n Ok(ParseResult::RMC(Some(RMC {\n\n source: Source::GPS,\n\n datetime: datetime::DateTime {\n\n date: datetime::Date {\n\n day: 20,\n\n month: 9,\n\n year: 2006\n\n },\n\n time: datetime::Time {\n\n hours: 12,\n\n minutes: 55,\n", "file_path": "tests/parsing.rs", "rank": 9, "score": 30680.18173056583 }, { "content": "#[test]\n\nfn test_stream_slice() {\n\n let mut p = Parser::new();\n\n let sentence = b\"0,T,,,15.2,N,,,A*12\\r\\n$GPVTG,089.0,T,,,15.2,N,,,A*12\\r\\n$GPVTG,089.0,T,,,15.2,N,,,A*12\\r\\n$GPVTG,089.0,T,\";\n\n let mut parse_count = 0;\n\n for b in sentence.iter() {\n\n let r = p.parse_from_byte(*b);\n\n if r.is_some() {\n\n assert_eq!(\n\n r.unwrap(),\n\n Ok(ParseResult::VTG(Some(VTG {\n\n source: Source::GPS,\n\n course: Some(From::from(89.0)),\n\n magnetic: None,\n\n speed: coords::Speed::from_knots(15.2),\n\n mode: Mode::Autonomous\n\n })))\n\n );\n\n parse_count += 1;\n\n }\n\n }\n\n assert_eq!(parse_count, 2);\n\n}\n\n\n", "file_path": "tests/parsing.rs", "rank": 10, "score": 30680.18173056583 }, { "content": "#[test]\n\nfn test_parser_iterator() {\n\n let mut p = Parser::new();\n\n let b = b\"$GPRMC,125504.049,A,5542.2389,N,03741.6063,E,0.06,25.82,200906,,,A*56\\r\\n\";\n\n {\n\n let mut iter = p.parse_from_bytes(&b[..]);\n\n assert_eq!(\n\n iter.next().unwrap(),\n\n Ok(ParseResult::RMC(Some(RMC {\n\n source: Source::GPS,\n\n datetime: datetime::DateTime {\n\n date: datetime::Date {\n\n day: 20,\n\n month: 9,\n\n year: 2006\n\n },\n\n time: datetime::Time {\n\n hours: 12,\n\n minutes: 55,\n\n seconds: 4.049\n\n }\n", "file_path": "tests/parsing.rs", "rank": 11, "score": 30680.18173056583 }, { "content": "#[test]\n\nfn test_parse_u16() {\n\n assert_eq!(parse_u16(Some(\"\")), Ok(None));\n\n assert_eq!(parse_u16(Some(\"123\")), Ok(Some(123u16)));\n\n assert_eq!(\n\n parse_u16(Some(\"a123\")),\n\n Err(\"Wrong unsigned int field format\")\n\n );\n\n assert_eq!(\n\n parse_u16(Some(\"-123\")),\n\n Err(\"Wrong unsigned int field format\")\n\n );\n\n assert_eq!(\n\n parse_u16(Some(\"70000\")),\n\n Err(\"Wrong unsigned int field format\")\n\n );\n\n assert_eq!(parse_u16(None), Ok(None));\n\n}\n\n\n", "file_path": "src/common.rs", "rank": 12, "score": 30680.18173056583 }, { "content": "#[test]\n\nfn test_parse_gpsquality() {\n\n assert_eq!(GPSQuality::parse(Some(\"0\")), Ok(Some(GPSQuality::NoFix)));\n\n assert_eq!(GPSQuality::parse(Some(\"1\")), Ok(Some(GPSQuality::GPS)));\n\n assert_eq!(GPSQuality::parse(Some(\"2\")), Ok(Some(GPSQuality::DGPS)));\n\n assert_eq!(GPSQuality::parse(Some(\"3\")), Ok(Some(GPSQuality::PPS)));\n\n assert_eq!(GPSQuality::parse(Some(\"4\")), Ok(Some(GPSQuality::RTK)));\n\n assert_eq!(GPSQuality::parse(Some(\"5\")), Ok(Some(GPSQuality::FRTK)));\n\n assert_eq!(\n\n GPSQuality::parse(Some(\"6\")),\n\n Ok(Some(GPSQuality::Estimated))\n\n );\n\n assert_eq!(GPSQuality::parse(Some(\"7\")), Ok(Some(GPSQuality::Manual)));\n\n assert_eq!(\n\n GPSQuality::parse(Some(\"8\")),\n\n Ok(Some(GPSQuality::Simulated))\n\n );\n\n assert_eq!(GPSQuality::parse(Some(\"\")), Ok(None));\n\n assert_eq!(GPSQuality::parse(None), Ok(None));\n\n assert!(GPSQuality::parse(Some(\"9\")).is_err());\n\n}\n", "file_path": "src/gga.rs", "rank": 13, "score": 30680.18173056583 }, { "content": "#[test]\n\nfn test_correct_rmc2() {\n\n let mut p = Parser::new();\n\n let sentence = b\"$GPRMC,113650.0,A,5548.607,S,03739.387,W,000.01,255.6,210403,08.7,E*66\\r\\n\";\n\n let mut parsed = false;\n\n for b in sentence.iter() {\n\n let r = p.parse_from_byte(*b);\n\n if r.is_some() {\n\n assert_eq!(\n\n r.unwrap(),\n\n Ok(ParseResult::RMC(Some(RMC {\n\n source: Source::GPS,\n\n datetime: datetime::DateTime {\n\n date: datetime::Date {\n\n day: 21,\n\n month: 4,\n\n year: 2003\n\n },\n\n time: datetime::Time {\n\n hours: 11,\n\n minutes: 36,\n", "file_path": "tests/parsing.rs", "rank": 14, "score": 30680.18173056583 }, { "content": "#[test]\n\nfn test_correct_gll() {\n\n let mut p = Parser::new();\n\n let b = b\"$GPGLL,4916.45,N,12311.12,W,225444,A*31\\r\\n\";\n\n {\n\n let mut iter = p.parse_from_bytes(&b[..]);\n\n assert_eq!(\n\n iter.next().unwrap(),\n\n Ok(ParseResult::GLL(Some(GLL {\n\n source: Source::GPS,\n\n time: datetime::Time {\n\n hours: 22,\n\n minutes: 54,\n\n seconds: 44.0\n\n },\n\n latitude: TryFrom::try_from(49.2741666667).unwrap(),\n\n longitude: TryFrom::try_from(-123.18533333334).unwrap(),\n\n mode: Mode::Autonomous\n\n })))\n\n );\n\n }\n\n}\n\n\n", "file_path": "tests/parsing.rs", "rank": 15, "score": 30680.18173056583 }, { "content": "#[test]\n\nfn test_source_bitor() {\n\n let s = Source::GLONASS | Source::GPS | Source::Beidou;\n\n assert!(s.mask == (Source::GLONASS as u32 | Source::GPS as u32 | Source::Beidou as u32));\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 16, "score": 30680.18173056583 }, { "content": "#[test]\n\nfn test_from_date_and_time() {\n\n assert!(DateTime::from_date_and_time(\n\n Some(Date {\n\n day: 1,\n\n month: 10,\n\n year: 2010\n\n }),\n\n Some(Time {\n\n hours: 1,\n\n minutes: 2,\n\n seconds: 50.0f32\n\n })\n\n )\n\n .is_ok());\n\n assert!(DateTime::from_date_and_time(\n\n Some(Date {\n\n day: 1,\n\n month: 10,\n\n year: 2010\n\n }),\n", "file_path": "src/datetime.rs", "rank": 17, "score": 30680.18173056583 }, { "content": "#[test]\n\nfn test_parse_mode() {\n\n assert_eq!(Mode::from_some_str(Some(\"A\")), Ok(Mode::Autonomous));\n\n assert_eq!(Mode::from_some_str(Some(\"D\")), Ok(Mode::Differential));\n\n assert_eq!(Mode::from_some_str(Some(\"E\")), Ok(Mode::Estimated));\n\n assert_eq!(Mode::from_some_str(Some(\"M\")), Ok(Mode::Manual));\n\n assert_eq!(Mode::from_some_str(Some(\"S\")), Ok(Mode::Simulator));\n\n assert_eq!(Mode::from_some_str(Some(\"N\")), Ok(Mode::NotValid));\n\n assert!(Mode::from_some_str(None).is_err());\n\n assert!(Mode::from_some_str(Some(\"\")).is_err());\n\n assert!(Mode::from_some_str(Some(\"abc\")).is_err());\n\n}\n\n\n", "file_path": "src/modes.rs", "rank": 18, "score": 30680.18173056583 }, { "content": "#[test]\n\nfn test_parse_time() {\n\n let time = Time::parse_from_hhmmss(Some(\"124201.340\"))\n\n .unwrap()\n\n .unwrap();\n\n assert_eq!(time.hours, 12);\n\n assert_eq!(time.minutes, 42);\n\n assert_eq!(time.seconds, 1.34);\n\n assert!(Time::parse_from_hhmmss(Some(\"304201.340\")).is_err());\n\n assert!(Time::parse_from_hhmmss(Some(\"109001.340\")).is_err());\n\n}\n\n\n", "file_path": "src/datetime.rs", "rank": 19, "score": 30680.18173056583 }, { "content": "#[test]\n\nfn test_sentence_bitor() {\n\n let s = Sentence::RMC | Sentence::VTG | Sentence::GGA;\n\n assert!(s.mask == (Sentence::RMC as u32 | Sentence::VTG as u32 | Sentence::GGA as u32));\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 20, "score": 30680.18173056583 }, { "content": "#[test]\n\nfn test_correct_gga() {\n\n let mut p = Parser::new();\n\n let sentence = b\"$GPGGA,145659.00,5956.695396,N,03022.454999,E,2,07,0.6,9.0,M,18.0,M,,*62\\r\\n\";\n\n let mut parsed = false;\n\n for b in sentence.iter() {\n\n let r = p.parse_from_byte(*b);\n\n if r.is_some() {\n\n assert_eq!(\n\n r.unwrap(),\n\n Ok(ParseResult::GGA(Some(GGA {\n\n source: Source::GPS,\n\n time: datetime::Time {\n\n hours: 14,\n\n minutes: 56,\n\n seconds: 59.0\n\n },\n\n latitude: TryFrom::try_from(59.944923266667).unwrap(),\n\n longitude: TryFrom::try_from(30.3742499833).unwrap(),\n\n gps_quality: GPSQuality::DGPS,\n\n sat_in_use: 7,\n", "file_path": "tests/parsing.rs", "rank": 21, "score": 30680.18173056583 }, { "content": "#[test]\n\nfn test_parse_date() {\n\n let date = Date::parse_from_ddmmyy(Some(\"010210\")).unwrap().unwrap();\n\n assert_eq!(date.day, 1);\n\n assert_eq!(date.month, 2);\n\n assert_eq!(date.year, 2010);\n\n let date = Date::parse_from_ddmmyy(Some(\"010270\")).unwrap().unwrap();\n\n assert_eq!(date.day, 1);\n\n assert_eq!(date.month, 2);\n\n assert_eq!(date.year, 1970);\n\n assert!(Date::parse_from_ddmmyy(Some(\"011470\")).is_err());\n\n assert!(Date::parse_from_ddmmyy(Some(\"451070\")).is_err());\n\n}\n\n\n", "file_path": "src/datetime.rs", "rank": 22, "score": 30680.18173056583 }, { "content": "#[test]\n\nfn test_correct_but_unsupported_source() {\n\n let mut p = Parser::new();\n\n let sentence = b\"$LCVTG,089.0,T,,,15.2,N,,*67\\r\\n\";\n\n let mut parsed = false;\n\n for b in sentence.iter() {\n\n let r = p.parse_from_byte(*b);\n\n if r.is_some() {\n\n assert_eq!(r.unwrap(), Err(\"Source is not supported!\"));\n\n parsed = true;\n\n break;\n\n }\n\n }\n\n assert!(parsed);\n\n}\n\n\n", "file_path": "tests/parsing.rs", "rank": 23, "score": 29598.04433238181 }, { "content": "#[test]\n\nfn test_create_filtered_parser() {\n\n let _parser = Parser::new()\n\n .source_filter(Source::GPS | Source::GLONASS)\n\n .sentence_filter(Sentence::RMC | Sentence::GLL);\n\n let _parser = Parser::new()\n\n .source_only(Source::GPS)\n\n .sentence_only(Sentence::RMC);\n\n}\n", "file_path": "src/lib.rs", "rank": 24, "score": 29598.04433238181 }, { "content": "#[test]\n\nfn test_parse_mode_or_status() {\n\n assert_eq!(\n\n Mode::from_some_str_or_status(Some(\"A\"), &Status::Valid),\n\n Ok(Mode::Autonomous)\n\n );\n\n assert_eq!(\n\n Mode::from_some_str_or_status(Some(\"D\"), &Status::Valid),\n\n Ok(Mode::Differential)\n\n );\n\n assert_eq!(\n\n Mode::from_some_str_or_status(Some(\"E\"), &Status::NotValid),\n\n Ok(Mode::Estimated)\n\n );\n\n assert_eq!(\n\n Mode::from_some_str_or_status(Some(\"M\"), &Status::NotValid),\n\n Ok(Mode::Manual)\n\n );\n\n assert_eq!(\n\n Mode::from_some_str_or_status(Some(\"S\"), &Status::NotValid),\n\n Ok(Mode::Simulator)\n", "file_path": "src/modes.rs", "rank": 25, "score": 29598.04433238181 }, { "content": "#[test]\n\nfn test_correct_but_unsupported_nmea_block() {\n\n let mut p = Parser::new();\n\n let sentence = b\"$GPZZZ,,,,,,,,,*61\\r\\n\";\n\n let mut parsed = false;\n\n for b in sentence.iter() {\n\n let r = p.parse_from_byte(*b);\n\n if r.is_some() {\n\n assert_eq!(r.unwrap(), Err(\"Unsupported sentence type.\"));\n\n parsed = true;\n\n break;\n\n }\n\n }\n\n assert!(parsed);\n\n}\n\n\n", "file_path": "tests/parsing.rs", "rank": 26, "score": 28623.522808050348 }, { "content": "pub(crate) fn parse_u8(input: Option<&str>) -> Result<Option<u8>, &'static str> {\n\n match input {\n\n Some(s) if s.len() == 0 => Ok(None),\n\n Some(s) => s\n\n .parse::<u8>()\n\n .map_err(|_| \"Wrong unsigned int field format\")\n\n .and_then(|u| Ok(Some(u))),\n\n None => Ok(None),\n\n }\n\n}\n\n\n\npub(crate) fn parse_u16(input: Option<&str>) -> Result<Option<u16>, &'static str> {\n\n match input {\n\n Some(s) if s.len() == 0 => Ok(None),\n\n Some(s) => s\n\n .parse::<u16>()\n\n .map_err(|_| \"Wrong unsigned int field format\")\n\n .and_then(|u| Ok(Some(u))),\n\n None => Ok(None),\n\n }\n", "file_path": "src/common.rs", "rank": 46, "score": 24.908531940218758 }, { "content": "}\n\n\n\npub(crate) fn parse_f32(input: Option<&str>) -> Result<Option<f32>, &'static str> {\n\n match input {\n\n Some(s) if s.len() == 0 => Ok(None),\n\n Some(s) => s\n\n .parse::<f32>()\n\n .map_err(|_| \"Wrong float field format\")\n\n .and_then(|u| Ok(Some(u))),\n\n None => Ok(None),\n\n }\n\n}\n\n\n\n#[test]\n", "file_path": "src/common.rs", "rank": 47, "score": 24.21856622572265 }, { "content": " pub(crate) fn parse_from_hhmmss(input: Option<&str>) -> Result<Option<Time>, &'static str> {\n\n match input {\n\n Some(time) if time.len() == 0 => Ok(None),\n\n Some(time) if time.len() < 6 => Err(\"Date input string is too short!\"),\n\n Some(time) => Ok(Some(Time {\n\n hours: (&time[..2])\n\n .parse()\n\n .map_err(|_| \"Hours string is not a number!\")\n\n .and_then(|h| {\n\n if h < 24 {\n\n Ok(h)\n\n } else {\n\n Err(\"Hours is not in range 0-23\")\n\n }\n\n })?,\n\n minutes: (&time[2..4])\n\n .parse()\n\n .map_err(|_| \"Minutes string is not a number!\")\n\n .and_then(|m| {\n\n if m < 59 {\n", "file_path": "src/datetime.rs", "rank": 48, "score": 20.801783106344317 }, { "content": "//! NMEA date and time structures.\n\n/// NMEA date\n\n#[derive(Debug, PartialEq, Clone)]\n\npub struct Date {\n\n /// NMEA day\n\n pub day: u8,\n\n /// NMEA month\n\n pub month: u8,\n\n /// Absolute year calculated from NMEA two-digit year, so for 19 it should be 2019, for 70 it should be 1970\n\n pub year: u16,\n\n}\n\n\n\nimpl Date {\n\n pub(crate) fn parse_from_ddmmyy(input: Option<&str>) -> Result<Option<Date>, &'static str> {\n\n match input {\n\n Some(date) if date.len() == 0 => Ok(None),\n\n Some(date) if date.len() < 6 => Err(\"Date input string is too short!\"),\n\n Some(date) => Ok(Some(Date {\n\n day: (&date[..2])\n\n .parse()\n", "file_path": "src/datetime.rs", "rank": 49, "score": 20.34305479789908 }, { "content": " }\n\n }\n\n}\n\n\n\nimpl Mode {\n\n pub(crate) fn from_some_str(from: Option<&str>) -> Result<Self, &'static str> {\n\n match from {\n\n Some(\"A\") => Ok(Mode::Autonomous),\n\n Some(\"D\") => Ok(Mode::Differential),\n\n Some(\"E\") => Ok(Mode::Estimated),\n\n Some(\"M\") => Ok(Mode::Manual),\n\n Some(\"S\") => Ok(Mode::Simulator),\n\n Some(\"N\") => Ok(Mode::NotValid),\n\n None => Err(\"Mode field shoud not be null!\"),\n\n Some(\"\") => Err(\"Mode should not be empty string!\"),\n\n _ => Err(\"Wrong mode character!\"),\n\n }\n\n }\n\n pub(crate) fn from_some_str_or_status(\n\n from: Option<&str>,\n", "file_path": "src/modes.rs", "rank": 50, "score": 20.03993211295358 }, { "content": " pub sat_in_use: u8,\n\n /// Horizontal dilusion of presicion. Indicates precision of solution.\n\n pub hdop: f32,\n\n /// Altitude over ground, typically WGS-84.\n\n pub altitude: Altitude,\n\n /// The difference between reference ellipsoid surface and mean-sea-level.\n\n pub geoidal_separation: Option<f32>,\n\n /// DGPS data age. None if DGPS not in use.\n\n pub age_dgps: Option<Duration>,\n\n /// ID of reference DGPS station used for fix. None if DGPS not in use.\n\n pub dgps_station_id: Option<u16>,\n\n}\n\n\n\nimpl GGA {\n\n pub(crate) fn parse<'a>(\n\n source: Source,\n\n fields: &mut core::str::Split<'a, char>,\n\n ) -> Result<Option<Self>, &'static str> {\n\n let time = Time::parse_from_hhmmss(fields.next())?;\n\n let latitude = Latitude::parse(fields.next(), fields.next())?;\n", "file_path": "src/gga.rs", "rank": 51, "score": 19.086268855370534 }, { "content": " /// Ignore all [sentences](enum.Sentence.html) except given.\n\n pub fn sentence_filter(mut self, sentence_mask: SentenceMask) -> Self {\n\n self.sentence_mask = sentence_mask;\n\n self\n\n }\n\n /// Use parser state and bytes slice than returns Iterator that yield [ParseResult](enum.ParseResult.html) or errors if has enough data for parsing.\n\n pub fn parse_from_bytes<'a>(\n\n &'a mut self,\n\n input: &'a [u8],\n\n ) -> impl Iterator<Item = Result<ParseResult, &'static str>> + 'a {\n\n ParserIterator::new(self, input)\n\n }\n\n /// Parse NMEA by one byte at a time. Returns Some if has enough data for parsing.\n\n pub fn parse_from_byte(&mut self, symbol: u8) -> Option<Result<ParseResult, &'static str>> {\n\n let (new_state, result) = match self.parser_state {\n\n ParserState::WaitStart if symbol == b'$' => {\n\n self.buflen = 0;\n\n self.chksum = 0;\n\n (ParserState::ReadUntilChkSum, None)\n\n }\n", "file_path": "src/lib.rs", "rank": 52, "score": 18.256079354286506 }, { "content": " .map(|year| if year > 69 { year + 1900 } else { year + 2000 })\n\n .map_err(|_| \"Year string is not a number!\")?,\n\n })),\n\n _ => Ok(None),\n\n }\n\n }\n\n}\n\n\n\n/// NMEA time in UTC\n\n#[derive(Debug, PartialEq, Clone)]\n\npub struct Time {\n\n /// Hours as reported by receiver\n\n pub hours: u8,\n\n /// Minutes as reported by receiver\n\n pub minutes: u8,\n\n /// Seconds as reported by receiver. Precision and accuracy depends on receiver.\n\n pub seconds: f32,\n\n}\n\n\n\nimpl Time {\n", "file_path": "src/datetime.rs", "rank": 53, "score": 18.17223815454437 }, { "content": " Ok(m)\n\n } else {\n\n Err(\"Minutes is not in range 0-59\")\n\n }\n\n })?,\n\n seconds: (&time[4..])\n\n .parse::<f32>()\n\n .map_err(|_| \"Seconds string is not a float\")\n\n .and_then(|s| {\n\n if s < 60f32 {\n\n Ok(s)\n\n } else {\n\n Err(\"Seconds is not in range 0-59\")\n\n }\n\n })?,\n\n })),\n\n _ => Ok(None),\n\n }\n\n }\n\n}\n", "file_path": "src/datetime.rs", "rank": 54, "score": 17.292327058710367 }, { "content": "use crate::common;\n\nuse crate::coords::{Altitude, Latitude, Longitude};\n\nuse crate::datetime::Time;\n\nuse crate::Source;\n\nuse core::time::Duration;\n\n\n\n/// Geographic coordinates including altitude, GPS solution quality, DGPS usage information.\n\n#[derive(Debug, PartialEq, Clone)]\n\npub struct GGA {\n\n /// Navigational system.\n\n pub source: Source,\n\n /// Time of fix in UTC.\n\n pub time: Time,\n\n /// Latitude in reference datum, typically WGS-84.\n\n pub latitude: Latitude,\n\n /// Logitude in reference datum, typically WGS-84.\n\n pub longitude: Longitude,\n\n /// Quality of GPS solution.\n\n pub gps_quality: GPSQuality,\n\n /// Sattelites in use\n", "file_path": "src/gga.rs", "rank": 55, "score": 17.040268157066105 }, { "content": " _ => Err(\"Source is not supported!\"),\n\n }\n\n }\n\n}\n\n\n\n/// Various kinds of NMEA sentence like RMC, VTG or other. Used for filter by sentence type in Parser.\n\n#[derive(Debug, Copy, Clone)]\n\npub enum Sentence {\n\n /// Recommended minimum sentence.\n\n RMC = 0b1,\n\n /// Course over ground.\n\n VTG = 0b10,\n\n /// Geographic coordinates including altitude, GPS solution quality, DGPS usage information.\n\n GGA = 0b100,\n\n /// Geographic latitude ang longitude sentence with time of fix and receiver state.\n\n GLL = 0b1000,\n\n}\n\n\n\nimpl TryFrom<&str> for Sentence {\n\n type Error = &'static str;\n", "file_path": "src/lib.rs", "rank": 56, "score": 16.60029328262585 }, { "content": " seconds: 4.049\n\n }\n\n },\n\n latitude: TryFrom::try_from(55.703981666666664).unwrap(),\n\n longitude: TryFrom::try_from(37.69343833333333).unwrap(),\n\n speed: coords::Speed::from_knots(0.06),\n\n course: Some(From::from(25.82)),\n\n magnetic: None,\n\n mode: Mode::Autonomous\n\n })))\n\n );\n\n parsed = true;\n\n break;\n\n }\n\n }\n\n assert!(parsed);\n\n}\n\n\n", "file_path": "tests/parsing.rs", "rank": 57, "score": 16.51543158365767 }, { "content": " },\n\n latitude: TryFrom::try_from(55.703981666666664).unwrap(),\n\n longitude: TryFrom::try_from(37.69343833333333).unwrap(),\n\n speed: coords::Speed::from_knots(0.06),\n\n course: Some(From::from(25.82)),\n\n magnetic: None,\n\n mode: Mode::Autonomous\n\n })))\n\n );\n\n }\n\n let b1 = b\"$GPRMC,125504.049,A,5542.2389,N\";\n\n {\n\n let mut iter = p.parse_from_bytes(&b1[..]);\n\n assert!(iter.next().is_none());\n\n }\n\n let b2 = b\",03741.6063,E,0.06,25.82,200906,,,\";\n\n {\n\n let mut iter = p.parse_from_bytes(&b2[..]);\n\n assert!(iter.next().is_none());\n\n }\n", "file_path": "tests/parsing.rs", "rank": 58, "score": 16.28783313824486 }, { "content": " alternate: &Status,\n\n ) -> Result<Self, &'static str> {\n\n match from {\n\n Some(\"A\") => Ok(Mode::Autonomous),\n\n Some(\"D\") => Ok(Mode::Differential),\n\n Some(\"E\") => Ok(Mode::Estimated),\n\n Some(\"M\") => Ok(Mode::Manual),\n\n Some(\"S\") => Ok(Mode::Simulator),\n\n Some(\"N\") => Ok(Mode::NotValid),\n\n None => match alternate {\n\n Status::Valid => Ok(Mode::Autonomous),\n\n Status::NotValid => Ok(Mode::NotValid),\n\n },\n\n Some(\"\") => Err(\"Mode should not be empty string!\"),\n\n _ => Err(\"Wrong mode character!\"),\n\n }\n\n }\n\n}\n\n\n\n#[test]\n", "file_path": "src/modes.rs", "rank": 59, "score": 16.21282162226528 }, { "content": " /// Float RTK correction is in use.\n\n FRTK,\n\n /// Estimated by movement model.\n\n Estimated,\n\n /// Set by operator.\n\n Manual,\n\n /// Simulated.\n\n Simulated,\n\n}\n\n\n\nimpl GPSQuality {\n\n pub(crate) fn parse(input: Option<&str>) -> Result<Option<GPSQuality>, &'static str> {\n\n match input {\n\n Some(\"0\") => Ok(Some(GPSQuality::NoFix)),\n\n Some(\"1\") => Ok(Some(GPSQuality::GPS)),\n\n Some(\"2\") => Ok(Some(GPSQuality::DGPS)),\n\n Some(\"3\") => Ok(Some(GPSQuality::PPS)),\n\n Some(\"4\") => Ok(Some(GPSQuality::RTK)),\n\n Some(\"5\") => Ok(Some(GPSQuality::FRTK)),\n\n Some(\"6\") => Ok(Some(GPSQuality::Estimated)),\n", "file_path": "src/gga.rs", "rank": 60, "score": 15.826393879649935 }, { "content": "use crate::coords::{Course, Latitude, Longitude, MagneticCourse, Speed};\n\nuse crate::datetime::{Date, DateTime, Time};\n\nuse crate::modes::{Mode, Status};\n\nuse crate::Source;\n\n\n\n/// Recommended Minimum Sentence for any GNSS source.\n\n#[derive(Debug, PartialEq, Clone)]\n\npub struct RMC {\n\n /// Navigational system.\n\n pub source: Source,\n\n /// Date and time of fix in UTC.\n\n pub datetime: DateTime,\n\n /// Latitude in reference datum, mostly WGS-84.\n\n pub latitude: Latitude,\n\n /// Logitude in reference datum, mostly WGS-84.\n\n pub longitude: Longitude,\n\n /// Speed over ground.\n\n pub speed: Speed,\n\n /// Course over ground. Some receivers do not report it when no movement.\n\n pub course: Option<Course>,\n", "file_path": "src/rmc.rs", "rank": 61, "score": 15.796080069625262 }, { "content": "# Examples\n\n\n\nIf you could read a one byte at a time from the receiver you may use `parse_from_byte`:\n\n```rust\n\nuse nmea0183::{Parser, ParseResult};\n\n\n\nlet nmea = b\"$GPGGA,145659.00,5956.695396,N,03022.454999,E,2,07,0.6,9.0,M,18.0,M,,*62\\r\\n$GPGGA,,,,,,,,,,,,,,*00\\r\\n\";\n\nlet mut parser = Parser::new();\n\nfor b in &nmea[..] {\n\n if let Some(result) = parser.parse_from_byte(*b) {\n\n match result {\n\n Ok(ParseResult::GGA(Some(gga))) => { }, // Got GGA sentence\n\n Ok(ParseResult::GGA(None)) => { }, // Got GGA sentence without valid data, receiver ok but has no solution\n\n Ok(_) => {}, // Some other sentences..\n\n Err(e) => { } // Got parse error\n\n }\n\n }\n\n}\n\n```\n\n\n\nIf you read many bytes from receiver at once or want to parse NMEA log from text file you could use Iterator-style:\n\n```rust\n\nuse nmea0183::{Parser, ParseResult};\n\n\n\nlet nmea = b\"$GPGGA,,,,,,,,,,,,,,*00\\r\\n$GPRMC,125504.049,A,5542.2389,N,03741.6063,E,0.06,25.82,200906,,,A*56\\r\\n\";\n\nlet mut parser = Parser::new();\n\n\n\nfor result in parser.parse_from_bytes(&nmea[..]) {\n\n match result {\n\n Ok(ParseResult::RMC(Some(rmc))) => { }, // Got RMC sentence\n\n Ok(ParseResult::GGA(None)) => { }, // Got GGA sentence without valid data, receiver ok but has no solution\n\n Ok(_) => {}, // Some other sentences..\n\n Err(e) => { } // Got parse error\n\n }\n\n}\n\n```\n\n\n\nIt is possible to ignore some sentences or sources. You can set filter on [Parser](https://docs.rs/nmea0183/latest/nmea0183/struct.Parser.html) like so:\n\n```rust\n\nuse nmea0183::{Parser, ParseResult, Sentence, Source};\n\n\n\nlet parser_only_gps_gallileo = Parser::new()\n\n .source_filter(Source::GPS | Source::Gallileo);\n\nlet parser_only_rmc_gga_gps = Parser::new()\n\n .source_only(Source::GPS)\n\n .sentence_filter(Sentence::RMC | Sentence::GGA);\n\n```\n\n\n\n# Panics\n\n\n\nShould not panic. If so please report issue on project page.\n\n\n", "file_path": "README.md", "rank": 62, "score": 15.662362736136627 }, { "content": "impl GLL {\n\n pub(crate) fn parse<'a>(\n\n source: Source,\n\n fields: &mut core::str::Split<'a, char>,\n\n ) -> Result<Option<Self>, &'static str> {\n\n let latitude = Latitude::parse(fields.next(), fields.next())?;\n\n let longitude = Longitude::parse(fields.next(), fields.next())?;\n\n let time = Time::parse_from_hhmmss(fields.next())?;\n\n let status = if let Some(f_status) = fields.next() {\n\n Status::from_str(f_status)?\n\n } else {\n\n return Err(\"Status field is mandatory for GLL sentence!\");\n\n };\n\n let mode = Mode::from_some_str_or_status(fields.next(), &status)?;\n\n if let (Some(lat), Some(lon), Some(time)) = (latitude, longitude, time) {\n\n Ok(Some(GLL {\n\n source,\n\n time: time,\n\n latitude: lat,\n\n longitude: lon,\n\n mode,\n\n }))\n\n } else {\n\n Ok(None)\n\n }\n\n }\n\n}\n", "file_path": "src/gll.rs", "rank": 63, "score": 15.61928924961428 }, { "content": "\n\n/// NMEA date and time in UTC\n\n#[derive(Debug, PartialEq, Clone)]\n\npub struct DateTime {\n\n /// NMEA date\n\n pub date: Date,\n\n /// NMEA time\n\n pub time: Time,\n\n}\n\n\n\nimpl DateTime {\n\n pub(crate) fn from_date_and_time(\n\n maybe_date: Option<Date>,\n\n maybe_time: Option<Time>,\n\n ) -> Result<Option<Self>, &'static str> {\n\n match (maybe_date, maybe_time) {\n\n (Some(date), Some(time)) => Ok(Some(DateTime { date, time })),\n\n (None, None) => Ok(None),\n\n _ => Err(\"Date or time is None, should be Some both\"),\n\n }\n\n }\n\n}\n\n\n\n#[test]\n", "file_path": "src/datetime.rs", "rank": 64, "score": 15.374425922182677 }, { "content": "#[derive(Debug, PartialEq)]\n\npub(crate) enum Status {\n\n Valid,\n\n NotValid,\n\n}\n\n\n\nimpl Status {\n\n pub(crate) fn from_str(st: &str) -> Result<Status, &'static str> {\n\n match st {\n\n \"A\" => Ok(Status::Valid),\n\n \"V\" => Ok(Status::NotValid),\n\n _ => Err(\"Invalid status field!\"),\n\n }\n\n }\n\n}\n\n\n\n/// Receiver mode of operation.\n\n#[derive(Debug, PartialEq, Clone)]\n\npub enum Mode {\n\n /// Autonomous mode without any external correction.\n", "file_path": "src/modes.rs", "rank": 65, "score": 15.354203748233644 }, { "content": " /// Magnetic course over ground (angle to magnetic North pole). Receiver may not report it.\n\n pub magnetic: Option<MagneticCourse>,\n\n /// Receiver's mode of operation.\n\n pub mode: Mode,\n\n}\n\n\n\nimpl RMC {\n\n pub(crate) fn parse<'a>(\n\n source: Source,\n\n fields: &mut core::str::Split<'a, char>,\n\n ) -> Result<Option<Self>, &'static str> {\n\n let time = Time::parse_from_hhmmss(fields.next())?;\n\n let status = if let Some(f_status) = fields.next() {\n\n Status::from_str(f_status)?\n\n } else {\n\n return Err(\"Status field is mandatory for RMC sentence!\");\n\n };\n\n let latitude = Latitude::parse(fields.next(), fields.next())?;\n\n let longitude = Longitude::parse(fields.next(), fields.next())?;\n\n let speed = Speed::parse(fields.next())?;\n", "file_path": "src/rmc.rs", "rank": 66, "score": 15.064942869713216 }, { "content": "\n\nimpl BitOr<Source> for SourceMask {\n\n type Output = Self;\n\n fn bitor(self, rhs: Source) -> Self {\n\n SourceMask {\n\n mask: self.mask | rhs as u32,\n\n }\n\n }\n\n}\n\n\n\nimpl TryFrom<&str> for Source {\n\n type Error = &'static str;\n\n\n\n fn try_from(from: &str) -> Result<Self, Self::Error> {\n\n match &from[0..2] {\n\n \"GP\" => Ok(Source::GPS),\n\n \"GL\" => Ok(Source::GLONASS),\n\n \"GA\" => Ok(Source::Gallileo),\n\n \"BD\" => Ok(Source::Beidou),\n\n \"GN\" => Ok(Source::GNSS),\n", "file_path": "src/lib.rs", "rank": 67, "score": 14.851306308647512 }, { "content": "//! match result {\n\n//! Ok(ParseResult::GGA(Some(gga))) => { }, // Got GGA sentence\n\n//! Ok(ParseResult::GGA(None)) => { }, // Got GGA sentence without valid data, receiver ok but has no solution\n\n//! Ok(_) => {}, // Some other sentences..\n\n//! Err(e) => { } // Got parse error\n\n//! }\n\n//! }\n\n//! }\n\n//! ```\n\n//!\n\n//! If you read many bytes from receiver at once or want to parse NMEA log from text file you could use Iterator-style:\n\n//! ```rust\n\n//! use nmea0183::{Parser, ParseResult};\n\n//!\n\n//! let nmea = b\"$GPGGA,,,,,,,,,,,,,,*00\\r\\n$GPRMC,125504.049,A,5542.2389,N,03741.6063,E,0.06,25.82,200906,,,A*56\\r\\n\";\n\n//! let mut parser = Parser::new();\n\n//!\n\n//! for result in parser.parse_from_bytes(&nmea[..]) {\n\n//! match result {\n\n//! Ok(ParseResult::RMC(Some(rmc))) => { }, // Got RMC sentence\n", "file_path": "src/lib.rs", "rank": 68, "score": 14.758673030085891 }, { "content": " seconds: 50.0\n\n }\n\n },\n\n latitude: TryFrom::try_from(-55.810116666666666).unwrap(),\n\n longitude: TryFrom::try_from(-37.65645).unwrap(),\n\n speed: coords::Speed::from_knots(0.01),\n\n course: Some(From::from(255.6)),\n\n magnetic: Some(From::from(246.90001)),\n\n mode: Mode::Autonomous\n\n })))\n\n );\n\n parsed = true;\n\n break;\n\n }\n\n }\n\n assert!(parsed);\n\n}\n\n\n", "file_path": "tests/parsing.rs", "rank": 69, "score": 14.618633953616754 }, { "content": "use crate::coords::{Latitude, Longitude};\n\nuse crate::datetime::Time;\n\nuse crate::modes::{Mode, Status};\n\nuse crate::Source;\n\n\n\n/// Geographic latitude ang longitude sentence with time of fix and receiver state.\n\n#[derive(Debug, PartialEq, Clone)]\n\npub struct GLL {\n\n /// Navigational system.\n\n pub source: Source,\n\n /// Time of fix in UTC.\n\n pub time: Time,\n\n /// Latitude in reference datum, mostly WGS-84.\n\n pub latitude: Latitude,\n\n /// Logitude in reference datum, mostly WGS-84.\n\n pub longitude: Longitude,\n\n /// Receiver's mode of operation.\n\n pub mode: Mode,\n\n}\n\n\n", "file_path": "src/gll.rs", "rank": 70, "score": 14.487022345968958 }, { "content": "\n\n fn try_from(from: &str) -> Result<Self, Self::Error> {\n\n match from {\n\n \"RMC\" => Ok(Sentence::RMC),\n\n \"GGA\" => Ok(Sentence::GGA),\n\n \"GLL\" => Ok(Sentence::GLL),\n\n \"VTG\" => Ok(Sentence::VTG),\n\n _ => Err(\"Unsupported sentence type.\"),\n\n }\n\n }\n\n}\n\n\n\n/// Mask for Sentence filter in Parser.\n\npub struct SentenceMask {\n\n mask: u32,\n\n}\n\n\n\nimpl SentenceMask {\n\n fn is_masked(&self, sentence: Sentence) -> bool {\n\n sentence as u32 & self.mask == 0\n", "file_path": "src/lib.rs", "rank": 71, "score": 14.436888886923334 }, { "content": " }\n\n\n\n fn parse_sentence(&self) -> Result<Option<ParseResult>, &'static str> {\n\n let input = from_ascii(&self.buffer[..self.buflen])?;\n\n let mut iter = input.split(',');\n\n let sentence_field = iter\n\n .next()\n\n .ok_or(\"Sentence type not found but mandatory!\")?;\n\n if sentence_field.len() < 5 {\n\n return Err(\"Sentence field is too small. Must be 5 chars at least!\");\n\n }\n\n let source = Source::try_from(sentence_field)?;\n\n if self.source_mask.is_masked(source) {\n\n return Ok(None);\n\n }\n\n let sentence = Sentence::try_from(&sentence_field[2..5])?;\n\n if self.sentence_mask.is_masked(sentence) {\n\n return Ok(None);\n\n }\n\n match sentence {\n\n Sentence::RMC => Ok(Some(ParseResult::RMC(RMC::parse(source, &mut iter)?))),\n\n Sentence::GGA => Ok(Some(ParseResult::GGA(GGA::parse(source, &mut iter)?))),\n\n Sentence::GLL => Ok(Some(ParseResult::GLL(GLL::parse(source, &mut iter)?))),\n\n Sentence::VTG => Ok(Some(ParseResult::VTG(VTG::parse(source, &mut iter)?))),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 72, "score": 14.278706712241572 }, { "content": "use crate::coords::{Course, MagneticCourse, Speed};\n\nuse crate::modes::Mode;\n\nuse crate::Source;\n\n\n\n/// The actual course and speed relative to the ground.\n\n#[derive(Debug, PartialEq, Clone)]\n\npub struct VTG {\n\n /// Navigational system.\n\n pub source: Source,\n\n /// Course over ground. Some receivers do not reports it when no movement.\n\n pub course: Option<Course>,\n\n /// Magnetic course over ground (angle to magnetic North pole).\n\n pub magnetic: Option<MagneticCourse>,\n\n /// Speed over ground.\n\n pub speed: Speed,\n\n /// Receiver's mode of operation.\n\n pub mode: Mode,\n\n}\n\n\n\nimpl VTG {\n", "file_path": "src/vtg.rs", "rank": 73, "score": 14.071226394669239 }, { "content": " longitude: TryFrom::try_from(37.69343833333333).unwrap(),\n\n speed: coords::Speed::from_knots(0.06),\n\n course: Some(From::from(25.82)),\n\n magnetic: None,\n\n mode: Mode::Autonomous\n\n })))\n\n );\n\n assert!(iter.next().is_none());\n\n }\n\n}\n", "file_path": "tests/parsing.rs", "rank": 74, "score": 13.566493580861156 }, { "content": " let longitude = Longitude::parse(fields.next(), fields.next())?;\n\n let gps_quality = GPSQuality::parse(fields.next())?;\n\n let sat_in_use = common::parse_u8(fields.next())?;\n\n let hdop = common::parse_f32(fields.next())?;\n\n let altitude = Altitude::parse(fields.next())?;\n\n fields.next(); // Skip altitude type (always meters according to NMEA spec)\n\n let geoidal_separation = common::parse_f32(fields.next())?;\n\n fields.next(); // Skip geoidal separation type (always meters according to NMEA spec)\n\n let age_dgps = common::parse_f32(fields.next())?\n\n .and_then(|a| Some(Duration::from_millis((a * 1000f32) as u64)));\n\n let dgps_station_id = common::parse_u16(fields.next())?;\n\n if let (\n\n Some(time),\n\n Some(latitude),\n\n Some(longitude),\n\n Some(gps_quality),\n\n Some(sat_in_use),\n\n Some(hdop),\n\n Some(altitude),\n\n ) = (\n", "file_path": "src/gga.rs", "rank": 75, "score": 13.454333748096222 }, { "content": " time,\n\n latitude,\n\n longitude,\n\n gps_quality,\n\n sat_in_use,\n\n hdop,\n\n altitude,\n\n ) {\n\n Ok(Some(GGA {\n\n source,\n\n time,\n\n latitude,\n\n longitude,\n\n gps_quality,\n\n sat_in_use,\n\n hdop,\n\n altitude,\n\n geoidal_separation,\n\n age_dgps,\n\n dgps_station_id,\n", "file_path": "src/gga.rs", "rank": 76, "score": 13.21423844155725 }, { "content": " pub(crate) fn parse<'a>(\n\n source: Source,\n\n fields: &mut core::str::Split<'a, char>,\n\n ) -> Result<Option<Self>, &'static str> {\n\n let course = Course::parse(fields.next())?;\n\n fields.next(); // Not needed true course marker field\n\n let magnetic = MagneticCourse::parse_from_str(fields.next())?;\n\n fields.next(); // Not needed magnetic course marker field\n\n let speed = Speed::parse(fields.next())?;\n\n fields.next(); // Not needed speed knots marker field\n\n fields.next(); // Not needed speed kph field\n\n fields.next(); // Not needed speed kph marker field\n\n let mode = Mode::from_some_str(fields.next())?;\n\n\n\n if let Some(speed) = speed {\n\n Ok(Some(VTG {\n\n source,\n\n course,\n\n magnetic,\n\n speed,\n\n mode,\n\n }))\n\n } else {\n\n Ok(None)\n\n }\n\n }\n\n}\n", "file_path": "src/vtg.rs", "rank": 77, "score": 13.17243304995879 }, { "content": "impl BitOr<Sentence> for SentenceMask {\n\n type Output = Self;\n\n fn bitor(self, rhs: Sentence) -> Self {\n\n SentenceMask {\n\n mask: self.mask | rhs as u32,\n\n }\n\n }\n\n}\n\n\n\n/// The NMEA sentence parsing result.\n\n/// Sentences with many null fields or sentences without valid data is also parsed and returned as None.\n\n/// None ParseResult may be interpreted as working receiver but without valid data.\n\n#[derive(Debug, PartialEq, Clone)]\n\npub enum ParseResult {\n\n /// The Recommended Minimum Sentence for any GNSS. Typically most used.\n\n RMC(Option<RMC>),\n\n /// The Geographic coordinates including altitude, GPS solution quality, DGPS usage information.\n\n GGA(Option<GGA>),\n\n /// The Geographic latitude ang longitude sentence with time of fix and the receiver state.\n\n GLL(Option<GLL>),\n", "file_path": "src/lib.rs", "rank": 78, "score": 13.090084472345865 }, { "content": " let course = Course::parse(fields.next())?;\n\n let date = Date::parse_from_ddmmyy(fields.next())?;\n\n let magnetic = MagneticCourse::parse_from_mvar_mdir(&course, fields.next(), fields.next())?;\n\n let mode = Mode::from_some_str_or_status(fields.next(), &status)?;\n\n\n\n let datetime = DateTime::from_date_and_time(date, time)?;\n\n if let (Some(datetime), Some(latitude), Some(longitude), Some(speed)) =\n\n (datetime, latitude, longitude, speed)\n\n {\n\n Ok(Some(RMC {\n\n source,\n\n datetime,\n\n latitude,\n\n longitude,\n\n speed,\n\n course,\n\n magnetic: magnetic,\n\n mode,\n\n }))\n\n } else {\n\n Ok(None)\n\n }\n\n }\n\n}\n", "file_path": "src/rmc.rs", "rank": 79, "score": 12.79921869729829 }, { "content": " None\n\n )\n\n .is_err());\n\n assert!(DateTime::from_date_and_time(\n\n None,\n\n Some(Time {\n\n hours: 1,\n\n minutes: 2,\n\n seconds: 50.0f32\n\n })\n\n )\n\n .is_err());\n\n assert_eq!(DateTime::from_date_and_time(None, None), Ok(None));\n\n}\n", "file_path": "src/datetime.rs", "rank": 80, "score": 12.054881441404065 }, { "content": " }))\n\n } else {\n\n Ok(None)\n\n }\n\n }\n\n}\n\n\n\n/// Quality of GPS solution\n\n#[derive(Debug, PartialEq, Clone)]\n\npub enum GPSQuality {\n\n /// No solution\n\n NoFix,\n\n /// Ordinary GPS solution\n\n GPS,\n\n /// Differential correction used.\n\n DGPS,\n\n /// Locked PPS (pulse per second).\n\n PPS,\n\n /// RTK correction is in use.\n\n RTK,\n", "file_path": "src/gga.rs", "rank": 81, "score": 11.938836502709627 }, { "content": " hdop: 0.6,\n\n altitude: coords::Altitude { meters: 9.0 },\n\n geoidal_separation: Some(18.0),\n\n age_dgps: None,\n\n dgps_station_id: None\n\n })))\n\n );\n\n parsed = true;\n\n break;\n\n }\n\n }\n\n assert!(parsed);\n\n}\n\n\n", "file_path": "tests/parsing.rs", "rank": 82, "score": 10.888462070039544 }, { "content": "use core::ops::BitOr;\n\nuse core::slice::Iter;\n\npub(crate) mod common;\n\npub mod coords;\n\npub mod datetime;\n\npub(crate) mod gga;\n\npub(crate) mod gll;\n\npub(crate) mod modes;\n\npub(crate) mod rmc;\n\npub(crate) mod vtg;\n\n\n\npub use gga::GPSQuality;\n\npub use gga::GGA;\n\npub use gll::GLL;\n\npub use modes::Mode;\n\npub use rmc::RMC;\n\npub use vtg::VTG;\n\n\n\n/// Source of NMEA sentence like GPS, GLONASS or other.\n\n#[derive(Debug, Copy, Clone, PartialEq)]\n", "file_path": "src/lib.rs", "rank": 83, "score": 9.32313832863865 }, { "content": " );\n\n assert_eq!(\n\n Mode::from_some_str_or_status(Some(\"N\"), &Status::NotValid),\n\n Ok(Mode::NotValid)\n\n );\n\n assert_eq!(\n\n Mode::from_some_str_or_status(None, &Status::NotValid),\n\n Ok(Mode::NotValid)\n\n );\n\n assert!(Mode::from_some_str_or_status(Some(\"\"), &Status::NotValid).is_err());\n\n assert!(Mode::from_some_str_or_status(Some(\"abc\"), &Status::NotValid).is_err());\n\n}\n", "file_path": "src/modes.rs", "rank": 84, "score": 9.150556630091389 }, { "content": "#![no_std]\n\n#![warn(missing_docs)]\n\n//! NMEA 0183 parser. Implemented most used sentences like RMC, VTG, GGA, GLL.\n\n//! Parser do not use heap memory and relies only on `core`.\n\n//!\n\n//! You should instantiate [Parser](struct.Parser.html) with [new](struct.Parser.html#method.new) and than use methods like [parse_from_byte](struct.Parser.html#method.parse_from_bytes) or [parse_from_bytes](struct.Parser.html#method.parse_from_bytes).\n\n//! If parser accumulates enough data it will return [ParseResult](enum.ParseResult.html) on success or `&str` that describing an error.\n\n//!\n\n//! You do not need to do any preprocessing such as split data to strings or NMEA sentences.\n\n//!\n\n//! # Examples\n\n//!\n\n//! If you could read a one byte at a time from the receiver you may use `parse_from_byte`:\n\n//! ```rust\n\n//! use nmea0183::{Parser, ParseResult};\n\n//!\n\n//! let nmea = b\"$GPGGA,145659.00,5956.695396,N,03022.454999,E,2,07,0.6,9.0,M,18.0,M,,*62\\r\\n$GPGGA,,,,,,,,,,,,,,*00\\r\\n\";\n\n//! let mut parser = Parser::new();\n\n//! for b in &nmea[..] {\n\n//! if let Some(result) = parser.parse_from_byte(*b) {\n", "file_path": "src/lib.rs", "rank": 85, "score": 9.133893930251077 }, { "content": " Autonomous,\n\n /// Differential correction used.\n\n Differential,\n\n /// Estimated position from previous data and movement model.\n\n Estimated,\n\n /// Set by operator.\n\n Manual,\n\n /// Simulation mode.\n\n Simulator,\n\n /// Completely invalid state. Position data if present could not be used.\n\n NotValid,\n\n}\n\n\n\nimpl Mode {\n\n /// Position data shoud be valid if true\n\n pub fn is_valid(&self) -> bool {\n\n match self {\n\n Mode::Autonomous => true,\n\n Mode::Differential => true,\n\n _ => false,\n", "file_path": "src/modes.rs", "rank": 86, "score": 9.037515973464577 }, { "content": " Some(\"7\") => Ok(Some(GPSQuality::Manual)),\n\n Some(\"8\") => Ok(Some(GPSQuality::Simulated)),\n\n Some(\"\") => Ok(None),\n\n None => Ok(None),\n\n _ => Err(\"Wrong GPSQuality indicator type!\"),\n\n }\n\n }\n\n}\n\n\n\n#[test]\n", "file_path": "src/gga.rs", "rank": 87, "score": 8.92558727464238 }, { "content": " let b3 = b\"A*56\\r\\n\";\n\n {\n\n let mut iter = p.parse_from_bytes(&b3[..]);\n\n assert_eq!(\n\n iter.next().unwrap(),\n\n Ok(ParseResult::RMC(Some(RMC {\n\n source: Source::GPS,\n\n datetime: datetime::DateTime {\n\n date: datetime::Date {\n\n day: 20,\n\n month: 9,\n\n year: 2006\n\n },\n\n time: datetime::Time {\n\n hours: 12,\n\n minutes: 55,\n\n seconds: 4.049\n\n }\n\n },\n\n latitude: TryFrom::try_from(55.703981666666664).unwrap(),\n", "file_path": "tests/parsing.rs", "rank": 88, "score": 8.846497483751389 }, { "content": " /// The actual course and speed relative to the ground.\n\n VTG(Option<VTG>),\n\n}\n\n\n\n/// Parses NMEA sentences and stores intermediate parsing state.\n\n/// Parser is tolerant for errors so you should not reinitialize it after errors.\n\npub struct Parser {\n\n buffer: [u8; 79],\n\n buflen: usize,\n\n chksum: u8,\n\n expected_chksum: u8,\n\n parser_state: ParserState,\n\n source_mask: SourceMask,\n\n sentence_mask: SentenceMask,\n\n}\n\n\n\n#[derive(Debug)]\n", "file_path": "src/lib.rs", "rank": 89, "score": 8.587108851964722 }, { "content": "//! Ok(ParseResult::GGA(None)) => { }, // Got GGA sentence without valid data, receiver ok but has no solution\n\n//! Ok(_) => {}, // Some other sentences..\n\n//! Err(e) => { } // Got parse error\n\n//! }\n\n//! }\n\n//!\n\n//! ```\n\n//!\n\n//! It is possible to ignore some sentences or sources. You can set filter on [Parser](struct.Parser.html) like so:\n\n//! ```rust\n\n//! use nmea0183::{Parser, ParseResult, Sentence, Source};\n\n//!\n\n//! let parser_only_gps_gallileo = Parser::new()\n\n//! .source_filter(Source::GPS | Source::Gallileo);\n\n//! let parser_only_rmc_gga_gps = Parser::new()\n\n//! .source_only(Source::GPS)\n\n//! .sentence_filter(Sentence::RMC | Sentence::GGA);\n\n//! ```\n\n//!\n\n//! # Panics\n", "file_path": "src/lib.rs", "rank": 90, "score": 8.232471066763543 }, { "content": "[![Crates.io](https://img.shields.io/badge/crates.io-v0.3.0-orange.svg?longCache=true)](https://crates.io/crates/nmea0183)\n\n[![Build Status](https://travis-ci.org/nsforth/nmea0183.svg?tag=v0.3.0)](https://travis-ci.org/nsforth/nmea0183)\n\n[![Codecov coverage status](https://codecov.io/gh/nsforth/nmea0183/branch/master/graph/badge.svg)](https://codecov.io/gh/nsforth/nmea0183)\n\n# NMEA 0183 parser.\n\n\n\nImplemented most used sentences like RMC, VTG, GGA, GLL.\n\nParser do not use heap memory and relies only on `core`.\n\n\n\nYou should instantiate [Parser](https://docs.rs/nmea0183/latest/nmea0183/struct.Parser.html) with [new](https://docs.rs/nmea0183/latest/nmea0183/struct.Parser.html#method.new) and than use methods like [parse_from_byte](https://docs.rs/nmea0183/latest/nmea0183/struct.Parser.html#method.parse_from_bytes) or [parse_from_bytes](https://docs.rs/nmea0183/latest/nmea0183/struct.Parser.html#method.parse_from_bytes).\n\nIf parser accumulates enough data it will return [ParseResult](https://docs.rs/nmea0183/latest/nmea0183/enum.ParseResult.html) on success or `&str` that describing an error.\n\n\n\nYou do not need to do any preprocessing such as split data to strings or NMEA sentences.\n\n\n", "file_path": "README.md", "rank": 91, "score": 7.9238265927687355 }, { "content": "# Errors\n\n\n\n`Unsupported sentence type.` - Got currently not supported sentence.\n\n\n\n`Checksum error!` - Sentence has wrong checksum, possible data corruption.\n\n\n\n`Source is not supported!` - Unknown source, new sattelite system is launched? :)\n\n\n\n`NMEA format error!` - Possible data corruption. Parser drops all accumulated data and starts seek new sentences.\n\n\n\nIt's possible to got other very rare error messages that relates to protocol errors. Receivers nowadays mostly do not violate NMEA specs.\n\n\n\n# Planned features\n\n\n\nGSA and GSV parsing.\n\n\n", "file_path": "README.md", "rank": 92, "score": 7.742489231106702 }, { "content": "use core::convert::TryFrom;\n\nuse nmea0183::coords;\n\nuse nmea0183::datetime;\n\nuse nmea0183::GPSQuality;\n\nuse nmea0183::Mode;\n\nuse nmea0183::GGA;\n\nuse nmea0183::GLL;\n\nuse nmea0183::RMC;\n\nuse nmea0183::VTG;\n\nuse nmea0183::{ParseResult, Parser, Source};\n\n\n\n#[test]\n", "file_path": "tests/parsing.rs", "rank": 93, "score": 7.495105452748103 }, { "content": " Err(e) => (ParserState::WaitStart, Some(Err(e))),\n\n },\n\n ParserState::ChkSumLower => match parse_hex_halfbyte(symbol) {\n\n Ok(s) => {\n\n if ((self.expected_chksum << 4) | s) != self.chksum {\n\n (ParserState::WaitStart, Some(Err(\"Checksum error!\")))\n\n } else {\n\n (ParserState::WaitCR, None)\n\n }\n\n }\n\n Err(e) => (ParserState::WaitStart, Some(Err(e))),\n\n },\n\n ParserState::WaitCR if symbol == b'\\r' => (ParserState::WaitLF, None),\n\n ParserState::WaitLF if symbol == b'\\n' => {\n\n (ParserState::WaitStart, self.parse_sentence().transpose())\n\n }\n\n _ => (ParserState::WaitStart, Some(Err(\"NMEA format error!\"))),\n\n };\n\n self.parser_state = new_state;\n\n return result;\n", "file_path": "src/lib.rs", "rank": 94, "score": 7.43434298473062 }, { "content": " if let Some(r) = self.parser.parse_from_byte(symbol) {\n\n return Some(r);\n\n }\n\n }\n\n None\n\n }\n\n}\n\n\n\nimpl Parser {\n\n /// Constructs new Parser.\n\n pub fn new() -> Parser {\n\n Parser {\n\n buffer: [0u8; 79],\n\n buflen: 0,\n\n chksum: 0,\n\n expected_chksum: 0,\n\n parser_state: ParserState::WaitStart,\n\n source_mask: Default::default(),\n\n sentence_mask: Default::default(),\n\n }\n", "file_path": "src/lib.rs", "rank": 95, "score": 7.337893388006552 }, { "content": "//!\n\n//! Should not panic. If so please report issue on project page.\n\n//!\n\n//! # Errors\n\n//!\n\n//! `Unsupported sentence type.` - Got currently not supported sentence.\n\n//!\n\n//! `Checksum error!` - Sentence has wrong checksum, possible data corruption.\n\n//!\n\n//! `Source is not supported!` - Unknown source, new sattelite system is launched? :)\n\n//!\n\n//! `NMEA format error!` - Possible data corruption. Parser drops all accumulated data and starts seek new sentences.\n\n//!\n\n//! It's possible to got other very rare error messages that relates to protocol errors. Receivers nowadays mostly do not violate NMEA specs.\n\n//!\n\n//! # Planned features\n\n//!\n\n//! GSA and GSV parsing.\n\n//!\n\nuse core::convert::TryFrom;\n", "file_path": "src/lib.rs", "rank": 96, "score": 5.763107421159345 }, { "content": " .map_err(|_| \"Day string is not a number!\")\n\n .and_then(|d| {\n\n if d > 0 && d < 32 {\n\n Ok(d)\n\n } else {\n\n Err(\"Day is not in range 1-31\")\n\n }\n\n })?,\n\n month: (&date[2..4])\n\n .parse()\n\n .map_err(|_| \"Month string is not a number!\")\n\n .and_then(|m| {\n\n if m > 0 && m < 13 {\n\n Ok(m)\n\n } else {\n\n Err(\"Months is not in range 1-12\")\n\n }\n\n })?,\n\n year: (&date[4..6])\n\n .parse::<u16>()\n", "file_path": "src/datetime.rs", "rank": 97, "score": 5.6862436966675025 }, { "content": "pub enum Source {\n\n /// USA Global Positioning System\n\n GPS = 0b1,\n\n /// Russian Federation GLONASS\n\n GLONASS = 0b10,\n\n /// European Union Gallileo\n\n Gallileo = 0b100,\n\n /// China's Beidou\n\n Beidou = 0b1000,\n\n /// Global Navigation Sattelite System. Some combination of other systems. Depends on receiver model, receiver settings, etc..\n\n GNSS = 0b10000,\n\n}\n\n\n\n/// Mask for Source filter in Parser.\n\npub struct SourceMask {\n\n mask: u32,\n\n}\n\n\n\nimpl SourceMask {\n\n fn is_masked(&self, source: Source) -> bool {\n", "file_path": "src/lib.rs", "rank": 98, "score": 5.234880895685366 }, { "content": " ParserState::WaitStart if symbol != b'$' => (ParserState::WaitStart, None),\n\n ParserState::ReadUntilChkSum if symbol != b'*' => {\n\n if self.buffer.len() <= self.buflen {\n\n (\n\n ParserState::WaitStart,\n\n Some(Err(\"NMEA sentence is too long!\")),\n\n )\n\n } else {\n\n self.buffer[self.buflen] = symbol;\n\n self.buflen += 1;\n\n self.chksum = self.chksum ^ symbol;\n\n (ParserState::ReadUntilChkSum, None)\n\n }\n\n }\n\n ParserState::ReadUntilChkSum if symbol == b'*' => (ParserState::ChkSumUpper, None),\n\n ParserState::ChkSumUpper => match parse_hex_halfbyte(symbol) {\n\n Ok(s) => {\n\n self.expected_chksum = s;\n\n (ParserState::ChkSumLower, None)\n\n }\n", "file_path": "src/lib.rs", "rank": 99, "score": 5.021381046698436 } ]
Rust
src/channel/bolt/keyset.rs
LNP-WG/lnp-core
f8a24a0a61ae7a1c0a87020fde3caf90320e2c6d
use std::collections::BTreeMap; use amplify::DumbDefault; #[cfg(feature = "serde")] use amplify::ToYamlString; use bitcoin::util::bip32::{ChildNumber, ExtendedPrivKey, KeySource}; use p2p::legacy::{AcceptChannel, ChannelType, OpenChannel}; use secp256k1::{PublicKey, Secp256k1}; use wallet::hd::HardenedIndex; use wallet::scripts::PubkeyScript; #[derive(Clone, PartialEq, Eq, Debug, StrictEncode, StrictDecode)] #[cfg_attr( feature = "serde", derive(Display, Serialize, Deserialize), serde(crate = "serde_crate"), display(LocalPubkey::to_yaml_string) )] pub struct LocalPubkey { pub key: PublicKey, pub source: KeySource, } impl LocalPubkey { #[inline] pub fn to_bip32_derivation_map(&self) -> BTreeMap<PublicKey, KeySource> { bmap! { self.key => self.source.clone() } } #[inline] pub fn to_bitcoin_pk(&self) -> bitcoin::PublicKey { bitcoin::PublicKey::new(self.key) } } #[derive(Clone, PartialEq, Eq, Debug, StrictEncode, StrictDecode)] #[cfg_attr( feature = "serde", derive(Display, Serialize, Deserialize), serde(crate = "serde_crate"), display(LocalKeyset::to_yaml_string) )] pub struct LocalKeyset { pub funding_pubkey: LocalPubkey, pub revocation_basepoint: LocalPubkey, pub payment_basepoint: LocalPubkey, pub delayed_payment_basepoint: LocalPubkey, pub htlc_basepoint: LocalPubkey, pub first_per_commitment_point: LocalPubkey, pub shutdown_scriptpubkey: Option<PubkeyScript>, pub static_remotekey: bool, } #[derive(Clone, PartialEq, Eq, Debug, StrictEncode, StrictDecode)] #[cfg_attr( feature = "serde", derive(Display, Serialize, Deserialize), serde(crate = "serde_crate"), display(RemoteKeyset::to_yaml_string) )] pub struct RemoteKeyset { pub funding_pubkey: PublicKey, pub revocation_basepoint: PublicKey, pub payment_basepoint: PublicKey, pub delayed_payment_basepoint: PublicKey, pub htlc_basepoint: PublicKey, pub first_per_commitment_point: PublicKey, pub shutdown_scriptpubkey: Option<PubkeyScript>, pub static_remotekey: bool, } #[cfg(feature = "serde")] impl ToYamlString for LocalPubkey {} #[cfg(feature = "serde")] impl ToYamlString for LocalKeyset {} #[cfg(feature = "serde")] impl ToYamlString for RemoteKeyset {} impl From<&OpenChannel> for RemoteKeyset { fn from(open_channel: &OpenChannel) -> Self { Self { funding_pubkey: open_channel.funding_pubkey, revocation_basepoint: open_channel.revocation_basepoint, payment_basepoint: open_channel.payment_point, delayed_payment_basepoint: open_channel.delayed_payment_basepoint, htlc_basepoint: open_channel.htlc_basepoint, first_per_commitment_point: open_channel.first_per_commitment_point, shutdown_scriptpubkey: open_channel.shutdown_scriptpubkey.clone(), static_remotekey: false, } } } impl From<&AcceptChannel> for RemoteKeyset { fn from(accept_channel: &AcceptChannel) -> Self { Self { funding_pubkey: accept_channel.funding_pubkey, revocation_basepoint: accept_channel.revocation_basepoint, payment_basepoint: accept_channel.payment_point, delayed_payment_basepoint: accept_channel.delayed_payment_basepoint, htlc_basepoint: accept_channel.htlc_basepoint, first_per_commitment_point: accept_channel .first_per_commitment_point, shutdown_scriptpubkey: accept_channel.shutdown_scriptpubkey.clone(), static_remotekey: accept_channel .channel_type .map(ChannelType::has_static_remotekey) .unwrap_or_default(), } } } impl DumbDefault for LocalPubkey { fn dumb_default() -> Self { LocalPubkey { key: dumb_pubkey!(), source: KeySource::default(), } } } impl DumbDefault for LocalKeyset { fn dumb_default() -> Self { Self { funding_pubkey: DumbDefault::dumb_default(), revocation_basepoint: DumbDefault::dumb_default(), payment_basepoint: DumbDefault::dumb_default(), delayed_payment_basepoint: DumbDefault::dumb_default(), htlc_basepoint: DumbDefault::dumb_default(), first_per_commitment_point: DumbDefault::dumb_default(), shutdown_scriptpubkey: None, static_remotekey: false, } } } impl DumbDefault for RemoteKeyset { fn dumb_default() -> Self { Self { funding_pubkey: dumb_pubkey!(), revocation_basepoint: dumb_pubkey!(), payment_basepoint: dumb_pubkey!(), delayed_payment_basepoint: dumb_pubkey!(), htlc_basepoint: dumb_pubkey!(), first_per_commitment_point: dumb_pubkey!(), shutdown_scriptpubkey: None, static_remotekey: false, } } } impl LocalKeyset { pub fn with<C: secp256k1::Signing>( secp: &Secp256k1<C>, channel_source: KeySource, channel_xpriv: ExtendedPrivKey, shutdown_scriptpubkey: Option<PubkeyScript>, ) -> Self { let fingerpint = channel_source.0; let keys = (0u16..=6) .into_iter() .map(HardenedIndex::from) .map(ChildNumber::from) .map(|index| [index]) .map(|path| { let derivation_path = channel_source.1.clone().extend(path); let seckey = channel_xpriv .derive_priv(secp, &path) .expect("negligible probability") .private_key; LocalPubkey { key: PublicKey::from_secret_key(secp, &seckey), source: (fingerpint, derivation_path), } }) .collect::<Vec<_>>(); Self { funding_pubkey: keys[0].clone(), revocation_basepoint: keys[3].clone(), payment_basepoint: keys[1].clone(), delayed_payment_basepoint: keys[2].clone(), htlc_basepoint: keys[5].clone(), first_per_commitment_point: keys[4].clone(), shutdown_scriptpubkey, static_remotekey: false, } } }
use std::collections::BTreeMap; use amplify::DumbDefault; #[cfg(feature = "serde")] use amplify::ToYamlString; use bitcoin::util::bip32::{ChildNumber, ExtendedPrivKey, KeySource}; use p2p::legacy::{AcceptChannel, ChannelType, OpenChannel}; use secp256k1::{PublicKey, Secp256k1}; use wallet::hd::HardenedIndex; use wallet::scripts::PubkeyScript; #[derive(Clone, PartialEq, Eq, Debug, StrictEncode, StrictDecode)] #[cfg_attr( feature = "serde", derive(Display, Serialize, Deserialize), serde(crate = "serde_crate"), display(LocalPubkey::to_yaml_string) )] pub struct LocalPubkey { pub key: PublicKey, pub source: KeySource, } impl LocalPubkey { #[inline] pub fn to_bip32_derivation_map(&self) -> BTreeMap<PublicKey, KeySource> { bmap! { self.key => self.source.clone() } } #[inline] pub fn to_bitcoin_pk(&self) -> bitcoin::PublicKey { bitcoin::PublicKey::new(self.key) } } #[derive(Clone, PartialEq, Eq, Debug, StrictEncode, StrictDecode)] #[cfg_attr( feature = "serde", derive(Display, Serialize, Deserialize), serde(crate = "serde_crate"), display(LocalKeyset::to_yaml_string) )] pub struct LocalKeyset { pub funding_pubkey: LocalPubkey, pub revocation_basepoint: LocalPubkey, pub payment_basepoint: LocalPubkey, pub delayed_payment_basepoint: LocalPubkey, pub htlc_basepoint: LocalPubkey, pub first_per_commitment_point: LocalPubkey, pub shutdown_scriptpubkey: Option<PubkeyScript>, pub static_remotekey: bool, } #[derive(Clone, PartialEq, Eq, Debug, StrictEncode, StrictDecode)] #[cfg_attr( feature = "serde", derive(Display, Serialize, Deserialize), serde(crate = "serde_crate"), display(RemoteKeyset::to_yaml_string) )] pub struct RemoteKeyset { pub funding_pubkey: PublicKey, pub revocation_basepoint: PublicKey, pub payment_basepoint: PublicKey, pub delayed_payment_basepoint: PublicKey, pub htlc_basepoint: PublicKey, pub first_per_commitment_point: PublicKey, pub shutdown_scriptpubkey: Option<PubkeyScript>, pub static_remotekey: bool, } #[cfg(feature = "serde")] impl ToYamlString for LocalPubkey {} #[cfg(feature = "serde")] impl ToYamlString for LocalKeyset {} #[cfg(feature = "serde")] impl ToYamlString for RemoteKeyset {} impl From<&OpenChannel> for RemoteKeyset { fn from(open_channel: &OpenChannel) -> Self { Self { funding_pubkey: open_channel.funding_pubkey, revocation_basepoint: open_channel.revocation_basepoint, payment_basepoint: open_channel.payment_point, delayed_payment_basepoint: open_channel.delayed_payment_basepoint, htlc_basepoint: open_channel.htlc_basepoint, first_per_commitment_point: open_channel.first_per_commitment_point, shutdown_scriptpubkey: open_channel.shutdown_scriptpubkey.clone(), static_remotekey: false, } } } impl From<&AcceptChannel> for RemoteKeyset { fn from(accept_channel: &AcceptChannel) -> Self { Self { funding_pubkey: accept_channel.funding_pubkey, revocation_basepoint: accept_channel.revocation_basepoint, payment_basepoint: accept_channel.payment_point, delayed_payment_basepoint: accept_channel.delayed_payment_basepoint, htlc_basepoint: accept_channel.htlc_basepoint, first_per_commitment_point: accept_channel .first_per_commitment_point, shutdown_scriptpubkey: accept_channel.shutdown_scriptpubkey.clone(), static_remotekey: accept_channel .channel_type .map(ChannelType::has_static_remotekey) .unwrap_or_default(), } } } impl DumbDefault for LocalPubkey { fn dumb_default() -> Self { LocalPubkey { key: dumb_pubkey!(), source: KeySource::default(), } } } impl DumbDefault for LocalKeyset { fn dumb_default() -> Self { Self { funding_pubkey: DumbDefault::dumb_default(),
} impl DumbDefault for RemoteKeyset { fn dumb_default() -> Self { Self { funding_pubkey: dumb_pubkey!(), revocation_basepoint: dumb_pubkey!(), payment_basepoint: dumb_pubkey!(), delayed_payment_basepoint: dumb_pubkey!(), htlc_basepoint: dumb_pubkey!(), first_per_commitment_point: dumb_pubkey!(), shutdown_scriptpubkey: None, static_remotekey: false, } } } impl LocalKeyset { pub fn with<C: secp256k1::Signing>( secp: &Secp256k1<C>, channel_source: KeySource, channel_xpriv: ExtendedPrivKey, shutdown_scriptpubkey: Option<PubkeyScript>, ) -> Self { let fingerpint = channel_source.0; let keys = (0u16..=6) .into_iter() .map(HardenedIndex::from) .map(ChildNumber::from) .map(|index| [index]) .map(|path| { let derivation_path = channel_source.1.clone().extend(path); let seckey = channel_xpriv .derive_priv(secp, &path) .expect("negligible probability") .private_key; LocalPubkey { key: PublicKey::from_secret_key(secp, &seckey), source: (fingerpint, derivation_path), } }) .collect::<Vec<_>>(); Self { funding_pubkey: keys[0].clone(), revocation_basepoint: keys[3].clone(), payment_basepoint: keys[1].clone(), delayed_payment_basepoint: keys[2].clone(), htlc_basepoint: keys[5].clone(), first_per_commitment_point: keys[4].clone(), shutdown_scriptpubkey, static_remotekey: false, } } }
revocation_basepoint: DumbDefault::dumb_default(), payment_basepoint: DumbDefault::dumb_default(), delayed_payment_basepoint: DumbDefault::dumb_default(), htlc_basepoint: DumbDefault::dumb_default(), first_per_commitment_point: DumbDefault::dumb_default(), shutdown_scriptpubkey: None, static_remotekey: false, } }
function_block-function_prefix_line
[ { "content": "fn lnp_out_channel_funding_key() -> ProprietaryKey {\n\n ProprietaryKey {\n\n prefix: PSBT_LNP_PROPRIETARY_PREFIX.to_vec(),\n\n subtype: PSBT_OUT_LNP_CHANNEL_FUNDING,\n\n key: vec![],\n\n }\n\n}\n\n\n", "file_path": "src/channel/funding.rs", "rank": 0, "score": 85261.09816228738 }, { "content": "/// Some features don't make sense on a per-channels or per-node basis, so each\n\n/// feature defines how it is presented in those contexts. Some features may be\n\n/// required for opening a channel, but not a requirement for use of the\n\n/// channel, so the presentation of those features depends on the feature\n\n/// itself.\n\n///\n\n/// # Specification\n\n/// <https://github.com/lightningnetwork/lightning-rfc/blob/master/09-features.md#bolt-9-assigned-feature-flags>\n\npub trait FeatureContext:\n\n Display\n\n + Debug\n\n + Copy\n\n + Clone\n\n + PartialEq\n\n + Eq\n\n + PartialOrd\n\n + Ord\n\n + Hash\n\n + Default\n\n{\n\n}\n\n\n\n/// Type representing `init` message feature context.\n\n#[derive(\n\n Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Display, Default\n\n)]\n\n#[display(\"I\", alt = \"init\")]\n\npub struct InitContext;\n", "file_path": "lnp2p/src/legacy/bolt9.rs", "rank": 1, "score": 70486.10433777426 }, { "content": "struct TlvPayment {\n\n #[lightning_encoding(tlv = 2)]\n\n #[cfg_attr(feature = \"strict_encoding\", network_encoding(tlv = 2))]\n\n amt_to_forward: Option<u64>,\n\n\n\n #[lightning_encoding(tlv = 4)]\n\n #[cfg_attr(feature = \"strict_encoding\", network_encoding(tlv = 4))]\n\n outgoing_cltv_value: Option<u32>,\n\n\n\n #[lightning_encoding(tlv = 6)]\n\n #[cfg_attr(feature = \"strict_encoding\", network_encoding(tlv = 6))]\n\n short_channel_id: Option<ShortChannelId>,\n\n\n\n #[lightning_encoding(tlv = 8)]\n\n #[cfg_attr(feature = \"strict_encoding\", network_encoding(tlv = 8))]\n\n payment_data: Option<PaymentData>,\n\n\n\n #[lightning_encoding(unknown_tlvs)]\n\n #[cfg_attr(feature = \"strict_encoding\", network_encoding(unknown_tlvs))]\n\n unknown: tlv::Stream,\n", "file_path": "lnp2p/src/legacy/bolt4.rs", "rank": 2, "score": 50571.66794223986 }, { "content": "/// Trait for any data that can be used as a part of the channel state\n\npub trait State: StrictEncode + StrictDecode + DumbDefault {\n\n fn to_funding(&self) -> Funding;\n\n fn set_funding(&mut self, funding: &Funding);\n\n}\n\n\n\npub type ExtensionQueue<N> = BTreeMap<N, Box<dyn ChannelExtension<N>>>;\n\n\n\n/// Channel operates as a three sets of extensions, where each set is applied\n\n/// to construct the transaction graph and the state in a strict order one after\n\n/// other. The order of the extensions within each set is defined by the\n\n/// concrete type implementing `extension::Nomenclature` marker trait, provided\n\n/// as a type parameter `N`\n\n#[derive(Getters)]\n\npub struct Channel<N>\n\nwhere\n\n N: Nomenclature,\n\n N::State: State,\n\n{\n\n /* TODO: Add channel graph cache.\n\n For this we need to track each state mutation and reset the cached data\n", "file_path": "src/channel/channel.rs", "rank": 3, "score": 44738.351995355784 }, { "content": "/// Marker trait for creating extension nomenclatures, defining order in which\n\n/// extensions are called to process the data.\n\n///\n\n/// Extension nomenclature is an enum with members convertible into `u16`\n\n/// representation\n\npub trait Nomenclature\n\nwhere\n\n Self: Clone\n\n + Copy\n\n + PartialEq\n\n + Eq\n\n + PartialOrd\n\n + Ord\n\n + Hash\n\n + Debug\n\n + Display\n\n + Default\n\n + TryFrom<u16, Error = strict_encoding::Error>\n\n + Into<u16>,\n\n{\n\n type State: DumbDefault;\n\n type Error: std::error::Error;\n\n type PeerMessage;\n\n type UpdateMessage;\n\n type UpdateRequest;\n\n}\n\n\n", "file_path": "src/extension.rs", "rank": 4, "score": 42939.393313806926 }, { "content": "pub trait History {\n\n type State;\n\n type Error: std::error::Error;\n\n\n\n fn height(&self) -> usize;\n\n fn get(&self, height: usize) -> Result<Self::State, Self::Error>;\n\n fn top(&self) -> Result<Self::State, Self::Error>;\n\n fn bottom(&self) -> Result<Self::State, Self::Error>;\n\n fn dig(&self) -> Result<Self::State, Self::Error>;\n\n fn push(&mut self, state: Self::State) -> Result<&mut Self, Self::Error>;\n\n}\n", "file_path": "src/channel/channel.rs", "rank": 5, "score": 41445.56617754711 }, { "content": "/// Marker trait for creating routing extension nomenclatures, defining order in\n\n/// which extensions are called to construct the route.\n\n///\n\n/// Extension nomenclature is an enum with members convertible into `u16`\n\n/// representation\n\npub trait Nomenclature\n\nwhere\n\n Self: extension::Nomenclature,\n\n{\n\n type HopPayload: SphinxPayload;\n\n\n\n fn default_extensions() -> Vec<Box<dyn RouterExtension<Self>>>;\n\n\n\n /// Updates router extension structure from peer message. Processed before\n\n /// each of the registered extensions gets [`Extension::update_from_peer`]\n\n fn update_from_peer(\n\n router: &mut Router<Self>,\n\n message: &Self::PeerMessage,\n\n ) -> Result<(), <Self as extension::Nomenclature>::Error>;\n\n}\n\n\n\n/// Generic router consisting of a queue of routing extensions, implementing\n\n/// specific router logic\n\npub struct Router<N>\n\nwhere\n", "file_path": "src/router/router.rs", "rank": 6, "score": 41445.56617754711 }, { "content": "pub trait ChannelExtension<N>\n\nwhere\n\n N: channel::Nomenclature,\n\n N::State: channel::State,\n\n Self: Extension<N>,\n\n{\n\n /// Constructs boxed extension objects which can be inserted into channel\n\n /// extension pipeline\n\n fn new() -> Box<dyn ChannelExtension<N>>\n\n where\n\n Self: Sized;\n\n\n\n /// Applies state to the channel transaction graph\n\n fn build_graph(\n\n &self,\n\n tx_graph: &mut TxGraph,\n\n remote: bool,\n\n ) -> Result<(), <N as Nomenclature>::Error>;\n\n}\n\n\n", "file_path": "src/extension.rs", "rank": 7, "score": 39368.721190418415 }, { "content": "/// Channel constructor specific methods\n\npub trait ChannelConstructor<N>\n\nwhere\n\n N: channel::Nomenclature,\n\n N::State: channel::State,\n\n Self: ChannelExtension<N> + Default,\n\n{\n\n fn enrich_funding(\n\n &self,\n\n psbt: &mut Psbt,\n\n funding: &Funding,\n\n ) -> Result<(), <N as Nomenclature>::Error>;\n\n}\n", "file_path": "src/extension.rs", "rank": 8, "score": 39368.721190418415 }, { "content": "pub trait RouterExtension<N>\n\nwhere\n\n N: router::Nomenclature,\n\n Self: Extension<N>,\n\n{\n\n /// Constructs boxed extension objects which can be inserted into router\n\n /// extension pipeline\n\n fn new() -> Box<dyn RouterExtension<N>>\n\n where\n\n Self: Sized;\n\n\n\n fn build_route(\n\n &mut self,\n\n payment: PaymentRequest,\n\n route: &mut Vec<Hop<<N as router::Nomenclature>::HopPayload>>,\n\n );\n\n}\n\n\n", "file_path": "src/extension.rs", "rank": 9, "score": 39368.721190418415 }, { "content": "pub trait ChannelGraph {\n\n fn tx_by_role(&self, role: TxRole) -> RoleIter;\n\n fn iter(&self) -> ChannelIter;\n\n fn funding_tx(&self) -> &ChannelTx;\n\n fn refund_tx(&self) -> &ChannelTx;\n\n}\n\n\n\n#[derive(Clone, Ord, PartialOrd, Eq, PartialEq, Debug)]\n\n#[derive(NetworkEncode, NetworkDecode)]\n\npub struct ChannelProposal {\n\n channel: FundingTx,\n\n pub signatures: NodeSignatureMap, // signatures on the graph using node key\n\n index: Option<BTreeMap<TxRole, Vec<ChannelTx>>>,\n\n}\n", "file_path": "lnp2p/src/bifrost/proposals.rs", "rank": 10, "score": 38804.371287998874 }, { "content": "pub trait PsbtLnpFunding {\n\n fn channel_funding_output(&self) -> Option<usize>;\n\n fn set_channel_funding_output(&mut self, vout: u16) -> Result<(), Error>;\n\n fn channel_funding_outpoint(&self) -> Result<OutPoint, Error>;\n\n fn extract_channel_funding(self) -> Result<Funding, Error>;\n\n}\n\n\n\nimpl PsbtLnpFunding for Psbt {\n\n fn channel_funding_output(&self) -> Option<usize> {\n\n let funding_key = lnp_out_channel_funding_key();\n\n self.outputs\n\n .iter()\n\n .enumerate()\n\n .find(|(_, output)| output.proprietary.get(&funding_key).is_some())\n\n .map(|(index, _)| index)\n\n }\n\n\n\n fn set_channel_funding_output(&mut self, vout: u16) -> Result<(), Error> {\n\n self.outputs\n\n .get_mut(vout as usize)\n", "file_path": "src/channel/funding.rs", "rank": 11, "score": 38804.371287998874 }, { "content": "pub trait ScriptGenerators {\n\n fn ln_funding(\n\n amount: u64,\n\n local_pubkey: &LocalPubkey,\n\n remote_pubkey: PublicKey,\n\n ) -> Self;\n\n\n\n /// NB: We use argument named `local_delayedpubkey`, but in fact the source\n\n /// for this key is the remote node key, since we generate a transaction\n\n /// which we will sign for the remote node.\n\n fn ln_to_local(\n\n amount: u64,\n\n revocationpubkey: PublicKey,\n\n local_delayedpubkey: PublicKey,\n\n to_self_delay: u16,\n\n ) -> Self;\n\n\n\n /// NB: We use argument named `remote_pubkey`, but in fact the source\n\n /// for this key is the local node key, since we generate a transaction\n\n /// which we will sign for the remote node.\n", "file_path": "src/channel/bolt/channel.rs", "rank": 12, "score": 38804.371287998874 }, { "content": "pub trait Extension<N: Nomenclature> {\n\n fn identity(&self) -> N;\n\n\n\n /// Perform a sate change and produce a message which should be communicated\n\n /// to peers notifying them about the state change\n\n #[allow(dead_code, unused_variables)]\n\n fn state_change(\n\n &mut self,\n\n request: &<N as extension::Nomenclature>::UpdateRequest,\n\n message: &mut <N as extension::Nomenclature>::PeerMessage,\n\n ) -> Result<(), <N as extension::Nomenclature>::Error> {\n\n // Do nothing by default\n\n Ok(())\n\n }\n\n\n\n /// Updates extension state from the data taken from the message received\n\n /// from the remote peer\n\n fn update_from_peer(\n\n &mut self,\n\n message: &<N as extension::Nomenclature>::PeerMessage,\n", "file_path": "src/extension.rs", "rank": 13, "score": 38789.41539827482 }, { "content": "pub trait ScriptGenerators {\n\n fn ln_offered_htlc(\n\n amount: u64,\n\n revocationpubkey: PublicKey,\n\n local_htlcpubkey: PublicKey,\n\n remote_htlcpubkey: PublicKey,\n\n payment_hash: HashLock,\n\n ) -> Self;\n\n\n\n fn ln_received_htlc(\n\n amount: u64,\n\n revocationpubkey: PublicKey,\n\n local_htlcpubkey: PublicKey,\n\n remote_htlcpubkey: PublicKey,\n\n cltv_expiry: u32,\n\n payment_hash: HashLock,\n\n ) -> Self;\n\n\n\n fn ln_htlc_output(\n\n amount: u64,\n", "file_path": "src/channel/bolt/extensions/htlc.rs", "rank": 14, "score": 37631.21587539857 }, { "content": "pub trait TxGenerators {\n\n fn ln_htlc(\n\n amount: u64,\n\n outpoint: OutPoint,\n\n cltv_expiry: u32,\n\n revocationpubkey: PublicKey,\n\n local_delayedpubkey: PublicKey,\n\n to_self_delay: u16,\n\n ) -> Self;\n\n}\n\n\n\nimpl TxGenerators for Transaction {\n\n /// NB: For HTLC Success transaction always set `cltv_expiry` parameter\n\n /// to zero!\n\n fn ln_htlc(\n\n amount: u64,\n\n outpoint: OutPoint,\n\n cltv_expiry: u32,\n\n revocationpubkey: PublicKey,\n\n local_delayedpubkey: PublicKey,\n", "file_path": "src/channel/bolt/extensions/htlc.rs", "rank": 15, "score": 37631.21587539857 }, { "content": "/// Marker trait for creating channel extension nomenclatures, defining order in\n\n/// which extensions are applied to the channel transaction structure.\n\n///\n\n/// Extension nomenclature is an enum with members convertible into `u16`\n\n/// representation\n\npub trait Nomenclature: extension::Nomenclature\n\nwhere\n\n <Self as extension::Nomenclature>::State: State,\n\n{\n\n type Constructor: ChannelConstructor<Self>;\n\n\n\n /// Returns set of default channel extenders\n\n fn default_extenders() -> Vec<Box<dyn ChannelExtension<Self>>> {\n\n Vec::default()\n\n }\n\n\n\n /// Returns set of default channel modifiers\n\n fn default_modifiers() -> Vec<Box<dyn ChannelExtension<Self>>> {\n\n Vec::default()\n\n }\n\n\n\n /// Updates channel extension structure from peer message. Processed before\n\n /// each of the registered extensions gets [`Extension::update_from_peer`]\n\n fn update_from_peer(\n\n channel: &mut Channel<Self>,\n\n message: &Self::PeerMessage,\n\n ) -> Result<(), <Self as extension::Nomenclature>::Error>;\n\n}\n\n\n", "file_path": "src/channel/channel.rs", "rank": 16, "score": 37521.917610379314 }, { "content": "pub trait TxRole: Clone + From<u16> + Into<u16> {}\n", "file_path": "src/channel/tx_graph.rs", "rank": 17, "score": 32992.596764261834 }, { "content": "pub trait TxIndex: Clone + From<u64> + Into<u64> {}\n\n\n\nimpl TxRole for u16 {}\n\nimpl TxIndex for u64 {}\n\n\n\n#[derive(Getters, Clone, PartialEq)]\n\npub struct TxGraph<'channel> {\n\n /// Read-only data for extensions on the number of channel parties\n\n funding: &'channel Funding,\n\n pub cmt_version: i32,\n\n pub cmt_locktime: u32,\n\n pub cmt_sequence: u32,\n\n pub cmt_outs: Vec<psbt::Output>,\n\n graph: BTreeMap<u16, BTreeMap<u64, Psbt>>,\n\n}\n\n\n\nimpl<'channel> TxGraph<'channel>\n\nwhere\n\n Self: 'channel,\n\n{\n", "file_path": "src/channel/tx_graph.rs", "rank": 18, "score": 32992.596764261834 }, { "content": "use secp256k1::PublicKey;\n\n\n\nuse super::{\n\n CommonParams, Direction, HtlcKnown, HtlcSecret, Lifecycle, LocalKeyset,\n\n PeerParams, Policy, RemoteKeyset,\n\n};\n\nuse crate::channel::{Funding, State};\n\n\n\n#[derive(Clone, Debug)]\n\n#[derive(StrictEncode, StrictDecode)]\n\n#[cfg_attr(\n\n feature = \"serde\",\n\n derive(Display, Serialize, Deserialize),\n\n serde(crate = \"serde_crate\"),\n\n display(ChannelState::to_yaml_string)\n\n)]\n\npub struct ChannelState {\n\n pub funding: Funding,\n\n\n\n /// Current channel lifecycle stage\n", "file_path": "src/channel/bolt/state.rs", "rank": 22, "score": 29.30914513890172 }, { "content": " feature = \"serde\",\n\n serde_as,\n\n derive(Serialize, Deserialize),\n\n serde(crate = \"serde_crate\", transparent)\n\n)]\n\n#[derive(\n\n Wrapper, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Display, From\n\n)]\n\n#[derive(NetworkEncode, NetworkDecode)]\n\n#[display(inner)]\n\npub struct ProtocolName(String);\n\n\n\nimpl FromStr for ProtocolName {\n\n type Err = ProtocolNameError;\n\n\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n if s.chars().any(|ch| {\n\n ch.is_ascii_uppercase() || ch.is_ascii_digit() || ch == '-'\n\n }) {\n\n Ok(ProtocolName(s.to_owned()))\n", "file_path": "lnp2p/src/bifrost/types.rs", "rank": 23, "score": 29.070794035962876 }, { "content": "#[cfg_attr(\n\n feature = \"serde\",\n\n derive(Serialize, Deserialize),\n\n serde(crate = \"serde_crate\")\n\n)]\n\npub struct ChannelFeatures {}\n\n\n\nimpl TryFrom<FlagVec> for ChannelFeatures {\n\n type Error = Error;\n\n\n\n fn try_from(_: FlagVec) -> Result<Self, Self::Error> {\n\n Ok(ChannelFeatures {})\n\n }\n\n}\n\n\n\nimpl From<ChannelFeatures> for FlagVec {\n\n fn from(_: ChannelFeatures) -> Self {\n\n FlagVec::default()\n\n }\n\n}\n", "file_path": "lnp2p/src/legacy/bolt9.rs", "rank": 24, "score": 28.326782971418556 }, { "content": "extern crate strict_encoding;\n\n\n\n#[cfg(feature = \"serde\")]\n\n#[macro_use]\n\nextern crate serde_with;\n\n#[cfg(feature = \"serde\")]\n\nextern crate serde_crate as serde;\n\n\n\npub extern crate lnp2p as p2p;\n\n\n\nmacro_rules! dumb_pubkey {\n\n () => {\n\n secp256k1::PublicKey::from_secret_key(\n\n secp256k1::SECP256K1,\n\n &secp256k1::ONE_KEY,\n\n )\n\n };\n\n}\n\n\n\npub mod channel;\n\npub mod extension;\n\npub mod router;\n\n\n\npub use channel::Channel;\n\npub use extension::{\n\n ChannelConstructor, ChannelExtension, Extension, RouterExtension,\n\n};\n", "file_path": "src/lib.rs", "rank": 25, "score": 27.020187622670846 }, { "content": "#[cfg(feature = \"serde\")]\n\n#[macro_use]\n\nextern crate serde_with;\n\n\n\nmacro_rules! dumb_pubkey {\n\n () => {\n\n secp256k1::PublicKey::from_secret_key(\n\n secp256k1::SECP256K1,\n\n &secp256k1::ONE_KEY,\n\n )\n\n };\n\n}\n\n\n\n#[cfg(feature = \"bifrost\")]\n\npub mod bifrost;\n\n#[cfg(feature = \"legacy\")]\n\npub mod legacy;\n", "file_path": "lnp2p/src/lib.rs", "rank": 26, "score": 26.702437148790835 }, { "content": "#[display(doc_comments)]\n\n/// incorrect naminng for protocol {0}: protocol name in Bifrost can contain\n\n/// only ASCII alphanumeric characters and dashes\n\npub struct ProtocolNameError(pub String);\n\n\n\n#[cfg_attr(\n\n feature = \"serde\",\n\n serde_as,\n\n derive(Serialize, Deserialize),\n\n serde(crate = \"serde_crate\", transparent)\n\n)]\n\n#[derive(\n\n Wrapper, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Default, From\n\n)]\n\npub struct ProtocolList(Vec<ProtocolName>);\n\n\n\nimpl IntoIterator for ProtocolList {\n\n type Item = ProtocolName;\n\n type IntoIter = std::vec::IntoIter<ProtocolName>;\n\n\n", "file_path": "lnp2p/src/bifrost/types.rs", "rank": 27, "score": 26.38510551787038 }, { "content": "}\n\n\n\n#[cfg(feature = \"strict_encoding\")]\n\nimpl StrictDecode for NodeColor {\n\n #[inline]\n\n fn strict_decode<D: io::Read>(\n\n d: D,\n\n ) -> Result<Self, strict_encoding::Error> {\n\n Self::decode(d).map_err(strict_encoding::Error::from)\n\n }\n\n}\n\n\n\n#[cfg_attr(\n\n feature = \"serde\",\n\n serde_as,\n\n derive(Serialize, Deserialize),\n\n serde(crate = \"serde_crate\", transparent)\n\n)]\n\n#[derive(\n\n Wrapper,\n", "file_path": "lnp2p/src/legacy/types.rs", "rank": 28, "score": 26.096062910921162 }, { "content": " pub preimage: HashPreimage,\n\n pub id: u64,\n\n pub cltv_expiry: u32,\n\n}\n\n\n\n#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]\n\n#[derive(StrictEncode, StrictDecode)]\n\n#[cfg_attr(\n\n feature = \"serde\",\n\n derive(Serialize, Deserialize),\n\n serde(crate = \"serde_crate\")\n\n)]\n\npub struct HtlcSecret {\n\n pub amount: u64,\n\n pub hashlock: HashLock,\n\n pub id: u64,\n\n pub cltv_expiry: u32,\n\n}\n\n\n\n#[derive(Getters, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]\n", "file_path": "src/channel/bolt/extensions/htlc.rs", "rank": 29, "score": 25.949621915300806 }, { "content": "use p2p::legacy::ChannelType;\n\nuse wallet::hlc::{HashLock, HashPreimage};\n\nuse wallet::psbt;\n\nuse wallet::psbt::{Psbt, PsbtVersion};\n\nuse wallet::scripts::{LockScript, PubkeyScript, WitnessScript};\n\n\n\nuse crate::channel::bolt::util::UpdateReq;\n\nuse crate::channel::bolt::{BoltExt, ChannelState, Error, TxType};\n\nuse crate::channel::tx_graph::TxGraph;\n\nuse crate::{ChannelExtension, Extension};\n\n\n\n#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]\n\n#[derive(StrictEncode, StrictDecode)]\n\n#[cfg_attr(\n\n feature = \"serde\",\n\n derive(Serialize, Deserialize),\n\n serde(crate = \"serde_crate\")\n\n)]\n\npub struct HtlcKnown {\n\n pub amount: u64,\n", "file_path": "src/channel/bolt/extensions/htlc.rs", "rank": 30, "score": 24.88674091041491 }, { "content": "#[derive(\n\n Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash, Debug, Display, Error\n\n)]\n\n#[display(doc_comments)]\n\npub enum Error {\n\n /// no funding output found in the funding transaction. The funding output\n\n /// must be marked with proprietary key having \"LNP\" prefix and 0x01\n\n /// subtype.\n\n NoFundingOutput,\n\n\n\n /// funding transaction does not contain output #{0} specified as a\n\n /// funding outpoint\n\n WrongOutput(u16),\n\n}\n\n\n\n/// Information about channel funding\n\n#[derive(Getters, Clone, PartialEq, Debug, StrictEncode, StrictDecode)]\n\n#[cfg_attr(\n\n feature = \"serde\",\n\n derive(Serialize, Deserialize),\n", "file_path": "src/channel/funding.rs", "rank": 32, "score": 24.374582601592838 }, { "content": " }\n\n}\n\n\n\n/// Structure containing part of the channel configuration (and state, as it\n\n/// contains adjustible fee) which must follow specific policies and be accepted\n\n/// or validated basing on those policies and additional protocol-level\n\n/// requirements.\n\n///\n\n/// This information applies for both channel peers and used in constructing\n\n/// both sides of asymmetric transactions.\n\n#[derive(Clone, Copy, PartialEq, Eq, Debug, Hash, StrictEncode, StrictDecode)]\n\n#[cfg_attr(\n\n feature = \"serde\",\n\n derive(Display, Serialize, Deserialize),\n\n serde(crate = \"serde_crate\"),\n\n display(CommonParams::to_yaml_string)\n\n)]\n\npub struct CommonParams {\n\n /// Minimum depth of the funding transaction before the channel is\n\n /// considered open\n", "file_path": "src/channel/bolt/policy.rs", "rank": 33, "score": 23.853035541691263 }, { "content": "use bitcoin::hashes::sha256;\n\nuse bitcoin::Txid;\n\nuse bitcoin_scripts::hlc::{HashLock, HashPreimage};\n\nuse bitcoin_scripts::PubkeyScript;\n\nuse internet2::presentation::sphinx::Onion;\n\nuse internet2::tlv;\n\nuse secp256k1::ecdsa::Signature;\n\nuse secp256k1::{PublicKey, SecretKey};\n\n\n\nuse super::{ChannelId, TempChannelId};\n\nuse crate::legacy::PaymentOnion;\n\n\n\n/// Total length of payment Sphinx package\n\npub const PAYMENT_SPHINX_LEN: usize = 1300;\n\n\n\n/// Channel types are an explicit enumeration: for convenience of future\n\n/// definitions they reuse even feature bits, but they are not an arbitrary\n\n/// combination (they represent the persistent features which affect the channel\n\n/// operation).\n\n#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Display)]\n", "file_path": "lnp2p/src/legacy/bolt2.rs", "rank": 34, "score": 23.365564957299615 }, { "content": "use std::str::FromStr;\n\n\n\nuse amplify::flags::FlagVec;\n\nuse lightning_encoding::{self, LightningDecode, LightningEncode};\n\n#[cfg(feature = \"serde\")]\n\nuse serde_with::{As, DisplayFromStr};\n\n\n\n/// Feature-flags-related errors\n\n#[derive(\n\n Clone, Copy, PartialEq, Eq, Ord, PartialOrd, Hash, Debug, Display, Error,\n\n From\n\n)]\n\n#[display(doc_comments)]\n\npub enum Error {\n\n #[from]\n\n /// feature flags inconsistency: {0}\n\n FeaturesInconsistency(NoRequiredFeatureError),\n\n\n\n /// unknown even feature flag with number {0}\n\n UnknownEvenFeature(u16),\n", "file_path": "lnp2p/src/legacy/bolt9.rs", "rank": 35, "score": 22.93605784250685 }, { "content": "/// Channel lifecycle: states of the channel state machine\n\n#[cfg_attr(\n\n feature = \"serde\",\n\n derive(Serialize, Deserialize),\n\n serde(crate = \"serde_crate\")\n\n)]\n\n#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Display)]\n\n#[derive(StrictEncode, StrictDecode)]\n\n#[repr(u8)]\n\npub enum Lifecycle {\n\n /// Channel is initialized, communications with the remote peer has not\n\n /// started yet\n\n #[display(\"INIT\")]\n\n Initial,\n\n\n\n /// Sent or received `open_channel`\n\n #[display(\"PROPOSED\")]\n\n Proposed,\n\n\n\n /// Sent or received `accept_channel`\n", "file_path": "src/channel/bolt/util.rs", "rank": 36, "score": 22.44953747760239 }, { "content": "\n\n/// Features provided in the `init` message and announced with\n\n/// `node_announcement`.\n\n///\n\n/// Flags are numbered from the least-significant bit, at bit 0 (i.e. 0x1, an\n\n/// even bit). They are generally assigned in pairs so that features can be\n\n/// introduced as optional (odd bits) and later upgraded to be compulsory (even\n\n/// bits), which will be refused by outdated nodes: see BOLT #1: The init\n\n/// Message.\n\n///\n\n/// # Specification\n\n/// <https://github.com/lightningnetwork/lightning-rfc/blob/master/09-features.md>\n\n#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Default)]\n\n#[cfg_attr(\n\n feature = \"serde\",\n\n derive(Serialize, Deserialize),\n\n serde(crate = \"serde_crate\")\n\n)]\n\npub struct InitFeatures {\n\n /// Requires or supports extra `channel_reestablish` fields\n", "file_path": "lnp2p/src/legacy/bolt9.rs", "rank": 37, "score": 22.279588949402246 }, { "content": " ///\n\n /// # Returns\n\n /// Bit number in feature vector if the feature is allowed for the provided\n\n /// `required` condition; `None` otherwise.\n\n pub fn bit(self, required: bool) -> Option<u16> {\n\n if self == Feature::InitialRoutingSync && required {\n\n return None;\n\n }\n\n Some(self as u16 + !required as u16)\n\n }\n\n}\n\n\n\n/// Error reporting unrecognized feature name\n\n#[derive(\n\n Clone, Ord, PartialOrd, Eq, PartialEq, Hash, Debug, Display, Error, From\n\n)]\n\n#[display(\"the provided feature name is not known: {0}\")]\n\npub struct UnknownFeatureError(pub String);\n\n\n\nimpl FromStr for Feature {\n", "file_path": "lnp2p/src/legacy/bolt9.rs", "rank": 38, "score": 21.825744506054605 }, { "content": " }\n\n\n\n /// Detects whether channel should be announced\n\n #[inline]\n\n pub fn should_announce_channel(&self) -> bool {\n\n self.channel_flags & 0x01 == 0x01\n\n }\n\n}\n\n\n\n/// This message contains information about a node and indicates its acceptance\n\n/// of the new channel. This is the second step toward creating the funding\n\n/// transaction and both versions of the commitment transaction.\n\n#[derive(Clone, PartialEq, Eq, Debug, Display)]\n\n#[derive(LightningEncode, LightningDecode)]\n\n#[cfg_attr(\n\n feature = \"strict_encoding\",\n\n derive(NetworkEncode, NetworkDecode),\n\n network_encoding(use_tlv)\n\n)]\n\n#[lightning_encoding(use_tlv)]\n", "file_path": "lnp2p/src/legacy/bolt2.rs", "rank": 39, "score": 21.58737359322852 }, { "content": " )]\n\n pub gossip_queries: Option<bool>,\n\n\n\n /// Requires/supports variable-length routing onion payloads\n\n #[cfg_attr(\n\n feature = \"serde\",\n\n serde(with = \"As::<Option<DisplayFromStr>>\")\n\n )]\n\n pub var_onion_optin: Option<bool>,\n\n\n\n /// Gossip queries can include additional information\n\n #[cfg_attr(\n\n feature = \"serde\",\n\n serde(with = \"As::<Option<DisplayFromStr>>\")\n\n )]\n\n pub gossip_queries_ex: Option<bool>,\n\n\n\n /// Static key for remote output\n\n #[cfg_attr(\n\n feature = \"serde\",\n", "file_path": "lnp2p/src/legacy/bolt9.rs", "rank": 40, "score": 21.36613046453121 }, { "content": "\n\n /// `channel_reserve_satoshis` from the open_channel message\n\n /// ({channel_reserve}) is less than `dust_limit_satoshis`\n\n /// ({dust_limit}; rejecting the channel according to BOLT-2\n\n RemoteDustExceedsLocalReserve {\n\n channel_reserve: u64,\n\n dust_limit: u64,\n\n },\n\n}\n\n\n\n/// Policy to validate channel parameters proposed by a remote peer.\n\n///\n\n/// By default, [`Channel::new`] uses reasonable default policy created by\n\n/// [`Policy::default()`] method. Channel creator may provide a custom policy by\n\n/// using [`Channel::with`] method.\n\n#[derive(Clone, Eq, PartialEq, Hash, Debug, StrictEncode, StrictDecode)]\n\n#[cfg_attr(\n\n feature = \"serde\",\n\n derive(Display, Serialize, Deserialize),\n\n serde(crate = \"serde_crate\"),\n", "file_path": "src/channel/bolt/policy.rs", "rank": 41, "score": 21.273149666102015 }, { "content": " feature = \"serde\",\n\n serde(with = \"As::<Option<DisplayFromStr>>\")\n\n )]\n\n pub option_support_large_channel: Option<bool>,\n\n\n\n /// Anchor outputs\n\n #[cfg_attr(\n\n feature = \"serde\",\n\n serde(with = \"As::<Option<DisplayFromStr>>\")\n\n )]\n\n pub option_anchor_outputs: Option<bool>,\n\n\n\n /// Rest of feature flags which are unknown to the current implementation\n\n #[cfg_attr(feature = \"serde\", serde(with = \"As::<DisplayFromStr>\"))]\n\n pub unknown: FlagVec,\n\n}\n\n\n\nimpl Display for InitFeatures {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {\n\n for (feature, required) in self.known_set_features() {\n", "file_path": "lnp2p/src/legacy/bolt9.rs", "rank": 42, "score": 21.249296308262004 }, { "content": " /// published; failing the channel\n\n NoPermanentId,\n\n\n\n /// local channel id {local} does not match to the one provided by\n\n /// the remote peer ({remote}) during the channel reestablishment\n\n ChannelIdMismatch { remote: ChannelId, local: ChannelId },\n\n}\n\n\n\n/// Channel direction\n\n#[derive(Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash, Debug, Display)]\n\n#[derive(StrictEncode, StrictDecode)]\n\n#[cfg_attr(\n\n feature = \"serde\",\n\n derive(Serialize, Deserialize),\n\n serde(crate = \"serde_crate\")\n\n)]\n\npub enum Direction {\n\n /// Inbound channels accepted by the local node.\n\n ///\n\n /// Launched in response to received `accept_channel` messages\n", "file_path": "src/channel/bolt/channel.rs", "rank": 43, "score": 21.132646528461635 }, { "content": "use super::GossipChannelInfo;\n\nuse crate::router::gossip::LocalChannelInfo;\n\nuse crate::router::Router;\n\nuse crate::{extension, router, Extension, RouterExtension};\n\n\n\n#[derive(\n\n Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash, Debug, Display, Error\n\n)]\n\n#[display(doc_comments)]\n\npub enum Error {}\n\n\n\n#[derive(Clone, PartialEq, Eq, Debug, Default)]\n\n#[derive(StrictEncode, StrictDecode)]\n\npub struct RouterState {\n\n remote_channels: Vec<GossipChannelInfo>,\n\n direct_channels: Vec<LocalChannelInfo>,\n\n}\n\n\n\nimpl DumbDefault for RouterState {\n\n fn dumb_default() -> Self {\n", "file_path": "src/router/gossip/router.rs", "rank": 44, "score": 21.015084386038424 }, { "content": "//! More funding inputs can be add with on-chain mined transactions only when\n\n//! channel becomes operational.\n\n\n\nuse std::collections::{BTreeMap, BTreeSet};\n\n\n\nuse bitcoin::hashes::sha256;\n\nuse bitcoin::secp256k1::schnorr::Signature;\n\nuse bitcoin::secp256k1::XOnlyPublicKey;\n\nuse bitcoin::{Amount, OutPoint, Txid, Witness};\n\nuse miniscript::Descriptor;\n\n\n\nuse crate::bifrost::ChannelId;\n\n\n\n// TODO: Temporary structs which need to be implemented at descriptor wallet\n\n// level\n\n#[derive(Clone, Ord, PartialOrd, Eq, PartialEq, Hash, Debug)]\n\n#[derive(NetworkEncode, NetworkDecode)]\n\npub struct SegwitDescriptor(u8);\n\n#[derive(Clone, Ord, PartialOrd, Eq, PartialEq, Hash, Debug)]\n\n#[derive(NetworkEncode, NetworkDecode)]\n", "file_path": "lnp2p/src/bifrost/proposals.rs", "rank": 45, "score": 20.680059333134103 }, { "content": " Ok(Self(Slice32::from_byte_iter(iter)?))\n\n }\n\n}\n\n\n\nimpl TempChannelId {\n\n pub fn random() -> Self {\n\n TempChannelId::from_inner(Slice32::random())\n\n }\n\n}\n\n\n\nimpl DumbDefault for TempChannelId {\n\n fn dumb_default() -> Self {\n\n Self(Default::default())\n\n }\n\n}\n\n\n\n#[derive(Wrapper, Clone, Debug, From, PartialEq, Eq)]\n\npub struct NodeColor([u8; 3]);\n\n\n\nimpl NodeColor {\n", "file_path": "lnp2p/src/legacy/types.rs", "rank": 46, "score": 20.56972194692758 }, { "content": "// LNP/BP Core Library implementing LNPBP specifications & standards\n\n// Written in 2019 by\n\n// Dr. Maxim Orlovsky <orlovsky@pandoracore.com>\n\n//\n\n// To the extent possible under law, the author(s) have dedicated all\n\n// copyright and related and neighboring rights to this software to\n\n// the public domain worldwide. This software is distributed without\n\n// any warranty.\n\n//\n\n// You should have received a copy of the MIT License\n\n// along with this software.\n\n// If not, see <https://opensource.org/licenses/MIT>.\n\n\n\nuse amplify::Slice32;\n\nuse p2p::legacy::{ChannelFeatures, ChannelId, ShortChannelId};\n\nuse secp256k1::PublicKey;\n\n\n\n#[derive(Clone, Copy, Eq, PartialEq, Hash, Debug)]\n\n#[derive(StrictEncode, StrictDecode)]\n\npub struct DirectionalInfo {\n", "file_path": "src/router/gossip/util.rs", "rank": 47, "score": 20.254884957055463 }, { "content": "impl FeatureContext for InitContext {}\n\n\n\n/// Type representing `node_announcement` message feature context.\n\n#[derive(\n\n Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Display, Default\n\n)]\n\n#[display(\"N\", alt = \"node_announcement\")]\n\npub struct NodeAnnouncementContext;\n\nimpl FeatureContext for NodeAnnouncementContext {}\n\n\n\n/// Type representing `channel_announcement` message feature context.\n\n#[derive(\n\n Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Display, Default\n\n)]\n\n#[display(\"C\", alt = \"channel_announcement\")]\n\npub struct ChannelAnnouncementContext;\n\nimpl FeatureContext for ChannelAnnouncementContext {}\n\n\n\n/// Type representing BOLT-11 invoice feature context.\n\n#[derive(\n", "file_path": "lnp2p/src/legacy/bolt9.rs", "rank": 48, "score": 20.090348598844844 }, { "content": "use p2p::legacy::PaymentOnion;\n\nuse strict_encoding::{\n\n self, strict_deserialize, strict_serialize, StrictDecode, StrictEncode,\n\n};\n\n\n\nuse super::{AnchorOutputs, BoltChannel, ChannelState, Error, Htlc};\n\nuse crate::channel::shared_ext::Bip96;\n\nuse crate::channel::tx_graph::TxRole;\n\nuse crate::channel::{self, Channel};\n\nuse crate::{extension, ChannelExtension};\n\n\n\n/// Shorthand for representing asset - amount pairs\n\npub type AssetsBalance = BTreeMap<AssetId, u64>;\n\n\n\n#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Display)]\n\n#[derive(StrictEncode, StrictDecode)]\n\n#[display(Debug)]\n\npub enum BoltExt {\n\n /// The channel itself\n\n Channel = 0,\n", "file_path": "src/channel/bolt/util.rs", "rank": 49, "score": 19.93977098741805 }, { "content": " ChannelId::from_inner(Slice32::from_inner(slice))\n\n }\n\n\n\n /// With some lightning messages (like error) channel id consisting of all\n\n /// zeros has a special meaning of \"applicable to all opened channels\". This\n\n /// function allow to detect this kind of [`ChannelId`]\n\n pub fn is_wildcard(&self) -> bool {\n\n self.to_inner().to_inner() == [0u8; 32]\n\n }\n\n}\n\n\n\n/// Lightning network temporary channel Id\n\n#[cfg_attr(\n\n feature = \"serde\",\n\n serde_as,\n\n derive(Serialize, Deserialize),\n\n serde(crate = \"serde_crate\", transparent)\n\n)]\n\n#[derive(\n\n Wrapper,\n", "file_path": "lnp2p/src/legacy/types.rs", "rank": 50, "score": 19.925823670515236 }, { "content": "\n\n _ => Err(lightning_encoding::Error::DataIntegrityError(s!(\n\n \"Wrong Network Address Format\"\n\n ))),\n\n }\n\n }\n\n}\n\n\n\n#[cfg(feature = \"strict_encoding\")]\n\nimpl strict_encoding::Strategy for AnnouncedNodeAddr {\n\n type Strategy = strict_encoding::strategies::UsingUniformAddr;\n\n}\n\n\n\n#[derive(Wrapper, Clone, Debug, Display, Hash, Default, From, PartialEq, Eq)]\n\n#[cfg_attr(feature = \"strict_encoding\", derive(StrictEncode, StrictDecode))]\n\n#[display(Debug)]\n\npub struct AddressList(Vec<AnnouncedNodeAddr>);\n\n\n\nimpl LightningEncode for AddressList {\n\n fn lightning_encode<E: io::Write>(\n", "file_path": "lnp2p/src/legacy/types.rs", "rank": 51, "score": 19.903445077518555 }, { "content": "/// transaction is a refund transaction.\n\npub const LN_TX_ROLE_COMMITMENT: u8 = 0x02;\n\n\n\n/// Signature created by a single lightning node\n\n#[derive(Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Debug)]\n\n#[derive(NetworkEncode, NetworkDecode)]\n\npub struct NodeSignature(pub XOnlyPublicKey, pub Signature);\n\n/// Map of lightning node keys to their signatures over certain data\n\n#[derive(Clone, Ord, PartialOrd, Eq, PartialEq, Debug)]\n\n#[derive(NetworkEncode, NetworkDecode)]\n\npub struct NodeSignatureMap(pub BTreeMap<XOnlyPublicKey, Signature>);\n\n\n\n/// External transaction output. Must always be a v0 witness or a above\n\n#[derive(Clone, Ord, PartialOrd, Eq, PartialEq, Hash, Debug)]\n\n#[derive(NetworkEncode, NetworkDecode)]\n\npub struct ChannelInput {\n\n /// UTXO used for funding\n\n pub prev_outpoint: OutPoint,\n\n\n\n /// Sequence number to use in the input construction\n", "file_path": "lnp2p/src/bifrost/proposals.rs", "rank": 52, "score": 19.85911466299768 }, { "content": "use bitcoin_scripts::hlc::HashPreimage;\n\nuse internet2::presentation::sphinx::SphinxPayload;\n\nuse internet2::tlv;\n\nuse lightning_encoding::{BigSize, LightningDecode, LightningEncode};\n\n\n\nuse crate::legacy::ShortChannelId;\n\n\n\n#[derive(Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash, Debug)]\n\n#[cfg_attr(feature = \"strict_encoding\", derive(NetworkEncode, NetworkDecode))]\n\npub struct PaymentOnion {\n\n pub realm: HopRealm,\n\n pub amt_to_forward: u64,\n\n pub outgoing_cltv_value: u32,\n\n}\n\n\n\n#[derive(Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash, Debug)]\n\n#[cfg_attr(feature = \"strict_encoding\", derive(NetworkEncode, NetworkDecode))]\n\npub enum HopRealm {\n\n Legacy(ShortChannelId),\n\n TlvIntermediary(ShortChannelId),\n", "file_path": "lnp2p/src/legacy/bolt4.rs", "rank": 53, "score": 19.42106855198877 }, { "content": " TlvReceiver(Option<PaymentData>),\n\n}\n\n\n\n#[derive(Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash, Debug)]\n\n#[derive(LightningEncode, LightningDecode)]\n\n#[cfg_attr(feature = \"strict_encoding\", derive(NetworkEncode, NetworkDecode))]\n\npub struct PaymentData {\n\n pub payment_secret: HashPreimage,\n\n pub total_msat: u64,\n\n}\n\n\n\n// For internal use to simplify Tlv encoding/decoding implementation\n\n#[derive(Clone, Ord, PartialOrd, Eq, PartialEq, Hash, Debug)]\n\n#[derive(LightningEncode, LightningDecode)]\n\n#[lightning_encoding(use_tlv)]\n\n#[cfg_attr(\n\n feature = \"strict_encoding\",\n\n derive(NetworkEncode, NetworkDecode),\n\n network_encoding(use_tlv)\n\n)]\n", "file_path": "lnp2p/src/legacy/bolt4.rs", "rank": 54, "score": 19.186227084157245 }, { "content": " #[inline]\n\n pub fn has_anchor_outputs(&self) -> bool {\n\n self.channel_type.unwrap_or_default().has_anchor_outputs()\n\n }\n\n\n\n /// Detects whether channel has `option_anchors_zero_fee_htlc_tx` set\n\n #[inline]\n\n pub fn has_anchors_zero_fee_htlc_tx(&self) -> bool {\n\n self.channel_type\n\n .unwrap_or_default()\n\n .has_anchors_zero_fee_htlc_tx()\n\n }\n\n}\n\n\n\n/// This message describes the outpoint which the funder has created for the\n\n/// initial commitment transactions. After receiving the peer's signature, via\n\n/// `funding_signed`, it will broadcast the funding transaction.\n\n#[derive(Clone, PartialEq, Eq, Debug, Display)]\n\n#[derive(LightningEncode, LightningDecode)]\n\n#[cfg_attr(feature = \"strict_encoding\", derive(NetworkEncode, NetworkDecode))]\n", "file_path": "lnp2p/src/legacy/bolt2.rs", "rank": 55, "score": 19.136451492655016 }, { "content": " self.0.into_inner()\n\n }\n\n\n\n #[inline]\n\n fn as_inner(&self) -> &Self::Inner {\n\n self.0.as_inner()\n\n }\n\n\n\n #[inline]\n\n fn from_inner(inner: Self::Inner) -> Self {\n\n Self(sha256t::Hash::from_inner(inner))\n\n }\n\n}\n\n\n\nimpl strict_encoding::Strategy for ChannelId {\n\n type Strategy = strict_encoding::strategies::HashFixedBytes;\n\n}\n\n\n\n/// Error parsing [`ChannelId`] bech32m representation\n\n#[derive(Clone, Ord, PartialOrd, Eq, PartialEq, Hash, Debug, Display, From)]\n", "file_path": "lnp2p/src/bifrost/types.rs", "rank": 56, "score": 19.118102837890554 }, { "content": "use amplify::num::u24;\n\nuse amplify::{Display, DumbDefault, Slice32, Wrapper};\n\nuse bitcoin::hashes::Hash;\n\nuse bitcoin::Txid;\n\nuse chrono::{DateTime, Local, TimeZone, Utc};\n\nuse lightning_encoding::{self, LightningDecode, LightningEncode};\n\n#[cfg(feature = \"serde\")]\n\nuse serde_with::{As, DisplayFromStr};\n\n#[cfg(feature = \"strict_encoding\")]\n\nuse strict_encoding::net::{\n\n AddrFormat, DecodeError, RawAddr, Transport, Uniform, UniformAddr, ADDR_LEN,\n\n};\n\n#[cfg(feature = \"strict_encoding\")]\n\nuse strict_encoding::{self, StrictDecode, StrictEncode};\n\n\n\n/// Enumeration of possible channel ids\n\n#[cfg_attr(\n\n feature = \"serde\",\n\n derive(Serialize, Deserialize),\n\n serde(crate = \"serde_crate\")\n", "file_path": "lnp2p/src/legacy/types.rs", "rank": 57, "score": 19.109282790153568 }, { "content": " Ok(())\n\n }\n\n}\n\n\n\n#[derive(Clone, PartialEq, Eq, Hash, Debug)]\n\npub enum UpdateReq {\n\n PayBolt(Vec<Hop<PaymentOnion>>),\n\n}\n\n\n\n#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Display)]\n\n#[derive(StrictEncode, StrictDecode)]\n\n#[display(Debug)]\n\npub enum TxType {\n\n HtlcSuccess,\n\n HtlcTimeout,\n\n Unknown(u16),\n\n}\n\n\n\nimpl From<TxType> for u16 {\n\n fn from(ty: TxType) -> Self {\n", "file_path": "src/channel/bolt/util.rs", "rank": 59, "score": 18.87074275491678 }, { "content": " /// Before the channel is assigned a final [`ChannelId`] returns\n\n /// [`TempChannelId`], and `None` after\n\n #[inline]\n\n pub fn temp_channel_id(self) -> Option<TempChannelId> {\n\n match self {\n\n ActiveChannelId::Static(_) => None,\n\n ActiveChannelId::Temporary(id) => Some(id),\n\n }\n\n }\n\n}\n\n\n\n/// Legacy lightning network channel id: 256-bit number representing funding\n\n/// txid XOR'ed with 32-bit funding output number\n\n#[cfg_attr(\n\n feature = \"serde\",\n\n serde_as,\n\n derive(Serialize, Deserialize),\n\n serde(crate = \"serde_crate\", transparent)\n\n)]\n\n#[derive(\n", "file_path": "lnp2p/src/legacy/types.rs", "rank": 60, "score": 18.853118346189238 }, { "content": " #[inline]\n\n pub fn set_local_keys(&mut self, keys: LocalKeyset) {\n\n self.local_keys = keys\n\n }\n\n\n\n /// Sets `static_remotekey` flag for the channel\n\n #[inline]\n\n pub fn set_static_remotekey(&mut self, static_remotekey: bool) {\n\n self.local_keys.static_remotekey = static_remotekey\n\n }\n\n}\n\n\n\nimpl Extension<BoltExt> for BoltChannel {\n\n fn identity(&self) -> BoltExt {\n\n BoltExt::Bolt3\n\n }\n\n\n\n fn update_from_local(&mut self, _message: &()) -> Result<(), Error> {\n\n // Nothing to do here so far\n\n Ok(())\n", "file_path": "src/channel/bolt/channel.rs", "rank": 61, "score": 18.68573831042779 }, { "content": " Clone,\n\n Copy,\n\n PartialEq,\n\n Eq,\n\n PartialOrd,\n\n Ord,\n\n Hash,\n\n Debug,\n\n Display,\n\n From,\n\n LightningEncode,\n\n LightningDecode\n\n)]\n\n#[cfg_attr(feature = \"strict_encoding\", derive(StrictEncode, StrictDecode))]\n\n#[display(LowerHex)]\n\n#[wrapper(FromStr, LowerHex, UpperHex)]\n\npub struct Alias(\n\n #[cfg_attr(feature = \"serde\", serde(with = \"As::<DisplayFromStr>\"))]\n\n Slice32,\n\n);\n", "file_path": "lnp2p/src/legacy/types.rs", "rank": 62, "score": 18.602067710844054 }, { "content": "use bitcoin::hashes::Hash;\n\nuse internet2::tlv;\n\nuse lightning_encoding::{LightningDecode, LightningEncode};\n\nuse lnpbp::chain::AssetId;\n\n\n\nuse super::{ChannelId, InitFeatures};\n\n\n\n/// List of the assets for parsing as a TLV field type 1 inside [`Init`]\n\n/// message.\n\n#[derive(Wrapper, Clone, Eq, PartialEq, Default, Debug, From)]\n\n#[cfg_attr(feature = \"strict_encoding\", derive(NetworkEncode, NetworkDecode))]\n\npub struct AssetList(HashSet<AssetId>);\n\n\n\nimpl LightningEncode for AssetList {\n\n fn lightning_encode<E: Write>(\n\n &self,\n\n mut e: E,\n\n ) -> Result<usize, lightning_encoding::Error> {\n\n self.0.iter().try_fold(0usize, |len, asset| {\n\n Ok(len + asset.lightning_encode(&mut e)?)\n", "file_path": "lnp2p/src/legacy/bolt1.rs", "rank": 63, "score": 18.52121227230955 }, { "content": " Wrapper,\n\n Clone,\n\n Copy,\n\n PartialEq,\n\n Eq,\n\n PartialOrd,\n\n Ord,\n\n Hash,\n\n Debug,\n\n Display,\n\n Default,\n\n From,\n\n LightningEncode,\n\n LightningDecode\n\n)]\n\n#[cfg_attr(feature = \"strict_encoding\", derive(StrictEncode, StrictDecode))]\n\n#[display(LowerHex)]\n\n#[wrapper(FromStr, LowerHex, UpperHex)]\n\npub struct ChannelId(\n\n #[cfg_attr(feature = \"serde\", serde(with = \"As::<DisplayFromStr>\"))]\n", "file_path": "lnp2p/src/legacy/types.rs", "rank": 64, "score": 18.391035285681184 }, { "content": " Clone,\n\n Copy,\n\n PartialEq,\n\n Eq,\n\n PartialOrd,\n\n Ord,\n\n Hash,\n\n Debug,\n\n Display,\n\n From,\n\n LightningEncode,\n\n LightningDecode\n\n)]\n\n#[cfg_attr(feature = \"strict_encoding\", derive(StrictEncode, StrictDecode))]\n\n#[display(LowerHex)]\n\n#[wrapper(FromStr, LowerHex, UpperHex)]\n\npub struct TempChannelId(\n\n #[cfg_attr(feature = \"serde\", serde(with = \"As::<DisplayFromStr>\"))]\n\n Slice32,\n\n);\n", "file_path": "lnp2p/src/legacy/types.rs", "rank": 65, "score": 18.391035285681184 }, { "content": " RouterState::default()\n\n }\n\n}\n\n\n\n#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Display)]\n\n#[derive(StrictEncode, StrictDecode)]\n\n#[display(Debug)]\n\npub enum GossipExt {\n\n MainRouter = 0,\n\n DirectRouter = 1,\n\n GossipRouter = 2,\n\n}\n\n\n\nimpl Default for GossipExt {\n\n fn default() -> Self {\n\n GossipExt::MainRouter\n\n }\n\n}\n\n\n\nimpl From<GossipExt> for u16 {\n", "file_path": "src/router/gossip/router.rs", "rank": 66, "score": 18.359484615567766 }, { "content": " #[cfg_attr(\n\n feature = \"serde\",\n\n serde(with = \"As::<Option<DisplayFromStr>>\")\n\n )]\n\n pub option_data_loss_protect: Option<bool>,\n\n\n\n /// Sending node needs a complete routing information dump\n\n pub initial_routing_sync: bool,\n\n\n\n /// Commits to a shutdown scriptpubkey when opening channel\n\n #[cfg_attr(\n\n feature = \"serde\",\n\n serde(with = \"As::<Option<DisplayFromStr>>\")\n\n )]\n\n pub option_upfront_shutdown_script: Option<bool>,\n\n\n\n /// More sophisticated gossip control\n\n #[cfg_attr(\n\n feature = \"serde\",\n\n serde(with = \"As::<Option<DisplayFromStr>>\")\n", "file_path": "lnp2p/src/legacy/bolt9.rs", "rank": 67, "score": 18.31050338276654 }, { "content": " /// fee proportional millionth\n\n pub fee_proportional_millionths: u32,\n\n\n\n /// Used only if `option_channel_htlc_max` in `message_flags` is set\n\n pub htlc_maximum_msat: u64,\n\n}\n\n\n\n/// Information about channel used for route construction and re-broadcasting\n\n/// gossip messages.\n\n#[derive(Clone, Copy, Eq, PartialEq, Hash, Debug, Display)]\n\n#[derive(StrictEncode, StrictDecode)]\n\n#[display(\"{short_channel_id}\")]\n\npub struct GossipChannelInfo {\n\n /// Node identities constituting channel\n\n pub nodes: (PublicKey, PublicKey),\n\n\n\n /// Chainhash\n\n pub chain_hash: Slice32,\n\n\n\n /// Short Channel Id\n", "file_path": "src/router/gossip/util.rs", "rank": 68, "score": 18.14453829056291 }, { "content": " /// Short channel Id\n\n pub short_channel_id: ShortChannelId,\n\n\n\n /// Node Signature\n\n pub node_signature: Signature,\n\n\n\n /// Bitcoin Signature\n\n pub bitcoin_signature: Signature,\n\n}\n\n\n\n/// This gossip message contains ownership information regarding a channel. It\n\n/// ties each on-chain Bitcoin key to the associated Lightning node key, and\n\n/// vice-versa. The channel is not practically usable until at least one side\n\n/// has announced its fee levels and expiry, using channel_update.\n\n#[derive(Clone, PartialEq, Eq, Debug, Display)]\n\n#[derive(LightningEncode, LightningDecode)]\n\n#[cfg_attr(feature = \"strict_encoding\", derive(NetworkEncode, NetworkDecode))]\n\n#[display(\"channel_announcement({chain_hash}, {short_channel_id}, ...)\")]\n\npub struct ChannelAnnouncement {\n\n /// Node Signature 1\n", "file_path": "lnp2p/src/legacy/bolt7.rs", "rank": 69, "score": 18.06032426085507 }, { "content": " /// Node Id 1\n\n pub node_id_1: PublicKey,\n\n\n\n /// Node Id 2\n\n pub node_id_2: PublicKey,\n\n\n\n /// Bitcoin key 1\n\n pub bitcoin_key_1: PublicKey,\n\n\n\n /// Bitcoin key 2\n\n pub bitcoin_key_2: PublicKey,\n\n}\n\n\n\n/// This gossip message allows a node to indicate extra data associated with it,\n\n/// in addition to its public key. To avoid trivial denial of service attacks,\n\n/// nodes not associated with an already known channel are ignored.\n\n#[derive(Clone, PartialEq, Eq, Debug, Display)]\n\n#[derive(LightningEncode, LightningDecode)]\n\n#[cfg_attr(feature = \"strict_encoding\", derive(NetworkEncode, NetworkDecode))]\n\n#[display(\"node_announcement({node_id}, {alias}, {addresses}, ...)\")]\n", "file_path": "lnp2p/src/legacy/bolt7.rs", "rank": 70, "score": 18.010707918856173 }, { "content": "\n\n #[inline]\n\n pub fn local_amount_msat(&self) -> u64 {\n\n self.constructor().local_amount_msat()\n\n }\n\n\n\n #[inline]\n\n pub fn remote_amount_msat(&self) -> u64 {\n\n self.constructor().remote_amount_msat()\n\n }\n\n}\n\n\n\n/// The core of the lightning channel operating according to the Bolt3 standard.\n\n/// This is \"channel constructor\" used by `Channel` structure and managing part\n\n/// of the state which is not HTLC-related.\n\n///\n\n/// The type should not be constructed directly or used from outside of the\n\n/// library, but it's made public for allowing channel state access.\n\n#[derive(Getters, Clone, PartialEq, Eq, Debug, StrictEncode, StrictDecode)]\n\n#[getter(as_copy)]\n", "file_path": "src/channel/bolt/channel.rs", "rank": 71, "score": 17.94434998159763 }, { "content": " serde(with = \"As::<Option<DisplayFromStr>>\")\n\n )]\n\n pub option_static_remotekey: Option<bool>,\n\n\n\n /// Node supports `payment_secret` field\n\n #[cfg_attr(\n\n feature = \"serde\",\n\n serde(with = \"As::<Option<DisplayFromStr>>\")\n\n )]\n\n pub payment_secret: Option<bool>,\n\n\n\n /// Node can receive basic multi-part payments\n\n #[cfg_attr(\n\n feature = \"serde\",\n\n serde(with = \"As::<Option<DisplayFromStr>>\")\n\n )]\n\n pub basic_mpp: Option<bool>,\n\n\n\n /// Can create large channels\n\n #[cfg_attr(\n", "file_path": "lnp2p/src/legacy/bolt9.rs", "rank": 72, "score": 17.86535178074483 }, { "content": " Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Display, Default\n\n)]\n\n#[display(\"9\", alt = \"bolt11\")]\n\npub struct Bolt11Context;\n\nimpl FeatureContext for Bolt11Context {}\n\n\n\n/// Specific named feature flags\n\n#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Display)]\n\n#[non_exhaustive]\n\n#[repr(u16)]\n\npub enum Feature {\n\n /// Requires or supports extra `channel_reestablish` fields\n\n #[display(\"option_data_loss_protect\", alt = \"0/1\")]\n\n OptionDataLossProtect = 0,\n\n\n\n /// Sending node needs a complete routing information dump\n\n #[display(\"initial_routing_sync\", alt = \"3\")]\n\n InitialRoutingSync = 2,\n\n\n\n /// Commits to a shutdown scriptpubkey when opening channel\n", "file_path": "lnp2p/src/legacy/bolt9.rs", "rank": 73, "score": 17.760118179881495 }, { "content": "\n\n /// The next sender-broadcast commitment transaction's per-commitment point\n\n pub next_per_commitment_point: PublicKey,\n\n}\n\n\n\n#[derive(Clone, PartialEq, Eq, Debug, Display)]\n\n#[derive(LightningEncode, LightningDecode)]\n\n#[cfg_attr(feature = \"strict_encoding\", derive(NetworkEncode, NetworkDecode))]\n\n#[display(\"update_fee({channel_id}, {feerate_per_kw})\")]\n\npub struct UpdateFee {\n\n /// The channel ID\n\n pub channel_id: ChannelId,\n\n\n\n /// Fee rate per 1000-weight of the transaction\n\n pub feerate_per_kw: u32,\n\n}\n\n\n\n#[derive(Copy, Clone, PartialEq, Eq, Debug, Display)]\n\n#[derive(LightningEncode, LightningDecode)]\n\n#[cfg_attr(feature = \"strict_encoding\", derive(NetworkEncode, NetworkDecode))]\n", "file_path": "lnp2p/src/legacy/bolt2.rs", "rank": 74, "score": 17.646089395999645 }, { "content": " /// The minimum value unencumbered by HTLCs for the counterparty to keep in\n\n /// the channel\n\n pub channel_reserve_satoshis: u64,\n\n\n\n /// The maximum number of inbound HTLCs towards sender\n\n pub max_accepted_htlcs: u16,\n\n}\n\n\n\n#[cfg(feature = \"serde\")]\n\nimpl ToYamlString for PeerParams {}\n\n\n\nimpl Default for PeerParams {\n\n /// Sets reasonable values for the channel parameters requested from the\n\n /// other peer in sent `open_channel` or `accept_channel` messages.\n\n ///\n\n /// Usually this should not be used and instead [`Channel::with`] should be\n\n /// provided with custom channel parameters basing on the user preferences.\n\n fn default() -> Self {\n\n PeerParams {\n\n dust_limit_satoshis: BOLT3_DUST_LIMIT,\n", "file_path": "src/channel/bolt/policy.rs", "rank": 75, "score": 17.530195594562215 }, { "content": "\n\nuse super::{ChannelId, ProtocolList};\n\n\n\n/// Once authentication is complete, the first message reveals the features\n\n/// supported or required by this node, even if this is a reconnection.\n\n#[derive(Clone, PartialEq, Eq, Debug, Display, NetworkEncode, NetworkDecode)]\n\n#[network_encoding(use_tlv)]\n\n#[display(\"init({protocols}, {assets:#?})\")]\n\npub struct Init {\n\n pub protocols: ProtocolList,\n\n pub assets: HashSet<AssetId>,\n\n #[network_encoding(unknown_tlvs)]\n\n pub unknown_tlvs: tlv::Stream,\n\n}\n\n\n\n/// In order to allow for the existence of long-lived TCP connections, at\n\n/// times it may be required that both ends keep alive the TCP connection\n\n/// at the application level. Such messages also allow obfuscation of\n\n/// traffic patterns.\n\n#[derive(Clone, PartialEq, Eq, Debug, Display, NetworkEncode, NetworkDecode)]\n", "file_path": "lnp2p/src/bifrost/ctrl.rs", "rank": 76, "score": 17.492980370100966 }, { "content": "#[derive(Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash, Debug, Display)]\n\n#[display(\"pay {amount_msat} msat to {node_id} locked by {payment_hash}\")]\n\npub struct PaymentRequest {\n\n /// Amount to pay\n\n pub amount_msat: u64,\n\n\n\n /// The hash lock for the payment\n\n pub payment_hash: HashLock,\n\n\n\n /// Destination node id\n\n pub node_id: PublicKey,\n\n\n\n /// Minimal CLTV expiry that should be used at the destination.\n\n ///\n\n /// The actual CLTV used in the offered HTLC may be larger due to\n\n /// `cltv_delay`s on a route.\n\n pub min_final_cltv_expiry: u32,\n\n}\n", "file_path": "lnp2p/src/legacy/bolt11.rs", "rank": 77, "score": 17.186595626637995 }, { "content": " pub last_offered_htlc_id: u64,\n\n}\n\n\n\nimpl State for ChannelState {\n\n fn to_funding(&self) -> Funding {\n\n self.funding.clone()\n\n }\n\n\n\n fn set_funding(&mut self, funding: &Funding) {\n\n self.funding = funding.clone()\n\n }\n\n}\n\n\n\n#[cfg(feature = \"serde\")]\n\nimpl ToYamlString for ChannelState {}\n\n\n\nimpl DumbDefault for ChannelState {\n\n fn dumb_default() -> Self {\n\n ChannelState {\n\n funding: Funding::new(),\n", "file_path": "src/channel/bolt/state.rs", "rank": 78, "score": 17.11585518039501 }, { "content": "#[cfg_attr(feature = \"strict_encoding\", derive(NetworkEncode, NetworkDecode))]\n\n#[cfg_attr(\n\n feature = \"serde\",\n\n derive(Serialize, Deserialize),\n\n serde(crate = \"serde_crate\")\n\n)]\n\npub enum ChannelType {\n\n /// no features (no bits set)\n\n #[display(\"basic\")]\n\n Basic,\n\n\n\n /// option_static_remotekey (bit 12)\n\n #[display(\"static_remotekey\")]\n\n StaticRemotekey,\n\n\n\n /// option_anchor_outputs and option_static_remotekey (bits 20 and 12)\n\n #[display(\"anchored\")]\n\n AnchorOutputsStaticRemotekey,\n\n\n\n /// option_anchors_zero_fee_htlc_tx and option_static_remotekey (bits 22\n", "file_path": "lnp2p/src/legacy/bolt2.rs", "rank": 79, "score": 17.072314567280255 }, { "content": ")]\n\n#[derive(\n\n Clone,\n\n Copy,\n\n PartialEq,\n\n Eq,\n\n PartialOrd,\n\n Ord,\n\n Hash,\n\n Debug,\n\n Display,\n\n From,\n\n LightningEncode,\n\n LightningDecode\n\n)]\n\n#[cfg_attr(feature = \"strict_encoding\", derive(StrictEncode, StrictDecode))]\n\n#[display(inner)]\n\npub enum ActiveChannelId {\n\n /// Channel does not have a permanent id and uses temporary one\n\n #[from]\n", "file_path": "lnp2p/src/legacy/types.rs", "rank": 80, "score": 17.053971964975265 }, { "content": " fn ln_to_remote_v1(amount: u64, remote_pubkey: PublicKey) -> Self;\n\n\n\n /// NB: We use argument named `remote_pubkey`, but in fact the source\n\n /// for this key is the local node key, since we generate a transaction\n\n /// which we will sign for the remote node.\n\n fn ln_to_remote_v2(amount: u64, remote_pubkey: PublicKey) -> Self;\n\n}\n\n\n\nimpl ScriptGenerators for LockScript {\n\n fn ln_funding(\n\n _: u64,\n\n local_pubkey: &LocalPubkey,\n\n remote_pubkey: PublicKey,\n\n ) -> Self {\n\n let pk = vec![\n\n local_pubkey.to_bitcoin_pk(),\n\n bitcoin::PublicKey::new(remote_pubkey),\n\n ]\n\n .lex_ordered();\n\n\n", "file_path": "src/channel/bolt/channel.rs", "rank": 81, "score": 17.046390446227747 }, { "content": " pub short_channel_id: ShortChannelId,\n\n\n\n /// Information about each channel direction.\n\n ///\n\n /// The first tuple field corresponds to the direction from the first\n\n /// node id (see [`ChannelInfo::nodes`]) to the second one – and the second\n\n /// tuple field to the opposite direction.\n\n pub directions: (Option<DirectionalInfo>, Option<DirectionalInfo>),\n\n\n\n /// The channel capacity, known only for local channels - or if it can be\n\n /// deduced from on-chain data, if they are available\n\n pub capacity_sats: Option<u64>,\n\n\n\n /// Channel features\n\n pub features: ChannelFeatures,\n\n}\n\n\n\n/// Information about channel used for route construction and re-broadcasting\n\n/// gossip messages.\n\n#[derive(Clone, Copy, Eq, PartialEq, Hash, Debug, Display)]\n", "file_path": "src/router/gossip/util.rs", "rank": 82, "score": 17.009424496274892 }, { "content": "///\n\n/// # Specification\n\n/// <https://github.com/lightningnetwork/lightning-rfc/blob/master/01-messaging.md#the-ping-and-pong-messages>\n\n#[derive(Clone, PartialEq, Eq, Debug, Display)]\n\n#[derive(LightningEncode, LightningDecode)]\n\n#[cfg_attr(feature = \"strict_encoding\", derive(NetworkEncode, NetworkDecode))]\n\n#[display(\"ping({pong_size})\")]\n\npub struct Ping {\n\n pub pong_size: u16,\n\n pub ignored: Vec<u8>,\n\n}\n\n\n\n/// For simplicity of diagnosis, it's often useful to tell a peer that something\n\n/// is incorrect.\n\n///\n\n/// # Specification\n\n/// <https://github.com/lightningnetwork/lightning-rfc/blob/master/01-messaging.md#the-error-message>\n\n#[derive(Clone, PartialEq, Debug, Error, LightningEncode, LightningDecode)]\n\n#[cfg_attr(feature = \"strict_encoding\", derive(NetworkEncode, NetworkDecode))]\n\npub struct Error {\n", "file_path": "lnp2p/src/legacy/bolt1.rs", "rank": 83, "score": 16.940131351170166 }, { "content": "#[display(\"ping({pong_size})\")]\n\npub struct Ping {\n\n pub ignored: Vec<u8>,\n\n pub pong_size: u16,\n\n}\n\n\n\n/// For simplicity of diagnosis, it's often useful to tell a peer that something\n\n/// is incorrect.\n\n#[derive(Clone, PartialEq, Debug, Error, NetworkEncode, NetworkDecode)]\n\n#[network_encoding(use_tlv)]\n\npub struct Error {\n\n pub channel_id: Option<ChannelId>,\n\n pub errno: u64,\n\n pub message: Option<String>,\n\n /// Any additiona error details\n\n #[network_encoding(unknown_tlvs)]\n\n pub unknown_tlvs: tlv::Stream,\n\n}\n\n\n\nimpl Display for Error {\n", "file_path": "lnp2p/src/bifrost/ctrl.rs", "rank": 84, "score": 16.746605972338948 }, { "content": "\n\nuse super::{\n\n AddressList, Alias, ChannelId, InitFeatures, NodeColor, ShortChannelId,\n\n};\n\nuse crate::legacy::ChannelFeatures;\n\n\n\n/// This is a direct message between the two endpoints of a channel and serves\n\n/// as an opt-in mechanism to allow the announcement of the channel to the rest\n\n/// of the network. It contains the necessary signatures, by the sender, to\n\n/// construct the `channel_announcement` message.\n\n#[derive(Copy, Clone, PartialEq, Eq, Debug, Display)]\n\n#[derive(LightningEncode, LightningDecode)]\n\n#[cfg_attr(feature = \"strict_encoding\", derive(NetworkEncode, NetworkDecode))]\n\n#[display(\n\n \"announcement_signature({channel_id}, {short_channel_id}, ...signatures)\"\n\n)]\n\npub struct AnnouncementSignatures {\n\n /// The channel ID\n\n pub channel_id: ChannelId,\n\n\n", "file_path": "lnp2p/src/legacy/bolt7.rs", "rank": 85, "score": 16.725732665996333 }, { "content": "/// Data structure maintained by each node during channel creation phase\n\n/// (before the funding transaction is mined or became a part of the most\n\n/// recent parent channel state)\n\n#[derive(Clone, PartialOrd, Eq, PartialEq, Debug)]\n\npub struct PreChannel {\n\n /// Channel id, constructed out of [`ChannelParams`] and\n\n /// [`Self::coordinator_node`]\n\n pub channel_id: ChannelId,\n\n pub coordinator_node: XOnlyPublicKey,\n\n pub channel_params: ChannelParams,\n\n pub proposal: ChannelProposal,\n\n pub finalized_at: Option<chrono::DateTime<chrono::Utc>>,\n\n}\n\n\n\n/// Request initiating channel creation workflow.\n\n///\n\n/// If a peer accepts the channel in the proposed – or updated – form, it MUST\n\n/// reply with [`AcceptChannel`] message. If the channel is not accepted, the\n\n/// peer must send [`super::Error`] message.\n\n#[derive(Clone, PartialOrd, Eq, PartialEq, Debug, Display)]\n", "file_path": "lnp2p/src/bifrost/channel.rs", "rank": 86, "score": 16.628710635015405 }, { "content": "/// transaction and both versions of the commitment transaction.\n\n#[derive(Clone, PartialEq, Eq, Debug, Display)]\n\n#[derive(LightningEncode, LightningDecode)]\n\n#[cfg_attr(\n\n feature = \"strict_encoding\",\n\n derive(NetworkEncode, NetworkDecode),\n\n network_encoding(use_tlv)\n\n)]\n\n#[lightning_encoding(use_tlv)]\n\n#[display(\n\n \"open_channel({chain_hash}, {temporary_channel_id}, {funding_satoshis}, \\\n\n {channel_flags}, ...)\"\n\n)]\n\npub struct OpenChannel {\n\n /// The genesis hash of the blockchain where the channel is to be opened\n\n pub chain_hash: Slice32,\n\n\n\n /// A temporary channel ID, until the funding outpoint is announced\n\n pub temporary_channel_id: TempChannelId,\n\n\n", "file_path": "lnp2p/src/legacy/bolt2.rs", "rank": 87, "score": 16.47410815423081 }, { "content": "#[cfg(feature = \"serde\")]\n\nimpl ToYamlString for CommonParams {}\n\n\n\nimpl Default for CommonParams {\n\n /// Sets reasonable values for the common channel parameters used in\n\n /// constructing `open_channel` message.\n\n ///\n\n /// Usually this should not be used and instead [`Channel::with`] should be\n\n /// provided with custom channel parameters basing on the current state of\n\n /// the bitcoin mempool and hash rate.\n\n fn default() -> Self {\n\n CommonParams {\n\n minimum_depth: 3,\n\n feerate_per_kw: 256,\n\n announce_channel: true,\n\n channel_type: ChannelType::default(),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/channel/bolt/policy.rs", "rank": 88, "score": 16.470345277160355 }, { "content": "#[derive(Clone, Ord, PartialOrd, Eq, PartialEq, Debug, Display)]\n\n#[derive(NetworkEncode, NetworkDecode)]\n\n#[display(\"upgrade_channel({channel_id}, {protocol}, ...)\")]\n\npub struct UpgradeChannel {\n\n pub channel_id: ChannelId,\n\n pub protocol: ProtocolName,\n\n pub accepted: BTreeMap<XOnlyPublicKey, Signature>,\n\n}\n\n\n\n#[derive(Clone, Ord, PartialOrd, Eq, PartialEq, Debug, Display)]\n\n#[derive(NetworkEncode, NetworkDecode)]\n\n#[display(\"donwgrade_channel({channel_id}, {protocol}, ...)\")]\n\npub struct DowngradeChannel {\n\n pub channel_id: ChannelId,\n\n pub protocol: ProtocolName,\n\n pub accepted: BTreeMap<XOnlyPublicKey, Signature>,\n\n}\n\n\n\n#[derive(Clone, PartialEq, Debug, Display)]\n\n#[derive(NetworkEncode, NetworkDecode)]\n\n#[display(\"close_channel({channel_id}, ...)\")]\n\npub struct CloseChannel {\n\n pub channel_id: ChannelId,\n\n pub closing_tx: Psbt,\n\n pub accepted: BTreeMap<XOnlyPublicKey, Signature>,\n\n}\n", "file_path": "lnp2p/src/bifrost/channel.rs", "rank": 89, "score": 16.45022602578687 }, { "content": " pub pending: Vec<XOnlyPublicKey>,\n\n pub accepted: BTreeMap<XOnlyPublicKey, Signature>,\n\n}\n\n\n\n/// Response from a peer to a channel coordinator\n\n#[derive(Clone, Ord, PartialOrd, Eq, PartialEq, Debug, Display)]\n\n#[derive(NetworkEncode, NetworkDecode)]\n\n#[display(\"accept_channel({channel_id}, ...)\")]\n\npub struct AcceptChannel {\n\n pub channel_id: ChannelId,\n\n pub updated_proposal: ChannelProposal,\n\n pub signatures: BTreeMap<XOnlyPublicKey, Signature>,\n\n}\n\n\n\n#[derive(Clone, Ord, PartialOrd, Eq, PartialEq, Debug, Display)]\n\n#[derive(NetworkEncode, NetworkDecode)]\n\n#[display(\"finalize_channel({channel_id}, ...)\")]\n\npub struct FinalizeChannel {\n\n pub channel_id: ChannelId,\n\n pub proposal: ChannelProposal,\n", "file_path": "lnp2p/src/bifrost/channel.rs", "rank": 90, "score": 16.431818711919988 }, { "content": " #[display(\"inbound\")]\n\n Inbound,\n\n\n\n /// Outbound channels proposed to a remote node.\n\n ///\n\n /// Created by sending `open_channel` message\n\n #[display(\"outbound\")]\n\n Outbount,\n\n}\n\n\n\nimpl Direction {\n\n /// Detects if the channel is inbound\n\n #[inline]\n\n pub fn is_inbound(self) -> bool {\n\n self == Direction::Inbound\n\n }\n\n\n\n /// Detects if the channel is outbound\n\n #[inline]\n\n pub fn is_outbound(self) -> bool {\n", "file_path": "src/channel/bolt/channel.rs", "rank": 91, "score": 16.365092464943263 }, { "content": " pub fn has_anchors_zero_fee_htlc_tx(self) -> bool {\n\n self == ChannelType::AnchorsZeroFeeHtlcTxStaticRemotekey\n\n }\n\n\n\n /// Converts default channel type into `None` and non-default into\n\n /// `Some(ChannelType)`\n\n #[inline]\n\n pub fn into_option(self) -> Option<ChannelType> {\n\n match self {\n\n ChannelType::Basic => None,\n\n _ => Some(self),\n\n }\n\n }\n\n}\n\n\n\n/// Error parsing [`ChannelType`] from strings\n\n#[derive(Clone, Ord, PartialOrd, Eq, PartialEq, Hash, Debug, Display, Error)]\n\n#[display(\"unknown channel type name `{0}`\")]\n\npub struct ChannelTypeParseError(String);\n\n\n", "file_path": "lnp2p/src/legacy/bolt2.rs", "rank": 92, "score": 16.342674547015683 }, { "content": " derive(Display, Serialize, Deserialize),\n\n serde(crate = \"serde_crate\"),\n\n display(PeerParams::to_yaml_string)\n\n)]\n\npub struct PeerParams {\n\n /// The threshold below which outputs on transactions broadcast by sender\n\n /// will be omitted\n\n pub dust_limit_satoshis: u64,\n\n\n\n /// The number of blocks which the counterparty will have to wait to claim\n\n /// on-chain funds if they broadcast a commitment transaction\n\n pub to_self_delay: u16,\n\n\n\n /// Indicates the smallest value HTLC this node will accept.\n\n pub htlc_minimum_msat: u64,\n\n\n\n /// The maximum inbound HTLC value in flight towards sender, in\n\n /// milli-satoshi\n\n pub max_htlc_value_in_flight_msat: u64,\n\n\n", "file_path": "src/channel/bolt/policy.rs", "rank": 93, "score": 16.263241576842265 }, { "content": " /// A signature on the commitment transaction\n\n pub signature: Signature,\n\n\n\n /// Signatures on the HTLC transactions\n\n pub htlc_signatures: Vec<Signature>,\n\n}\n\n\n\n#[derive(Clone, PartialEq, Eq, Debug, Display)]\n\n#[derive(LightningEncode, LightningDecode)]\n\n#[cfg_attr(feature = \"strict_encoding\", derive(NetworkEncode, NetworkDecode))]\n\n#[display(\n\n \"revoke_and_ack({channel_id}, {next_per_commitment_point}, \\\n\n ...per_commitment_secret)\"\n\n)]\n\npub struct RevokeAndAck {\n\n /// The channel ID\n\n pub channel_id: ChannelId,\n\n\n\n /// The secret corresponding to the per-commitment point\n\n pub per_commitment_secret: SecretKey,\n", "file_path": "lnp2p/src/legacy/bolt2.rs", "rank": 94, "score": 16.183790513647807 }, { "content": "/// Tag used for [`ChannelId`] hash type\n\npub struct ChannelIdTag;\n\n\n\nimpl sha256t::Tag for ChannelIdTag {\n\n #[inline]\n\n fn engine() -> sha256::HashEngine {\n\n let midstate = sha256::Midstate::from_inner(CHANNEL_ID_MIDSTATE);\n\n sha256::HashEngine::from_midstate(midstate, 64)\n\n }\n\n}\n\n\n\n/// A channel identifier\n\n///\n\n/// Represents commitment to the channel parameters and channel coordinator\n\n/// node; any two distinct channels are guaranteed (with SHA256 collision\n\n/// resistance level) to have a distinct channel ids.\n\n#[derive(\n\n Wrapper, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Default, From\n\n)]\n\n#[wrapper(Debug, LowerHex, Index, IndexRange, IndexFrom, IndexTo, IndexFull)]\n", "file_path": "lnp2p/src/bifrost/types.rs", "rank": 95, "score": 16.179920216536996 }, { "content": " pub sequence_no: u32,\n\n\n\n /// Descriptor for the previous transaction output required to construct\n\n /// witness for the input. Always v0+ witness\n\n pub descriptor: SegwitDescriptor,\n\n\n\n /// Witness satisfying prevous transaction descriptor.\n\n ///\n\n /// Must be present only when the transaction is signed\n\n pub witness: Option<Witness>,\n\n}\n\n\n\n/// Information to construct external transaction output not used in the\n\n/// channel.\n\n#[derive(Clone, Ord, PartialOrd, Eq, PartialEq, Debug)]\n\n#[derive(NetworkEncode, NetworkDecode)]\n\npub struct ChannelOutput {\n\n /// We have to expose full descriptor in order to allow P2C tweaks\n\n pub output: Descriptor<bitcoin::PublicKey>,\n\n\n", "file_path": "lnp2p/src/bifrost/proposals.rs", "rank": 96, "score": 16.163242342525237 }, { "content": "\n\nimpl strict_encoding::Strategy for AnnouncedNodeAddr {\n\n type Strategy = strict_encoding::strategies::UsingUniformAddr;\n\n}\n\n\n\n#[derive(Wrapper, Clone, Debug, Display, Hash, Default, From, PartialEq, Eq)]\n\n#[derive(NetworkEncode, NetworkDecode)]\n\n#[display(Debug)]\n\npub struct AddressList(Vec<AnnouncedNodeAddr>);\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use bitcoin::hashes::hex::FromHex;\n\n\n\n use super::*;\n\n\n\n #[test]\n\n fn test_address_encodings() {\n\n // Test vectors taken from https://github.com/rust-bitcoin/rust-lightning/blob/main/lightning/src/ln/msgs.rs\n\n let ipv4 = AnnouncedNodeAddr::IpV4 {\n", "file_path": "lnp2p/src/bifrost/types.rs", "rank": 97, "score": 16.15842263788784 }, { "content": " }\n\n}\n\n\n\nimpl ScriptGenerators for PubkeyScript {\n\n #[inline]\n\n fn ln_funding(\n\n amount: u64,\n\n local_pubkey: &LocalPubkey,\n\n remote_pubkey: PublicKey,\n\n ) -> Self {\n\n WitnessScript::ln_funding(amount, local_pubkey, remote_pubkey)\n\n .to_p2wsh()\n\n }\n\n\n\n #[inline]\n\n fn ln_to_local(\n\n amount: u64,\n\n revocationpubkey: PublicKey,\n\n local_delayedpubkey: PublicKey,\n\n to_self_delay: u16,\n", "file_path": "src/channel/bolt/channel.rs", "rank": 98, "score": 15.97398876508374 }, { "content": "#[derive(LightningEncode, LightningDecode)]\n\n#[cfg_attr(feature = \"strict_encoding\", derive(NetworkEncode, NetworkDecode))]\n\n#[display(\"funding_locked({channel_id}, {next_per_commitment_point})\")]\n\npub struct FundingLocked {\n\n /// The channel ID\n\n pub channel_id: ChannelId,\n\n\n\n /// The per-commitment point of the second commitment transaction\n\n pub next_per_commitment_point: PublicKey,\n\n}\n\n\n\n#[derive(Clone, PartialEq, Eq, Debug, Display)]\n\n#[derive(LightningEncode, LightningDecode)]\n\n#[cfg_attr(feature = \"strict_encoding\", derive(NetworkEncode, NetworkDecode))]\n\n#[display(\"shutdown({channel_id}, {scriptpubkey})\")]\n\npub struct Shutdown {\n\n /// The channel ID\n\n pub channel_id: ChannelId,\n\n\n\n /// The destination of this peer's funds on closing.\n", "file_path": "lnp2p/src/legacy/bolt2.rs", "rank": 99, "score": 15.963004016409194 } ]
Rust
keymanager-lib/src/policy.rs
keks/oasis-core
37479c75e5f94ffc03222cba6edd0624c1280d25
use std::{ collections::{HashMap, HashSet}, sync::RwLock, }; use anyhow::Result; use lazy_static::lazy_static; use sgx_isa::Keypolicy; use tiny_keccak::sha3_256; use oasis_core_keymanager_api_common::*; use oasis_core_runtime::{ common::{ cbor, runtime::RuntimeId, sgx::{ avr::EnclaveIdentity, seal::{seal, unseal}, }, }, enclave_rpc::Context as RpcContext, runtime_context, storage::StorageContext, }; use crate::context::Context as KmContext; lazy_static! { static ref POLICY: Policy = Policy::new(); } const POLICY_STORAGE_KEY: &'static [u8] = b"keymanager_policy"; const POLICY_SEAL_CONTEXT: &'static [u8] = b"Ekiden Keymanager Seal policy v0"; pub struct Policy { inner: RwLock<Inner>, } struct Inner { policy: Option<CachedPolicy>, } impl Policy { fn new() -> Self { Self { inner: RwLock::new(Inner { policy: None }), } } pub fn unsafe_skip() -> bool { option_env!("OASIS_UNSAFE_SKIP_KM_POLICY").is_some() } pub fn global<'a>() -> &'a Policy { &POLICY } pub fn init(&self, ctx: &mut RpcContext, raw_policy: &Vec<u8>) -> Result<Vec<u8>> { if Self::unsafe_skip() { return Ok(vec![]); } let mut inner = self.inner.write().unwrap(); let old_policy = match inner.policy.as_ref() { Some(old_policy) => old_policy.clone(), None => match Self::load_policy() { Some(old_policy) => old_policy, None => CachedPolicy::default(), }, }; let new_policy = CachedPolicy::parse(raw_policy)?; let rctx = runtime_context!(ctx, KmContext); if rctx.runtime_id != new_policy.runtime_id { return Err(KeyManagerError::PolicyInvalid.into()); } if old_policy.serial > new_policy.serial { return Err(KeyManagerError::PolicyRollback.into()); } else if old_policy.serial == new_policy.serial { if old_policy.checksum != new_policy.checksum { return Err(KeyManagerError::PolicyChanged.into()); } inner.policy = Some(old_policy.clone()); return Ok(old_policy.checksum.clone()); } Self::save_raw_policy(raw_policy); let new_checksum = new_policy.checksum.clone(); inner.policy = Some(new_policy); Ok(new_checksum) } pub fn may_get_or_create_keys( &self, remote_enclave: &EnclaveIdentity, req: &RequestIds, ) -> Result<()> { let inner = self.inner.read().unwrap(); let policy = match inner.policy.as_ref() { Some(policy) => policy, None => return Err(KeyManagerError::InvalidAuthentication.into()), }; match policy.may_get_or_create_keys(remote_enclave, req) { true => Ok(()), false => Err(KeyManagerError::InvalidAuthentication.into()), } } pub fn may_replicate_master_secret(&self, remote_enclave: &EnclaveIdentity) -> Result<()> { #[cfg(target_env = "sgx")] { let our_id = EnclaveIdentity::current().expect("failed to query MRENCLAVE/MRSIGNER"); if our_id == *remote_enclave { return Ok(()); } } let inner = self.inner.read().unwrap(); let policy = match inner.policy.as_ref() { Some(policy) => policy, None => return Err(KeyManagerError::InvalidAuthentication.into()), }; match policy.may_replicate_master_secret(remote_enclave) { true => Ok(()), false => Err(KeyManagerError::InvalidAuthentication.into()), } } pub fn may_replicate_from(&self) -> Option<HashSet<EnclaveIdentity>> { let inner = self.inner.read().unwrap(); let mut src_set = match inner.policy.as_ref() { Some(policy) => policy.may_replicate_from.clone(), None => HashSet::new(), }; match EnclaveIdentity::current() { Some(id) => { src_set.insert(id); } None => {} }; match src_set.is_empty() { true => None, false => Some(src_set), } } fn load_policy() -> Option<CachedPolicy> { let ciphertext = StorageContext::with_current(|_mkvs, untrusted_local| { untrusted_local.get(POLICY_STORAGE_KEY.to_vec()) }) .unwrap(); unseal(Keypolicy::MRENCLAVE, &POLICY_SEAL_CONTEXT, &ciphertext).map(|plaintext| { CachedPolicy::parse(&plaintext).expect("failed to deserialize persisted policy") }) } fn save_raw_policy(raw_policy: &Vec<u8>) { let ciphertext = seal(Keypolicy::MRENCLAVE, &POLICY_SEAL_CONTEXT, &raw_policy); StorageContext::with_current(|_mkvs, untrusted_local| { untrusted_local.insert(POLICY_STORAGE_KEY.to_vec(), ciphertext) }) .expect("failed to persist master secret"); } } #[derive(Clone, Debug)] struct CachedPolicy { pub checksum: Vec<u8>, pub serial: u32, pub runtime_id: RuntimeId, pub may_query: HashMap<RuntimeId, HashSet<EnclaveIdentity>>, pub may_replicate: HashSet<EnclaveIdentity>, pub may_replicate_from: HashSet<EnclaveIdentity>, } impl CachedPolicy { fn parse(raw: &Vec<u8>) -> Result<Self> { let untrusted_policy: SignedPolicySGX = cbor::from_slice(&raw)?; let policy = untrusted_policy.verify()?; let mut cached_policy = Self::default(); cached_policy.checksum = sha3_256(&raw).to_vec(); cached_policy.serial = policy.serial; cached_policy.runtime_id = policy.id; let enclave_identity = match EnclaveIdentity::current() { Some(enclave_identity) => enclave_identity, None => return Ok(cached_policy), }; let enclave_policy = match policy.enclaves.get(&enclave_identity) { Some(enclave_policy) => enclave_policy, None => return Ok(cached_policy), }; for (rt_id, ids) in &enclave_policy.may_query { let mut query_ids = HashSet::new(); for e_id in ids { query_ids.insert(e_id.clone()); } cached_policy.may_query.insert(*rt_id, query_ids); } for e_id in &enclave_policy.may_replicate { cached_policy.may_replicate.insert(e_id.clone()); } for (e_id, other_policy) in &policy.enclaves { if other_policy.may_replicate.contains(&enclave_identity) { cached_policy.may_replicate_from.insert(e_id.clone()); } } Ok(cached_policy) } fn default() -> Self { CachedPolicy { checksum: vec![], serial: 0, runtime_id: RuntimeId::default(), may_query: HashMap::new(), may_replicate: HashSet::new(), may_replicate_from: HashSet::new(), } } fn may_get_or_create_keys(&self, remote_enclave: &EnclaveIdentity, req: &RequestIds) -> bool { let may_query = match self.may_query.get(&req.runtime_id) { Some(may_query) => may_query, None => return false, }; may_query.contains(remote_enclave) } fn may_replicate_master_secret(&self, remote_enclave: &EnclaveIdentity) -> bool { self.may_replicate.contains(remote_enclave) } }
use std::{ collections::{HashMap, HashSet}, sync::RwLock, }; use anyhow::Result; use lazy_static::lazy_static; use sgx_isa::Keypolicy; use tiny_keccak::sha3_256; use oasis_core_keymanager_api_common::*; use oasis_core_runtime::{ common::{ cbor, runtime::RuntimeId, sgx::{ avr::EnclaveIdentity, seal::{seal, unseal}, }, }, enclave_rpc::Context as RpcContext, runtime_context, storage::StorageContext, }; use crate::context::Context as KmContext; lazy_static! { static ref POLICY: Policy = Policy::new(); } const POLICY_STORAGE_KEY: &'static [u8] = b"keymanager_policy"; const POLICY_SEAL_CONTEXT: &'static [u8] = b"Ekiden Keymanager Seal policy v0"; pub struct Policy { inner: RwLock<Inner>, } struct Inner { policy: Option<CachedPolicy>, } impl Policy { fn new() -> Self { Self { inner: RwLock::new(Inner { policy: None }), } } pub fn unsafe_skip() -> bool { option_env!("OASIS_UNSAFE_SKIP_KM_POLICY").is_some() } pub fn global<'a>() -> &'a Policy { &POLICY } pub fn init(&self, ctx: &mut RpcContext, raw_policy: &Vec<u8>) -> Result<Vec<u8>> { if Self::unsafe_skip() { return Ok(vec![]); } let mut inner = self.inner.write().unwrap(); let old_policy = match inner.policy.as_ref() { Some(old_policy) => old_policy.clone(), None => match Self::load_policy() { Some(old_policy) => old_policy, None => CachedPolicy::default(), }, }; let new_policy = CachedPolicy::parse(raw_policy)?; let rctx = runtime_context!(ctx, KmContext); if rctx.runtime_id != new_policy.runtime_id { return Err(KeyManagerError::PolicyInvalid.into()); } if old_policy.serial > new_policy.serial { return Err(KeyManagerError::PolicyRollback.into()); } else if old_policy.serial == new_policy.serial { if old_policy.checksum != new_policy.checksum { return Err(KeyManagerError::PolicyChanged.into()); } inner.policy = Some(old_policy.clone()); return Ok(old_policy.checksum.clone()); } Self::save_raw_policy(raw_policy); let new_checksum = new_policy.checksum.clone(); inner.policy = Some(new_policy); Ok(new_checksum) } pub fn may_get_or_create_keys( &self, remote_enclave: &EnclaveIdentity, req: &RequestIds, ) -> Result<()> { let inner = self.inner.read().unwrap(); let policy = match inner.policy.as_ref() { Some(policy) => policy, None => return Err(KeyManagerError::InvalidAuthentication.into()), }; match policy.may_get_or_create_keys(remote_enclave, req) { true => Ok(()), false => Err(KeyManagerError::InvalidAuthentication.into()), } } pub fn may_replicate_master_secret(&self, remote_enclave: &EnclaveIdentity) -> Result<()> { #[cfg(target_env = "sgx")] { let our_id = EnclaveIdentity::current().expect("failed to query MRENCLAVE/MRSIGNER"); if our_id == *remote_enclave { return Ok(()); } } let inner = self.inner.read().unwrap(); let policy = match inner.policy.as_ref() { Some(policy) => policy, None => return Err(KeyManagerError::InvalidAuthentication.into()), }; match policy.may_replicate_master_secret(remote_enclave) { true => Ok(()), false => Err(KeyManagerError::InvalidAuthentication.into()), } } pub fn may_replicate_from(&self) -> Option<HashSet<EnclaveIdentity>> { let inner = self.inner.read().unwrap(); let mut src_set = match inner.policy.as_ref() { Some(policy) => policy.may_replicate_from.clone(), None => HashSet::new(), }; match EnclaveIdentity::current() { Some(id) => { src_set.insert(id); } None => {} }; match src_set.is_empty() { true => None, false => Some(src_set), } } fn load_policy() -> Option<CachedPolicy> { let ciphertext = StorageContext::with_current(|_mkvs, untrusted_local| { untrusted_local.get(POLICY_STORAGE_KEY.to_vec()) }) .unwrap(); unseal(Keypolicy::MRENCLAVE, &POLICY_SEAL_CONTEXT, &ciphertext).map(|plaintext| { CachedPolicy::parse(&plaintext).expect("failed to deserialize persisted policy") }) } fn save_raw_policy(raw_policy: &Vec<u8>) { let ciphertext = seal(Keypolicy::MRENCLAVE, &POLICY_SEAL_CONTEXT, &raw_policy); StorageContext::with_current(|_mkvs, untrusted_local| { untrusted_local.insert(POLICY_STORAGE_KEY.to_vec(), ciphertext) }) .expect("failed to persist master secret"); } } #[derive(Clone, Debug)] struct CachedPolicy { pub checksum: Vec<u8>, pub serial: u32, pub runtime_id: RuntimeId, pub may_query: HashMap<RuntimeId, HashSet<EnclaveIdentity>>, pub may_replicate: HashSet<EnclaveIdentity>, pub may_replicate_from: HashSet<EnclaveIdentity>, } impl CachedPolicy { fn parse(raw: &Vec<u8>) -> Result<Self> {
te_enclave: &EnclaveIdentity, req: &RequestIds) -> bool { let may_query = match self.may_query.get(&req.runtime_id) { Some(may_query) => may_query, None => return false, }; may_query.contains(remote_enclave) } fn may_replicate_master_secret(&self, remote_enclave: &EnclaveIdentity) -> bool { self.may_replicate.contains(remote_enclave) } }
let untrusted_policy: SignedPolicySGX = cbor::from_slice(&raw)?; let policy = untrusted_policy.verify()?; let mut cached_policy = Self::default(); cached_policy.checksum = sha3_256(&raw).to_vec(); cached_policy.serial = policy.serial; cached_policy.runtime_id = policy.id; let enclave_identity = match EnclaveIdentity::current() { Some(enclave_identity) => enclave_identity, None => return Ok(cached_policy), }; let enclave_policy = match policy.enclaves.get(&enclave_identity) { Some(enclave_policy) => enclave_policy, None => return Ok(cached_policy), }; for (rt_id, ids) in &enclave_policy.may_query { let mut query_ids = HashSet::new(); for e_id in ids { query_ids.insert(e_id.clone()); } cached_policy.may_query.insert(*rt_id, query_ids); } for e_id in &enclave_policy.may_replicate { cached_policy.may_replicate.insert(e_id.clone()); } for (e_id, other_policy) in &policy.enclaves { if other_policy.may_replicate.contains(&enclave_identity) { cached_policy.may_replicate_from.insert(e_id.clone()); } } Ok(cached_policy) } fn default() -> Self { CachedPolicy { checksum: vec![], serial: 0, runtime_id: RuntimeId::default(), may_query: HashMap::new(), may_replicate: HashSet::new(), may_replicate_from: HashSet::new(), } } fn may_get_or_create_keys(&self, remo
random
[ { "content": "/// Unseal a previously sealed secret to the enclave.\n\n///\n\n/// The `context` field is a domain separation tag.\n\n///\n\n/// # Panics\n\n///\n\n/// All parsing and authentication errors of the ciphertext are fatal and\n\n/// will result in a panic.\n\npub fn unseal(key_policy: Keypolicy, context: &[u8], ciphertext: &[u8]) -> Option<Vec<u8>> {\n\n let ct_len = ciphertext.len();\n\n if ct_len == 0 {\n\n return None;\n\n } else if ct_len < TAG_SIZE + NONCE_SIZE {\n\n panic!(\"ciphertext is corrupted, invalid size\");\n\n }\n\n let ct_len = ct_len - NONCE_SIZE;\n\n\n\n // Split the ciphertext || tag || nonce.\n\n let mut nonce = [0u8; NONCE_SIZE];\n\n nonce.copy_from_slice(&ciphertext[ct_len..]);\n\n let ciphertext = &ciphertext[..ct_len];\n\n\n\n let d2 = new_d2(key_policy, context);\n\n let plaintext = d2\n\n .open(&nonce, ciphertext.to_vec(), vec![])\n\n .expect(\"ciphertext is corrupted\");\n\n\n\n Some(plaintext)\n\n}\n\n\n", "file_path": "runtime/src/common/sgx/seal.rs", "rank": 0, "score": 555110.730664523 }, { "content": "/// Seal a secret to the enclave.\n\n///\n\n/// The `context` field is a domain separation tag.\n\npub fn seal(key_policy: Keypolicy, context: &[u8], data: &[u8]) -> Vec<u8> {\n\n let mut rng = OsRng {};\n\n\n\n // Encrypt the raw policy.\n\n let mut nonce = [0u8; NONCE_SIZE];\n\n rng.fill(&mut nonce);\n\n let d2 = new_d2(key_policy, context);\n\n let mut ciphertext = d2.seal(&nonce, data.to_vec(), vec![]);\n\n ciphertext.extend_from_slice(&nonce);\n\n\n\n ciphertext\n\n}\n\n\n", "file_path": "runtime/src/common/sgx/seal.rs", "rank": 1, "score": 464431.07619715517 }, { "content": "/// See `Kdf::get_or_create_keys`.\n\npub fn get_or_create_keys(req: &RequestIds, ctx: &mut RpcContext) -> Result<KeyPair> {\n\n // Authenticate the source enclave based on the MRSIGNER/MRENCLAVE/request\n\n // so that the keys are never released to an incorrect enclave.\n\n if !Policy::unsafe_skip() {\n\n let si = ctx.session_info.as_ref();\n\n let si = si.ok_or(KeyManagerError::NotAuthenticated)?;\n\n let their_id = &si.authenticated_avr.identity;\n\n\n\n Policy::global().may_get_or_create_keys(their_id, &req)?;\n\n }\n\n\n\n Kdf::global().get_or_create_keys(req)\n\n}\n\n\n", "file_path": "keymanager-lib/src/methods.rs", "rank": 2, "score": 440369.1695125297 }, { "content": "/// See `Kdf::get_public_key`.\n\npub fn get_public_key(req: &RequestIds, _ctx: &mut RpcContext) -> Result<Option<SignedPublicKey>> {\n\n let kdf = Kdf::global();\n\n\n\n // No authentication, absolutely anyone is allowed to query public keys.\n\n\n\n let pk = kdf.get_public_key(req)?;\n\n pk.map_or(Ok(None), |pk| Ok(Some(kdf.sign_public_key(pk)?)))\n\n}\n\n\n", "file_path": "keymanager-lib/src/methods.rs", "rank": 3, "score": 413668.2231023228 }, { "content": "fn new_d2(key_policy: Keypolicy, context: &[u8]) -> DeoxysII {\n\n let mut seal_key = egetkey(key_policy, context);\n\n let d2 = DeoxysII::new(&seal_key);\n\n seal_key.zeroize();\n\n\n\n d2\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_seal_unseal() {\n\n // Test different policies.\n\n let sealed_a = seal(Keypolicy::MRSIGNER, b\"MRSIGNER\", b\"Mr. Signer\");\n\n let unsealed_a = unseal(Keypolicy::MRSIGNER, b\"MRSIGNER\", &sealed_a);\n\n assert_eq!(unsealed_a, Some(b\"Mr. Signer\".to_vec()));\n\n\n\n let sealed_b = seal(Keypolicy::MRENCLAVE, b\"MRENCLAVE\", b\"Mr. Enclave\");\n", "file_path": "runtime/src/common/sgx/seal.rs", "rank": 4, "score": 412339.1074830485 }, { "content": "/// Initialize the Kdf.\n\nfn init_kdf(req: &InitRequest, ctx: &mut RpcContext) -> Result<SignedInitResponse> {\n\n let policy_checksum = Policy::global().init(ctx, &req.policy)?;\n\n Kdf::global().init(&req, ctx, policy_checksum)\n\n}\n\n\n", "file_path": "keymanager-lib/src/keymanager.rs", "rank": 5, "score": 403265.7378880047 }, { "content": "#[inline]\n\npub fn cbor_decode<T>(mut buf: &[u8]) -> Result<T>\n\nwhere\n\n T: DeserializeOwned,\n\n{\n\n // gRPC can return an empty buffer if there is no response. Unfortunately the\n\n // CBOR parser fails to decode an empty buffer even if the target type is the\n\n // unit type (). As a workaround we replace the buffer with a decodable one.\n\n if buf.is_empty() {\n\n buf = CBOR_NULL;\n\n }\n\n\n\n cbor::from_slice(buf).map_err(|e| Error::Codec(Box::new(e)))\n\n}\n\n\n\n/// A helper macro for defining gRPC methods using the CBOR codec.\n\nmacro_rules! grpc_method {\n\n ($id:ident, $name:expr, $rq:ty, $rsp:ty) => {\n\n const $id: ::grpcio::Method<$rq, $rsp> = ::grpcio::Method {\n\n ty: ::grpcio::MethodType::Unary,\n\n name: $name,\n", "file_path": "client/src/grpc.rs", "rank": 6, "score": 401215.8419811811 }, { "content": "/// egetkey returns a 256 bit key suitable for sealing secrets to the\n\n/// enclave in cold storage, derived from the results of the `EGETKEY`\n\n/// instruction. The `context` field is a domain separation tag.\n\n///\n\n/// Note: The key can also be used for other things (eg: as an X25519\n\n/// private key).\n\npub fn egetkey(key_policy: Keypolicy, context: &[u8]) -> [u8; 32] {\n\n let mut k = [0u8; 32];\n\n\n\n // Obtain the per-CPU package SGX sealing key, with the requested\n\n // policy.\n\n let master_secret = egetkey_impl(key_policy, context);\n\n\n\n // Expand the 128 bit EGETKEY result into a 256 bit key, suitable\n\n // for use with our MRAE primitives.\n\n let mut kdf = KMac::new_kmac256(&master_secret, SEAL_KDF_CUSTOM);\n\n kdf.update(&context);\n\n kdf.finalize(&mut k);\n\n\n\n k\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n", "file_path": "runtime/src/common/sgx/egetkey.rs", "rank": 7, "score": 393337.9154520627 }, { "content": "/// Deserializes a slice to a value.\n\npub fn from_slice<'a, T>(slice: &'a [u8]) -> Result<T>\n\nwhere\n\n T: Deserialize<'a>,\n\n{\n\n serde_cbor::from_slice(slice)\n\n}\n", "file_path": "runtime/src/common/cbor.rs", "rank": 8, "score": 388727.60177738476 }, { "content": "/// Set the global set of trusted policy signers.\n\n/// Changing the set of policy signers after the first call is not possible.\n\npub fn set_trusted_policy_signers(signers: TrustedPolicySigners) -> bool {\n\n INIT_TRUSTED_SIGNERS_ONCE.call_once(|| {\n\n *TRUSTED_SIGNERS.lock().unwrap() = signers;\n\n });\n\n\n\n true\n\n}\n\n\n\nconst POLICY_SIGN_CONTEXT: &'static [u8] = b\"oasis-core/keymanager: policy\";\n\n\n\nimpl SignedPolicySGX {\n\n /// Verify the signatures and return the PolicySGX, if the signatures are correct.\n\n pub fn verify(&self) -> Result<PolicySGX> {\n\n // Verify the signatures.\n\n let untrusted_policy_raw = cbor::to_vec(&self.policy);\n\n let mut signers: HashSet<OasisPublicKey> = HashSet::new();\n\n for sig in &self.signatures {\n\n let public_key = match sig.public_key {\n\n Some(public_key) => public_key,\n\n None => return Err(KeyManagerError::PolicyInvalid.into()),\n", "file_path": "keymanager-api-common/src/lib.rs", "rank": 9, "score": 388341.8959714462 }, { "content": "#[cfg(not(target_env = \"sgx\"))]\n\nfn egetkey_impl(key_policy: Keypolicy, context: &[u8]) -> [u8; 16] {\n\n let mut k = [0u8; 16];\n\n\n\n // Deterministically generate a test master key from the context.\n\n let mut kdf = match key_policy {\n\n Keypolicy::MRENCLAVE => KMac::new_kmac256(MOCK_MRENCLAVE_KEY, MOCK_KDF_CUSTOM),\n\n Keypolicy::MRSIGNER => KMac::new_kmac256(MOCK_MRSIGNER_KEY, MOCK_KDF_CUSTOM),\n\n _ => panic!(\"Invalid key_policy\"),\n\n };\n\n kdf.update(&context);\n\n kdf.finalize(&mut k);\n\n\n\n k\n\n}\n\n\n", "file_path": "runtime/src/common/sgx/egetkey.rs", "rank": 10, "score": 384170.20019969833 }, { "content": "/// See `Kdf::replicate_master_secret`.\n\npub fn replicate_master_secret(\n\n _req: &ReplicateRequest,\n\n ctx: &mut RpcContext,\n\n) -> Result<ReplicateResponse> {\n\n // Authenticate the source enclave based on the MRSIGNER/MRNELCAVE.\n\n if !Policy::unsafe_skip() {\n\n let si = ctx.session_info.as_ref();\n\n let si = si.ok_or(KeyManagerError::NotAuthenticated)?;\n\n let their_id = &si.authenticated_avr.identity;\n\n\n\n Policy::global().may_replicate_master_secret(their_id)?;\n\n }\n\n\n\n Kdf::global().replicate_master_secret()\n\n}\n", "file_path": "keymanager-lib/src/methods.rs", "rank": 11, "score": 357306.6961298116 }, { "content": "/// Add a tracing span context to the provided `Context`.\n\npub fn add_span_context(ctx: &mut Context, span_context: Vec<u8>) {\n\n ctx.add_value(TRACING_SPAN_CONTEXT_KEY, span_context);\n\n}\n\n\n", "file_path": "runtime/src/tracing.rs", "rank": 12, "score": 335814.811210542 }, { "content": "#[inline]\n\npub fn cbor_encode<T>(t: &T, buf: &mut Vec<u8>)\n\nwhere\n\n T: Serialize,\n\n{\n\n cbor::to_writer(buf, t)\n\n}\n\n\n\n/// CBOR decoding wrapper for gRPC.\n", "file_path": "client/src/grpc.rs", "rank": 13, "score": 328155.9922482591 }, { "content": "/// Serializes a value to a vector.\n\npub fn to_vec<T>(value: &T) -> Vec<u8>\n\nwhere\n\n T: Serialize,\n\n{\n\n // Use to_value first to force serialization into canonical format.\n\n serde_cbor::to_vec(&to_value(&value)).unwrap()\n\n}\n\n\n", "file_path": "runtime/src/common/cbor.rs", "rank": 14, "score": 326780.0278229007 }, { "content": "/// Verify attestation report.\n\npub fn verify(avr: &AVR) -> Result<AuthenticatedAVR> {\n\n let unsafe_skip_avr_verification = option_env!(\"OASIS_UNSAFE_SKIP_AVR_VERIFY\").is_some();\n\n let strict_avr_verification = option_env!(\"OASIS_STRICT_AVR_VERIFY\").is_some();\n\n\n\n // Get the time.\n\n let timestamp_now = insecure_posix_time();\n\n\n\n // Verify IAS signature.\n\n if !unsafe_skip_avr_verification {\n\n validate_avr_signature(\n\n &avr.certificate_chain,\n\n &avr.body,\n\n &avr.signature,\n\n timestamp_now as u64,\n\n )?;\n\n }\n\n\n\n // Parse AV report body.\n\n let avr_body = ParsedAVR::new(&avr)?;\n\n\n", "file_path": "runtime/src/common/sgx/avr.rs", "rank": 15, "score": 326435.84257968806 }, { "content": "/// Initialize a keymanager with trusted policy signers.\n\npub fn new_keymanager(signers: TrustedPolicySigners) -> Box<dyn Initializer> {\n\n // Initializer.\n\n let init = move |protocol: &Arc<Protocol>,\n\n _rak: &Arc<RAK>,\n\n _rpc_demux: &mut RpcDemux,\n\n rpc: &mut RpcDispatcher|\n\n -> Option<Box<dyn TxnDispatcher>> {\n\n // Initialize the set of trusted policy signers.\n\n set_trusted_policy_signers(signers.clone());\n\n\n\n // Register RPC methods exposed via EnclaveRPC to remote clients.\n\n {\n\n use crate::methods::*;\n\n with_api! { register_runtime_rpc_methods!(rpc, api); }\n\n }\n\n\n\n // TODO: Someone that cares can add macros for this, I do not. Note\n\n // that these are local methods, for use by the node key manager\n\n // component.\n\n rpc.add_method(\n", "file_path": "keymanager-lib/src/keymanager.rs", "rank": 17, "score": 322419.00505143363 }, { "content": "/// Helper for doing encrypted MKVS operations.\n\nfn get_encryption_context(ctx: &mut TxnContext, key: &[u8]) -> Result<EncryptionContext> {\n\n let rctx = runtime_context!(ctx, Context);\n\n\n\n // Derive key pair ID based on key.\n\n let key_pair_id = KeyPairId::from(Hash::digest_bytes(key).as_ref());\n\n\n\n // Fetch encryption keys.\n\n let io_ctx = IoContext::create_child(&ctx.io_ctx);\n\n let result = rctx.km_client.get_or_create_keys(io_ctx, key_pair_id);\n\n let key = Executor::with_current(|executor| executor.block_on(result))?;\n\n\n\n Ok(EncryptionContext::new(key.state_key.as_ref()))\n\n}\n\n\n", "file_path": "tests/runtimes/simple-keyvalue/src/main.rs", "rank": 18, "score": 319862.0053276393 }, { "content": "/// Generates a public/private key pair suitable for use with\n\n/// `derive_symmetric_key`, `box_seal`, and `box_open`.\n\npub fn generate_key_pair() -> ([u8; 32], [u8; 32]) {\n\n let mut rng = OsRng {};\n\n\n\n let sk = x25519_dalek::StaticSecret::new(&mut rng);\n\n let pk = x25519_dalek::PublicKey::from(&sk);\n\n\n\n (pk.as_bytes().clone(), sk.to_bytes())\n\n}\n\n\n", "file_path": "runtime/src/common/crypto/mrae/deoxysii.rs", "rank": 19, "score": 295333.49507645046 }, { "content": "/// Boxes (\"seals\") the provided additional data and plaintext via\n\n/// Deoxys-II-256-128 using a symmetric key derived from the provided\n\n/// X25519 public and private keys.\n\n/// The nonce should be `NONCE_SIZE` bytes long and unique for all time\n\n/// for a given public and private key tuple.\n\npub fn box_seal(\n\n nonce: &[u8; NONCE_SIZE],\n\n plaintext: Vec<u8>,\n\n additional_data: Vec<u8>,\n\n peers_public_key: &[u8; 32],\n\n private_key: &[u8; 32],\n\n) -> Result<Vec<u8>> {\n\n let key = derive_symmetric_key(peers_public_key, private_key);\n\n\n\n let d2 = DeoxysII::new(&key);\n\n\n\n Ok(d2.seal(nonce, plaintext, additional_data))\n\n}\n\n\n", "file_path": "runtime/src/common/crypto/mrae/deoxysii.rs", "rank": 20, "score": 291294.51654893835 }, { "content": "// Check if s < L, per RFC 8032, inspired by the Go runtime library's version\n\n// of this check.\n\nfn sc_minimal(raw_s: &[u8]) -> bool {\n\n let mut rd = Cursor::new(raw_s);\n\n let mut s = [0u64; 4];\n\n\n\n // Read the raw scalar into limbs, and reverse it, because the raw\n\n // representation is little-endian.\n\n rd.read_u64_into::<LittleEndian>(&mut s[..]).unwrap();\n\n s.reverse();\n\n\n\n // Compare each limb, from most significant to least.\n\n for i in 0..4 {\n\n if s[i] > CURVE_ORDER[i] {\n\n return false;\n\n } else if s[i] < CURVE_ORDER[i] {\n\n return true;\n\n }\n\n }\n\n\n\n // The scalar is equal to the order of the curve.\n\n false\n", "file_path": "runtime/src/common/crypto/signature.rs", "rank": 21, "score": 290813.7848587443 }, { "content": "pub fn trusted_policy_signers() -> TrustedPolicySigners {\n\n TrustedPolicySigners {\n\n signers: {\n\n let mut set = HashSet::new();\n\n if option_env!(\"OASIS_UNSAFE_KM_POLICY_KEYS\").is_some() {\n\n for seed in [\n\n \"ekiden key manager test multisig key 0\",\n\n \"ekiden key manager test multisig key 1\",\n\n \"ekiden key manager test multisig key 2\",\n\n ]\n\n .iter()\n\n {\n\n let private_key = OasisPrivateKey::from_test_seed(seed.to_string());\n\n set.insert(private_key.public_key());\n\n }\n\n }\n\n\n\n // TODO: Populate with the production keys as well.\n\n set\n\n },\n\n threshold: 9001, // TODO: Set this to a real value.\n\n }\n\n}\n", "file_path": "tests/runtimes/simple-keymanager/src/api.rs", "rank": 22, "score": 282742.34294921317 }, { "content": "/// Retrieve a tracing span context from the provided `Context`.\n\npub fn get_span_context(ctx: &Context) -> Option<&Vec<u8>> {\n\n ctx.get_value(TRACING_SPAN_CONTEXT_KEY)\n\n}\n", "file_path": "runtime/src/tracing.rs", "rank": 23, "score": 280614.69823397766 }, { "content": "\tSerial uint32 `json:\"serial\"`\n", "file_path": "go/keymanager/api/policy_sgx.go", "rank": 24, "score": 280349.97943466017 }, { "content": "/// Parse runtime call output.\n\npub fn parse_call_output<O>(output: Vec<u8>) -> Result<O>\n\nwhere\n\n O: DeserializeOwned,\n\n{\n\n let output: TxnOutput = cbor::from_slice(&output)?;\n\n match output {\n\n TxnOutput::Success(data) => Ok(cbor::from_value(data)?),\n\n TxnOutput::Error(error) => Err(TxnClientError::TxnFailed(error).into()),\n\n }\n\n}\n", "file_path": "client/src/transaction/client.rs", "rank": 25, "score": 268642.83377976273 }, { "content": "#[inline]\n\nfn cbor_decode<T>(buf: &[u8]) -> Result<T>\n\nwhere\n\n T: DeserializeOwned,\n\n{\n\n cbor::from_slice(buf).map_err(|e| Error::Codec(Box::new(e)))\n\n}\n", "file_path": "runtime/src/storage/mkvs/interop/rpc.rs", "rank": 26, "score": 268155.95636356 }, { "content": "/// Get the logger.\n\npub fn get_logger(module: &'static str) -> slog::Logger {\n\n LOGGER.new(o!(\"module\" => module))\n\n}\n\n\n", "file_path": "runtime/src/common/logger.rs", "rank": 27, "score": 265575.9970425937 }, { "content": "/// Convert a value to a `Value`.\n\npub fn to_value<T>(value: T) -> Value\n\nwhere\n\n T: Serialize,\n\n{\n\n serde_cbor::value::to_value(value).unwrap()\n\n}\n\n\n", "file_path": "runtime/src/common/cbor.rs", "rank": 28, "score": 262494.13062262826 }, { "content": "struct Inner {\n\n /// Runtime Id for which we are going to request keys.\n\n runtime_id: RuntimeId,\n\n /// RPC client.\n\n rpc_client: Client,\n\n /// Local cache for the get_or_create_keys KeyManager endpoint.\n\n get_or_create_secret_keys_cache: RwLock<LruCache<KeyPairId, KeyPair>>,\n\n /// Local cache for the get_public_key KeyManager endpoint.\n\n get_public_key_cache: RwLock<LruCache<KeyPairId, SignedPublicKey>>,\n\n}\n\n\n\n/// A key manager client which talks to a remote key manager enclave.\n\npub struct RemoteClient {\n\n inner: Arc<Inner>,\n\n}\n\n\n\nimpl RemoteClient {\n\n fn new(runtime_id: RuntimeId, client: RpcClient, keys_cache_sizes: usize) -> Self {\n\n Self {\n\n inner: Arc::new(Inner {\n", "file_path": "keymanager-client/src/client.rs", "rank": 29, "score": 260406.8275534663 }, { "content": "struct Inner {\n\n /// Master secret.\n\n master_secret: Option<MasterSecret>,\n\n checksum: Option<Vec<u8>>,\n\n runtime_id: Option<RuntimeId>,\n\n signer: Option<Arc<dyn signature::Signer>>,\n\n cache: LruCache<Vec<u8>, KeyPair>,\n\n}\n\n\n\nimpl Inner {\n\n fn reset(&mut self) {\n\n self.master_secret = None;\n\n self.checksum = None;\n\n self.runtime_id = None;\n\n self.signer = None;\n\n self.cache.clear();\n\n }\n\n\n\n fn derive_contract_key(&self, req: &RequestIds) -> Result<KeyPair> {\n\n let checksum = self.get_checksum()?;\n", "file_path": "keymanager-lib/src/kdf.rs", "rank": 30, "score": 260406.82755346625 }, { "content": "/// Return previously set runtime ID of this runtime.\n\nfn get_runtime_id(_args: &(), ctx: &mut TxnContext) -> Result<Option<String>> {\n\n let rctx = runtime_context!(ctx, Context);\n\n\n\n Ok(Some(rctx.test_runtime_id.to_string()))\n\n}\n\n\n", "file_path": "tests/runtimes/simple-keyvalue/src/main.rs", "rank": 31, "score": 258927.81933255453 }, { "content": "struct Inner {\n\n timestamp: i64,\n\n}\n\n\n", "file_path": "runtime/src/common/time.rs", "rank": 32, "score": 258883.97542214138 }, { "content": "fn parse_avr_timestamp(timestamp: &str) -> Result<i64> {\n\n let timestamp_unix = match Utc.datetime_from_str(&timestamp, IAS_TS_FMT) {\n\n Ok(timestamp) => timestamp.timestamp(),\n\n _ => return Err(AVRError::MalformedTimestamp.into()),\n\n };\n\n Ok(timestamp_unix)\n\n}\n\n\n", "file_path": "runtime/src/common/sgx/avr.rs", "rank": 33, "score": 258597.01479613627 }, { "content": "/// Retrieve a key/value pair.\n\nfn get(args: &String, ctx: &mut TxnContext) -> Result<Option<String>> {\n\n if ctx.check_only {\n\n return Err(CheckOnlySuccess::default().into());\n\n }\n\n ctx.emit_txn_tag(b\"kv_op\", b\"get\");\n\n ctx.emit_txn_tag(b\"kv_key\", args.as_bytes());\n\n\n\n let existing = StorageContext::with_current(|mkvs, _untrusted_local| {\n\n mkvs.get(IoContext::create_child(&ctx.io_ctx), args.as_bytes())\n\n });\n\n Ok(existing.map(|v| String::from_utf8(v)).transpose()?)\n\n}\n\n\n", "file_path": "tests/runtimes/simple-keyvalue/src/main.rs", "rank": 34, "score": 257920.92046832503 }, { "content": "/// Remove a key/value pair.\n\nfn remove(args: &String, ctx: &mut TxnContext) -> Result<Option<String>> {\n\n if ctx.check_only {\n\n return Err(CheckOnlySuccess::default().into());\n\n }\n\n ctx.emit_txn_tag(b\"kv_op\", b\"remove\");\n\n ctx.emit_txn_tag(b\"kv_key\", args.as_bytes());\n\n\n\n let existing = StorageContext::with_current(|mkvs, _untrusted_local| {\n\n mkvs.remove(IoContext::create_child(&ctx.io_ctx), args.as_bytes())\n\n });\n\n Ok(existing.map(|v| String::from_utf8(v)).transpose()?)\n\n}\n\n\n", "file_path": "tests/runtimes/simple-keyvalue/src/main.rs", "rank": 35, "score": 257920.92046832503 }, { "content": "#[inline]\n\nfn cbor_encode<T>(t: &T, buf: &mut Vec<u8>)\n\nwhere\n\n T: Serialize,\n\n{\n\n cbor::to_writer(buf, t)\n\n}\n\n\n\n/// CBOR decoding wrapper for gRPC.\n", "file_path": "runtime/src/storage/mkvs/interop/rpc.rs", "rank": 36, "score": 256402.0003443204 }, { "content": "/// Insert a key/value pair.\n\nfn insert(args: &KeyValue, ctx: &mut TxnContext) -> Result<Option<String>> {\n\n if args.value.as_bytes().len() > 128 {\n\n return Err(anyhow!(\"Value too big to be inserted.\"));\n\n }\n\n if ctx.check_only {\n\n return Err(CheckOnlySuccess::default().into());\n\n }\n\n ctx.emit_txn_tag(b\"kv_op\", b\"insert\");\n\n ctx.emit_txn_tag(b\"kv_key\", args.key.as_bytes());\n\n\n\n let existing = StorageContext::with_current(|mkvs, _untrusted_local| {\n\n mkvs.insert(\n\n IoContext::create_child(&ctx.io_ctx),\n\n args.key.as_bytes(),\n\n args.value.as_bytes(),\n\n )\n\n });\n\n Ok(existing.map(|v| String::from_utf8(v)).transpose()?)\n\n}\n\n\n", "file_path": "tests/runtimes/simple-keyvalue/src/main.rs", "rank": 37, "score": 254777.81035304553 }, { "content": "/// (encrypted) Remove a key/value pair.\n\nfn enc_remove(args: &String, ctx: &mut TxnContext) -> Result<Option<String>> {\n\n let enc_ctx = get_encryption_context(ctx, args.as_bytes())?;\n\n let existing = StorageContext::with_current(|mkvs, _untrusted_local| {\n\n enc_ctx.remove(mkvs, IoContext::create_child(&ctx.io_ctx), args.as_bytes())\n\n });\n\n Ok(existing.map(|v| String::from_utf8(v)).transpose()?)\n\n}\n\n\n", "file_path": "tests/runtimes/simple-keyvalue/src/main.rs", "rank": 38, "score": 254777.81035304553 }, { "content": "/// (encrypted) Retrieve a key/value pair.\n\nfn enc_get(args: &String, ctx: &mut TxnContext) -> Result<Option<String>> {\n\n let enc_ctx = get_encryption_context(ctx, args.as_bytes())?;\n\n let existing = StorageContext::with_current(|mkvs, _untrusted_local| {\n\n enc_ctx.get(mkvs, IoContext::create_child(&ctx.io_ctx), args.as_bytes())\n\n });\n\n Ok(existing.map(|v| String::from_utf8(v)).transpose()?)\n\n}\n\n\n", "file_path": "tests/runtimes/simple-keyvalue/src/main.rs", "rank": 39, "score": 254777.81035304553 }, { "content": "/// (encrypted) Insert a key/value pair.\n\nfn enc_insert(args: &KeyValue, ctx: &mut TxnContext) -> Result<Option<String>> {\n\n // NOTE: This is only for example purposes, the correct way would be\n\n // to also generate a (deterministic) nonce.\n\n let nonce = [0u8; NONCE_SIZE];\n\n\n\n let enc_ctx = get_encryption_context(ctx, args.key.as_bytes())?;\n\n let existing = StorageContext::with_current(|mkvs, _untrusted_local| {\n\n enc_ctx.insert(\n\n mkvs,\n\n IoContext::create_child(&ctx.io_ctx),\n\n args.key.as_bytes(),\n\n args.value.as_bytes(),\n\n &nonce,\n\n )\n\n });\n\n Ok(existing.map(|v| String::from_utf8(v)).transpose()?)\n\n}\n\n\n", "file_path": "tests/runtimes/simple-keyvalue/src/main.rs", "rank": 40, "score": 251726.11000594223 }, { "content": "fn deserialize_base64<'de, D>(deserializer: D) -> Result<Option<Vec<u8>>, D::Error>\n\nwhere\n\n D: serde::de::Deserializer<'de>,\n\n{\n\n struct Base64Visitor;\n\n\n\n impl<'de> serde::de::Visitor<'de> for Base64Visitor {\n\n type Value = Option<Vec<u8>>;\n\n\n\n fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {\n\n write!(formatter, \"base64 ASCII text\")\n\n }\n\n\n\n fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>\n\n where\n\n E: serde::de::Error,\n\n {\n\n base64::decode(v)\n\n .map_err(serde::de::Error::custom)\n\n .map(Some)\n", "file_path": "runtime/src/storage/mkvs/tests/mod.rs", "rank": 41, "score": 250880.27842436818 }, { "content": "/// Serializes a value to a writer.\n\npub fn to_writer<W, T>(writer: W, value: &T)\n\nwhere\n\n W: Write,\n\n T: Serialize,\n\n{\n\n // Use to_value first to force serialization into canonical format.\n\n serde_cbor::to_writer(writer, &to_value(&value)).unwrap()\n\n}\n\n\n", "file_path": "runtime/src/common/cbor.rs", "rank": 42, "score": 249544.2330849066 }, { "content": "pub fn main() {\n\n let init = new_keymanager(api::trusted_policy_signers());\n\n oasis_core_runtime::start_runtime(init, version_from_cargo!());\n\n}\n", "file_path": "tests/runtimes/simple-keymanager/src/main.rs", "rank": 43, "score": 236221.94961989744 }, { "content": "/// Unboxes (\"opens\") the provided additional data and ciphertext via\n\n/// Deoxys-II-256-128 using a symmetric key derived from the provided\n\n/// X25519 public and private keys.\n\n/// The nonce should be `NONCE_SIZE` bytes long and both it and the additional\n\n/// data must match the value passed to `box_seal`.\n\npub fn box_open(\n\n nonce: &[u8; NONCE_SIZE],\n\n ciphertext: Vec<u8>,\n\n additional_data: Vec<u8>,\n\n peers_public_key: &[u8; 32],\n\n private_key: &[u8; 32],\n\n) -> Result<Vec<u8>> {\n\n let key = derive_symmetric_key(peers_public_key, private_key);\n\n\n\n let d2 = DeoxysII::new(&key);\n\n\n\n d2.open(nonce, ciphertext, additional_data)\n\n .map_err(|err| err.into())\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n extern crate test;\n\n\n\n use self::test::{black_box, Bencher};\n", "file_path": "runtime/src/common/crypto/mrae/deoxysii.rs", "rank": 45, "score": 231780.82139873478 }, { "content": "\tMayQuery map[common.Namespace][]sgx.EnclaveIdentity `json:\"may_query\"`\n", "file_path": "go/keymanager/api/policy_sgx.go", "rank": 46, "score": 229585.38524422116 }, { "content": "/// Returns the number of seconds since the UNIX epoch. The time returned\n\n/// is guaranteed to never decrease within each enclave instance (though it\n\n/// may decrease iff the enclave is re-launched).\n\n///\n\n/// The returned timestamp MUST NOT be trusted on in any way, as the underlying\n\n/// time source is reliant on the host operating system.\n\npub fn insecure_posix_time() -> i64 {\n\n let mut inner = TIME_SOURCE.inner.lock().unwrap();\n\n\n\n let now = SystemTime::now().duration_since(UNIX_EPOCH).unwrap();\n\n let now = now.as_secs() as i64;\n\n\n\n if now < inner.timestamp {\n\n panic!(\"time: clock appeared to have ran backwards\")\n\n }\n\n inner.timestamp = now;\n\n\n\n inner.timestamp\n\n}\n\n\n", "file_path": "runtime/src/common/time.rs", "rank": 47, "score": 222359.37215876576 }, { "content": "func New(opts ...Option) *Sigstruct {\n\n\tvar s Sigstruct\n\n\tfor _, v := range opts {\n\n\t\tv(&s)\n\n\t}\n\n\n\n\treturn &s\n", "file_path": "go/common/sgx/sigstruct/sigstruct.go", "rank": 48, "score": 220253.29554955446 }, { "content": "/// Derives a MRAE AEAD symmetric key suitable for use with the asymmetric\n\n/// box primitives from the provided X25519 public and private keys.\n\nfn derive_symmetric_key(public: &[u8; 32], private: &[u8; 32]) -> [u8; KEY_SIZE] {\n\n let public = x25519_dalek::PublicKey::from(public.clone());\n\n let private = x25519_dalek::StaticSecret::from(private.clone());\n\n\n\n let pmk = private.diffie_hellman(&public);\n\n\n\n let mut kdf = Kdf::new_varkey(b\"MRAE_Box_Deoxys-II-256-128\").expect(\"Hmac::new_varkey\");\n\n kdf.input(pmk.as_bytes());\n\n drop(pmk);\n\n\n\n let mut derived_key = [0u8; KEY_SIZE];\n\n let digest = kdf.result();\n\n derived_key.copy_from_slice(&digest.code().as_ref()[..KEY_SIZE]);\n\n\n\n derived_key\n\n}\n\n\n", "file_path": "runtime/src/common/crypto/mrae/deoxysii.rs", "rank": 49, "score": 218723.47328073817 }, { "content": "#[derive(Default, Debug)]\n\nstruct QuoteBody {\n\n version: u16,\n\n signature_type: u16,\n\n gid: u32,\n\n isv_svn_qe: u16,\n\n isv_svn_pce: u16,\n\n basename: [u8; 32],\n\n report_body: Report,\n\n}\n\n\n\nimpl QuoteBody {\n\n /// Decode quote body.\n\n fn decode(quote_body: &Vec<u8>) -> Result<QuoteBody> {\n\n let mut reader = Cursor::new(quote_body);\n\n let mut quote_body: QuoteBody = QuoteBody::default();\n\n\n\n // TODO: Should we ensure that reserved bytes are all zero?\n\n\n\n // Quote body.\n\n quote_body.version = reader.read_u16::<LittleEndian>()?;\n", "file_path": "runtime/src/common/sgx/avr.rs", "rank": 50, "score": 217269.69200375196 }, { "content": "// Returns `insecure_posix_time` as SystemTime.\n\npub fn insecure_posix_system_time() -> SystemTime {\n\n UNIX_EPOCH + Duration::from_secs(insecure_posix_time() as u64)\n\n}\n\n\n\n/// Force update the minimum timestamp from a semi-trusted source (eg: the AVR\n\n/// timestamp), under the assumption that the semi-trusted source is more trust\n\n/// worthy than the host operating system.\n\npub(crate) fn update_insecure_posix_time(timestamp: i64) {\n\n let mut inner = TIME_SOURCE.inner.lock().unwrap();\n\n\n\n if timestamp > inner.timestamp {\n\n inner.timestamp = timestamp;\n\n }\n\n\n\n // The IAS clock and local clock should be closely synced, and minor\n\n // differences in NTP implementations (eg: smear vs no smear), should\n\n // be masked by the fact that the AVR timestamp will be a minimum of\n\n // 1 RTT in the past.\n\n}\n\n\n\nlazy_static! {\n\n static ref TIME_SOURCE: TimeSource = TimeSource {\n\n inner: Mutex::new(Inner {\n\n timestamp: INITIAL_MINIMUM_TIME,\n\n })\n\n };\n\n}\n", "file_path": "runtime/src/common/time.rs", "rank": 51, "score": 216634.19410367883 }, { "content": "fn validate_avr_signature(\n\n cert_chain: &[u8],\n\n message: &[u8],\n\n signature: &[u8],\n\n unix_time: u64,\n\n) -> Result<()> {\n\n // Load the Intel SGX Attestation Report Signing CA certificate.\n\n let anchors = webpki::TLSServerTrustAnchors(&IAS_ANCHORS);\n\n\n\n // Decode the certificate chain.\n\n let raw_pem = percent_encoding::percent_decode(cert_chain).decode_utf8()?;\n\n let mut cert_chain = Vec::new();\n\n for pem in parse_many(&raw_pem.as_bytes()) {\n\n if pem.tag != PEM_CERTIFICATE_LABEL {\n\n return Err(AVRError::MalformedCertificatePEM.into());\n\n }\n\n cert_chain.push(pem.contents);\n\n }\n\n if cert_chain.len() == 0 {\n\n return Err(AVRError::NoCertificates.into());\n", "file_path": "runtime/src/common/sgx/avr.rs", "rank": 52, "score": 214096.24089564988 }, { "content": "/// Initialize the global slog_stdlog adapter to allow logging with the log crate (instead of slog).\n\npub fn init_logger(level: LogLevel) {\n\n INIT_GLOBAL_LOGGER.call_once(|| {\n\n let global_logger = LOGGER.new(o!(\"module\" => \"global\"));\n\n GLOBAL_LOGGER_SCOPE_GUARD\n\n .lock()\n\n .unwrap()\n\n .get_or_insert(slog_scope::set_global_logger(global_logger));\n\n let _log_guard = slog_stdlog::init_with_level(level).unwrap();\n\n });\n\n}\n", "file_path": "runtime/src/common/logger.rs", "rank": 53, "score": 212784.390982204 }, { "content": "fn validate_decoded_avr_signature(\n\n anchors: &webpki::TLSServerTrustAnchors,\n\n cert_ders: &Vec<Vec<u8>>,\n\n message: &[u8],\n\n signature: Vec<u8>,\n\n time: webpki::Time,\n\n) -> Result<()> {\n\n assert!(cert_ders.len() >= 1);\n\n let (cert_der, inter_ders) = cert_ders.split_at(1);\n\n let inter_ders: Vec<_> = inter_ders.iter().map(|der| &der[..]).collect();\n\n let cert = webpki::EndEntityCert::from(&cert_der[0])?;\n\n cert.verify_is_valid_tls_server_cert(IAS_SIG_ALGS, &anchors, &inter_ders, time)?;\n\n Ok(cert.verify_signature(IAS_SIG_ALGS[0], message, &signature)?)\n\n}\n\n\n\n/// Return true iff the (POXIX) timestamp is considered \"fresh\" for the purposes\n\n/// of a cached AVR, given the current time.\n\npub(crate) fn timestamp_is_fresh(now: i64, timestamp: i64) -> bool {\n\n (now - timestamp).abs() < 60 * 60 * 24\n\n}\n", "file_path": "runtime/src/common/sgx/avr.rs", "rank": 54, "score": 211093.54179591712 }, { "content": "type PolicySGX struct {\n\n\t// Serial is the monotonically increasing policy serial number.\n\n\tSerial uint32 `json:\"serial\"`\n\n\n\n\t// ID is the runtime ID that this policy is valid for.\n\n\tID common.Namespace `json:\"id\"`\n\n\n\n\t// Enclaves is the per-key manager enclave ID access control policy.\n\n\tEnclaves map[sgx.EnclaveIdentity]*EnclavePolicySGX `json:\"enclaves\"`\n", "file_path": "go/keymanager/api/policy_sgx.go", "rank": 55, "score": 200120.3114430286 }, { "content": "struct Inner {\n\n private_key: Option<PrivateKey>,\n\n avr: Option<Arc<avr::AVR>>,\n\n avr_timestamp: Option<i64>,\n\n #[allow(unused)]\n\n enclave_identity: Option<avr::EnclaveIdentity>,\n\n #[allow(unused)]\n\n target_info: Option<Targetinfo>,\n\n #[allow(unused)]\n\n nonce: Option<String>,\n\n}\n\n\n\n/// Runtime attestation key.\n\n///\n\n/// The runtime attestation key (RAK) represents the identity of the enclave\n\n/// and can be used to sign remote attestations. Its purpose is to avoid\n\n/// round trips to IAS for each verification as the verifier can instead\n\n/// verify the RAK signature and the signature on the provided AVR which\n\n/// RAK to the enclave.\n\npub struct RAK {\n", "file_path": "runtime/src/rak.rs", "rank": 56, "score": 199189.97486458393 }, { "content": "func NewCommonStore(dataDir string) (*CommonStore, error) {\n\n\tlogger := logging.GetLogger(\"common/persistent\")\n\n\n\n\topts := badger.DefaultOptions(filepath.Join(dataDir, dbName))\n\n\topts = opts.WithLogger(cmnBadger.NewLogAdapter(logger))\n\n\topts = opts.WithSyncWrites(true)\n\n\t// Allow value log truncation if required (this is needed to recover the\n\n\t// value log file which can get corrupted in crashes).\n\n\topts = opts.WithTruncate(true)\n\n\topts = opts.WithCompression(options.None)\n\n\t// Reduce cache size to 128 KiB as the default is 1 GiB.\n\n\topts = opts.WithMaxCacheSize(128 * 1024)\n\n\n\n\tdb, err := badger.Open(opts)\n\n\tif err != nil {\n\n\t\treturn nil, fmt.Errorf(\"failed to open persistence database: %w\", err)\n\n\t}\n\n\n\n\tcs := &CommonStore{\n\n\t\tdb: db,\n\n\t\tgc: cmnBadger.NewGCWorker(logger, db),\n\n\t}\n\n\n\n\treturn cs, nil\n", "file_path": "go/common/persistent/persistent.go", "rank": 57, "score": 198732.9844825742 }, { "content": "type SignedPolicySGX struct {\n\n\tPolicy PolicySGX `json:\"policy\"`\n\n\n\n\tSignatures []signature.Signature `json:\"signatures\"`\n", "file_path": "go/keymanager/api/policy_sgx.go", "rank": 58, "score": 197782.86615926088 }, { "content": "type EnclavePolicySGX struct {\n\n\t// MayQuery is the map of runtime IDs to the vector of enclave IDs that\n\n\t// may query private key material.\n\n\t//\n\n\t// TODO: This could be made more sophisticated and seggregate based on\n\n\t// contract ID as well, but for now punt on the added complexity.\n\n\tMayQuery map[common.Namespace][]sgx.EnclaveIdentity `json:\"may_query\"`\n\n\n\n\t// MayReplicate is the vector of enclave IDs that may retrieve the master\n\n\t// secret (Note: Each enclave ID may always implicitly replicate from other\n\n\t// instances of itself).\n\n\tMayReplicate []sgx.EnclaveIdentity `json:\"may_replicate\"`\n", "file_path": "go/keymanager/api/policy_sgx.go", "rank": 59, "score": 197782.86615926088 }, { "content": "struct Ctx {\n\n mkvs: *mut dyn MKVS,\n\n untrusted_local: Arc<dyn KeyValue>,\n\n}\n\n\n\nthread_local! {\n\n static CTX: RefCell<Option<Ctx>> = RefCell::new(None);\n\n}\n\n\n", "file_path": "runtime/src/storage/context.rs", "rank": 60, "score": 196814.8974554636 }, { "content": "var PolicySGXSignatureContext = signature.NewContext(\"oasis-core/keymanager: policy\")\n", "file_path": "go/keymanager/api/policy_sgx.go", "rank": 61, "score": 195519.4666668539 }, { "content": "struct Inner {\n\n spawned: AtomicBool,\n\n current_block: watch::Receiver<Option<BlockSnapshot>>,\n\n current_block_tx: Mutex<Option<watch::Sender<Option<BlockSnapshot>>>>,\n\n}\n\n\n\n/// Block watcher.\n\n#[derive(Clone)]\n\npub struct BlockWatcher {\n\n inner: Arc<Inner>,\n\n}\n\n\n\nimpl BlockWatcher {\n\n /// Create new block watcher.\n\n pub fn new() -> Self {\n\n let (tx, rx) = watch::channel(None);\n\n\n\n Self {\n\n inner: Arc::new(Inner {\n\n spawned: AtomicBool::new(false),\n", "file_path": "client/src/transaction/block_watcher.rs", "rank": 62, "score": 194613.95844767045 }, { "content": "struct Inner {\n\n /// Multiplexed session.\n\n session: Mutex<MultiplexedSession>,\n\n /// Used transport.\n\n transport: Box<dyn Transport>,\n\n /// Internal send queue receiver, only available until the controller\n\n /// is spawned (is None later).\n\n recvq: Mutex<Option<mpsc::Receiver<SendqRequest>>>,\n\n /// Internal send queue sender for serializing all requests.\n\n sendq: mpsc::Sender<SendqRequest>,\n\n /// Flag indicating whether the controller has been spawned.\n\n has_controller: AtomicBool,\n\n /// Maximum number of call retries.\n\n max_retries: usize,\n\n}\n\n\n\n/// RPC client.\n\npub struct RpcClient {\n\n inner: Arc<Inner>,\n\n}\n", "file_path": "client/src/enclave_rpc/client.rs", "rank": 63, "score": 194613.95844767045 }, { "content": "pub fn generate_key_value_pairs_ex(prefix: String, count: usize) -> (Vec<Vec<u8>>, Vec<Vec<u8>>) {\n\n let mut keys: Vec<Vec<u8>> = Vec::with_capacity(count);\n\n let mut values: Vec<Vec<u8>> = Vec::with_capacity(count);\n\n\n\n for i in 0..count {\n\n keys.push(format!(\"{}key {}\", prefix, i).into_bytes());\n\n values.push(format!(\"{}value {}\", prefix, i).into_bytes());\n\n }\n\n\n\n (keys, values)\n\n}\n", "file_path": "runtime/src/storage/mkvs/tree/tree_test.rs", "rank": 64, "score": 194576.6000044241 }, { "content": "fn bench_insert_batch(b: &mut Bencher, num_values: usize, commit: bool) {\n\n b.iter(|| {\n\n let mut tree = Tree::make().new(Box::new(NoopReadSyncer));\n\n for i in 0..num_values {\n\n let key = format!(\"key {}\", i);\n\n let value = format!(\"value {}\", i);\n\n tree.insert(Context::background(), key.as_bytes(), value.as_bytes())\n\n .expect(\"insert\");\n\n }\n\n if commit {\n\n tree.commit(Context::background(), Default::default(), 0)\n\n .expect(\"commit\");\n\n }\n\n });\n\n}\n\n\n", "file_path": "runtime/src/storage/mkvs/tree/tree_bench.rs", "rank": 65, "score": 194227.91706841774 }, { "content": "func SanityCheckSignedPolicySGX(currentSigPol, newSigPol *SignedPolicySGX) error {\n\n\tnewRawPol := cbor.Marshal(newSigPol.Policy)\n\n\tfor _, sig := range newSigPol.Signatures {\n\n\t\tif !sig.PublicKey.IsValid() {\n\n\t\t\treturn fmt.Errorf(\"keymanager: sanity check failed: SGX policy signature's public key %s is invalid\", sig.PublicKey.String())\n\n\t\t}\n\n\t\tif !sig.Verify(PolicySGXSignatureContext, newRawPol) {\n\n\t\t\treturn fmt.Errorf(\"keymanager: sanity check failed: SGX policy signature from %s is invalid\", sig.PublicKey.String())\n\n\t\t}\n\n\t}\n\n\n\n\t// If a prior version of the policy is not provided, then there is nothing\n\n\t// more to check. Even with a prior version of the document, since policy\n\n\t// updates can happen independently of a new version of the enclave, it's\n\n\t// basically impossible to generically validate the Enclaves portion.\n\n\tif currentSigPol == nil {\n\n\t\treturn nil\n\n\t}\n\n\n\n\tcurrentPol, newPol := currentSigPol.Policy, newSigPol.Policy\n\n\tif !newPol.ID.Equal(&currentPol.ID) {\n\n\t\treturn fmt.Errorf(\"keymanager: sanity check failed: SGX policy runtime ID changed from %s to %s\", currentPol.ID, newPol.ID)\n\n\t}\n\n\n\n\tif currentPol.Serial >= newPol.Serial {\n\n\t\treturn fmt.Errorf(\"keymanager: sanity check failed: SGX policy serial number did not increase\")\n\n\t}\n\n\n\n\treturn nil\n", "file_path": "go/keymanager/api/policy_sgx.go", "rank": 66, "score": 193288.78011321358 }, { "content": "// Package SGX provides common Intel SGX datatypes and utilities.\n\npackage sgx\n\n\n\nimport (\n\n\t\"crypto/rsa\"\n\n\t\"crypto/sha256\"\n\n\t\"encoding/base64\"\n\n\t\"encoding/hex\"\n\n\t\"fmt\"\n\n\t\"io\"\n\n\t\"math/big\"\n\n)\n\n\n\nconst (\n\n\t// MrEnclaveSize is the size of an MrEnclave in bytes.\n\n\tMrEnclaveSize = sha256.Size\n\n\n\n\t// MrSignerSize is the size of an MrSigner in bytes.\n\n\tMrSignerSize = sha256.Size\n\n\n\n\t// enclaveIdentitySize is the total size of EnclaveIdentity in bytes.\n\n\tenclaveIdentitySize = MrSignerSize + MrEnclaveSize\n\n\n\n\t// ModulusSize is the required RSA modulus size in bits.\n\n\tModulusSize = 3072\n\n\n\n\tmodulusBytes = ModulusSize / 8\n\n)\n\n\n\n// AttributesFlags is attributes flags inside enclave report attributes.\n\ntype AttributesFlags uint64\n\n\n\n// Predefined enclave report attributes flags.\n\nconst (\n\n\tAttributeInit AttributesFlags = 0b0000_0001\n\n\tAttributeDebug AttributesFlags = 0b0000_0010\n\n\tAttributeMode64Bit AttributesFlags = 0b0000_0100\n\n\tAttributeProvisionKey AttributesFlags = 0b0001_0000\n\n\tAttributeEInitTokenKey AttributesFlags = 0b0010_0000\n\n)\n\n\n\n// Attributes is a SGX enclave attributes value inside report.\n\ntype Attributes struct {\n\n\tFlags AttributesFlags\n\n\tXfrm uint64\n\n}\n\n\n\n// GetFlagInit returns value of given flag attribute of the Report.\n\nfunc (a AttributesFlags) Contains(flag AttributesFlags) bool {\n\n\treturn (uint64(a) & uint64(flag)) != 0\n\n}\n\n\n\n// Mrenclave is a SGX enclave identity register value (MRENCLAVE).\n\ntype MrEnclave [MrEnclaveSize]byte\n\n\n\n// MarshalBinary encodes a Mrenclave into binary form.\n\nfunc (m *MrEnclave) MarshalBinary() (data []byte, err error) {\n\n\tdata = append([]byte{}, m[:]...)\n\n\treturn\n\n}\n\n\n\n// UnmarshalBinary decodes a binary marshaled Mrenclave.\n\nfunc (m *MrEnclave) UnmarshalBinary(data []byte) error {\n\n\tif len(data) != MrEnclaveSize {\n\n\t\treturn fmt.Errorf(\"sgx: malformed MRENCLAVE\")\n\n\t}\n\n\n\n\tcopy(m[:], data)\n\n\n\n\treturn nil\n\n}\n\n\n\n// UnmarshalHex decodes a hex marshaled MrEnclave.\n\nfunc (m *MrEnclave) UnmarshalHex(text string) error {\n\n\tb, err := hex.DecodeString(text)\n\n\tif err != nil {\n\n\t\treturn err\n\n\t}\n\n\n\n\treturn m.UnmarshalBinary(b)\n\n}\n\n\n\n// FromSgxs derives a MrEnclave from r, under the assumption that r will\n\n// provide the entire `.sgxs` file.\n\nfunc (m *MrEnclave) FromSgxs(r io.Reader) error {\n\n\t// A `.sgxs` file's SHA256 digest is conveniently the MRENCLAVE.\n\n\tvar buf [32768]byte\n\n\n\n\th := sha256.New()\n\nreadLoop:\n\n\tfor {\n\n\t\tl, err := r.Read(buf[:])\n\n\t\tif l > 0 {\n\n\t\t\t_, _ = h.Write(buf[:l])\n\n\t\t}\n\n\t\tswitch err {\n\n\t\tcase nil:\n\n\t\tcase io.EOF:\n\n\t\t\tbreak readLoop\n\n\t\tdefault:\n\n\t\t\treturn fmt.Errorf(\"sgx: failed to read .sgxs: %w\", err)\n\n\t\t}\n\n\t}\n\n\n\n\tsum := h.Sum(nil)\n\n\treturn m.UnmarshalBinary(sum)\n\n}\n\n\n\n// FromSgxsBytes dervies a MrEnclave from a byte slice containing a `.sgxs`\n\n// file.\n\nfunc (m *MrEnclave) FromSgxsBytes(data []byte) error {\n\n\tsum := sha256.Sum256(data)\n\n\treturn m.UnmarshalBinary(sum[:])\n\n}\n\n\n\n// String returns the string representation of a MrEnclave.\n\nfunc (m MrEnclave) String() string {\n\n\treturn hex.EncodeToString(m[:])\n\n}\n\n\n\n// MrSigner is a SGX enclave signer register value (MRSIGNER).\n\ntype MrSigner [MrSignerSize]byte\n\n\n\n// MarshalBinary encodes a MrSigner into binary form.\n\nfunc (m *MrSigner) MarshalBinary() (data []byte, err error) {\n\n\tdata = append([]byte{}, m[:]...)\n\n\treturn\n\n}\n\n\n\n// UnmarshalBinary decodes a binary marshaled MrSigner.\n\nfunc (m *MrSigner) UnmarshalBinary(data []byte) error {\n\n\tif len(data) != MrSignerSize {\n\n\t\treturn fmt.Errorf(\"sgx: malformed MRSIGNER\")\n\n\t}\n\n\n\n\tcopy(m[:], data)\n\n\n\n\treturn nil\n\n}\n\n\n\n// UnmarshalHex decodes a hex marshaled MrSigner.\n\nfunc (m *MrSigner) UnmarshalHex(text string) error {\n\n\tb, err := hex.DecodeString(text)\n\n\tif err != nil {\n\n\t\treturn err\n\n\t}\n\n\n\n\treturn m.UnmarshalBinary(b)\n\n}\n\n\n\n// FromPublicKey derives a MrSigner from a RSA public key.\n\nfunc (m *MrSigner) FromPublicKey(pk *rsa.PublicKey) error {\n\n\t// The MRSIGNER is the SHA256 digest of the little endian representation\n\n\t// of the RSA public key modulus.\n\n\tmodulus, err := To3072le(pk.N, false)\n\n\tif err != nil {\n\n\t\treturn err\n\n\t}\n\n\n\n\tsum := sha256.Sum256(modulus)\n\n\treturn m.UnmarshalBinary(sum[:])\n\n}\n\n\n\n// To3072le converts a big.Int to a 3072 bit little endian representation,\n\n// padding if allowed AND required.\n\nfunc To3072le(z *big.Int, mayPad bool) ([]byte, error) {\n\n\tbuf := z.Bytes()\n\n\n\n\tsz := len(buf)\n\n\tif sz != modulusBytes {\n\n\t\tpadLen := modulusBytes - sz\n\n\t\tif !mayPad || padLen < 0 {\n\n\t\t\treturn nil, fmt.Errorf(\"sgx: big int is not %v bits: %v\", ModulusSize, sz)\n\n\t\t}\n\n\n\n\t\t// Pad before reversing.\n\n\t\tpadded := make([]byte, padLen, modulusBytes)\n\n\t\tbuf = append(padded, buf...)\n\n\t}\n\n\n\n\tbuf = reverseBuffer(buf)\n\n\n\n\treturn buf, nil\n\n}\n\n\n\n// From3072le converts a 3072 bit buffer to the corresponding big.Int, assuming\n\n// that the buffer is in little endian representation.\n\nfunc From3072le(b []byte) (*big.Int, error) {\n\n\tif sz := len(b); sz != modulusBytes {\n\n\t\treturn nil, fmt.Errorf(\"sgx: buffer is not %v bits: %v\", modulusBytes, sz)\n\n\t}\n\n\n\n\tbuf := reverseBuffer(b)\n\n\tvar ret big.Int\n\n\treturn ret.SetBytes(buf), nil\n\n}\n\n\n\nfunc reverseBuffer(b []byte) []byte {\n\n\tbuf := append([]byte{}, b...)\n\n\tfor left, right := 0, len(buf)-1; left < right; left, right = left+1, right-1 {\n\n\t\tbuf[left], buf[right] = buf[right], buf[left]\n\n\t}\n\n\treturn buf\n\n}\n\n\n\n// String returns the string representation of a MrSigner.\n\nfunc (m MrSigner) String() string {\n\n\treturn hex.EncodeToString(m[:])\n\n}\n\n\n\n// EnclaveIdentity is a byte serialized MRSIGNER/MRENCLAVE pair.\n\ntype EnclaveIdentity struct {\n\n\tMrEnclave MrEnclave `json:\"mr_enclave\"`\n\n\tMrSigner MrSigner `json:\"mr_signer\"`\n\n}\n\n\n\n// MarshalText encodes an EnclaveIdentity into text form.\n\nfunc (id EnclaveIdentity) MarshalText() (data []byte, err error) {\n\n\treturn []byte(base64.StdEncoding.EncodeToString(append(id.MrEnclave[:], id.MrSigner[:]...))), nil\n\n}\n\n\n\n// UnmarshalText decodes a text marshaled EnclaveIdentity.\n\nfunc (id *EnclaveIdentity) UnmarshalText(text []byte) error {\n\n\tb, err := base64.StdEncoding.DecodeString(string(text))\n\n\tif err != nil {\n\n\t\treturn fmt.Errorf(\"sgx: malformed EnclaveIdentity: %w\", err)\n\n\t}\n\n\tif err := id.MrEnclave.UnmarshalBinary(b[:MrEnclaveSize]); err != nil {\n\n\t\treturn fmt.Errorf(\"sgx: malformed MrEnclave in EnclaveIdentity: %w\", err)\n\n\t}\n\n\tif err := id.MrSigner.UnmarshalBinary(b[MrEnclaveSize:]); err != nil {\n\n\t\treturn fmt.Errorf(\"sgx: malformed MrSigner in EnclaveIdentity: %w\", err)\n\n\t}\n\n\n\n\treturn nil\n\n}\n\n\n\n// UnmarshalHex decodes a hex marshaled EnclaveIdentity.\n\nfunc (id *EnclaveIdentity) UnmarshalHex(text string) error {\n\n\tb, err := hex.DecodeString(text)\n\n\tif err != nil || len(b) != enclaveIdentitySize {\n\n\t\treturn fmt.Errorf(\"sgx: malformed EnclaveIdentity: %w\", err)\n\n\t}\n\n\n\n\tcopy(id.MrEnclave[:], b[:MrEnclaveSize])\n\n\tcopy(id.MrSigner[:], b[MrEnclaveSize:])\n\n\n\n\treturn nil\n\n}\n\n\n\n// String returns the string representation of a EnclaveIdentity.\n\nfunc (id EnclaveIdentity) String() string {\n\n\treturn hex.EncodeToString(id.MrEnclave[:]) + hex.EncodeToString(id.MrSigner[:])\n\n}\n", "file_path": "go/common/sgx/common.go", "rank": 67, "score": 190634.94111087246 }, { "content": "//! Wrappers for sealing secrets to the enclave in cold storage.\n\nuse rand::{rngs::OsRng, Rng};\n\nuse sgx_isa::Keypolicy;\n\nuse zeroize::Zeroize;\n\n\n\nuse crate::common::{\n\n crypto::mrae::deoxysii::{DeoxysII, NONCE_SIZE, TAG_SIZE},\n\n sgx::egetkey::egetkey,\n\n};\n\n\n\n/// Seal a secret to the enclave.\n\n///\n\n/// The `context` field is a domain separation tag.\n", "file_path": "runtime/src/common/sgx/seal.rs", "rank": 68, "score": 189197.51002437252 }, { "content": " unseal(Keypolicy::MRENCLAVE, b\"MRENCLAVE\", &sealed_b[..2]);\n\n }\n\n\n\n #[test]\n\n #[should_panic]\n\n fn test_incorrect_ciphertext_b() {\n\n let mut sealed_b = seal(Keypolicy::MRENCLAVE, b\"MRENCLAVE\", b\"Mr. Enclave\");\n\n sealed_b[0] = sealed_b[0].wrapping_add(1);\n\n unseal(Keypolicy::MRENCLAVE, b\"MRENCLAVE\", &sealed_b);\n\n }\n\n}\n", "file_path": "runtime/src/common/sgx/seal.rs", "rank": 69, "score": 189195.5728890075 }, { "content": " let unsealed_b = unseal(Keypolicy::MRENCLAVE, b\"MRENCLAVE\", &sealed_b);\n\n assert_eq!(unsealed_b, Some(b\"Mr. Enclave\".to_vec()));\n\n\n\n // Test zero-length ciphertext.\n\n let unsealed_c = unseal(Keypolicy::MRENCLAVE, b\"MRENCLAVE\", b\"\");\n\n assert_eq!(unsealed_c, None);\n\n }\n\n\n\n #[test]\n\n #[should_panic]\n\n fn test_incorrect_context() {\n\n // Test incorrect context.\n\n let sealed_b = seal(Keypolicy::MRENCLAVE, b\"MRENCLAVE1\", b\"Mr. Enclave\");\n\n unseal(Keypolicy::MRENCLAVE, b\"MRENCLAVE2\", &sealed_b);\n\n }\n\n\n\n #[test]\n\n #[should_panic]\n\n fn test_incorrect_ciphertext_a() {\n\n let sealed_b = seal(Keypolicy::MRENCLAVE, b\"MRENCLAVE\", b\"Mr. Enclave\");\n", "file_path": "runtime/src/common/sgx/seal.rs", "rank": 70, "score": 189194.68927972743 }, { "content": "package sgx\n\n\n\nimport (\n\n\t\"crypto/rsa\"\n\n\t\"testing\"\n\n\n\n\t\"github.com/stretchr/testify/require\"\n\n)\n\n\n\nfunc TestMrSignerDerivation(t *testing.T) {\n\n\trequire := require.New(t)\n\n\n\n\t// This could just use FortanixDummyMrSigner, since it's done in\n\n\t// the package init()...\n\n\tvar mrSigner MrSigner\n\n\terr := mrSigner.FromPublicKey(fortanixDummyKey.Public().(*rsa.PublicKey))\n\n\trequire.NoError(err, \"Derive MRSIGNER\")\n\n\n\n\trequire.Equal(mrSigner.String(), \"9affcfae47b848ec2caf1c49b4b283531e1cc425f93582b36806e52a43d78d1a\")\n\n}\n", "file_path": "go/common/sgx/common_test.go", "rank": 71, "score": 189172.2491953443 }, { "content": "\tPolicy PolicySGX `json:\"policy\"`\n", "file_path": "go/keymanager/api/policy_sgx.go", "rank": 72, "score": 188757.95151376748 }, { "content": "func NewEncoder(w io.Writer) *cbor.Encoder {\n\n\treturn encMode.NewEncoder(w)\n", "file_path": "go/common/cbor/cbor.go", "rank": 73, "score": 187610.70616772966 }, { "content": "func NewDecoder(r io.Reader) *cbor.Decoder {\n\n\treturn decMode.NewDecoder(r)\n", "file_path": "go/common/cbor/cbor.go", "rank": 74, "score": 187610.70616772966 }, { "content": "\tAttributeDebug AttributesFlags = 0b0000_0010\n", "file_path": "go/common/sgx/common.go", "rank": 75, "score": 186800.0174212927 }, { "content": "func NewDynamicRuntimePolicyChecker(service grpc.ServiceName, watcher api.PolicyWatcher) *DynamicRuntimePolicyChecker {\n\n\treturn &DynamicRuntimePolicyChecker{\n\n\t\taccessPolicies: make(map[common.Namespace]accessctl.Policy),\n\n\t\tservice: service,\n\n\t\twatcher: watcher,\n\n\t}\n", "file_path": "go/common/grpc/policy/policy.go", "rank": 76, "score": 184375.6226421207 }, { "content": "func (ss *ServiceStore) PutCBOR(key []byte, value interface{}) error {\n\n\treturn ss.store.db.Update(func(tx *badger.Txn) error {\n\n\t\treturn tx.Set(ss.dbKey(key), cbor.Marshal(value))\n\n\t})\n", "file_path": "go/common/persistent/persistent.go", "rank": 77, "score": 182701.73381492106 }, { "content": "func (ss *ServiceStore) GetCBOR(key []byte, value interface{}) error {\n\n\treturn ss.store.db.View(func(tx *badger.Txn) error {\n\n\t\titem, txErr := tx.Get(ss.dbKey(key))\n\n\t\tswitch txErr {\n\n\t\tcase nil:\n\n\t\tcase badger.ErrKeyNotFound:\n\n\t\t\treturn ErrNotFound\n\n\t\tdefault:\n\n\t\t\treturn txErr\n\n\t\t}\n\n\t\treturn item.Value(func(val []byte) error {\n\n\t\t\tif val == nil {\n\n\t\t\t\treturn ErrNotFound\n\n\t\t\t}\n\n\t\t\treturn cbor.Unmarshal(val, value)\n\n\t\t})\n\n\t})\n", "file_path": "go/common/persistent/persistent.go", "rank": 78, "score": 182701.73381492106 }, { "content": "package cbor\n\n\n\nimport (\n\n\t\"bytes\"\n\n\t\"testing\"\n\n\n\n\t\"github.com/stretchr/testify/require\"\n\n)\n\n\n\nfunc TestOutOfMem1(t *testing.T) {\n\n\trequire := require.New(t)\n\n\n\n\tvar f []byte\n\n\terr := Unmarshal([]byte(\"\\x9b\\x00\\x00000000\"), f)\n\n\trequire.Error(err, \"Invalid CBOR input should fail\")\n\n}\n\n\n\nfunc TestOutOfMem2(t *testing.T) {\n\n\trequire := require.New(t)\n\n\n\n\tvar f []byte\n\n\terr := Unmarshal([]byte(\"\\x9b\\x00\\x00\\x81112233\"), f)\n\n\trequire.Error(err, \"Invalid CBOR input should fail\")\n\n}\n\n\n\nfunc TestEncoderDecoder(t *testing.T) {\n\n\trequire := require.New(t)\n\n\n\n\tvar buf bytes.Buffer\n\n\tenc := NewEncoder(&buf)\n\n\terr := enc.Encode(42)\n\n\trequire.NoError(err, \"Encode\")\n\n\n\n\tvar x int\n\n\tdec := NewDecoder(&buf)\n\n\terr = dec.Decode(&x)\n\n\trequire.NoError(err, \"Decode\")\n\n\trequire.EqualValues(42, x, \"decoded value should be correct\")\n\n}\n\n\n\nfunc TestDecodeUnknowField(t *testing.T) {\n\n\trequire := require.New(t)\n\n\n\n\ttype a struct {\n\n\t\tA string\n\n\t}\n\n\ttype b struct {\n\n\t\ta\n\n\t\tB string\n\n\t}\n\n\traw := Marshal(&b{\n\n\t\ta: a{\n\n\t\t\tA: \"Verily, no cyclone or whirlwind is Zarathustra:\",\n\n\t\t},\n\n\t\tB: \"and if he be a dancer, he is not at all a tarantula-dancer!\",\n\n\t})\n\n\n\n\tvar dec a\n\n\terr := Unmarshal(raw, &dec)\n\n\trequire.Error(err, \"unknown fields should fail\")\n\n\n\n\terr = UnmarshalTrusted(raw, &dec)\n\n\trequire.NoError(err, \"unknown fields from trusted sources should pass\")\n\n}\n", "file_path": "go/common/cbor/cbor_test.go", "rank": 79, "score": 182339.76322589658 }, { "content": "\tCfgPolicySerial = \"keymanager.policy.serial\"\n", "file_path": "go/oasis-node/cmd/keymanager/keymanager.go", "rank": 80, "score": 181469.6517599034 }, { "content": "func unmarshalPolicyCBOR(pb []byte) (*kmApi.PolicySGX, error) {\n\n\tvar p *kmApi.PolicySGX = &kmApi.PolicySGX{}\n\n\tif err := cbor.Unmarshal(pb, p); err != nil {\n\n\t\treturn nil, err\n\n\t}\n\n\n\n\t// Re-marshal to check the canonicity.\n\n\tpb2 := cbor.Marshal(p)\n\n\tif !bytes.Equal(pb, pb2) {\n\n\t\treturn nil, errors.New(\"policy file not in canonical form\")\n\n\t}\n\n\n\n\treturn p, nil\n", "file_path": "go/oasis-node/cmd/keymanager/keymanager.go", "rank": 81, "score": 181461.80508358215 }, { "content": "func NewQueryFactory(state abciAPI.ApplicationQueryState) *QueryFactory {\n\n\treturn &QueryFactory{state}\n", "file_path": "go/consensus/tendermint/apps/keymanager/query.go", "rank": 82, "score": 181280.7687319236 }, { "content": "func UnsafeDebugForEnclave(sgxs []byte) ([]byte, error) {\n\n\t// Note: The key is unavailable unless DontBlameOasis is enabled.\n\n\tsigningKey := sgx.UnsafeFortanixDummyKey()\n\n\tif signingKey == nil {\n\n\t\treturn nil, fmt.Errorf(\"sgx/sigstruct: debug signing key unavailable\")\n\n\t}\n\n\n\n\tvar enclaveHash sgx.MrEnclave\n\n\tif err := enclaveHash.FromSgxsBytes(sgxs); err != nil {\n\n\t\treturn nil, fmt.Errorf(\"sgx/sigstruct: failed to derive EnclaveHash: %w\", err)\n\n\t}\n\n\n\n\tbuilder := New(\n\n\t\tWithAttributes(sgx.Attributes{\n\n\t\t\tFlags: sgx.AttributeDebug | sgx.AttributeMode64Bit,\n\n\t\t\tXfrm: 3, // X87, SSE (\"XFRM[1:0] must be set to 0x3\")\n\n\t\t}),\n\n\t\tWithAttributesMask([2]uint64{^uint64(0), ^uint64(0)}),\n\n\t\tWithEnclaveHash(enclaveHash),\n\n\t)\n\n\n\n\tret, err := builder.Sign(signingKey)\n\n\tif err != nil {\n\n\t\treturn nil, fmt.Errorf(\"sgx/sigstruct: failed to sign with test key: %w\", err)\n\n\t}\n\n\n\n\treturn ret, nil\n", "file_path": "go/common/sgx/sigstruct/debug_builder.go", "rank": 83, "score": 179575.3335264836 }, { "content": "\tCfgPolicyMayQuery = \"keymanager.policy.may.query\"\n", "file_path": "go/oasis-node/cmd/keymanager/keymanager.go", "rank": 84, "score": 178970.70842713994 }, { "content": "func (net *Network) NewKeymanagerPolicy(cfg *KeymanagerPolicyCfg) (*KeymanagerPolicy, error) {\n\n\tpolicyName := fmt.Sprintf(\"keymanager-policy-%d\", cfg.Serial)\n\n\n\n\tpolicyDir, err := net.baseDir.NewSubDir(policyName)\n\n\tif err != nil {\n\n\t\tnet.logger.Error(\"failed to create keymanager policy subdir\",\n\n\t\t\t\"err\", err,\n\n\t\t)\n\n\t\treturn nil, fmt.Errorf(\"oasis/keymanager: failed to create keymanager policy subdir: %w\", err)\n\n\t}\n\n\n\n\tnewPol := &KeymanagerPolicy{\n\n\t\tnet: net,\n\n\t\tdir: policyDir,\n\n\t\truntime: cfg.Runtime,\n\n\t\tserial: cfg.Serial,\n\n\t}\n\n\tnet.keymanagerPolicies = append(net.keymanagerPolicies, newPol)\n\n\n\n\treturn newPol, nil\n", "file_path": "go/oasis-test-runner/oasis/keymanager.go", "rank": 85, "score": 178868.89534100975 }, { "content": "package cbor\n\n\n\nimport (\n\n\t\"errors\"\n\n\t\"math\"\n\n\n\n\t\"github.com/fxamacker/cbor/v2\"\n\n)\n\n\n\nconst invalidVersion = math.MaxUint16\n\n\n\nvar (\n\n\t// ErrInvalidVersion is the error returned when a versioned\n\n\t// serialized blob is either missing, or has an invalid version.\n\n\tErrInvalidVersion = errors.New(\"cbor: missing or invalid version\")\n\n\n\n\tdecOptionsVersioned = decOptions\n\n\n\n\tdecModeVersioned cbor.DecMode\n\n)\n\n\n\n// Versioned is a generic versioned serializable data structure.\n\ntype Versioned struct {\n\n\tV uint16 `json:\"v\"`\n\n}\n\n\n\n// GetVersion returns the version of a versioned serializable data\n\n// structure, if any.\n\nfunc GetVersion(data []byte) (uint16, error) {\n\n\tvblob := Versioned{\n\n\t\tV: invalidVersion,\n\n\t}\n\n\tif err := decModeVersioned.Unmarshal(data, &vblob); err != nil {\n\n\t\treturn 0, err\n\n\t}\n\n\tif vblob.V == invalidVersion {\n\n\t\treturn 0, ErrInvalidVersion\n\n\t}\n\n\treturn vblob.V, nil\n\n}\n\n\n\n// NewVersioned creates a new Versioned structure with the specified version.\n\nfunc NewVersioned(v uint16) Versioned {\n\n\tif v == invalidVersion {\n\n\t\tpanic(\"cbor: invalid version specified\")\n\n\t}\n\n\treturn Versioned{V: v}\n\n}\n\n\n\nfunc init() {\n\n\t// Use the untrusted decode options, but ignore unknown fields.\n\n\t// FIXME: https://github.com/fxamacker/cbor/issues/240\n\n\tdecOptionsVersioned.ExtraReturnErrors = int(cbor.ExtraDecErrorNone)\n\n\n\n\tvar err error\n\n\tif decModeVersioned, err = decOptionsVersioned.DecMode(); err != nil {\n\n\t\tpanic(err)\n\n\t}\n\n}\n", "file_path": "go/common/cbor/versioned.go", "rank": 86, "score": 177841.0817335942 }, { "content": "package cbor\n\n\n\nimport (\n\n\t\"encoding/binary\"\n\n\t\"errors\"\n\n\t\"io\"\n\n\t\"sync\"\n\n\n\n\t\"github.com/prometheus/client_golang/prometheus\"\n\n)\n\n\n\n// Maximum message size.\n\nconst maxMessageSize = 16 * 1024 * 1024 // 16 MiB\n\n\n\nvar (\n\n\terrMessageTooLarge = errors.New(\"codec: message too large\")\n\n\terrMessageMalformed = errors.New(\"codec: message is malformed\")\n\n\n\n\tcodecValueSize = prometheus.NewSummaryVec(\n\n\t\tprometheus.SummaryOpts{\n\n\t\t\tName: \"oasis_codec_size\",\n\n\t\t\tHelp: \"CBOR codec message size (bytes).\",\n\n\t\t},\n\n\t\t[]string{\"call\", \"module\"},\n\n\t)\n\n\n\n\tcodecCollectors = []prometheus.Collector{\n\n\t\tcodecValueSize,\n\n\t}\n\n\n\n\tmetricsOnce sync.Once\n\n)\n\n\n\n// MessageReader is a reader wrapper that decodes CBOR-encoded Message structures.\n\ntype MessageReader struct {\n\n\treader io.Reader\n\n\n\n\t// module is the module name where the message is read to.\n\n\tmodule string\n\n}\n\n\n\n// Read deserializes a single CBOR-encoded Message from the underlying reader.\n\nfunc (c *MessageReader) Read(msg interface{}) error {\n\n\t// Read 32-bit length prefix.\n\n\trawLength := make([]byte, 4)\n\n\tif _, err := io.ReadAtLeast(c.reader, rawLength, 4); err != nil {\n\n\t\treturn err\n\n\t}\n\n\n\n\tlabels := prometheus.Labels{\"module\": c.module, \"call\": \"read\"}\n\n\tlength := binary.BigEndian.Uint32(rawLength)\n\n\tcodecValueSize.With(labels).Observe(float64(length))\n\n\tif length > maxMessageSize {\n\n\t\treturn errMessageTooLarge\n\n\t}\n\n\n\n\t// Decode message bytes.\n\n\tr := io.LimitReader(c.reader, int64(length))\n\n\tdec := NewDecoder(r)\n\n\tif err := dec.Decode(msg); err != nil {\n\n\t\treturn err\n\n\t}\n\n\tif r.(*io.LimitedReader).N > 0 {\n\n\t\treturn errMessageMalformed\n\n\t}\n\n\n\n\treturn nil\n\n}\n\n\n\n// MessageWriter is a writer wrapper that encodes Messages structures to CBOR.\n\ntype MessageWriter struct {\n\n\twriter io.Writer\n\n\n\n\t// module is the module name where the message was created.\n\n\tmodule string\n\n}\n\n\n\n// Write serializes a single Message to CBOR and writes it to the underlying writer.\n\nfunc (c *MessageWriter) Write(msg interface{}) error {\n\n\t// Encode into CBOR.\n\n\tdata := Marshal(msg)\n\n\tlength := len(data)\n\n\tlabels := prometheus.Labels{\"module\": c.module, \"call\": \"write\"}\n\n\tcodecValueSize.With(labels).Observe(float64(length))\n\n\tif length > maxMessageSize {\n\n\t\treturn errMessageTooLarge\n\n\t}\n\n\n\n\t// Write 32-bit length prefix and encoded data.\n\n\trawLength := make([]byte, 4)\n\n\tbinary.BigEndian.PutUint32(rawLength, uint32(length))\n\n\tif _, err := c.writer.Write(rawLength); err != nil {\n\n\t\treturn err\n\n\t}\n\n\tif _, err := c.writer.Write(data); err != nil {\n\n\t\treturn err\n\n\t}\n\n\n\n\treturn nil\n\n}\n\n\n\n// MessageCodec is a length-prefixed Message encoder/decoder.\n\ntype MessageCodec struct {\n\n\tMessageReader\n\n\tMessageWriter\n\n}\n\n\n\n// NewMessageCodec constructs a new Message encoder/decoder.\n\nfunc NewMessageCodec(rw io.ReadWriter, module string) *MessageCodec {\n\n\tmetricsOnce.Do(func() {\n\n\t\tprometheus.MustRegister(codecCollectors...)\n\n\t})\n\n\n\n\treturn &MessageCodec{\n\n\t\tMessageReader: MessageReader{module: module, reader: rw},\n\n\t\tMessageWriter: MessageWriter{module: module, writer: rw},\n\n\t}\n\n}\n", "file_path": "go/common/cbor/codec.go", "rank": 87, "score": 177841.0817335942 }, { "content": "func New(ctx context.Context, backend tmapi.Backend) (ServiceClient, error) {\n\n\ta := app.New()\n\n\tif err := backend.RegisterApplication(a); err != nil {\n\n\t\treturn nil, fmt.Errorf(\"keymanager/tendermint: failed to register app: %w\", err)\n\n\t}\n\n\n\n\tsc := &serviceClient{\n\n\t\tlogger: logging.GetLogger(\"keymanager/tendermint\"),\n\n\t\tquerier: a.QueryFactory().(*app.QueryFactory),\n\n\t}\n\n\tsc.notifier = pubsub.NewBrokerEx(func(ch channels.Channel) {\n\n\t\tstatuses, err := sc.GetStatuses(ctx, consensus.HeightLatest)\n\n\t\tif err != nil {\n\n\t\t\tsc.logger.Error(\"status notifier: unable to get a list of statuses\",\n\n\t\t\t\t\"err\", err,\n\n\t\t\t)\n\n\t\t\treturn\n\n\t\t}\n\n\n\n\t\twr := ch.In()\n\n\t\tfor _, v := range statuses {\n\n\t\t\twr <- v\n\n\t\t}\n\n\t})\n\n\n\n\treturn sc, nil\n", "file_path": "go/consensus/tendermint/keymanager/keymanager.go", "rank": 88, "score": 177290.91909651016 }, { "content": "func New(cfg Config) (host.Provisioner, error) {\n\n\t// Use a default RuntimeAttestInterval if none was provided.\n\n\tif cfg.RuntimeAttestInterval == 0 {\n\n\t\tcfg.RuntimeAttestInterval = defaultRuntimeAttestInterval\n\n\t}\n\n\n\n\ts := &sgxProvisioner{\n\n\t\tcfg: cfg,\n\n\t\tias: cfg.IAS,\n\n\t\taesm: aesm.NewClient(aesmdSocketPath),\n\n\t\tlogger: logging.GetLogger(\"runtime/host/sgx\"),\n\n\t}\n\n\tp, err := sandbox.New(sandbox.Config{\n\n\t\tGetSandboxConfig: s.getSandboxConfig,\n\n\t\tHostInitializer: s.hostInitializer,\n\n\t\tInsecureNoSandbox: cfg.InsecureNoSandbox,\n\n\t\tLogger: s.logger,\n\n\t})\n\n\tif err != nil {\n\n\t\treturn nil, err\n\n\t}\n\n\ts.sandbox = p\n\n\n\n\treturn s, nil\n", "file_path": "go/runtime/host/sgx/sgx.go", "rank": 89, "score": 177097.42019586603 }, { "content": "package cbor\n\n\n\nimport (\n\n\t\"bytes\"\n\n\t\"encoding/binary\"\n\n\t\"testing\"\n\n\n\n\t\"github.com/stretchr/testify/require\"\n\n)\n\n\n\ntype message struct {\n\n\tNumber uint64\n\n}\n\n\n\nfunc TestCodecRoundTrip(t *testing.T) {\n\n\tmsg := message{\n\n\t\tNumber: 42,\n\n\t}\n\n\n\n\tvar buffer bytes.Buffer\n\n\tcodec := NewMessageCodec(&buffer, t.Name())\n\n\terr := codec.Write(&msg)\n\n\trequire.NoError(t, err, \"Write (1st)\")\n\n\n\n\terr = codec.Write(&msg)\n\n\trequire.NoError(t, err, \"Write (2nd)\")\n\n\n\n\tvar decodedMsg1 message\n\n\terr = codec.Read(&decodedMsg1)\n\n\trequire.NoError(t, err, \"Read (1st)\")\n\n\trequire.EqualValues(t, msg, decodedMsg1, \"Decoded message must be equal to source message\")\n\n\n\n\tvar decodedMsg2 message\n\n\terr = codec.Read(&decodedMsg2)\n\n\trequire.NoError(t, err, \"Read (2nd)\")\n\n\trequire.EqualValues(t, msg, decodedMsg2, \"Decoded message must be equal to source message\")\n\n}\n\n\n\nfunc TestCodecOversized(t *testing.T) {\n\n\trequire := require.New(t)\n\n\n\n\tvar buffer bytes.Buffer\n\n\tcodec := NewMessageCodec(&buffer, t.Name())\n\n\n\n\terr := codec.Write(42)\n\n\trequire.NoError(err, \"Write\")\n\n\n\n\t// Corrupt the buffer to include a huge length.\n\n\tbinary.BigEndian.PutUint32(buffer.Bytes()[:4], maxMessageSize+1)\n\n\n\n\tvar x int\n\n\terr = codec.Read(&x)\n\n\trequire.Error(err, \"Read should fail with oversized message\")\n\n\trequire.EqualValues(errMessageTooLarge, err)\n\n}\n\n\n\nfunc TestCodecMalformed(t *testing.T) {\n\n\trequire := require.New(t)\n\n\n\n\tvar buffer bytes.Buffer\n\n\tcodec := NewMessageCodec(&buffer, t.Name())\n\n\n\n\terr := codec.Write(42)\n\n\trequire.NoError(err, \"Write\")\n\n\n\n\t// Corrupt the buffer to include an incorrect length (larger than what is really there).\n\n\tbinary.BigEndian.PutUint32(buffer.Bytes()[:4], 1024)\n\n\n\n\tvar x int\n\n\terr = codec.Read(&x)\n\n\trequire.Error(err, \"Read should fail with malformed message\")\n\n\trequire.EqualValues(errMessageMalformed, err)\n\n}\n", "file_path": "go/common/cbor/codec_test.go", "rank": 90, "score": 176145.06192919542 }, { "content": "package cbor\n\n\n\nimport (\n\n\t\"testing\"\n\n\n\n\t\"github.com/stretchr/testify/require\"\n\n)\n\n\n\nfunc TestVersioned(t *testing.T) {\n\n\trequire := require.New(t)\n\n\n\n\ttype a struct {\n\n\t\tA string\n\n\t}\n\n\n\n\traw := Marshal(&a{\n\n\t\tA: \"Open still remaineth the earth for great souls.\",\n\n\t})\n\n\t_, err := GetVersion(raw)\n\n\trequire.Equal(ErrInvalidVersion, err, \"missing version should error\")\n\n\n\n\ttype b struct {\n\n\t\tVersioned\n\n\t\ta\n\n\t}\n\n\n\n\tconst testVersion uint16 = 451\n\n\traw = Marshal(&b{\n\n\t\tVersioned: NewVersioned(testVersion),\n\n\t\ta: a{\n\n\t\t\tA: \"Empty are still many sites for lone ones and twain ones\",\n\n\t\t},\n\n\t})\n\n\tversion, err := GetVersion(raw)\n\n\trequire.NoError(err, \"versioned blobs should deserialize\")\n\n\trequire.Equal(testVersion, version, \"the version should be correct\")\n\n}\n", "file_path": "go/common/cbor/versioned_test.go", "rank": 91, "score": 176145.06192919542 }, { "content": "pub fn main() {\n\n // Initializer.\n\n let init = |protocol: &Arc<Protocol>,\n\n rak: &Arc<RAK>,\n\n _rpc_demux: &mut RpcDemux,\n\n rpc: &mut RpcDispatcher|\n\n -> Option<Box<dyn TxnDispatcher>> {\n\n let mut txn = TxnMethDispatcher::new();\n\n with_api! { register_runtime_txn_methods!(txn, api); }\n\n\n\n // Create the key manager client.\n\n let rt_id = protocol.get_runtime_id();\n\n let km_client = Arc::new(oasis_core_keymanager_client::RemoteClient::new_runtime(\n\n rt_id,\n\n protocol.clone(),\n\n rak.clone(),\n\n 1024,\n\n trusted_policy_signers(),\n\n ));\n\n let initializer_km_client = km_client.clone();\n", "file_path": "tests/runtimes/simple-keyvalue/src/main.rs", "rank": 92, "score": 175919.87260953235 }, { "content": "package sgx\n\n\n\nimport (\n\n\t\"crypto/rsa\"\n\n\t\"crypto/x509\"\n\n\t\"encoding/pem\"\n\n\n\n\tcmdFlags \"github.com/oasisprotocol/oasis-core/go/oasis-node/cmd/common/flags\"\n\n)\n\n\n\nvar (\n\n\t// FortanixDummyMrSigner is the MRSIGNER value corresponding to the\n\n\t// dummy signing key that is used by the Fortanix Rust SGX SDK's\n\n\t// enclave-runner.\n\n\tFortanixDummyMrSigner MrSigner\n\n\n\n\tfortanixDummyKey *rsa.PrivateKey\n\n)\n\n\n\n// UnsafeFortanixDummyKey returns the Fortanix dummy signing key.\n\n//\n\n// This MUST only ever be used for launching test enclaves.\n\nfunc UnsafeFortanixDummyKey() *rsa.PrivateKey {\n\n\tif !cmdFlags.DebugDontBlameOasis() {\n\n\t\treturn nil\n\n\t}\n\n\treturn fortanixDummyKey\n\n}\n\n\n\n// This is the \"dummy\" enclave signing key extracted from the\n\n// Fortanix Rust SGX SDK's enclave-runner, converted to\n\n// PEM format from the DER representation via:\n\n//\n\n// openssl rsa -in dummy.priv.der -inform der -out /tmp/dummy.priv.pem\n\n//\n\n// Bug reports of any kind regarding the existence of this private\n\n// key in the git repository (especially those sent to our bug bounty\n\n// program) will be ignored and mercilessly mocked.\n\n//\n\n// Source: https://github.com/fortanix/rust-sgx/blob/master/enclave-runner/src/dummy.key\n\nconst fortanixDummyPrivateKeyPEM = `\n\n-----BEGIN RSA PRIVATE KEY-----\n\nMIIG4gIBAAKCAYEAsbAX4s+7kHIpH+ZVBKtdefCfMacpgQL72og5r4hKoj0l5tyD\n\npH3yp+Tp1z+7EQqJC5vbQuX0U6WCoNxs5/n9LJy/b750Kee6NEoM7F9iSDka92ov\n\nTSW7NYrkMUpRCLHRIMVKKR30sCfwPXVlrmMeVPjRe/6E+lbWfTztbL6HGr69yNvt\n\nqqFITS31e9eHkIy0csriCGgaRmptkeuyTHMruatccwu1IU+WWE/v/n8MhO5hLA2z\n\nJpja7aoWNdzL8Hv0XvQvg9/VHP/kbdSpX1s3Bhqhw5T2iPO1JWvw8QucaQXwJEPy\n\ngtCqWO0sYX6bj44S1LtAEBekBEWzah6jsMrWu1oDDfuEFLNyc7VCTTzpvPJWlG9K\n\nN3x4qCSPQEAduEl9zwB5WqedaoHujVWol+4iho50ZciY29MyxeASktbJlTDEaj1g\n\n5mpPC4QCqWWWIvaU0FtwAkrNUfrx3I79bs8+L8cQYy6+2o8ygVOn76plD0RtmDUA\n\nVMAfVEN16wkT+suPAgEDAoIBgHZ1ZUHf0mBMG2qZjgMc6Pv1v3ZvcQCsp+cFe8pa\n\n3Gwow+89rRhT9xqYm+TVJ2CxsLJn54HuouJuVxXoSJqmqMhof5/UTXFFJs2Gs0g/\n\nltrQvKTxdN4ZJ3kHQsuG4LB2i2suMXC+oyAaoCj47nRCFDil4P1UWKbkjv4onkh/\n\nBLx/KTCSnnHA2t4eo6flBQsIeEyHQVrwEYRG87adIYhMx9Ec6EyyeMDfuZA1Sqmq\n\nCFie63KzzMRl50kcDs6TMqBSocyHWxDGioGT5Q1tjkx7mOXn/qMlK74quSyURfyb\n\nA7Gb9nhfthW7HZoB3/mgB4Zqv8Qf9dqmBcxDxC71C5AdwexqaoavkHK0gHCLEGOQ\n\nO8pEGnckF3R7DuNWQfrkd9LYOQ4nw9FZuiZpDYwJ6IZJhG0z92UCPwXsxmOsNZLQ\n\ndNurU2jCyl0CWiAR19Quql6qR8LbGwI/lUV9+TiA3HEQoB9sQuv1dpeEDsvCZaaP\n\nqUCFXdsgrrHiDKoeCSqnIgj4qwKBwQDolQQ7CCIwTCiZ9t5ZEeDp3rTXLSj+oBHx\n\nN2VM6YHHPYSpFo+4+2uOt7jXdr1eCNhlauHcJp31qhj7diwWaH7KV1kBI/IfJBYw\n\nx5Cj2TfbBT9MqzyxDuKq6DVfZAAPSrEAcKLWcbFy5kP9mQlWm+NPGkTmmG+LZwr7\n\nqfeTYvoXjI+BTbdbRaEsl6pulzmrP2bDpuk9Zog14weCrsUkn9aSlaYku6Jx2V1x\n\nBPVnlvTevT1wIdeVZTelGcZoUdNBkYECgcEAw5Qir63jKlXkP7l1k4/ww1/u97AL\n\n7RONcVYiqTmVF155xp3RqTySYzKjk5fS5+UaySBta/f9XDX0KDjmQjW1DmMKQtA5\n\nSYCbmh0ZFAtYMobvlQ3qV7T/qDr26IVp7Lp3OVQwyi9Uvf4WPa3Cd+P4k6Y8Z6zK\n\nx4j+NLPKozsgNCM3y8t5/6EmrtGUfIhc6bfCaGveQYTlM9r0hR7toJZ3bg8F3ILq\n\nhW++3qsaDjvyT78jX2IitIfUr/yhwryNq8UPAoHBAJsOAtIFbCAyxbv56ZC2lfE/\n\nIzoeG1RqtqDPmN3xAS9+WHC5tSX88l8lJeT505QF5Zjx6+gZvqPGu1JOyA7wVIbk\n\n5gDCoWoYDssvtcKQz+dY1N3HfctfQcdFeOpCqrTcdgBLFzmhIPdELVO7W48Sl4oR\n\ng0Rln7JEsf0b+mJB/A+zClYzz5Iua3MPxvRk0RzU7y0Z8NOZsCPsr6x0g22/5GG5\n\nGW3SbEvmPktYo5pkoz8o06AWj7juJRi72ZrhN4ELqwKBwQCCYsHKc+zG4+1/0PkN\n\nCqCCP/SlIAfzYl5LjsHGJmNk6aaEaTZw0wxCIcJiZTdFQ2cwwEjypVOSzqLFe0Qs\n\nI84Jl1wsitDbqxJmvhC4B5Ahr0pjXpw6eKpwJ09Frkad0aTQ4ssxdOMpVA7TySxP\n\n7VBibtLvyIcvsKl4d9xs0hV4F3qH3Pv/wMR0i7hTBZNGeoGa8pQrre4ikfhYv0kV\n\nuaT0CgPoV0cDn9SUchFe0qGKf2zqQWx4Wo3KqGvXKF5yg18CgcAzEW7LB7LDOsfY\n\nx8y0+8pD5HDuDeAP3sgRB4yTXFNL6GMHs6Q3YxxsVk0LoYOzTOpunoUlQdCxu9zR\n\nEeN9Mu9lfUB8df2MtfPzxmRZGJ393+AE9DP8qZBwtdQ5enVDXk1WkUgaF7evXDfL\n\nSAkQt9OUCAE2+5/QLQnPshNV51cP9pdc3ZyVlUPv4PgH2o8VzDzsLOKLZni6BP1z\n\nEEMnB7ZDPep0Ez7tuWlJTYVVdbVTq73hpc2UNGtehW6r57ct0gI=\n\n-----END RSA PRIVATE KEY-----`\n\n\n\nfunc init() {\n\n\tvar err error\n\n\tblk, _ := pem.Decode([]byte(fortanixDummyPrivateKeyPEM))\n\n\tfortanixDummyKey, err = x509.ParsePKCS1PrivateKey(blk.Bytes)\n\n\tif err != nil {\n\n\t\tpanic(\"failed to parse dummy key DER: \" + err.Error())\n\n\t}\n\n\n\n\tif err = FortanixDummyMrSigner.FromPublicKey(fortanixDummyKey.Public().(*rsa.PublicKey)); err != nil {\n\n\t\tpanic(\"failed to derive dummy key MrSigner: \" + err.Error())\n\n\t}\n\n}\n", "file_path": "go/common/sgx/fortanix_dummy.go", "rank": 93, "score": 175704.79316601763 }, { "content": "func New() tmapi.Application {\n\n\treturn &keymanagerApplication{}\n", "file_path": "go/consensus/tendermint/apps/keymanager/keymanager.go", "rank": 94, "score": 175632.29747183557 }, { "content": "\tPolicyChecksum []byte `json:\"policy_checksum\"`\n", "file_path": "go/keymanager/api/api.go", "rank": 95, "score": 175317.1193951762 }, { "content": "\tID common.Namespace `json:\"id\"`\n", "file_path": "go/keymanager/api/policy_sgx.go", "rank": 96, "score": 174932.25357039305 }, { "content": "\tEnclaves map[sgx.EnclaveIdentity]*EnclavePolicySGX `json:\"enclaves\"`\n", "file_path": "go/keymanager/api/policy_sgx.go", "rank": 97, "score": 174931.67010761073 }, { "content": "package api\n\n\n\nimport (\n\n\t\"fmt\"\n\n\n\n\t\"github.com/oasisprotocol/oasis-core/go/common\"\n\n\t\"github.com/oasisprotocol/oasis-core/go/common/cbor\"\n\n\t\"github.com/oasisprotocol/oasis-core/go/common/crypto/signature\"\n\n\t\"github.com/oasisprotocol/oasis-core/go/common/sgx\"\n\n)\n\n\n\n// PolicySGXSignatureContext is the context used to sign PolicySGX documents.\n\nvar PolicySGXSignatureContext = signature.NewContext(\"oasis-core/keymanager: policy\")\n\n\n\n// PolicySGX is a key manager access control policy for the replicated\n\n// SGX key manager.\n\ntype PolicySGX struct {\n\n\t// Serial is the monotonically increasing policy serial number.\n\n\tSerial uint32 `json:\"serial\"`\n\n\n\n\t// ID is the runtime ID that this policy is valid for.\n\n\tID common.Namespace `json:\"id\"`\n\n\n\n\t// Enclaves is the per-key manager enclave ID access control policy.\n\n\tEnclaves map[sgx.EnclaveIdentity]*EnclavePolicySGX `json:\"enclaves\"`\n\n}\n\n\n\n// EnclavePolicySGX is the per-SGX key manager enclave ID access control policy.\n\ntype EnclavePolicySGX struct {\n\n\t// MayQuery is the map of runtime IDs to the vector of enclave IDs that\n\n\t// may query private key material.\n\n\t//\n\n\t// TODO: This could be made more sophisticated and seggregate based on\n\n\t// contract ID as well, but for now punt on the added complexity.\n\n\tMayQuery map[common.Namespace][]sgx.EnclaveIdentity `json:\"may_query\"`\n\n\n\n\t// MayReplicate is the vector of enclave IDs that may retrieve the master\n\n\t// secret (Note: Each enclave ID may always implicitly replicate from other\n\n\t// instances of itself).\n\n\tMayReplicate []sgx.EnclaveIdentity `json:\"may_replicate\"`\n\n}\n\n\n\n// SignedPolicySGX is a signed SGX key manager access control policy.\n\ntype SignedPolicySGX struct {\n\n\tPolicy PolicySGX `json:\"policy\"`\n\n\n\n\tSignatures []signature.Signature `json:\"signatures\"`\n\n}\n\n\n\n// SanityCheckSignedPolicySGX verifies a SignedPolicySGX.\n\nfunc SanityCheckSignedPolicySGX(currentSigPol, newSigPol *SignedPolicySGX) error {\n\n\tnewRawPol := cbor.Marshal(newSigPol.Policy)\n\n\tfor _, sig := range newSigPol.Signatures {\n\n\t\tif !sig.PublicKey.IsValid() {\n\n\t\t\treturn fmt.Errorf(\"keymanager: sanity check failed: SGX policy signature's public key %s is invalid\", sig.PublicKey.String())\n\n\t\t}\n\n\t\tif !sig.Verify(PolicySGXSignatureContext, newRawPol) {\n\n\t\t\treturn fmt.Errorf(\"keymanager: sanity check failed: SGX policy signature from %s is invalid\", sig.PublicKey.String())\n\n\t\t}\n\n\t}\n\n\n\n\t// If a prior version of the policy is not provided, then there is nothing\n\n\t// more to check. Even with a prior version of the document, since policy\n\n\t// updates can happen independently of a new version of the enclave, it's\n\n\t// basically impossible to generically validate the Enclaves portion.\n\n\tif currentSigPol == nil {\n\n\t\treturn nil\n\n\t}\n\n\n\n\tcurrentPol, newPol := currentSigPol.Policy, newSigPol.Policy\n\n\tif !newPol.ID.Equal(&currentPol.ID) {\n\n\t\treturn fmt.Errorf(\"keymanager: sanity check failed: SGX policy runtime ID changed from %s to %s\", currentPol.ID, newPol.ID)\n\n\t}\n\n\n\n\tif currentPol.Serial >= newPol.Serial {\n\n\t\treturn fmt.Errorf(\"keymanager: sanity check failed: SGX policy serial number did not increase\")\n\n\t}\n\n\n\n\treturn nil\n\n}\n", "file_path": "go/keymanager/api/policy_sgx.go", "rank": 98, "score": 174926.65054551273 }, { "content": "\n\n Ok(k.to_vec())\n\n }\n\n\n\n fn get_checksum(&self) -> Result<Vec<u8>> {\n\n match self.checksum.as_ref() {\n\n Some(checksum) => Ok(checksum.clone()),\n\n None => Err(KeyManagerError::NotInitialized.into()),\n\n }\n\n }\n\n}\n\n\n\nimpl Kdf {\n\n fn new() -> Self {\n\n Self {\n\n inner: RwLock::new(Inner {\n\n master_secret: None,\n\n checksum: None,\n\n runtime_id: None,\n\n signer: None,\n", "file_path": "keymanager-lib/src/kdf.rs", "rank": 99, "score": 48.187313912350724 } ]
Rust
core_nodes/src/abc.rs
alec-deason/virtual_modular
77857488c4b573522807430855c83bfc3d588647
use generic_array::{arr, typenum::*}; use std::collections::HashMap; use virtual_modular_graph::{Node, Ports, BLOCK_SIZE}; #[derive(Clone, Debug)] pub struct ABCSequence { line: Vec<abc_parser::datatypes::MusicSymbol>, key: HashMap<char, abc_parser::datatypes::Accidental>, idx: usize, clock: u32, sounding: Option<f32>, current_duration: f32, triggered: bool, } impl ABCSequence { pub fn new(tune: &str) -> Option<Self> { let parsed = abc_parser::abc::tune(tune).ok()?; let key = parsed .header .info .iter() .find(|f| f.0 == 'K') .map(|f| f.1.clone()) .unwrap_or("C".to_string()); let key: HashMap<_, _> = match key.as_str() { "C" => vec![], "G" => vec![('F', abc_parser::datatypes::Accidental::Sharp)], _ => panic!(), } .into_iter() .collect(); let mut line: Vec<_> = parsed .body .unwrap() .music .into_iter() .map(|l| l.symbols.clone()) .flatten() .collect(); line.retain(|s| match s { abc_parser::datatypes::MusicSymbol::Rest(abc_parser::datatypes::Rest::Note(..)) => true, abc_parser::datatypes::MusicSymbol::Note { .. } => true, _ => false, }); let mut r = Self { line, key, idx: 0, clock: 0, current_duration: 0.0, sounding: None, triggered: false, }; let dur = r.duration(0); r.clock = dur; r.current_duration = dur as f32 / 24.0; r.sounding = r.freq(0); Some(r) } } fn accidental_to_freq_multiplier(accidental: &abc_parser::datatypes::Accidental) -> f32 { let semitones = match accidental { abc_parser::datatypes::Accidental::Sharp => 1, abc_parser::datatypes::Accidental::Flat => -1, abc_parser::datatypes::Accidental::Natural => 0, abc_parser::datatypes::Accidental::DoubleSharp => 2, abc_parser::datatypes::Accidental::DoubleFlat => -2, }; 2.0f32.powf((semitones * 100) as f32 / 1200.0) } impl ABCSequence { fn freq(&self, idx: usize) -> Option<f32> { if let abc_parser::datatypes::MusicSymbol::Note { note, octave, accidental, .. } = self.line[idx] { if accidental.is_some() { todo!() } let mut base = match note { abc_parser::datatypes::Note::C => 16.35, abc_parser::datatypes::Note::D => 18.35, abc_parser::datatypes::Note::E => 20.60, abc_parser::datatypes::Note::F => 21.83, abc_parser::datatypes::Note::G => 24.50, abc_parser::datatypes::Note::A => 27.50, abc_parser::datatypes::Note::B => 30.87, }; let accidental = match note { abc_parser::datatypes::Note::C => self.key.get(&'C'), abc_parser::datatypes::Note::D => self.key.get(&'D'), abc_parser::datatypes::Note::E => self.key.get(&'E'), abc_parser::datatypes::Note::F => self.key.get(&'F'), abc_parser::datatypes::Note::G => self.key.get(&'G'), abc_parser::datatypes::Note::A => self.key.get(&'A'), abc_parser::datatypes::Note::B => self.key.get(&'B'), }; if let Some(accidental) = accidental { base *= accidental_to_freq_multiplier(accidental); } Some(base * 2.0f32.powi(octave as i32 + 2)) } else { panic!() } } fn duration(&self, idx: usize) -> u32 { match self.line[idx] { abc_parser::datatypes::MusicSymbol::Rest(abc_parser::datatypes::Rest::Note( _length, )) => { unimplemented!() } abc_parser::datatypes::MusicSymbol::Note { length, .. } => (length * 24.0) as u32, _ => panic!("{:?}", self.line[idx]), } } } impl Node for ABCSequence { type Input = U1; type Output = U4; #[inline] fn process(&mut self, input: Ports<Self::Input>) -> Ports<Self::Output> { let trigger = input[0]; let mut r_freq = [0.0f32; BLOCK_SIZE]; let mut r_gate = [0.0f32; BLOCK_SIZE]; let mut r_eoc = [0.0f32; BLOCK_SIZE]; let mut r_dur = [0.0f32; BLOCK_SIZE]; for i in 0..BLOCK_SIZE { if trigger[i] > 0.5 { if !self.triggered { self.triggered = true; self.clock -= 1; } } else { self.triggered = false; } if self.clock == 0 { self.idx = self.idx + 1; if self.idx >= self.line.len() { self.idx = 0; r_eoc[i] = 1.0; } self.clock = self.duration(self.idx); self.current_duration = self.clock as f32 / 24.0; self.sounding = self.freq(self.idx); r_gate[i] = 0.0; } else { r_gate[i] = if self.sounding.is_some() { 1.0 } else { 0.0 }; } r_freq[i] = self.sounding.unwrap_or(0.0); r_dur[i] = self.current_duration; } arr![[f32; BLOCK_SIZE]; r_freq, r_gate, r_eoc, r_dur] } }
use generic_array::{arr, typenum::*}; use std::collections::HashMap; use virtual_modular_graph::{Node, Ports, BLOCK_SIZE}; #[derive(Clone, Debug)] pub struct ABCSequence { line: Vec<abc_parser::datatypes::MusicSymbol>, key: HashMap<char, abc_parser::datatypes::Accidental>, idx: usize, clock: u32, sounding: Option<f32>, current_duration: f32, triggered: bool, } impl ABCSequence { pub fn new(tune: &str) -> Option<Self> { let parsed = abc_parser::abc::tune(tune).ok()?; let key = parsed .header .info .iter() .find(|f| f.0 == 'K') .map(|f| f.1.clone()) .unwrap_or("C".to_string()); let key: HashMap<_, _> = match key.as_str() { "C" => vec![], "G" => vec![('F', abc_parser::datatypes::Accidental::Sharp)], _ => panic!(), } .into_iter() .collect(); let mut line: Vec<_> = parsed .body .unwrap() .music .into_iter() .map(|l| l.symbols.clone()) .flatten() .collect(); line.retain(|s| match s { abc_parser::datatypes::MusicSymbol::Rest(abc_parser::datatypes::Rest::Note(..)) => true, abc_parser::datatypes::MusicSymbol::Note { .. } => true, _ => false, }); let mut r = Self { line, key, idx: 0, clock: 0, current_duration: 0.0, sounding: None, triggered: false, }; let dur = r.duration(0); r.clock = dur; r.current_duration = dur as f32 / 24.0; r.sounding = r.freq(0); Some(r) } } fn accidental_to_freq_multiplier(accidental: &abc_parser::datatypes::Accidental) -> f32 { let semitones = match accidental { abc_parser::datatypes::Accidental::Sharp => 1, abc_parser::datatypes::Accidental::Flat => -1, abc_parser::datatypes::Accidental::Natural => 0, abc_parser::datatypes::Accidental::DoubleSharp => 2, abc_parser::datatypes::Accidental::DoubleFlat => -2, }; 2.0f32.powf((semitones * 100) as f32 / 1200.0) } impl ABCSequence { fn freq(&self, idx: usize) -> Option<f32> { if let abc_parser::datatypes::MusicSymbol::Note { note, octave, accidental, .. } = self.line[idx] { if accidental.is_some() { todo!() } let mut base = match note { abc_parser::datatypes::Note::C => 16.35, abc_parser::datatypes::Note::D => 18.35, abc_parser::datatypes::Note::E => 20.60, abc_parser::datatypes::Note::F => 21.83, abc_parser::datatypes::Note::G => 24.50, abc_parser::datatypes::Note::A => 27.50, abc_parser::datatypes::Note::B => 30.87, }; let accidental = match note { abc_parser::datatypes::Note::C => self.key.get(&'C'), abc_parser::datatypes::Note::D => self.key.get(&'D'), abc_parser::datatypes::Note::E => self.key.get(&'E'), abc_parser::datatypes::Note::F => self.key.get(&'F'), abc_parser::datatypes::Note::G => self.key.get(&'G'), abc_parser::datatypes::Note::A => self.key.get(&'A'), abc_parser::datatypes::Note::B => self.key.get(&'B'), }; if let Some(accidental) = accidental { base *= accidental_to_freq_multiplier(accidental); } Some(base * 2.0f32.powi(octave as i32 + 2)) } else { panic!() } }
} impl Node for ABCSequence { type Input = U1; type Output = U4; #[inline] fn process(&mut self, input: Ports<Self::Input>) -> Ports<Self::Output> { let trigger = input[0]; let mut r_freq = [0.0f32; BLOCK_SIZE]; let mut r_gate = [0.0f32; BLOCK_SIZE]; let mut r_eoc = [0.0f32; BLOCK_SIZE]; let mut r_dur = [0.0f32; BLOCK_SIZE]; for i in 0..BLOCK_SIZE { if trigger[i] > 0.5 { if !self.triggered { self.triggered = true; self.clock -= 1; } } else { self.triggered = false; } if self.clock == 0 { self.idx = self.idx + 1; if self.idx >= self.line.len() { self.idx = 0; r_eoc[i] = 1.0; } self.clock = self.duration(self.idx); self.current_duration = self.clock as f32 / 24.0; self.sounding = self.freq(self.idx); r_gate[i] = 0.0; } else { r_gate[i] = if self.sounding.is_some() { 1.0 } else { 0.0 }; } r_freq[i] = self.sounding.unwrap_or(0.0); r_dur[i] = self.current_duration; } arr![[f32; BLOCK_SIZE]; r_freq, r_gate, r_eoc, r_dur] } }
fn duration(&self, idx: usize) -> u32 { match self.line[idx] { abc_parser::datatypes::MusicSymbol::Rest(abc_parser::datatypes::Rest::Note( _length, )) => { unimplemented!() } abc_parser::datatypes::MusicSymbol::Note { length, .. } => (length * 24.0) as u32, _ => panic!("{:?}", self.line[idx]), } }
function_block-full_function
[ { "content": "fn make_euclidian_rhythm(pulses: u32, len: u32, steps: &mut Vec<bool>) {\n\n steps.resize(len as usize, false);\n\n steps.fill(false);\n\n let mut bucket = 0;\n\n for step in steps.iter_mut() {\n\n bucket += pulses;\n\n if bucket >= len {\n\n bucket -= len;\n\n *step = true;\n\n }\n\n }\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct Brownian {\n\n current: f32,\n\n triggered: bool,\n\n}\n\n\n\nimpl Default for Brownian {\n", "file_path": "core_nodes/src/randomization.rs", "rank": 0, "score": 181369.3424251524 }, { "content": "pub fn parse(data: &str) -> Result<Vec<Line>, String> {\n\n use pom::parser::Parser;\n\n use pom::parser::*;\n\n use std::str::{self, FromStr};\n\n\n\n fn node_name<'a>() -> Parser<'a, u8, String> {\n\n let number = one_of(b\"0123456789\");\n\n (one_of(b\"abcdefghijklmnopqrstuvwxyzxy\").repeat(1)\n\n + (one_of(b\"abcdefghijklmnopqrstuvwxyzxy\") | number | sym(b'_')).repeat(0..))\n\n .collect()\n\n .convert(str::from_utf8)\n\n .map(|s| s.to_string())\n\n .name(\"node_name\")\n\n }\n\n fn node_constructor_name<'a>() -> Parser<'a, u8, String> {\n\n let lowercase = one_of(b\"abcdefghijklmnopqrstuvwxyzxy\");\n\n let uppercase = one_of(b\"ABCDEFGHIJKLMNOPQRSTUVWXYZ\");\n\n let number = one_of(b\"0123456789\");\n\n (uppercase.repeat(1)\n\n + (lowercase | one_of(b\"ABCDEFGHIJKLMNOPQRSTUVWXYZ\") | number | sym(b'_')).repeat(0..))\n", "file_path": "definition_language/src/lib.rs", "rank": 1, "score": 179437.73647003167 }, { "content": "pub fn make_coprime(numbers: &mut [u32]) {\n\n for n in numbers.iter_mut() {\n\n if *n <= 1 {\n\n *n = 2;\n\n }\n\n }\n\n for i in 1..numbers.len() {\n\n let mut done = false;\n\n while !done {\n\n done = true;\n\n for j in 0..i {\n\n while gcd(numbers[i], numbers[j]) != 1 {\n\n numbers[i] += 1;\n\n done = false;\n\n }\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "core_nodes/src/utils.rs", "rank": 2, "score": 158534.68937486052 }, { "content": "fn write_data<T>(output: &mut [T], channels: usize, ring_buffer: &mut Consumer<(f32, f32)>)\n\nwhere\n\n T: cpal::Sample,\n\n{\n\n let mut underran = false;\n\n for frame in output.chunks_mut(channels) {\n\n let (value_left, value_right) = ring_buffer.pop().unwrap_or_else(|| {\n\n underran = true;\n\n (0.0, 0.0)\n\n });\n\n frame[0] = cpal::Sample::from::<f32>(&(value_left * 0.5));\n\n frame[1] = cpal::Sample::from::<f32>(&(value_right * 0.5));\n\n }\n\n if underran {\n\n println!(\"buffer underrun\");\n\n }\n\n}\n", "file_path": "dynamic_environment/src/main.rs", "rank": 3, "score": 138730.97868986186 }, { "content": "pub trait PortCount: ArrayLength<[f32; BLOCK_SIZE]> + ToInt<usize> {}\n\nimpl<T: ArrayLength<[f32; BLOCK_SIZE]> + ToInt<usize>> PortCount for T {}\n\npub type Ports<N> = GenericArray<[f32; BLOCK_SIZE], N>;\n\n\n", "file_path": "graph/src/lib.rs", "rank": 4, "score": 120654.28836670081 }, { "content": "fn parse_choices(data: &str) -> Result<Vec<ChoiceItem>, String> {\n\n use pom::parser::Parser;\n\n use pom::parser::*;\n\n use std::str::{self, FromStr};\n\n fn number<'a>() -> Parser<'a, u8, f32> {\n\n let integer = one_of(b\"0123456789\").repeat(0..);\n\n let frac = sym(b'.') + one_of(b\"0123456789\").repeat(1..);\n\n let exp = one_of(b\"eE\") + one_of(b\"+-\").opt() + one_of(b\"0123456789\").repeat(1..);\n\n let number = sym(b'-').opt() + integer + frac.opt() + exp.opt();\n\n number\n\n .collect()\n\n .convert(str::from_utf8)\n\n .convert(f32::from_str)\n\n .name(\"number\")\n\n }\n\n fn item<'a>() -> Parser<'a, u8, ChoiceItem> {\n\n (number() - sym(b'|') + number() + (sym(b'|') * number()).opt()).map(\n\n |((value, selection_probability), firing_probability)| ChoiceItem {\n\n value,\n\n selection_probability,\n\n firing_probability: firing_probability.unwrap_or(1.0),\n\n },\n\n )\n\n }\n\n let parsed = list(item(), sym(b' ').repeat(1..))\n\n .parse(data.as_bytes())\n\n .map_err(|e| format!(\"{:?}\", e));\n\n parsed\n\n}\n", "file_path": "core_nodes/src/sequencer.rs", "rank": 5, "score": 105594.53536875342 }, { "content": "fn parse_allpass(data: &str) -> Result<Vec<(f64, AllPass)>, String> {\n\n use pom::parser::Parser;\n\n use pom::parser::*;\n\n use std::str::{self, FromStr};\n\n fn number<'a>() -> Parser<'a, u8, f64> {\n\n let integer = one_of(b\"0123456789\").repeat(0..);\n\n let frac = sym(b'.') + one_of(b\"0123456789\").repeat(1..);\n\n let exp = one_of(b\"eE\") + one_of(b\"+-\").opt() + one_of(b\"0123456789\").repeat(1..);\n\n let number = sym(b'-').opt() + integer + frac.opt() + exp.opt();\n\n number\n\n .collect()\n\n .convert(str::from_utf8)\n\n .convert(f64::from_str)\n\n .name(\"number\")\n\n }\n\n fn whitespace<'a>() -> Parser<'a, u8, Vec<u8>> {\n\n sym(b' ').repeat(0..).name(\"whitespace\")\n\n }\n\n fn item<'a>() -> Parser<'a, u8, (f64, AllPass)> {\n\n (sym(b'(') * whitespace() * number() - whitespace() - sym(b',') - whitespace() + number() - whitespace() - sym(b')')).map(|(gain, delay)| (delay, AllPass::new(gain, delay)))\n", "file_path": "core_nodes/src/filter.rs", "rank": 6, "score": 103595.58218748454 }, { "content": "fn parse_combs(data: &str) -> Result<Vec<(f64, Comb)>, String> {\n\n use pom::parser::Parser;\n\n use pom::parser::*;\n\n use std::str::{self, FromStr};\n\n fn number<'a>() -> Parser<'a, u8, f64> {\n\n let integer = one_of(b\"0123456789\").repeat(0..);\n\n let frac = sym(b'.') + one_of(b\"0123456789\").repeat(1..);\n\n let exp = one_of(b\"eE\") + one_of(b\"+-\").opt() + one_of(b\"0123456789\").repeat(1..);\n\n let number = sym(b'-').opt() + integer + frac.opt() + exp.opt();\n\n number\n\n .collect()\n\n .convert(str::from_utf8)\n\n .convert(f64::from_str)\n\n .name(\"number\")\n\n }\n\n fn whitespace<'a>() -> Parser<'a, u8, Vec<u8>> {\n\n sym(b' ').repeat(0..).name(\"whitespace\")\n\n }\n\n fn item<'a>() -> Parser<'a, u8, (f64, Comb)> {\n\n (sym(b'(') * whitespace() * number() - whitespace() - sym(b',') - whitespace() + number() - whitespace() - sym(b')')).map(|(gain, delay)| (delay, Comb::new(gain, delay)))\n", "file_path": "core_nodes/src/filter.rs", "rank": 7, "score": 103595.58218748454 }, { "content": "fn gcd(a: u32, b: u32) -> u32 {\n\n if a == 0 || b == 0 {\n\n 0\n\n } else if a == b {\n\n a\n\n } else if a > b {\n\n gcd(a - b, b)\n\n } else {\n\n gcd(a, b - a)\n\n }\n\n}\n\n\n", "file_path": "core_nodes/src/utils.rs", "rank": 8, "score": 103242.72844278463 }, { "content": "pub fn to_rust(\n\n node_templates: &HashMap<String, NodeTemplate>,\n\n input_code: &[Line],\n\n) -> (String, u32) {\n\n let mut nodes = HashMap::new();\n\n nodes.insert(\"output\".to_string(), (\"Output()\".to_string(), 2, 0));\n\n\n\n let mut edges = HashMap::new();\n\n let mut consumed_nodes = HashSet::new();\n\n let mut bridge_map = HashMap::new();\n\n let mut input_count: HashMap<String, i32> = HashMap::new();\n\n for line in input_code {\n\n if let Line::Node {\n\n name,\n\n ty,\n\n static_parameters,\n\n } = line\n\n {\n\n nodes.insert(\n\n name.clone(),\n", "file_path": "definition_language/src/code_generation.rs", "rank": 9, "score": 89394.31270494434 }, { "content": "// Based on: https://gist.github.com/geraintluff/663e42e2519465e8b94df47793076f23\n\nfn householder(data: &mut [f64]) {\n\n\tlet factor = -2.0/data.len() as f64;\n\n\n\n\tlet mut sum = data.iter().sum::<f64>();\n\n\n\n\tsum *= factor;\n\n\n\n\tdata.iter_mut().for_each(|v| *v += sum);\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct Diffusor {\n\n delays: Vec<(f64, DelayLine)>,\n\n shuffles: Vec<(usize, f64)>,\n\n}\n\n\n\nimpl Diffusor {\n\n pub fn new<R: Rng+Sized>(len: f64, rng: &mut R) -> Self {\n\n let mut idxs: Vec<usize> = (0..8).collect();\n\n idxs.shuffle(rng);\n", "file_path": "core_nodes/src/delay_and_reverb.rs", "rank": 10, "score": 80551.9105736982 }, { "content": "// Based on: https://gist.github.com/geraintluff/c55f9482dce0db6e6a1f4509129e9a2a\n\nfn hadamard(data: &mut [f64]) {\n\n if data.len() <= 1 {\n\n return\n\n }\n\n\n\n let h_size = data.len()/2;\n\n\n\n let (a,b) = data.split_at_mut(h_size);\n\n hadamard(a);\n\n hadamard(b);\n\n\n\n for i in 0..h_size {\n\n let a = data[i];\n\n let b = data[i + h_size];\n\n data[i] = a + b;\n\n data[i + h_size] = a - b;\n\n }\n\n}\n\n\n", "file_path": "core_nodes/src/delay_and_reverb.rs", "rank": 11, "score": 80551.9105736982 }, { "content": "fn parse_sequence(data: &str) -> Result<Subsequence, String> {\n\n use pom::parser::Parser;\n\n use pom::parser::*;\n\n use std::str::{self, FromStr};\n\n fn modified_event<'a>() -> Parser<'a, u8, Subsequence> {\n\n (call(raw_event) + one_of(b\"*/!\") + number()).convert(|((e, o), n)| match o {\n\n b'*' => Ok(Subsequence::ClockMultiplier(Box::new(e), 1.0 / n)),\n\n b'/' => Ok(Subsequence::ClockMultiplier(Box::new(e), n)),\n\n b'!' => Ok(Subsequence::ClockMultiplier(\n\n Box::new(Subsequence::Tuplet(\n\n (0..n.max(1.0) as usize).map(|_| e.clone()).collect(),\n\n 0,\n\n )),\n\n 1.0 / n,\n\n )),\n\n _ => Err(()),\n\n })\n\n }\n\n\n\n fn raw_event<'a>() -> Parser<'a, u8, Subsequence> {\n", "file_path": "core_nodes/src/sequencer.rs", "rank": 12, "score": 73655.94892331262 }, { "content": "// Based on: http://www.martin-finke.de/blog/articles/audio-plugins-018-polyblep-oscillator/\n\nfn poly_blep(mut t: f64, dt: f64) -> f64 {\n\n if t < dt {\n\n t /= dt;\n\n 2.0 * t - t.powi(2) - 1.0\n\n } else if t > 1.0 - dt {\n\n t = (t - 1.0) / dt;\n\n t.powi(2) + 2.0 * t + 1.0\n\n } else {\n\n 0.0\n\n }\n\n}\n\n\n\nmacro_rules! oscillator {\n\n ($name:ident, $aux_inputs:ty, $body:expr) => {\n\n #[derive(Clone)]\n\n pub struct $name {\n\n phase: f64,\n\n per_sample: f64,\n\n previous: f64,\n\n }\n", "file_path": "core_nodes/src/oscillator.rs", "rank": 13, "score": 72066.07191564015 }, { "content": "#[derive(Copy, Clone, Debug)]\n\nstruct ChoiceItem {\n\n value: f32,\n\n selection_probability: f32,\n\n firing_probability: f32,\n\n}\n\n#[derive(Clone, Default)]\n\npub struct Choice {\n\n options: Vec<ChoiceItem>,\n\n current: f32,\n\n firing: f32,\n\n triggered: bool,\n\n}\n\n\n\nimpl Node for Choice {\n\n type Input = U1;\n\n type Output = U2;\n\n #[inline]\n\n fn process(&mut self, input: Ports<Self::Input>) -> Ports<Self::Output> {\n\n let trigger = input[0];\n\n let mut r = <Ports<Self::Output>>::default();\n", "file_path": "core_nodes/src/sequencer.rs", "rank": 15, "score": 51353.59875980003 }, { "content": "#[derive(Clone)]\n\nstruct ToneHole {\n\n line: DelayLine,\n\n filter: Simper,\n\n}\n\n\n\nimpl Default for ToneHole {\n\n fn default() -> Self {\n\n let mut filter = Simper::default();\n\n filter.set_parameters(10000.0, 0.0);\n\n Self {\n\n line: DelayLine::default(),\n\n filter,\n\n }\n\n }\n\n}\n\n\n\nimpl ToneHole {\n\n fn tick(&mut self, pa_plus: f64, pb_minus: f64, r0: f64, hole_reflectivity: f64) -> (f64, f64) {\n\n let pth_minus = self.line.current();\n\n\n", "file_path": "core_nodes/src/waveguide.rs", "rank": 16, "score": 51349.482622153104 }, { "content": "fn main() {\n\n let output_path = std::env::args()\n\n .nth(1)\n\n .expect(\"Must supply a path for the output WAV file\");\n\n\n\n // Make a sine oscillator running at 220 hz\n\n let osc = Pipe(Branch(Constant(220.0), Constant(0.0)), Sine::default());\n\n\n\n // Duplicate it across two channels to make a stereo signal\n\n let sound = Pipe(osc, Stereo);\n\n\n\n // Construct the synthesizer from that stereo generator\n\n let builder = InstrumentSynth::builder();\n\n let mut synth = builder.build_with_synth(sound);\n\n\n\n synth.set_sample_rate(44100.0);\n\n\n\n // Render one second of sound to buffers\n\n let mut left_buffer = vec![0.0; 44100];\n\n let mut right_buffer = vec![0.0; 44100];\n", "file_path": "examples/simple_host.rs", "rank": 17, "score": 47034.5669613152 }, { "content": "pub trait Node: DynClone {\n\n type Input: PortCount;\n\n type Output: PortCount;\n\n fn process(&mut self, input: Ports<Self::Input>) -> Ports<Self::Output>;\n\n fn set_static_parameters(&mut self, _parameters: &str) -> Result<(), String> {\n\n Ok(())\n\n }\n\n fn set_sample_rate(&mut self, _rate: f32) {}\n\n fn input_len(&self) -> usize {\n\n Self::Input::to_int()\n\n }\n\n fn output_len(&self) -> usize {\n\n Self::Output::to_int()\n\n }\n\n}\n\ndyn_clone::clone_trait_object!(<A,B> Node<Input=A, Output=B>);\n\n\n", "file_path": "graph/src/lib.rs", "rank": 18, "score": 45935.561015907275 }, { "content": "fn main() {\n\n let synth_path = std::env::args().nth(1);\n\n\n\n let builder = InstrumentSynth::builder();\n\n\n\n let graph = if let Some(synth_data) = synth_path\n\n .as_ref()\n\n .and_then(|p| std::fs::read_to_string(p).ok())\n\n {\n\n DynamicGraphBuilder::default().parse(&synth_data).unwrap()\n\n } else {\n\n panic!(\"Could not read synth definition file {:?}\", synth_path)\n\n };\n\n\n\n #[cfg(feauture = \"midi\")]\n\n launch_midi_listener(&graph);\n\n\n\n let reload_data = Arc::clone(&graph.reload_data);\n\n let watch_list = Arc::clone(&graph.watch_list);\n\n\n", "file_path": "dynamic_environment/src/main.rs", "rank": 19, "score": 45664.663359040795 }, { "content": "fn main() {\n\n let input_path = std::env::args().nth(1).expect(\"Must supply input path\");\n\n let output_path = std::env::args().nth(2).expect(\"Must supply output path\");\n\n\n\n let parsed = parse(&std::fs::read_to_string(&input_path).expect(\"Couldn't read inputs\")).unwrap();\n\n let (rust, _input_count) = to_rust(&std_nodes(), &parsed);\n\n std::fs::write(\n\n &output_path,\n\n rust\n\n )\n\n .unwrap();\n\n}\n", "file_path": "definition_language/src/main.rs", "rank": 20, "score": 45664.663359040795 }, { "content": "pub trait WrappedNode: DynClone {\n\n fn process(&mut self, input: &[[f32; BLOCK_SIZE]], output: &mut [[f32; BLOCK_SIZE]]);\n\n fn set_static_parameters(&mut self, _parameters: &str) -> Result<(), String>;\n\n fn set_sample_rate(&mut self, _rate: f32);\n\n fn input_len(&self) -> usize;\n\n fn output_len(&self) -> usize;\n\n}\n\ndyn_clone::clone_trait_object!(WrappedNode);\n\n\n\n#[derive(Clone)]\n\npub struct NodeWrapper<N>(N);\n\n\n\nimpl<A: PortCount, B: PortCount, N: Node<Input = A, Output = B> + Clone> WrappedNode\n\n for NodeWrapper<N>\n\n{\n\n fn process(&mut self, input: &[[f32; BLOCK_SIZE]], output: &mut [[f32; BLOCK_SIZE]]) {\n\n assert_eq!(input.len(), A::to_int());\n\n assert_eq!(output.len(), B::to_int());\n\n let inputs = <Ports<A>>::clone_from_slice(input);\n\n let outputs = self.0.process(inputs);\n", "file_path": "graph/src/lib.rs", "rank": 21, "score": 44850.95663695829 }, { "content": "fn main() {\n\n let parsed = parse(\n\n r##\"\n\nosc=Sine(220)\n\n(output, 0, osc)\n\n(output, 1, osc)\n\n\"##,\n\n )\n\n .unwrap();\n\n\n\n let (rust, _input_count) = to_rust(&std_nodes(), &parsed);\n\n println!(\"{}\", rust);\n\n}\n", "file_path": "definition_language/examples/code_generation.rs", "rank": 22, "score": 44404.85434474007 }, { "content": "fn code_for_node(\n\n node_type: String,\n\n static_parameters: &Option<String>,\n\n node_templates: &HashMap<String, NodeTemplate>,\n\n) -> (String, u32, u32) {\n\n let (input_port_count, output_port_count) = {\n\n match node_type.as_str() {\n\n \"Output\" => (2, 0),\n\n _ => {\n\n let template = node_templates\n\n .get(node_type.as_str())\n\n .unwrap_or_else(|| panic!(\"{}\", node_type))\n\n .clone();\n\n (\n\n template.node.input_len() as u32,\n\n template.node.output_len() as u32,\n\n )\n\n }\n\n }\n\n };\n", "file_path": "definition_language/src/code_generation.rs", "rank": 23, "score": 43242.38072264052 }, { "content": "fn run<T>(\n\n device: &cpal::Device,\n\n config: &cpal::StreamConfig,\n\n mut ring_buffer: Consumer<(f32, f32)>,\n\n) -> cpal::Stream\n\nwhere\n\n T: cpal::Sample,\n\n{\n\n let channels = config.channels as usize;\n\n\n\n let err_fn = |err| eprintln!(\"an error occurred on stream: {}\", err);\n\n\n\n let stream = device\n\n .build_output_stream(\n\n config,\n\n move |data: &mut [T], _: &cpal::OutputCallbackInfo| {\n\n write_data(data, channels, &mut ring_buffer)\n\n },\n\n err_fn,\n\n )\n\n .unwrap();\n\n stream.play().unwrap();\n\n stream\n\n}\n\n\n", "file_path": "dynamic_environment/src/main.rs", "rank": 24, "score": 43146.86413687852 }, { "content": "#[cfg(feauture = \"midi\")]\n\nfn launch_midi_listener(graph: &DynamicGraph) {\n\n let inputs = Arc::clone(graph.external_inputs);\n\n std::thread::spawn(move || {\n\n let mut voices = std::collections::HashMap::new();\n\n for c in 0..10 {\n\n voices.insert(c, (0..4).map(|i| (i, None)).collect::<Vec<_>>());\n\n }\n\n let mut current_voice = 0;\n\n\n\n let (client, _status) =\n\n jack::Client::new(\"virtual_modular_midi\", jack::ClientOptions::NO_START_SERVER)\n\n .unwrap();\n\n let shower = client\n\n .register_port(\"midi_in\", jack::MidiIn::default())\n\n .unwrap();\n\n let cback = move |_: &jack::Client, ps: &jack::ProcessScope| -> jack::Control {\n\n {\n\n let mut inputs = inputs.lock().unwrap();\n\n for e in shower.iter(ps) {\n\n let message = wmidi::MidiMessage::try_from(e.bytes).unwrap();\n", "file_path": "dynamic_environment/src/main.rs", "rank": 25, "score": 37925.1683327042 }, { "content": " fn process(&mut self, input: Ports<Self::Input>) -> Ports<Self::Output> {\n\n let mut r = [0.0f32; BLOCK_SIZE];\n\n for (i, r) in r.iter_mut().enumerate() {\n\n let v = input[0][i];\n\n if v != self.0 {\n\n self.0 = v;\n\n *r = 1.0;\n\n }\n\n }\n\n arr![[f32; BLOCK_SIZE]; r]\n\n }\n\n}\n\n\n\n#[derive(Copy, Clone, Default)]\n\npub struct BurstTrigger {\n\n triggered: bool,\n\n spacing: f64,\n\n clock: f64,\n\n per_sample: f64,\n\n remaining: u32,\n", "file_path": "core_nodes/src/clock.rs", "rank": 26, "score": 32977.930203323835 }, { "content": "use generic_array::{arr, typenum::*};\n\nuse virtual_modular_graph::{Node, Ports, BLOCK_SIZE};\n\n\n\n#[derive(Clone, Default)]\n\npub struct PulseOnLoad(bool);\n\nimpl Node for PulseOnLoad {\n\n type Input = U0;\n\n type Output = U1;\n\n #[inline]\n\n fn process(&mut self, _input: Ports<Self::Input>) -> Ports<Self::Output> {\n\n if self.0 {\n\n arr![[f32; BLOCK_SIZE]; [0.0f32; BLOCK_SIZE]]\n\n } else {\n\n self.0 = true;\n\n let mut r = [0.0f32; BLOCK_SIZE];\n\n r[0] = 1.0;\n\n arr![[f32; BLOCK_SIZE]; r]\n\n }\n\n }\n\n}\n", "file_path": "core_nodes/src/clock.rs", "rank": 27, "score": 32973.26555352915 }, { "content": "}\n\n\n\nimpl Node for BurstTrigger {\n\n type Input = U3;\n\n type Output = U2;\n\n #[inline]\n\n fn process(&mut self, input: Ports<Self::Input>) -> Ports<Self::Output> {\n\n let trigger = input[0];\n\n let count = input[1];\n\n let spacing = input[2];\n\n let mut r = <Ports<Self::Output>>::default();\n\n for i in 0..BLOCK_SIZE {\n\n if self.remaining > 0 {\n\n self.clock += self.per_sample;\n\n }\n\n if trigger[i] > 0.5 {\n\n if !self.triggered {\n\n self.triggered = true;\n\n self.remaining = count[i] as u32;\n\n self.spacing = spacing[i] as f64;\n", "file_path": "core_nodes/src/clock.rs", "rank": 28, "score": 32972.712320479506 }, { "content": "\n\n#[derive(Copy, Clone)]\n\npub struct Impulse(f32, bool);\n\nimpl Default for Impulse {\n\n fn default() -> Self {\n\n Self::new(0.5)\n\n }\n\n}\n\nimpl Impulse {\n\n pub fn new(threshold: f32) -> Self {\n\n Self(threshold, false)\n\n }\n\n}\n\nimpl Node for Impulse {\n\n type Input = U1;\n\n type Output = U1;\n\n\n\n #[inline]\n\n fn process(&mut self, input: Ports<Self::Input>) -> Ports<Self::Output> {\n\n let input = input[0];\n", "file_path": "core_nodes/src/clock.rs", "rank": 29, "score": 32970.786936869416 }, { "content": " let mut r = [0.0f32; BLOCK_SIZE];\n\n for (i, r) in r.iter_mut().enumerate() {\n\n let gate = input[i];\n\n let mut switched = false;\n\n if !self.1 && gate > self.0 {\n\n self.1 = true;\n\n switched = true;\n\n } else if gate < self.0 {\n\n self.1 = false;\n\n }\n\n if switched {\n\n *r = 1.0;\n\n }\n\n }\n\n arr![[f32; BLOCK_SIZE]; r]\n\n }\n\n}\n\n\n\n#[derive(Copy, Clone, Default)]\n\npub struct PulseDivider(u64, bool);\n", "file_path": "core_nodes/src/clock.rs", "rank": 30, "score": 32967.28627789387 }, { "content": "\n\nimpl Node for PulseDivider {\n\n type Input = U2;\n\n type Output = U1;\n\n\n\n #[inline]\n\n fn process(&mut self, input: Ports<Self::Input>) -> Ports<Self::Output> {\n\n let (division, gate) = (input[0], input[1]);\n\n let mut r = [0.0; BLOCK_SIZE];\n\n for (i, r) in r.iter_mut().enumerate() {\n\n let gate = gate[i];\n\n let division = division[i].round() as u64;\n\n if gate > 0.5 {\n\n if !self.1 {\n\n self.0 += 1;\n\n self.1 = true;\n\n }\n\n } else if self.1 {\n\n self.1 = false;\n\n }\n", "file_path": "core_nodes/src/clock.rs", "rank": 31, "score": 32966.37714592656 }, { "content": " if self.1 && division > 0 && self.0 % division == 0 {\n\n *r = gate;\n\n }\n\n }\n\n arr![[f32; BLOCK_SIZE]; r]\n\n }\n\n}\n\n\n\n#[derive(Copy, Clone)]\n\npub struct PulseOnChange(f32);\n\nimpl Default for PulseOnChange {\n\n fn default() -> Self {\n\n Self(f32::NAN)\n\n }\n\n}\n\n\n\nimpl Node for PulseOnChange {\n\n type Input = U1;\n\n type Output = U1;\n\n #[inline]\n", "file_path": "core_nodes/src/clock.rs", "rank": 32, "score": 32962.05871505924 }, { "content": " self.clock = self.spacing;\n\n }\n\n } else {\n\n self.triggered = false;\n\n }\n\n if self.remaining > 0 && self.clock >= self.spacing {\n\n self.remaining -= 1;\n\n self.clock = 0.0;\n\n r[0][i] = 1.0;\n\n if self.remaining == 0 {\n\n r[1][i] = 1.0;\n\n }\n\n }\n\n }\n\n r\n\n }\n\n\n\n fn set_sample_rate(&mut self, rate: f32) {\n\n self.per_sample = 1.0 / rate as f64;\n\n }\n\n}\n", "file_path": "core_nodes/src/clock.rs", "rank": 33, "score": 32961.93696716905 }, { "content": "pub struct EuclidianPulse {\n\n pulses: u32,\n\n len: u32,\n\n steps: Vec<bool>,\n\n idx: usize,\n\n triggered: bool,\n\n}\n\n\n\nimpl Node for EuclidianPulse {\n\n type Input = U3;\n\n type Output = U1;\n\n\n\n #[inline]\n\n fn process(&mut self, input: Ports<Self::Input>) -> Ports<Self::Output> {\n\n let pulses = input[0];\n\n let len = input[1];\n\n let gate = input[2];\n\n let mut r = [0.0f32; BLOCK_SIZE];\n\n\n\n for (i, r) in r.iter_mut().enumerate() {\n", "file_path": "core_nodes/src/randomization.rs", "rank": 38, "score": 34.930444423455285 }, { "content": " r.iter_mut().zip(&self.cache).for_each(|(r, v)| r[i] = *v);\n\n }\n\n r\n\n }\n\n\n\n fn set_static_parameters(&mut self, _parameters: &str) -> Result<(), String> {\n\n //TODO: actual code\n\n Ok(())\n\n }\n\n}\n\n\n\n#[derive(Clone, Default)]\n\npub struct StepSequencer {\n\n steps: Vec<f32>,\n\n idx: usize,\n\n triggered: bool,\n\n}\n\n\n\nimpl Node for StepSequencer {\n\n type Input = U1;\n", "file_path": "core_nodes/src/sequencer.rs", "rank": 39, "score": 31.693718974476162 }, { "content": "use generic_array::{arr, typenum::*};\n\nuse rand::prelude::*;\n\nuse std::{\n\n collections::hash_map::DefaultHasher,\n\n hash::{Hash, Hasher},\n\n};\n\nuse virtual_modular_graph::{Node, Ports, BLOCK_SIZE};\n\n\n\n#[derive(Clone, Debug)]\n\npub enum Subsequence {\n\n Item(f32, f32),\n\n Rest(f32),\n\n Tuplet(Vec<Subsequence>, usize),\n\n Iter(Vec<Subsequence>, usize),\n\n Choice(Vec<Subsequence>, usize),\n\n ClockMultiplier(Box<Subsequence>, f32),\n\n}\n\n\n\nimpl Default for Subsequence {\n\n fn default() -> Self {\n", "file_path": "core_nodes/src/sequencer.rs", "rank": 40, "score": 31.379402001790087 }, { "content": "pub struct Markov {\n\n transitions: Vec<(f32, Vec<(usize, f32)>)>,\n\n current_state: usize,\n\n trigger: bool,\n\n}\n\n\n\nimpl Markov {\n\n pub fn new(transitions: &[(f32, Vec<(usize, f32)>)]) -> Self {\n\n Self {\n\n transitions: transitions.to_vec(),\n\n current_state: 0,\n\n trigger: false,\n\n }\n\n }\n\n\n\n pub fn major_key_chords() -> Self {\n\n let transitions = vec![\n\n (3.0, vec![(1, 1.0)]),\n\n (6.0, vec![(2, 0.5), (3, 0.5)]),\n\n (4.0, vec![(4, 0.5), (3, 0.5)]),\n", "file_path": "core_nodes/src/randomization.rs", "rank": 41, "score": 30.951156440417375 }, { "content": "\n\n#[derive(Clone, Default)]\n\npub struct Toggle {\n\n value: bool,\n\n triggered: bool,\n\n}\n\nimpl Node for Toggle {\n\n type Input = U1;\n\n type Output = U1;\n\n\n\n #[inline]\n\n fn process(&mut self, input: Ports<Self::Input>) -> Ports<Self::Output> {\n\n let mut r = [0.0f32; BLOCK_SIZE];\n\n for (i, r) in r.iter_mut().enumerate() {\n\n if input[0][i] > 0.5 {\n\n if !self.triggered {\n\n self.triggered = true;\n\n self.value = !self.value;\n\n }\n\n } else {\n", "file_path": "core_nodes/src/computation.rs", "rank": 42, "score": 29.568394685012922 }, { "content": " type Output = U1;\n\n #[inline]\n\n fn process(&mut self, input: Ports<Self::Input>) -> Ports<Self::Output> {\n\n let trigger = input[0];\n\n let mut r = <Ports<Self::Output>>::default();\n\n for (i, r) in r[0].iter_mut().enumerate() {\n\n if trigger[i] > 0.5 {\n\n if !self.triggered {\n\n self.triggered = true;\n\n self.idx = (self.idx + 1) % self.steps.len();\n\n }\n\n } else {\n\n self.triggered = false;\n\n }\n\n *r = self.steps[self.idx];\n\n }\n\n r\n\n }\n\n\n\n fn set_static_parameters(&mut self, parameters: &str) -> Result<(), String> {\n\n self.steps = parameters\n\n .split_terminator(' ')\n\n .filter_map(|v| v.parse::<f32>().ok())\n\n .collect();\n\n self.idx %= self.steps.len();\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "core_nodes/src/sequencer.rs", "rank": 43, "score": 29.096955956957448 }, { "content": "\n\nimpl GlobalClockSequencer {\n\n fn tick(&mut self) {\n\n let result = self.sequence.get(clock, &mut rng);\n\n }\n\n}\n\n*/\n\n\n\n#[derive(Clone)]\n\npub struct NCube {\n\n data: Vec<f32>,\n\n width: usize,\n\n cache: [f32; 16],\n\n a: f32,\n\n r: f32,\n\n a_per_trigger: f32,\n\n triggered: bool,\n\n}\n\n\n\nimpl Default for NCube {\n", "file_path": "core_nodes/src/sequencer.rs", "rank": 44, "score": 28.257628616706263 }, { "content": " }\n\n\n\n fn set_sample_rate(&mut self, rate: f32) {\n\n self.rate = rate;\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug)]\n\npub struct DelayLine {\n\n line: Vec<f64>,\n\n in_index: usize,\n\n out_index: f64,\n\n delay: f64,\n\n}\n\n\n\nimpl Default for DelayLine {\n\n fn default() -> Self {\n\n let mut s = Self {\n\n line: vec![0.0; 41],\n\n in_index: 0,\n", "file_path": "core_nodes/src/delay_and_reverb.rs", "rank": 46, "score": 27.297902413445062 }, { "content": "use generic_array::{arr, typenum::*};\n\nuse rand::prelude::*;\n\nuse std::f64::consts::{TAU, PI};\n\nuse virtual_modular_graph::{Node, Ports, BLOCK_SIZE};\n\n\n\n#[derive(Clone)]\n\npub struct WaveTable {\n\n sample_rate: f32,\n\n table: Vec<f32>,\n\n len: f32,\n\n pub idx: f32,\n\n}\n\nimpl WaveTable {\n\n pub fn noise() -> Self {\n\n let mut rng = StdRng::seed_from_u64(2);\n\n let table: Vec<f32> = (0..1024 * 1000).map(|_| rng.gen_range(-1.0..1.0)).collect();\n\n Self {\n\n len: table.len() as f32,\n\n idx: thread_rng().gen_range(0.0..table.len() as f32),\n\n table,\n", "file_path": "core_nodes/src/oscillator.rs", "rank": 48, "score": 26.475931894944985 }, { "content": "use generic_array::{arr, typenum::*};\n\nuse virtual_modular_graph::{Node, Ports, BLOCK_SIZE};\n\n\n\nconst A0: f32 = 27.50;\n\nconst SEMITONE: f32 = 1.05946;\n\n\n\n#[derive(Clone)]\n\npub struct DegreeQuantizer {\n\n pitches: Vec<f32>,\n\n cache_key: (f32, f32),\n\n cached_value: f32,\n\n}\n\n\n\nimpl Default for DegreeQuantizer {\n\n fn default() -> Self {\n\n Self {\n\n pitches: Vec::new(),\n\n cache_key: (f32::NAN, f32::NAN),\n\n cached_value: 0.0,\n\n }\n", "file_path": "core_nodes/src/quantizer.rs", "rank": 49, "score": 25.96798362345328 }, { "content": "#[derive(Clone, Default)]\n\npub struct Accumulator {\n\n value: f32,\n\n sum_triggered: bool,\n\n reset_triggered: bool,\n\n}\n\nimpl Node for Accumulator {\n\n type Input = U4;\n\n type Output = U1;\n\n #[inline]\n\n fn process(&mut self, input: Ports<Self::Input>) -> Ports<Self::Output> {\n\n let value = input[0];\n\n let sum_trigger = input[1];\n\n let reset_value = input[2];\n\n let reset_trigger = input[3];\n\n\n\n let mut r = [0.0; BLOCK_SIZE];\n\n for (i, r) in r.iter_mut().enumerate() {\n\n if sum_trigger[i] > 0.5 {\n\n if !self.sum_triggered {\n", "file_path": "core_nodes/src/computation.rs", "rank": 50, "score": 25.40674272397746 }, { "content": " type Input = U3;\n\n type Output = U1;\n\n #[inline]\n\n fn process(&mut self, input: Ports<Self::Input>) -> Ports<Self::Output> {\n\n let (min, max, mut v) = (input[0], input[1], input[2]);\n\n for i in 0..BLOCK_SIZE {\n\n v[i] *= max[i] - min[i];\n\n v[i] += min[i];\n\n }\n\n arr![[f32; BLOCK_SIZE]; v]\n\n }\n\n}\n\n\n\n#[derive(Clone, Default)]\n\npub struct QuadSwitch {\n\n triggered: bool,\n\n pidx: usize,\n\n slew: f32,\n\n idx: usize,\n\n per_sample: f32,\n", "file_path": "core_nodes/src/computation.rs", "rank": 51, "score": 25.300017887979692 }, { "content": " }\n\n}\n\n\n\nimpl Node for DegreeQuantizer {\n\n type Input = U2;\n\n type Output = U1;\n\n #[inline]\n\n fn process(&mut self, input: Ports<Self::Input>) -> Ports<Self::Output> {\n\n let degree = input[0];\n\n let root = input[1];\n\n let mut r = [0.0; BLOCK_SIZE];\n\n for (i, r) in r.iter_mut().enumerate() {\n\n let degree = degree[i];\n\n let root = root[i];\n\n if (degree, root) != self.cache_key {\n\n self.cache_key = (degree, root);\n\n let degree = (degree + self.pitches.len() as f32 * 4.0).max(0.0).round() as usize;\n\n\n\n let octave = degree / self.pitches.len();\n\n let idx = degree % self.pitches.len();\n", "file_path": "core_nodes/src/quantizer.rs", "rank": 52, "score": 24.760708589226947 }, { "content": "use generic_array::{\n\n arr,\n\n sequence::{Concat, Split},\n\n typenum::*,\n\n};\n\nuse virtual_modular_graph::{Node, Ports, BLOCK_SIZE};\n\n\n\nuse pitch_detection::detector::mcleod::McLeodDetector;\n\nuse pitch_detection::detector::PitchDetector;\n\n#[derive(Clone)]\n\npub struct InstrumentTuner {\n\n rate: usize,\n\n buffer: [f32; 1024 * 10],\n\n corrections: Vec<(f32, f32)>,\n\n sweep_frequencies: Vec<(f32, f32)>,\n\n fill_idx: usize,\n\n test_idx: usize,\n\n sweep_idx: usize,\n\n sweep_clock: f32,\n\n}\n", "file_path": "core_nodes/src/tuning.rs", "rank": 53, "score": 24.634071227389885 }, { "content": "use generic_array::{arr, typenum::*};\n\nuse rand::prelude::*;\n\nuse virtual_modular_graph::{Node, Ports, BLOCK_SIZE};\n\n\n\n#[derive(Clone, Default)]\n\npub struct TapsAndStrikes {\n\n current_freq: f32,\n\n old_freq: f32,\n\n current_freq_modified: f32,\n\n crossover: f32,\n\n can_roll: bool,\n\n triggered: bool,\n\n per_sample: f32,\n\n}\n\n\n\nimpl Node for TapsAndStrikes {\n\n type Input = U5;\n\n type Output = U2;\n\n #[inline]\n\n fn process(&mut self, input: Ports<Self::Input>) -> Ports<Self::Output> {\n", "file_path": "core_nodes/src/performance.rs", "rank": 54, "score": 24.485783324604423 }, { "content": " fn process(&mut self, input: Ports<Self::Input>) -> Ports<Self::Output> {\n\n let mut r = [0.0; BLOCK_SIZE];\n\n for (i, r) in r.iter_mut().enumerate() {\n\n let crossover = 0.005;\n\n\n\n let mut diffusion = 0.0;\n\n for (s, _, _, _) in &mut self.strings {\n\n let v = s.current();\n\n diffusion += v;\n\n }\n\n diffusion /= self.strings.len() as f64;\n\n for f in self.body_filters.iter_mut() {\n\n diffusion = f.tick(diffusion);\n\n }\n\n *r = diffusion as f32;\n\n for (j, (s, base_freq, f, triggered)) in self.strings.iter_mut().enumerate() {\n\n let fret = input[j * 2][i];\n\n s.set_delay(((1.0 / (*base_freq)) / self.per_sample) * (1.0 - fret as f64));\n\n let trigger = input[j * 2 + 1][i];\n\n if trigger > 0.5 {\n", "file_path": "core_nodes/src/waveguide.rs", "rank": 55, "score": 24.329640593419597 }, { "content": " Term(Term),\n\n Operation(Box<Expression>, Operator, Box<Expression>),\n\n}\n\nimpl Expression {\n\n fn as_lines(&self, target_node: String, target_port: usize) -> Vec<Line> {\n\n let mut lines = vec![];\n\n match self {\n\n Expression::Term(t) => match t {\n\n Term::Node(n, c) => {\n\n lines.push(Line::Edge(\n\n n.clone(),\n\n *c as u32,\n\n target_node,\n\n target_port as u32,\n\n ));\n\n }\n\n Term::NodeConstructor {\n\n name,\n\n inputs,\n\n static_parameters,\n", "file_path": "definition_language/src/lib.rs", "rank": 56, "score": 24.13180685629232 }, { "content": "#[derive(Copy, Clone, Debug, Default)]\n\npub struct SchmittTrigger {\n\n triggered: bool\n\n}\n\n\n\nimpl SchmittTrigger {\n\n #[inline]\n\n pub fn tick(&mut self, trigger: f64) -> bool {\n\n if trigger > 0.5 {\n\n if !self.triggered {\n\n self.triggered = true;\n\n true\n\n } else {\n\n false\n\n }\n\n } else {\n\n self.triggered = false;\n\n false\n\n }\n\n }\n\n}\n", "file_path": "core_nodes/src/utils.rs", "rank": 57, "score": 24.033591148882586 }, { "content": " }\n\n}\n\n\n\nimpl<F: FnMut() -> f32 + Clone> Node for FnNode<(), F> {\n\n type Input = U0;\n\n type Output = U1;\n\n\n\n #[inline]\n\n fn process(&mut self, _input: Ports<Self::Input>) -> Ports<Self::Output> {\n\n let mut r = [0.0; BLOCK_SIZE];\n\n for r in r.iter_mut() {\n\n *r = self.0();\n\n }\n\n arr![[f32; BLOCK_SIZE]; r]\n\n }\n\n}\n\n\n\nimpl<F: FnMut((f32,)) -> f32 + Clone> Node for FnNode<(f32,), F> {\n\n type Input = U1;\n\n type Output = U1;\n", "file_path": "core_nodes/src/lib.rs", "rank": 58, "score": 23.28826388755185 }, { "content": "use generic_array::{arr, typenum::*};\n\nuse virtual_modular_graph::{Node, Ports, BLOCK_SIZE};\n\n\n\n#[derive(Copy, Clone)]\n\npub struct ADEnvelope {\n\n time: f32,\n\n triggered: bool,\n\n current: f32,\n\n per_sample: f32,\n\n}\n\n\n\nimpl Default for ADEnvelope {\n\n fn default() -> Self {\n\n Self {\n\n time: f32::INFINITY,\n\n triggered: false,\n\n current: 0.0,\n\n per_sample: 0.0,\n\n }\n\n }\n", "file_path": "core_nodes/src/envelope.rs", "rank": 59, "score": 23.244854850387366 }, { "content": "\n\n #[inline]\n\n fn process(&mut self, input: Ports<Self::Input>) -> Ports<Self::Output> {\n\n let a = input[0];\n\n let mut r = [0.0; BLOCK_SIZE];\n\n for (i, r) in r.iter_mut().enumerate() {\n\n *r = self.0((a[i],));\n\n }\n\n arr![[f32; BLOCK_SIZE]; r]\n\n }\n\n}\n\n\n\nimpl<F: FnMut((f32, f32)) -> f32 + Clone> Node for FnNode<(f32, f32), F> {\n\n type Input = U2;\n\n type Output = U1;\n\n\n\n #[inline]\n\n fn process(&mut self, input: Ports<Self::Input>) -> Ports<Self::Output> {\n\n let a = input[0];\n\n let b = input[1];\n\n let mut r = [0.0; BLOCK_SIZE];\n\n for (i, r) in r.iter_mut().enumerate() {\n\n *r = self.0((a[i], b[i]));\n\n }\n\n arr![[f32; BLOCK_SIZE]; r]\n\n }\n\n}\n", "file_path": "core_nodes/src/lib.rs", "rank": 60, "score": 23.238188503156454 }, { "content": "use generic_array::{arr, typenum::*};\n\nuse std::f32::consts::PI;\n\nuse virtual_modular_graph::{Node, Ports, BLOCK_SIZE};\n\n\n\n#[derive(Clone)]\n\npub struct Folder;\n\nimpl Node for Folder {\n\n type Input = U1;\n\n type Output = U1;\n\n\n\n #[inline]\n\n fn process(&mut self, input: Ports<Self::Input>) -> Ports<Self::Output> {\n\n let mut r = input[0];\n\n for r in r.iter_mut() {\n\n while r.abs() > 1.0 {\n\n *r = r.signum() - (*r - r.signum());\n\n }\n\n }\n\n arr![[f32; BLOCK_SIZE]; r]\n\n }\n", "file_path": "core_nodes/src/distortion.rs", "rank": 61, "score": 23.188851244754485 }, { "content": " .map(|(node, _count)| node)\n\n .collect();\n\n if to_process.is_empty() {\n\n let processed: HashSet<_> = processed.into_iter().collect();\n\n let all: HashSet<_> = nodes.keys().cloned().collect();\n\n panic!(\"{:?}\", all.difference(&processed));\n\n }\n\n for node_name in to_process {\n\n let in_edges = edges.remove(&node_name).unwrap_or_else(Vec::new);\n\n let (code, in_ports, out_ports) = &nodes\n\n .get(&node_name)\n\n .unwrap_or_else(|| panic!(\"Node not found: {}\", node_name));\n\n\n\n let mut needed_ports: HashSet<_> = (0..*in_ports).collect();\n\n let mut in_code: Vec<_> = in_edges\n\n .into_iter()\n\n .map(|(src_node, src_port, dst_port)| {\n\n needed_ports.remove(dst_port);\n\n let (code, _in_ports, _out_ports) = &nodes[&src_node];\n\n let mut code = code.clone();\n", "file_path": "definition_language/src/code_generation.rs", "rank": 62, "score": 23.157050858397916 }, { "content": " }\n\n }\n\n}\n\n\n\nimpl Node for Markov {\n\n type Input = U1;\n\n type Output = U1;\n\n #[inline]\n\n fn process(&mut self, input: Ports<Self::Input>) -> Ports<Self::Output> {\n\n let mut r = [0.0; BLOCK_SIZE];\n\n for (i, r) in r.iter_mut().enumerate() {\n\n if input[0][i] > 0.5 {\n\n if !self.trigger {\n\n self.trigger = true;\n\n let transitions = &self.transitions[self.current_state].1;\n\n let mut rng = thread_rng();\n\n let new_state = transitions\n\n .choose_weighted(&mut rng, |(_, w)| *w)\n\n .unwrap()\n\n .0;\n", "file_path": "core_nodes/src/randomization.rs", "rank": 63, "score": 23.067539726213745 }, { "content": "use generic_array::{arr, typenum::*};\n\nuse virtual_modular_graph::{Node, Ports, BLOCK_SIZE};\n\n\n\n#[derive(Copy, Clone)]\n\npub struct Log;\n\n\n\nimpl Node for Log {\n\n type Input = U1;\n\n type Output = U1;\n\n #[inline]\n\n fn process(&mut self, input: Ports<Self::Input>) -> Ports<Self::Output> {\n\n println!(\"{:?}\", input[0]);\n\n input\n\n }\n\n}\n\n\n\n#[derive(Copy, Clone, Default)]\n\npub struct LogTrigger {\n\n triggered: bool,\n\n}\n", "file_path": "core_nodes/src/misc.rs", "rank": 64, "score": 22.995771807517155 }, { "content": "\n\n#[derive(Clone, Default)]\n\npub struct BlockDelayLine {\n\n lines: Vec<f64>,\n\n pub width: usize,\n\n pub len: f64,\n\n in_index: usize,\n\n out_index: f64,\n\n output_buffer: Vec<f64>,\n\n delay: f64,\n\n}\n\n\n\nimpl BlockDelayLine {\n\n pub fn new(width: usize, delay: f64) -> Self {\n\n let mut s = Self {\n\n lines: vec![0.0; width],\n\n len: 1.0,\n\n output_buffer: vec![0.0; width],\n\n width,\n\n in_index: 0,\n", "file_path": "core_nodes/src/delay_and_reverb.rs", "rank": 65, "score": 22.980681207528836 }, { "content": " self.triggered = false;\n\n }\n\n *r = if self.value { 1.0 } else { 0.0 };\n\n }\n\n arr![[f32; BLOCK_SIZE]; r]\n\n }\n\n}\n\n\n\n#[derive(Copy, Clone, Default)]\n\npub struct Comparator;\n\nimpl Node for Comparator {\n\n type Input = U2;\n\n type Output = U1;\n\n\n\n #[inline]\n\n fn process(&mut self, input: Ports<Self::Input>) -> Ports<Self::Output> {\n\n let (a, b) = (input[0], input[1]);\n\n\n\n let mut r = [0.0f32; BLOCK_SIZE];\n\n for (i, r) in r.iter_mut().enumerate() {\n", "file_path": "core_nodes/src/computation.rs", "rank": 66, "score": 22.907102155417952 }, { "content": " match message {\n\n wmidi::MidiMessage::NoteOn(c, n, v) => {\n\n if let Some(voices) = voices.get_mut(&c.index()) {\n\n let mut consumed = None;\n\n let mut aval = None;\n\n for (i, (j, f)) in voices.iter_mut().enumerate() {\n\n if Some(n) == *f {\n\n consumed = Some(i);\n\n break;\n\n } else if f.is_none() && aval.is_none() {\n\n aval = Some((i, *j));\n\n }\n\n }\n\n let velocity = u8::try_from(v).unwrap();\n\n if let Some(i) = consumed {\n\n inputs\n\n .entry(format!(\"midi_{}_voice_{}_velocity\", c.index(), i))\n\n .or_insert_with(Vec::new)\n\n .push(u8::try_from(v).unwrap() as f32 / 127.0);\n\n } else {\n", "file_path": "dynamic_environment/src/main.rs", "rank": 67, "score": 22.89574973770695 }, { "content": "\n\n#[derive(Clone)]\n\npub struct ToneHoleFlute {\n\n tone_holes: Vec<ToneHole>,\n\n plus_lines: Vec<DelayLine>,\n\n plus_lines_buffer: Vec<f64>,\n\n minus_lines: Vec<DelayLine>,\n\n minus_lines_buffer: Vec<f64>,\n\n body_filter: Simper,\n\n}\n\n\n\nimpl Default for ToneHoleFlute {\n\n fn default() -> Self {\n\n let mut body_filter = Simper::default();\n\n body_filter.set_parameters(10000.0, 0.0);\n\n Self {\n\n tone_holes: (0..4).map(|_| ToneHole::default()).collect(),\n\n minus_lines: (0..4).map(|_| DelayLine::default()).collect(),\n\n minus_lines_buffer: vec![0.0; 4],\n\n plus_lines: (0..4).map(|_| DelayLine::default()).collect(),\n\n plus_lines_buffer: vec![0.0; 4],\n\n body_filter,\n\n }\n\n }\n\n}\n\n\n\n#[derive(Clone)]\n", "file_path": "core_nodes/src/waveguide.rs", "rank": 68, "score": 22.88416747857227 }, { "content": "impl Node for PluckedString {\n\n type Input = U3;\n\n type Output = U1;\n\n #[inline]\n\n fn process(&mut self, input: Ports<Self::Input>) -> Ports<Self::Output> {\n\n let mut r = [0.0; BLOCK_SIZE];\n\n for (i, r) in r.iter_mut().enumerate() {\n\n let freq = input[0][i];\n\n let trigger = input[1][i];\n\n let mut slap_threshold = input[2][i];\n\n if slap_threshold == 0.0 {\n\n slap_threshold = 1.0;\n\n }\n\n self.line.set_delay((1.0 / (freq as f64)) / self.per_sample);\n\n let pluck = if trigger > 0.5 {\n\n if !self.triggered {\n\n self.triggered = true;\n\n trigger as f64\n\n } else {\n\n 0.0\n", "file_path": "core_nodes/src/waveguide.rs", "rank": 69, "score": 22.712864399342735 }, { "content": "use generic_array::{arr, typenum::*};\n\nuse virtual_modular_graph::{Node, Ports, BLOCK_SIZE};\n\n\n\n#[derive(Clone, Default)]\n\npub struct Constant(pub f32);\n\nimpl Node for Constant {\n\n type Input = U0;\n\n type Output = U1;\n\n #[inline]\n\n fn process(&mut self, _input: Ports<Self::Input>) -> Ports<Self::Output> {\n\n arr![[f32; BLOCK_SIZE]; [self.0; BLOCK_SIZE]]\n\n }\n\n\n\n fn set_static_parameters(&mut self, parameters: &str) -> Result<(), String> {\n\n let n: f32 = parameters.parse().map_err(|e| format!(\"{}\", e))?;\n\n self.0 = n;\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "core_nodes/src/computation.rs", "rank": 71, "score": 22.330203536571705 }, { "content": " }\n\n }\n\n}\n\n\n\nimpl Node for StringBodyFilter {\n\n type Input = U1;\n\n type Output = U1;\n\n #[inline]\n\n fn process(&mut self, input: Ports<Self::Input>) -> Ports<Self::Output> {\n\n let mut r = [0.0; BLOCK_SIZE];\n\n for (i, r) in r.iter_mut().enumerate() {\n\n let mut v = input[0][i] as f64;\n\n for f in self.filters.iter_mut() {\n\n v = f.tick(v);\n\n }\n\n *r = v as f32;\n\n }\n\n arr![[f32; BLOCK_SIZE]; r]\n\n }\n\n}\n", "file_path": "core_nodes/src/waveguide.rs", "rank": 72, "score": 22.263911245527005 }, { "content": "use generic_array::{arr, typenum::*};\n\nuse std::f32::consts::PI;\n\nuse virtual_modular_graph::{Node, Ports, BLOCK_SIZE};\n\nuse crate::{DelayLine, utils::make_coprime};\n\n\n\n#[derive(Clone, Default)]\n\npub struct AllPass {\n\n delay: DelayLine,\n\n pub gain: f64,\n\n rate: f64,\n\n}\n\n\n\nimpl AllPass {\n\n pub fn new(gain: f64, delay: f64) -> Self {\n\n let mut line = DelayLine::default();\n\n line.set_delay(delay);\n\n Self {\n\n delay: line,\n\n gain,\n\n rate: 48000.0,\n", "file_path": "core_nodes/src/filter.rs", "rank": 73, "score": 22.207826446036226 }, { "content": " self.cached_value =\n\n self.pitches[idx] * 2.0f32.powi(octave as i32) * SEMITONE.powi(root as i32);\n\n }\n\n *r = self.cached_value;\n\n }\n\n\n\n arr![[f32; BLOCK_SIZE]; r]\n\n }\n\n\n\n fn set_static_parameters(&mut self, parameters: &str) -> Result<(), String> {\n\n let mut new_pitches = vec![A0];\n\n for n in parameters.split(' ') {\n\n let next_value = match n {\n\n \"W\" => new_pitches[new_pitches.len() - 1] * SEMITONE.powi(2),\n\n \"H\" => new_pitches[new_pitches.len() - 1] * SEMITONE,\n\n _ => return Err(format!(\"Unknown interval {}\", n)),\n\n };\n\n new_pitches.push(next_value);\n\n }\n\n self.pitches = new_pitches;\n", "file_path": "core_nodes/src/quantizer.rs", "rank": 74, "score": 22.13779337166123 }, { "content": " #[inline]\n\n fn process(&mut self, input: Ports<Self::Input>) -> Ports<Self::Output> {\n\n input\n\n }\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct IndexPort<A: PortCount>(usize, PhantomData<A>);\n\nimpl<A: PortCount> IndexPort<A> {\n\n pub fn new(port: usize) -> Self {\n\n IndexPort(port, Default::default())\n\n }\n\n}\n\n\n\nimpl<A: PortCount> Node for IndexPort<A> {\n\n type Input = A;\n\n type Output = U1;\n\n #[inline]\n\n fn process(&mut self, input: Ports<Self::Input>) -> Ports<Self::Output> {\n\n arr![[f32; BLOCK_SIZE]; input[self.0]]\n\n }\n\n}\n", "file_path": "core_nodes/src/topological.rs", "rank": 75, "score": 21.85552725007706 }, { "content": "}\n\nimpl Noise {\n\n pub fn positive() -> Self {\n\n Self {\n\n positive: true,\n\n ..Default::default()\n\n }\n\n }\n\n}\n\n\n\nimpl Node for Noise {\n\n type Input = U1;\n\n type Output = U1;\n\n #[inline]\n\n fn process(&mut self, input: Ports<Self::Input>) -> Ports<Self::Output> {\n\n let freq = input[0];\n\n\n\n let mut r = [0.0; BLOCK_SIZE];\n\n for (i, r) in r.iter_mut().enumerate() {\n\n let period = 1.0 / freq[i];\n", "file_path": "core_nodes/src/oscillator.rs", "rank": 76, "score": 21.742544201513468 }, { "content": "pub struct Sink<A: PortCount>(PhantomData<A>);\n\nimpl<A: PortCount> Default for Sink<A> {\n\n fn default() -> Self {\n\n Self(Default::default())\n\n }\n\n}\n\nimpl<A: PortCount> Node for Sink<A> {\n\n type Input = A;\n\n type Output = U0;\n\n #[inline]\n\n fn process(&mut self, _input: Ports<Self::Input>) -> Ports<Self::Output> {\n\n arr![[f32; BLOCK_SIZE]; ]\n\n }\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct Stack<A: Clone, B: Clone, C: Clone>(A, B, PhantomData<C>);\n\nimpl<A, B, C> Stack<A, B, C>\n\nwhere\n\n A: Node + Clone,\n", "file_path": "core_nodes/src/topological.rs", "rank": 77, "score": 21.70026140922835 }, { "content": " let mut r = <Ports<Self::Output> >::default();\n\n for (i, r) in r[0].iter_mut().enumerate() {\n\n let mut signal = signal[i] as f64;\n\n for (_, all_pass) in &mut self.all_passes {\n\n signal = all_pass.tick(signal);\n\n }\n\n *r = signal as f32;\n\n }\n\n r\n\n }\n\n\n\n fn set_sample_rate(&mut self, rate: f32) {\n\n self.rate = rate as f64;\n\n let mut delays:Vec<_> = self.all_passes.iter().map(|(d, _)| (d*self.rate) as u32).collect()\n\n ;\n\n make_coprime(&mut delays);\n\n for (d, (_, all_pass)) in delays.iter().zip(&mut self.all_passes) {\n\n all_pass.set_delay(*d as f64);\n\n }\n\n }\n\n\n\n fn set_static_parameters(&mut self, parameters: &str) -> Result<(), String> {\n\n self.all_passes = parse_allpass(parameters)?;\n\n self.set_sample_rate(self.rate as f32);\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "core_nodes/src/filter.rs", "rank": 78, "score": 21.619189046147945 }, { "content": "use generic_array::{arr, typenum::*};\n\nuse rand::prelude::*;\n\nuse virtual_modular_graph::{Node, Ports, BLOCK_SIZE};\n\n\n\n#[derive(Clone, Default)]\n\npub struct BernoulliGate {\n\n trigger: bool,\n\n active_gate: bool,\n\n}\n\n\n\nimpl Node for BernoulliGate {\n\n type Input = U2;\n\n type Output = U2;\n\n #[inline]\n\n fn process(&mut self, input: Ports<Self::Input>) -> Ports<Self::Output> {\n\n let (prob, sig) = (input[0], input[1]);\n\n let mut r = [[0.0; BLOCK_SIZE], [0.0; BLOCK_SIZE]];\n\n for i in 0..BLOCK_SIZE {\n\n let prob = prob[i];\n\n let sig = sig[i];\n", "file_path": "core_nodes/src/randomization.rs", "rank": 79, "score": 21.442951978058005 }, { "content": " let pulses = pulses[i] as u32;\n\n let len = len[i] as u32;\n\n if pulses != self.pulses || len != self.len {\n\n make_euclidian_rhythm(pulses, len, &mut self.steps);\n\n self.pulses = pulses;\n\n self.len = len;\n\n }\n\n let gate = gate[i];\n\n if gate > 0.5 {\n\n if !self.triggered {\n\n self.triggered = true;\n\n self.idx = (self.idx + 1) % self.len as usize;\n\n if self.steps[self.idx] {\n\n *r = 1.0;\n\n }\n\n }\n\n } else {\n\n self.triggered = false;\n\n }\n\n }\n\n arr![[f32; BLOCK_SIZE]; r]\n\n }\n\n}\n\n\n", "file_path": "core_nodes/src/randomization.rs", "rank": 80, "score": 21.285129433331157 }, { "content": " corrections,\n\n sweep_clock: 1.0,\n\n }\n\n }\n\n}\n\n\n\nimpl Node for InstrumentTuner {\n\n type Input = U1;\n\n type Output = U1;\n\n #[inline]\n\n fn process(&mut self, input: Ports<Self::Input>) -> Ports<Self::Output> {\n\n let mut r = [0.0; BLOCK_SIZE];\n\n for i in 0..BLOCK_SIZE {\n\n self.sweep_clock -= 1.0 / self.rate as f32;\n\n if self.sweep_clock <= 0.0 {\n\n if self.fill_idx == self.buffer.len() {\n\n const POWER_THRESHOLD: f32 = 5.0;\n\n const CLARITY_THRESHOLD: f32 = 0.7;\n\n let mut detector = McLeodDetector::new(1024 * 10, (1024 * 10) / 2);\n\n\n", "file_path": "core_nodes/src/tuning.rs", "rank": 81, "score": 21.25245940859152 }, { "content": " *r = output as f32;\n\n }\n\n arr![[f32; BLOCK_SIZE]; r]\n\n }\n\n\n\n fn set_sample_rate(&mut self, rate: f32) {\n\n self.per_sample = 1.0 / rate as f64;\n\n self.string_filter = OnePole::new(0.75 - (0.2 * 22050.0 / rate as f64), 0.9);\n\n }\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct ImaginaryGuitar {\n\n strings: Vec<(DelayLine, f64, OnePole, bool)>,\n\n per_sample: f64,\n\n body_filters: [Biquad; 6],\n\n}\n\n\n\nimpl Default for ImaginaryGuitar {\n\n fn default() -> Self {\n", "file_path": "core_nodes/src/waveguide.rs", "rank": 82, "score": 21.252127059325268 }, { "content": " if a[i] > b[i] {\n\n *r = 1.0\n\n } else {\n\n *r = 0.0\n\n }\n\n }\n\n arr![[f32; BLOCK_SIZE]; r]\n\n }\n\n}\n\n\n\n#[derive(Copy, Clone, Default)]\n\npub struct CXor;\n\nimpl Node for CXor {\n\n type Input = U2;\n\n type Output = U1;\n\n\n\n #[inline]\n\n fn process(&mut self, input: Ports<Self::Input>) -> Ports<Self::Output> {\n\n let (a, b) = (input[0], input[1]);\n\n\n", "file_path": "core_nodes/src/computation.rs", "rank": 83, "score": 20.895428316406228 }, { "content": " if self.idx >= self.len {\n\n self.idx -= self.len;\n\n }\n\n *r = self.table[self.idx as usize % self.table.len()];\n\n self.idx += input[i] * d;\n\n }\n\n arr![[f32; BLOCK_SIZE]; r]\n\n }\n\n\n\n fn set_sample_rate(&mut self, rate: f32) {\n\n self.sample_rate = rate;\n\n }\n\n}\n\n\n\n#[derive(Clone, Default)]\n\npub struct Noise {\n\n clock: f32,\n\n value: f32,\n\n positive: bool,\n\n per_sample: f32,\n", "file_path": "core_nodes/src/oscillator.rs", "rank": 84, "score": 20.872051865348855 }, { "content": " fn set_sample_rate(&mut self, rate: f32) {\n\n self.rate = rate as f64;\n\n self.blocks.set_sample_rate(self.delay_mul, self.gain_mul, self.rate);\n\n }\n\n}\n\n\n\n#[derive(Clone, Default)]\n\npub struct ModableDelay {\n\n line: DelayLine,\n\n rate: f32,\n\n}\n\nimpl Node for ModableDelay {\n\n type Input = U3;\n\n type Output = U1;\n\n\n\n #[inline]\n\n fn process(&mut self, input: Ports<Self::Input>) -> Ports<Self::Output> {\n\n let (len, sig, feedback) = (input[0], input[1], input[2]);\n\n let mut r = [0.0; BLOCK_SIZE];\n\n for (i, r) in r.iter_mut().enumerate() {\n", "file_path": "core_nodes/src/delay_and_reverb.rs", "rank": 85, "score": 20.85601704813319 }, { "content": " Subsequence::ClockMultiplier(sub_sequence, ..) => sub_sequence.reset(),\n\n }\n\n }\n\n\n\n fn current(\n\n &mut self,\n\n pulse: bool,\n\n clock_division: f32,\n\n ) -> (Option<f32>, bool, bool, bool, bool, f32) {\n\n match self {\n\n Subsequence::Rest(clock) => {\n\n if pulse {\n\n *clock += 1.0;\n\n }\n\n let do_tick = if *clock >= clock_division {\n\n *clock = 0.0;\n\n true\n\n } else {\n\n false\n\n };\n", "file_path": "core_nodes/src/sequencer.rs", "rank": 86, "score": 20.84079204023505 }, { "content": " r\n\n }\n\n\n\n fn set_sample_rate(&mut self, rate: f32) {\n\n for (len, d) in &mut self.delays {\n\n d.set_delay(*len*rate as f64);\n\n }\n\n }\n\n\n\n}\n\n\n\n// Based on: https://signalsmith-audio.co.uk/writing/2021/lets-write-a-reverb/\n\n#[derive(Clone)]\n\npub struct Reverb2 {\n\n diffusors: Vec<Diffusor>,\n\n delays: Vec<(f64, DelayLine)>,\n\n}\n\n\n\nimpl Default for Reverb2 {\n\n fn default() -> Self {\n", "file_path": "core_nodes/src/delay_and_reverb.rs", "rank": 87, "score": 20.75844150266111 }, { "content": " #[inline]\n\n fn process(&mut self, input: Ports<Self::Input>) -> Ports<Self::Output> {\n\n let mut r_trig = [0.0f32; BLOCK_SIZE];\n\n let mut r_gate = [0.0f32; BLOCK_SIZE];\n\n let mut r_eoc = [0.0f32; BLOCK_SIZE];\n\n let mut r_value = [0.0f32; BLOCK_SIZE];\n\n let mut r_len = [0.0f32; BLOCK_SIZE];\n\n for i in 0..BLOCK_SIZE {\n\n if input[1][i] > 0.5 {\n\n if !self.burst_triggered {\n\n self.burst_triggered = true;\n\n self.firing = true;\n\n self.seq.reset();\n\n }\n\n } else {\n\n self.burst_triggered = false;\n\n }\n\n if self.firing {\n\n let trigger = input[0][i];\n\n let result = self.seq.tick(trigger);\n", "file_path": "core_nodes/src/sequencer.rs", "rank": 88, "score": 20.67935559190331 }, { "content": " fn process(&mut self, input: Ports<Self::Input>) -> Ports<Self::Output> {\n\n let freq = input[0];\n\n let pressure = input[1];\n\n let note_trigger = input[2];\n\n let feedback = input[3];\n\n\n\n let mut r = [0.0f32; BLOCK_SIZE];\n\n let mut rng = thread_rng();\n\n\n\n for (i, r) in r.iter_mut().enumerate() {\n\n let mut freq = freq[i].max(1.0);\n\n let mut pressure = pressure[i];\n\n if pressure == 0.0 {\n\n self.breath = (self.breath + self.per_sample * 10.0 * 2.0).min(12.0);\n\n }\n\n let mut excitation_cutoff = 6000.0;\n\n let mut body_cutoff = 8.5;\n\n let mut noise_scale = 0.01;\n\n let mut feedback = feedback[i];\n\n if freq >= 1046.50 {\n", "file_path": "core_nodes/src/waveguide.rs", "rank": 89, "score": 20.63738415055738 }, { "content": " let mut r = [0.0; BLOCK_SIZE];\n\n for i in 0..BLOCK_SIZE {\n\n let a = a[i];\n\n let b = b[i];\n\n let v = a.max(b).min(-a.min(b));\n\n r[i] = v;\n\n }\n\n arr![[f32; BLOCK_SIZE]; r]\n\n }\n\n}\n\n\n\n#[derive(Copy, Clone, Default)]\n\npub struct SampleAndHold(f32, bool, bool);\n\nimpl Node for SampleAndHold {\n\n type Input = U2;\n\n type Output = U1;\n\n\n\n #[inline]\n\n fn process(&mut self, input: Ports<Self::Input>) -> Ports<Self::Output> {\n\n let (gate, signal) = (input[0], input[1]);\n", "file_path": "core_nodes/src/computation.rs", "rank": 90, "score": 20.233350214295317 }, { "content": " line: DelayLine,\n\n per_sample: f64,\n\n}\n\n\n\nimpl Default for SympatheticString {\n\n fn default() -> Self {\n\n Self {\n\n line: DelayLine::default(),\n\n per_sample: 1.0 / 44100.0,\n\n }\n\n }\n\n}\n\n\n\nimpl Node for SympatheticString {\n\n type Input = U3;\n\n type Output = U1;\n\n #[inline]\n\n fn process(&mut self, input: Ports<Self::Input>) -> Ports<Self::Output> {\n\n let mut r = [0.0; BLOCK_SIZE];\n\n for (i, r) in r.iter_mut().enumerate() {\n", "file_path": "core_nodes/src/waveguide.rs", "rank": 91, "score": 20.190788275620623 }, { "content": " #[inline]\n\n fn process(&mut self, input: Ports<Self::Input>) -> Ports<Self::Output> {\n\n let src_imp = input[0];\n\n let clock = input[1];\n\n let src_aux = input[2];\n\n\n\n let mut imp = [0.0; BLOCK_SIZE];\n\n let mut aux = [0.0; BLOCK_SIZE];\n\n for i in 0..BLOCK_SIZE {\n\n let src_imp = src_imp[i];\n\n let clock = clock[i];\n\n if src_imp > 0.5 {\n\n if !self.triggered {\n\n self.triggered = true;\n\n self.aux_next = src_aux[i];\n\n self.pending = true;\n\n self.next_imp = 1.0;\n\n }\n\n } else {\n\n self.triggered = false;\n", "file_path": "core_nodes/src/quantizer.rs", "rank": 92, "score": 20.18201057442123 }, { "content": " self.per_sample = 1.0 / rate;\n\n }\n\n}\n\n\n\n#[derive(Copy, Clone, Default)]\n\npub struct Slew {\n\n current: f32,\n\n target: f32,\n\n remaining: u32,\n\n delta: f32,\n\n per_sample: f32,\n\n}\n\nimpl Node for Slew {\n\n type Input = U2;\n\n type Output = U1;\n\n\n\n #[inline]\n\n fn process(&mut self, input: Ports<Self::Input>) -> Ports<Self::Output> {\n\n let (transition_time, sig) = (input[0], input[1]);\n\n let mut r = sig;\n", "file_path": "core_nodes/src/computation.rs", "rank": 93, "score": 20.0448765990487 }, { "content": "#![feature(hash_drain_filter)]\n\nuse generic_array::{\n\n arr,\n\n typenum::{U0, U2},\n\n};\n\nuse virtual_modular_graph::{Node, Ports, BLOCK_SIZE};\n\n\n\nuse std::{cell::RefCell, collections::HashMap};\n\n\n\npub struct InstrumentSynth {\n\n synth: RefCell<Box<dyn Node<Input = U0, Output = U2>>>,\n\n synth_sample: (usize, Ports<U2>),\n\n float_parameters: HashMap<String, Box<dyn FnMut(f64)>>,\n\n float_float_parameters: HashMap<String, Box<dyn FnMut(f64, f64)>>,\n\n sample_rate: f32,\n\n}\n\nunsafe impl Send for InstrumentSynth {}\n\nunsafe impl Sync for InstrumentSynth {}\n\n\n\n#[derive(Default)]\n", "file_path": "src/lib.rs", "rank": 94, "score": 20.02090150876452 }, { "content": "\n\nimpl Node for LogTrigger {\n\n type Input = U1;\n\n type Output = U1;\n\n #[inline]\n\n fn process(&mut self, input: Ports<Self::Input>) -> Ports<Self::Output> {\n\n for r in &input[0] {\n\n if *r > 0.5 {\n\n if !self.triggered {\n\n self.triggered = true;\n\n println!(\"Trigger\");\n\n }\n\n } else {\n\n self.triggered = false;\n\n }\n\n }\n\n input\n\n }\n\n}\n\n\n", "file_path": "core_nodes/src/misc.rs", "rank": 95, "score": 19.999609631869298 }, { "content": "}\n\n\n\nimpl RMS {\n\n pub fn tick(&mut self, sample: f64) -> f64 {\n\n self.mean_squared = self.mean_squared * self.decay + (1.0 - self.decay) * sample.powi(2);\n\n self.mean_squared.sqrt()\n\n }\n\n}\n\n\n\nimpl Node for RMS {\n\n type Input = U1;\n\n type Output = U1;\n\n\n\n #[inline]\n\n fn process(&mut self, input: Ports<Self::Input>) -> Ports<Self::Output> {\n\n let mut r = [0.0f32; BLOCK_SIZE];\n\n for (sample, r) in input[0].iter().zip(&mut r) {\n\n *r = self.tick(*sample as f64) as f32;\n\n }\n\n arr![[f32; BLOCK_SIZE]; r]\n", "file_path": "core_nodes/src/misc.rs", "rank": 96, "score": 19.994608663523202 }, { "content": " (None, do_tick, false, false, false, clock_division)\n\n }\n\n Subsequence::Item(v, clock) => {\n\n if pulse {\n\n *clock += 1.0;\n\n }\n\n let (do_tick, do_trigger, gate) = if *clock >= clock_division {\n\n *clock = 0.0;\n\n (true, true, true)\n\n } else {\n\n (false, false, true)\n\n };\n\n (Some(*v), do_tick, do_trigger, gate, false, clock_division)\n\n }\n\n Subsequence::Tuplet(sub_sequence, sub_idx) => {\n\n let clock_division = clock_division / sub_sequence.len() as f32;\n\n let (v, do_tick, do_trigger, gate, _, len) =\n\n sub_sequence[*sub_idx].current(pulse, clock_division);\n\n let do_tick = if do_tick {\n\n *sub_idx += 1;\n", "file_path": "core_nodes/src/sequencer.rs", "rank": 97, "score": 19.932414450767585 }, { "content": "\n\nimpl Node for NCube {\n\n type Input = U2;\n\n type Output = U16;\n\n #[inline]\n\n fn process(&mut self, input: Ports<Self::Input>) -> Ports<Self::Output> {\n\n let trigger = input[0];\n\n let radius = input[1];\n\n let mut r = <Ports<Self::Output>>::default();\n\n for i in 0..BLOCK_SIZE {\n\n if trigger[i] > 0.5 {\n\n if !self.triggered {\n\n self.triggered = true;\n\n self.a += self.a_per_trigger;\n\n self.r = radius[i] * 9.0;\n\n self.rebuild_cache();\n\n }\n\n } else {\n\n self.triggered = false;\n\n }\n", "file_path": "core_nodes/src/sequencer.rs", "rank": 98, "score": 19.9001907699329 }, { "content": " .push(u8::try_from(v).unwrap() as f32 / 127.0);\n\n }\n\n wmidi::MidiMessage::NoteOff(c, n, ..) => {\n\n if let Some(voices) = voices.get_mut(&c.index()) {\n\n for (j, f) in voices.iter_mut() {\n\n if Some(n) == *f {\n\n *f = None;\n\n inputs\n\n .entry(format!(\n\n \"midi_{}_voice_{}_velocity\",\n\n c.index(),\n\n j\n\n ))\n\n .or_insert_with(Vec::new)\n\n .push(0.0);\n\n }\n\n }\n\n }\n\n if u8::try_from(n).unwrap() == current_voice {\n\n current_voice = 0;\n", "file_path": "dynamic_environment/src/main.rs", "rank": 99, "score": 19.879968277581693 } ]
Rust
imgui/src/widget/tab.rs
eiz/imgui-rs
1ef9393f3253e4318e50219cd3c93fc98beeb578
use crate::sys; use crate::Ui; use bitflags::bitflags; use std::ptr; bitflags! { #[repr(transparent)] pub struct TabBarFlags: u32 { const REORDERABLE = sys::ImGuiTabBarFlags_Reorderable; const AUTO_SELECT_NEW_TABS = sys::ImGuiTabBarFlags_AutoSelectNewTabs; const TAB_LIST_POPUP_BUTTON = sys::ImGuiTabBarFlags_TabListPopupButton; const NO_CLOSE_WITH_MIDDLE_MOUSE_BUTTON = sys::ImGuiTabBarFlags_NoCloseWithMiddleMouseButton; const NO_TAB_LIST_SCROLLING_BUTTONS = sys::ImGuiTabBarFlags_NoTabListScrollingButtons; const NO_TOOLTIP = sys::ImGuiTabBarFlags_NoTooltip; const FITTING_POLICY_RESIZE_DOWN = sys::ImGuiTabBarFlags_FittingPolicyResizeDown; const FITTING_POLICY_SCROLL = sys::ImGuiTabBarFlags_FittingPolicyScroll; const FITTING_POLICY_MASK = sys::ImGuiTabBarFlags_FittingPolicyMask_; const FITTING_POLICY_DEFAULT = sys::ImGuiTabBarFlags_FittingPolicyDefault_; } } bitflags! { #[repr(transparent)] pub struct TabItemFlags: u32 { const UNSAVED_DOCUMENT = sys::ImGuiTabItemFlags_UnsavedDocument; const SET_SELECTED = sys::ImGuiTabItemFlags_SetSelected; const NO_CLOSE_WITH_MIDDLE_MOUSE_BUTTON = sys::ImGuiTabItemFlags_NoCloseWithMiddleMouseButton; const NO_PUSH_ID = sys::ImGuiTabItemFlags_NoPushId; const NO_TOOLTIP = sys::ImGuiTabItemFlags_NoTooltip; const NO_REORDER = sys::ImGuiTabItemFlags_NoReorder; const LEADING = sys::ImGuiTabItemFlags_Leading; const TRAILING = sys::ImGuiTabItemFlags_Trailing; } } pub struct TabBar<T> { id: T, flags: TabBarFlags, } impl<T: AsRef<str>> TabBar<T> { #[inline] #[doc(alias = "BeginTabBar")] pub fn new(id: T) -> Self { Self { id, flags: TabBarFlags::empty(), } } #[inline] pub fn reorderable(mut self, value: bool) -> Self { self.flags.set(TabBarFlags::REORDERABLE, value); self } #[inline] pub fn flags(mut self, flags: TabBarFlags) -> Self { self.flags = flags; self } #[must_use] pub fn begin(self, ui: &Ui) -> Option<TabBarToken<'_>> { ui.tab_bar_with_flags(self.id, self.flags) } pub fn build<R, F: FnOnce() -> R>(self, ui: &Ui, f: F) -> Option<R> { self.begin(ui).map(|_tab| f()) } } create_token!( pub struct TabBarToken<'ui>; drop { sys::igEndTabBar() } ); pub struct TabItem<'a, T> { label: T, opened: Option<&'a mut bool>, flags: TabItemFlags, } impl<'a, T: AsRef<str>> TabItem<'a, T> { #[doc(alias = "BeginTabItem")] pub fn new(name: T) -> Self { Self { label: name, opened: None, flags: TabItemFlags::empty(), } } #[inline] pub fn opened(mut self, opened: &'a mut bool) -> Self { self.opened = Some(opened); self } #[inline] pub fn flags(mut self, flags: TabItemFlags) -> Self { self.flags = flags; self } #[must_use] pub fn begin(self, ui: &Ui) -> Option<TabItemToken<'_>> { ui.tab_item_with_flags(self.label, self.opened, self.flags) } pub fn build<R, F: FnOnce() -> R>(self, ui: &Ui, f: F) -> Option<R> { self.begin(ui).map(|_tab| f()) } } create_token!( pub struct TabItemToken<'ui>; drop { sys::igEndTabItem() } ); impl Ui { pub fn tab_bar(&self, id: impl AsRef<str>) -> Option<TabBarToken<'_>> { self.tab_bar_with_flags(id, TabBarFlags::empty()) } pub fn tab_bar_with_flags( &self, id: impl AsRef<str>, flags: TabBarFlags, ) -> Option<TabBarToken<'_>> { let should_render = unsafe { sys::igBeginTabBar(self.scratch_txt(id), flags.bits() as i32) }; if should_render { Some(TabBarToken::new(self)) } else { unsafe { sys::igEndTabBar() }; None } } pub fn tab_item(&self, label: impl AsRef<str>) -> Option<TabItemToken<'_>> { self.tab_item_with_flags(label, None, TabItemFlags::empty()) } pub fn tab_item_with_opened( &self, label: impl AsRef<str>, opened: &mut bool, ) -> Option<TabItemToken<'_>> { self.tab_item_with_flags(label, Some(opened), TabItemFlags::empty()) } pub fn tab_item_button_with_flags(&self, label: impl AsRef<str>, flags: TabItemFlags) -> bool { unsafe { sys::igTabItemButton(self.scratch_txt(label), flags.bits() as i32) } } pub fn tab_item_with_flags( &self, label: impl AsRef<str>, opened: Option<&mut bool>, flags: TabItemFlags, ) -> Option<TabItemToken<'_>> { let should_render = unsafe { sys::igBeginTabItem( self.scratch_txt(label), opened.map(|x| x as *mut bool).unwrap_or(ptr::null_mut()), flags.bits() as i32, ) }; if should_render { Some(TabItemToken::new(self)) } else { None } } }
use crate::sys; use crate::Ui; use bitflags::bitflags; use std::ptr; bitflags! { #[repr(transparent)] pub struct TabBarFlags: u32 { const REORDERABLE = sys::ImGuiTabBarFlags_Reorderable; const AUTO_SELECT_NEW_TABS = sys::ImGuiTabBarFlags_AutoSelectNewTabs; const TAB_LIST_POPUP_BUTTON = sys::ImGuiTabBarFlags_TabListPopupButton; const NO_CLOSE_WITH_MIDDLE_MOUSE_BUTTON = sys::ImGuiTabBarFlags_NoCloseWithMiddleMouseButton; const NO_TAB_LIST_SCROLLING_BUTTONS = sys::ImGuiTabBarFlags_NoTabListScrollingButtons; const NO_TOOLTIP = sys::ImGuiTabBarFlags_NoTooltip; const FITTING_POLICY_RESIZE_DOWN = sys::ImGuiTabBarFlags_FittingPolicyResizeDown; const FITTING_POLICY_SCROLL = sys::ImGuiTabBarFlags_FittingPolicyScroll; const FITTING_POLICY_MASK = sys::ImGuiTabBarFlags_FittingPolicyMask_; const FITTING_POLICY_DEFAULT = sys::ImGuiTabBarFlags_FittingPolicyDefault_; } } bitflags! { #[repr(transparent)] pub struct TabItemFlags: u32 { const UNSAVED_DOCUMENT = sys::ImGuiTabItemFlags_UnsavedDocument; const SET_SELECTED = sys::ImGuiTabItemFlags_SetSelected; const NO_CLOSE_WITH_MIDDLE_MOUSE_BUTTON = sys::ImGuiTabItemFlags_NoCloseWithMiddleMouseButton; const NO_PUSH_ID = sys::ImGuiTabItemFlags_NoPushId; const NO_TOOLTIP = sys::ImGuiTabItemFlags_NoTooltip; const NO_REORDER = sys::ImGuiTabItemFlags_NoReorder; const LEADING = sys::ImGuiTabItemFlags_Leading; const TRAILING = sys::ImGuiTabItemFlags_Trailing; } } pub struct TabBar<T> { id: T, flags: TabBarFlags, } impl<T: AsRef<str>> TabBar<T> { #[inline] #[doc(alias = "BeginTabBar")] pub fn new(id: T) -> Self { Self { id, flags: TabBarFlags::empty(), } } #[inline] pub fn reorderable(mut self, value: bool) -> Self { self.flags.set(TabBarFlags::REORDERABLE, value); self } #[inline] pub fn flags(mut self, flags: TabBarFlags) -> Self { self.flags = flags; self } #[must_use] pub fn begin(self, ui: &Ui) -> Option<TabBarToken<'_>> { ui.tab_bar_with_flags(self.id, self.flags) } pub fn build<R, F: FnOnce() -> R>(self, ui: &Ui, f: F) -> Option<R> { self.begin(ui).map(|_tab| f()) } } create_token!( pub struct TabBarToken<'ui>; drop { sys::igEndTabBar() } ); pub struct TabItem<'a, T> { label: T, opened: Option<&'a mut bool>, flags: TabItemFlags, } impl<'a, T: AsRef<str>> TabItem<'a, T> { #[doc(alias = "BeginTabItem")] pub fn new(name: T) -> Self { Self { label: name, opened: None, flags: TabItemFlags::empty(), } } #[inline] pub fn opened(mut self, opened: &'a mut bool) -> Self { self.opened = Some(opened); self } #[inline] pub fn flags(mut self, flags: TabItemFlags) -> Self { self.flags = flags; self } #[must_use] pub fn begin(self, ui: &Ui) -> Option<TabItemToken<'_>> { ui.tab_item_with_flags(self.label, self.opened, self.flags) } pub fn build<R, F: FnOnce() -> R>(self, ui: &Ui, f: F) -> Option<R> { self.begin(ui).map(|_tab| f()) } } create_token!( pub struct TabItemToken<'ui>; drop { sys::igEndTabItem() } ); impl Ui { pub fn tab_bar(&self, id: impl AsRef<str>) -> Option<TabBarToken<'_>> { self.tab_bar_with_flags(id, TabBarFlags::empty()) } pub fn tab_bar_with_flags( &self, id: impl AsRef<str>, flags: TabBarFlags, ) -> Option<TabBarToken<'_>> { let should_render = unsafe { sys::igBeginTabBar(self.scratch_txt(id), flags.bits() as i32) }; if should_render { Some(TabBarToken::new(self)) } else { unsafe { sys::igEndTabBar() }; None } } pub fn tab_item(&self, label: impl AsRef<str>) -> Option<TabItemToken<'_>> { self.tab_item_with_flags(label, None, TabItemFlags::empty()) } pub fn tab_item_with_opened( &self, label: impl AsRef<str>, opened: &mut bool, ) -> Option<TabItemToken<'_>> { self.tab_item_with_flags(label, Some(opened), TabItemFlags::empty()) } pub fn tab_item_button_with_flags(&self, label: impl AsRef<str>, flags: TabItemFlags) -> bool { unsafe { sys::igTabItemButton(self.scratch_txt(label), flags.bits() as i32) } } pub fn tab_item_with_flags( &self, label: impl AsRef<str>, opened: Option<&mut bool>, flags: TabItemFlags, ) -> Option<TabItemToken<'_>> {
if should_render { Some(TabItemToken::new(self)) } else { None } } }
let should_render = unsafe { sys::igBeginTabItem( self.scratch_txt(label), opened.map(|x| x as *mut bool).unwrap_or(ptr::null_mut()), flags.bits() as i32, ) };
assignment_statement
[ { "content": "fn show_test_window(ui: &Ui, state: &mut State, opened: &mut bool) {\n\n if state.show_app_main_menu_bar {\n\n show_example_app_main_menu_bar(ui, state)\n\n }\n\n if state.show_app_auto_resize {\n\n show_example_app_auto_resize(\n\n ui,\n\n &mut state.auto_resize_state,\n\n &mut state.show_app_auto_resize,\n\n );\n\n }\n\n if state.show_app_fixed_overlay {\n\n show_example_app_fixed_overlay(ui, &mut state.show_app_fixed_overlay);\n\n }\n\n if state.show_app_manipulating_window_title {\n\n show_example_app_manipulating_window_title(ui);\n\n }\n\n if state.show_app_metrics {\n\n ui.show_metrics_window(&mut state.show_app_metrics);\n\n }\n", "file_path": "imgui-examples/examples/test_window_impl.rs", "rank": 0, "score": 382422.10062244715 }, { "content": "fn show_example_app_fixed_overlay(ui: &Ui, opened: &mut bool) {\n\n const DISTANCE: f32 = 10.0;\n\n let window_pos = [DISTANCE, DISTANCE];\n\n let style = ui.push_style_color(StyleColor::WindowBg, [0.0, 0.0, 0.0, 0.3]);\n\n ui.window(\"Example: Fixed Overlay\")\n\n .opened(opened)\n\n .position(window_pos, Condition::Always)\n\n .title_bar(false)\n\n .resizable(false)\n\n .always_auto_resize(true)\n\n .movable(false)\n\n .save_settings(false)\n\n .build(|| {\n\n ui.text(\n\n \"Simple overlay\\nin the corner of the screen.\\n(right-click to change position)\",\n\n );\n\n ui.separator();\n\n let mouse_pos = ui.io().mouse_pos;\n\n ui.text(format!(\n\n \"Mouse Position: ({:.1},{:.1})\",\n\n mouse_pos[0], mouse_pos[1]\n\n ));\n\n });\n\n style.pop();\n\n}\n\n\n", "file_path": "imgui-examples/examples/test_window_impl.rs", "rank": 1, "score": 377975.8591936201 }, { "content": "fn show_example_app_auto_resize(ui: &Ui, state: &mut AutoResizeState, opened: &mut bool) {\n\n ui.window(\"Example: Auto-resizing window\")\n\n .opened(opened)\n\n .always_auto_resize(true)\n\n .build(|| {\n\n ui.text(\n\n \"Window will resize every-ui to the size of its content.\n\nNote that you probably don't want to query the window size to\n\noutput your content because that would create a feedback loop.\",\n\n );\n\n ui.slider(\"Number of lines\", 1, 20, &mut state.lines);\n\n for i in 0..state.lines {\n\n ui.text(format!(\"{:2$}This is line {}\", \"\", i, i as usize * 4));\n\n }\n\n });\n\n}\n\n\n", "file_path": "imgui-examples/examples/test_window_impl.rs", "rank": 2, "score": 363909.27278703224 }, { "content": "fn show_example_app_custom_rendering(ui: &Ui, state: &mut CustomRenderingState, opened: &mut bool) {\n\n ui.window(\"Example: Custom rendering\")\n\n .size([350.0, 560.0], Condition::FirstUseEver)\n\n .opened(opened)\n\n .build(|| {\n\n ui.text(\"Primitives\");\n\n // TODO: Add DragFloat to change value of sz\n\n ui.color_edit3(\"Color\", &mut state.col);\n\n let draw_list = ui.get_window_draw_list();\n\n let p = ui.cursor_screen_pos();\n\n let spacing = 8.0;\n\n let mut y = p[1] + 4.0;\n\n for n in 0..2 {\n\n let mut x = p[0] + 4.0;\n\n let thickness = if n == 0 { 1.0 } else { 4.0 };\n\n draw_list\n\n .add_circle(\n\n [x + state.sz * 0.5, y + state.sz * 0.5],\n\n state.sz * 0.5,\n\n state.col,\n", "file_path": "imgui-examples/examples/test_window_impl.rs", "rank": 3, "score": 363909.2727870323 }, { "content": "fn example_selector(run: &mut bool, ui: &mut Ui, state: &mut State) {\n\n let w = ui\n\n .window(\"Slider examples\")\n\n .opened(run)\n\n .position([20.0, 20.0], Condition::Appearing)\n\n .size([700.0, 80.0], Condition::Appearing)\n\n .resizable(false);\n\n w.build(|| {\n\n let mut clicked = false;\n\n clicked |= ui.radio_button(\"Example 1: Basic sliders\", &mut state.example, 1);\n\n clicked |= ui.radio_button(\"Example 2: Slider arrays\", &mut state.example, 2);\n\n if clicked {\n\n state.reset();\n\n }\n\n });\n\n}\n\n\n", "file_path": "imgui-examples/examples/slider.rs", "rank": 4, "score": 295865.2238310493 }, { "content": "fn example_selector(run: &mut bool, ui: &mut Ui, state: &mut State) {\n\n let w = ui\n\n .window(\"Color button examples\")\n\n .opened(run)\n\n .position([20.0, 20.0], Condition::Appearing)\n\n .size([700.0, 100.0], Condition::Appearing)\n\n .resizable(false);\n\n w.build(|| {\n\n let ex1 = ui.radio_button(\"Example 1: Basics\", &mut state.example, 1);\n\n let ex2 = ui.radio_button(\"Example 2: Alpha component\", &mut state.example, 2);\n\n let ex3 = ui.radio_button(\"Example 3: Input format\", &mut state.example, 3);\n\n if ex1 || ex2 || ex3 {\n\n state.reset();\n\n }\n\n });\n\n}\n\n\n", "file_path": "imgui-examples/examples/color_button.rs", "rank": 5, "score": 292669.5882467358 }, { "content": "fn example_selector(run: &mut bool, ui: &mut Ui, state: &mut State) {\n\n let w = ui\n\n .window(\"Radio button examples\")\n\n .opened(run)\n\n .position([20.0, 20.0], Condition::Appearing)\n\n .size([700.0, 80.0], Condition::Appearing)\n\n .resizable(false);\n\n w.build(|| {\n\n let mut clicked = false;\n\n clicked |= ui.radio_button(\"Example 1: Boolean radio buttons\", &mut state.example, 1);\n\n clicked |= ui.radio_button(\"Example 2: Radio buttons\", &mut state.example, 2);\n\n if clicked {\n\n state.reset();\n\n }\n\n });\n\n}\n\n\n", "file_path": "imgui-examples/examples/radio_button.rs", "rank": 6, "score": 292669.5882467358 }, { "content": "fn show_example_menu_file(ui: &Ui, state: &mut FileMenuState) {\n\n ui.menu_item_config(\"(dummy_menu)\").enabled(false).build();\n\n ui.menu_item(\"New\");\n\n ui.menu_item_config(\"Open\").shortcut(\"Ctrl+O\").build();\n\n if let Some(_menu) = ui.begin_menu(\"Open Recent\") {\n\n ui.menu_item(\"fish_hat.c\");\n\n ui.menu_item(\"fish_hat.inl\");\n\n ui.menu_item(\"fish_hat.h\");\n\n if let Some(menu) = ui.begin_menu(\"More..\") {\n\n ui.menu_item(\"Hello\");\n\n ui.menu_item(\"Sailor\");\n\n\n\n if let Some(_menu) = ui.begin_menu(\"Recurse..\") {\n\n show_example_menu_file(ui, state);\n\n menu.end();\n\n }\n\n }\n\n }\n\n ui.menu_item_config(\"Save\").shortcut(\"Ctrl+S\").build();\n\n ui.menu_item(\"Save As..\");\n", "file_path": "imgui-examples/examples/test_window_impl.rs", "rank": 7, "score": 260042.04409120555 }, { "content": "fn show_example_app_main_menu_bar(ui: &Ui, state: &mut State) {\n\n if let Some(menu_bar) = ui.begin_main_menu_bar() {\n\n if let Some(menu) = ui.begin_menu(\"File\") {\n\n show_example_menu_file(ui, &mut state.file_menu);\n\n menu.end();\n\n }\n\n if let Some(menu) = ui.begin_menu(\"Edit\") {\n\n ui.menu_item_config(\"Undo\").shortcut(\"CTRL+Z\").build();\n\n ui.menu_item_config(\"Redo\")\n\n .shortcut(\"CTRL+Y\")\n\n .enabled(false)\n\n .build();\n\n ui.separator();\n\n ui.menu_item_config(\"Cut\").shortcut(\"CTRL+X\").build();\n\n ui.menu_item_config(\"Copy\").shortcut(\"CTRL+C\").build();\n\n ui.menu_item_config(\"Paste\").shortcut(\"CTRL+V\").build();\n\n menu.end();\n\n }\n\n menu_bar.end();\n\n }\n\n}\n\n\n", "file_path": "imgui-examples/examples/test_window_impl.rs", "rank": 8, "score": 260042.04409120555 }, { "content": "fn show_app_log(ui: &Ui, app_log: &mut Vec<String>) {\n\n ui.window(\"Example: Log\")\n\n .size([500.0, 400.0], Condition::FirstUseEver)\n\n .build(|| {\n\n if ui.small_button(\"[Debug] Add 5 entries\") {\n\n let categories = [\"info\", \"warn\", \"error\"];\n\n let words = [\n\n \"Bumfuzzled\",\n\n \"Cattywampus\",\n\n \"Snickersnee\",\n\n \"Abibliophobia\",\n\n \"Absquatulate\",\n\n \"Nincompoop\",\n\n \"Pauciloquent\",\n\n ];\n\n for _ in 0..5 {\n\n let category = categories[app_log.len() % categories.len()];\n\n let word = words[app_log.len() % words.len()];\n\n let frame = ui.frame_count();\n\n let time = ui.time();\n", "file_path": "imgui-examples/examples/test_window_impl.rs", "rank": 9, "score": 259356.34652724984 }, { "content": "pub fn verbose() -> bool {\n\n VERBOSE.load(std::sync::atomic::Ordering::Relaxed)\n\n}\n\n\n", "file_path": "xtask/src/main.rs", "rank": 10, "score": 237089.256516471 }, { "content": "fn show_user_guide(ui: &Ui) {\n\n ui.bullet_text(\"Double-click on title bar to collapse window.\");\n\n ui.bullet_text(\"Click and drag on lower right corner to resize window.\");\n\n ui.bullet_text(\"Click and drag on any empty space to move window.\");\n\n ui.bullet_text(\"Mouse Wheel to scroll.\");\n\n // TODO: check font_allow_user_scaling\n\n ui.bullet_text(\"TAB/SHIFT+TAB to cycle through keyboard editable fields.\");\n\n ui.bullet_text(\"CTRL+Click on a slider or drag box to input text.\");\n\n ui.bullet_text(\n\n \"While editing text:\n\n- Hold SHIFT or use mouse to select text\n\n- CTRL+Left/Right to word jump\n\n- CTRL+A or double-click to select all\n\n- CTRL+X,CTRL+C,CTRL+V clipboard\n\n- CTRL+Z,CTRL+Y undo/redo\n\n- ESCAPE to revert\n\n- You can apply arithmetic operators +,*,/ on numerical values.\n\n Use +- to subtract.\\n\",\n\n );\n\n}\n\n\n", "file_path": "imgui-examples/examples/test_window_impl.rs", "rank": 11, "score": 229511.4359888325 }, { "content": "fn example_1(ui: &Ui, state: &mut State) {\n\n let w = ui\n\n .window(\"Example 1: Basic sliders\")\n\n .size([700.0, 340.0], Condition::Appearing)\n\n .position([20.0, 120.0], Condition::Appearing);\n\n w.build(|| {\n\n ui.text(\"All of the following data types are supported:\");\n\n ui.text(\"Signed: i8 i16 i32 i64\");\n\n ui.text(\"Unsigned: u8 u16 u32 u64\");\n\n ui.text(\"Floats: f32 f64\");\n\n\n\n // Full ranges can be specified with Rust's `::MIN/MAX` constants\n\n ui.slider(\"u8 value\", u8::MIN, u8::MAX, &mut state.u8_value);\n\n\n\n // However for larger data-types, it's usually best to specify\n\n // a much smaller range. The following slider is hard to use.\n\n ui.slider(\"Full range f32 value\", f32::MIN/2.0, f32::MAX/2.0, &mut state.f32_value);\n\n // Note the `... / 2.0` - anything larger is not supported by\n\n // the upstream C++ library\n\n ui.text(\"Note that for 32-bit/64-bit types, sliders are always limited to half of the natural type range!\");\n", "file_path": "imgui-examples/examples/slider.rs", "rank": 12, "score": 228033.03090201068 }, { "content": "fn example_2(ui: &Ui, state: &mut State) {\n\n let w = ui\n\n .window(\"Example 2: Slider arrays\")\n\n .size([700.0, 260.0], Condition::Appearing)\n\n .position([20.0, 120.0], Condition::Appearing);\n\n w.build(|| {\n\n ui.text(\"You can easily build a slider group from an array of values:\");\n\n ui.slider_config(\"[u8; 4]\", 0, u8::MAX)\n\n .build_array(&mut state.array);\n\n\n\n ui.text(\"You don't need to use arrays with known length; arbitrary slices can be used:\");\n\n let slice: &mut [u8] = &mut state.array[1..=2];\n\n ui.slider_config(\"subslice\", 0, u8::MAX).build_array(slice);\n\n });\n\n}\n\n\n", "file_path": "imgui-examples/examples/slider.rs", "rank": 13, "score": 228033.03090201068 }, { "content": "fn example_1(ui: &Ui, state: &mut State) {\n\n let w = ui\n\n .window(\"Example 1: Boolean radio buttons\")\n\n .size([700.0, 200.0], Condition::Appearing)\n\n .position([20.0, 120.0], Condition::Appearing);\n\n w.build(|| {\n\n ui.text_wrapped(\n\n \"Boolean radio buttons accept a boolean active state, which is passed as a value and \\\n\n not as a mutable reference. This means that it's not updated automatically, so you \\\n\n can implement any click behaviour you want. The return value is true if the button \\\n\n was clicked.\",\n\n );\n\n ui.text(state.notify_text);\n\n\n\n if ui.radio_button_bool(\"I'm permanently active\", true) {\n\n state.notify_text = \"*** Permanently active radio button was clicked\";\n\n }\n\n if ui.radio_button_bool(\"I'm permanently inactive\", false) {\n\n state.notify_text = \"*** Permanently inactive radio button was clicked\";\n\n }\n\n if ui.radio_button_bool(\"I toggle my state on click\", state.simple_bool) {\n\n state.simple_bool = !state.simple_bool; // flip state on click\n\n state.notify_text = \"*** Toggling radio button was clicked\";\n\n }\n\n });\n\n}\n\n\n", "file_path": "imgui-examples/examples/radio_button.rs", "rank": 14, "score": 225017.86373523623 }, { "content": "fn example_2(ui: &Ui, state: &mut State) {\n\n let w = ui\n\n .window(\"Example 2: Radio buttons\")\n\n .size([700.0, 300.0], Condition::Appearing)\n\n .position([20.0, 120.0], Condition::Appearing);\n\n w.build(|| {\n\n ui.text_wrapped(\n\n \"Normal radio buttons accept a mutable reference to state, and the value \\\n\n corresponding to this button. They are very flexible, because the value can be any \\\n\n type that is both Copy and PartialEq. This is especially useful with Rust enums\",\n\n );\n\n ui.text(state.notify_text);\n\n\n\n ui.separator();\n\n if ui.radio_button(\"I'm number 1\", &mut state.number, 1) {\n\n state.notify_text = \"*** Number 1 was clicked\";\n\n }\n\n if ui.radio_button(\"I'm number 2\", &mut state.number, 2) {\n\n state.notify_text = \"*** Number 2 was clicked\";\n\n }\n", "file_path": "imgui-examples/examples/radio_button.rs", "rank": 15, "score": 225017.86373523623 }, { "content": "fn example_1(ui: &Ui, state: &mut State) {\n\n let w = ui\n\n .window(\"Example 1: Basics\")\n\n .size([700.0, 300.0], Condition::Appearing)\n\n .position([20.0, 140.0], Condition::Appearing);\n\n w.build(|| {\n\n ui.text_wrapped(\n\n \"Color button is a widget that displays a color value as a clickable rectangle. \\\n\n It also supports a tooltip with detailed information about the color value. \\\n\n Try hovering over and clicking these buttons!\",\n\n );\n\n ui.text(state.notify_text);\n\n\n\n ui.text(\"This button is black:\");\n\n if ui.color_button(\"Black color\", [0.0, 0.0, 0.0, 1.0]) {\n\n state.notify_text = \"*** Black button was clicked\";\n\n }\n\n\n\n ui.text(\"This button is red:\");\n\n if ui.color_button(\"Red color\", [1.0, 0.0, 0.0, 1.0]) {\n", "file_path": "imgui-examples/examples/color_button.rs", "rank": 16, "score": 225017.86373523623 }, { "content": "fn show_example_app_manipulating_window_title(ui: &Ui) {\n\n ui.window(\"Same title as another window##1\")\n\n .position([100.0, 100.0], Condition::FirstUseEver)\n\n .build(|| {\n\n ui.text(\n\n \"This is window 1.\n\nMy title is the same as window 2, but my identifier is unique.\",\n\n );\n\n });\n\n ui.window(\"Same title as another window##2\")\n\n .position([100.0, 200.0], Condition::FirstUseEver)\n\n .build(|| {\n\n ui.text(\n\n \"This is window 2.\n\nMy title is the same as window 1, but my identifier is unique.\",\n\n );\n\n });\n\n let chars = ['|', '/', '-', '\\\\'];\n\n let ch_idx = (ui.time() / 0.25) as usize & 3;\n\n let num = ui.frame_count(); // The C++ version uses rand() here\n\n let title = format!(\"Animated title {} {}###AnimatedTitle\", chars[ch_idx], num);\n\n ui.window(title)\n\n .position([100.0, 300.0], Condition::FirstUseEver)\n\n .build(|| ui.text(\"This window has a changing title\"));\n\n}\n\n\n", "file_path": "imgui-examples/examples/test_window_impl.rs", "rank": 17, "score": 221042.69269381667 }, { "content": "fn show_help_marker(ui: &Ui, desc: &str) {\n\n ui.text_disabled(\"(?)\");\n\n if ui.is_item_hovered() {\n\n ui.tooltip(|| {\n\n ui.text(desc);\n\n });\n\n }\n\n}\n\n\n", "file_path": "imgui-examples/examples/test_window_impl.rs", "rank": 18, "score": 212583.68266550833 }, { "content": "/// Returns `true` if the provided event is associated with the provided window.\n\n///\n\n/// # Example\n\n/// ```rust,no_run\n\n/// # let mut event_pump: sdl2::EventPump = unimplemented!();\n\n/// # let window: sdl2::video::Window = unimplemented!();\n\n/// # let mut imgui = imgui::Context::create();\n\n/// # let mut platform = SdlPlatform::init(&mut imgui);\n\n/// use imgui_sdl2_support::{SdlPlatform, filter_event};\n\n/// // Assuming there are multiple windows, we only want to provide the events\n\n/// // of the window where we are rendering to imgui-rs\n\n/// for event in event_pump.poll_iter().filter(|event| filter_event(&window, event)) {\n\n/// platform.handle_event(&mut imgui, &event);\n\n/// }\n\n/// ```\n\npub fn filter_event(window: &Window, event: &Event) -> bool {\n\n Some(window.id()) == event.get_window_id()\n\n}\n\n\n\n/// SDL 2 backend platform state.\n\n///\n\n/// A backend platform handles window/input device events and manages their\n\n/// state.\n\n///\n\n/// There are three things you need to do to use this library correctly:\n\n///\n\n/// 1. Initialize a `SdlPlatform` instance\n\n/// 2. Pass events to the platform (every frame)\n\n/// 3. Call frame preparation callback (every frame)\n\npub struct SdlPlatform {\n\n cursor_instance: Option<Cursor>, /* to avoid dropping cursor instances */\n\n last_frame: Instant,\n\n mouse_buttons: [Button; 5],\n\n}\n\n\n", "file_path": "imgui-sdl2-support/src/lib.rs", "rank": 19, "score": 192381.8942552608 }, { "content": "fn example_3(ui: &Ui) {\n\n let w = ui\n\n .window(\"Example 3: Input format\")\n\n .size([700.0, 320.0], Condition::Appearing)\n\n .position([20.0, 140.0], Condition::Appearing);\n\n w.build(|| {\n\n ui.text(\"This button interprets the input value [1.0, 0.0, 0.0, 1.0] as RGB(A) (default):\");\n\n ui.color_button(\"RGBA red\", [1.0, 0.0, 0.0, 1.0]);\n\n\n\n ui.separator();\n\n ui.text(\"This button interprets the input value [1.0, 0.0, 0.0, 1.0] as HSV(A):\");\n\n ui.color_button_config(\"HSVA black\", [1.0, 0.0, 0.0, 1.0])\n\n .input_mode(ColorEditInputMode::HSV)\n\n .build();\n\n });\n\n}\n\n\n", "file_path": "imgui-examples/examples/color_button.rs", "rank": 20, "score": 178589.42520968162 }, { "content": "fn example_2(ui: &Ui) {\n\n let w = ui\n\n .window(\"Example 2: Alpha component\")\n\n .size([700.0, 320.0], Condition::Appearing)\n\n .position([20.0, 140.0], Condition::Appearing);\n\n w.build(|| {\n\n ui.text_wrapped(\n\n \"The displayed color is passed to the button as four float values between \\\n\n 0.0 - 1.0 (RGBA). If you don't care about the alpha component, it can be \\\n\n disabled and it won't show up in the tooltip\",\n\n );\n\n\n\n ui.text(\"This button ignores the alpha component:\");\n\n ui.color_button_config(\"Red color\", [1.0, 0.0, 0.0, 0.5])\n\n .alpha(false)\n\n .build();\n\n\n\n ui.spacing();\n\n ui.spacing();\n\n ui.spacing();\n", "file_path": "imgui-examples/examples/color_button.rs", "rank": 21, "score": 178589.42520968162 }, { "content": "fn no_current_context() -> bool {\n\n let ctx = unsafe { sys::igGetCurrentContext() };\n\n ctx.is_null()\n\n}\n\n\n\nimpl Context {\n\n /// Creates a new active imgui-rs context.\n\n ///\n\n /// # Panics\n\n ///\n\n /// Panics if an active context already exists\n\n #[doc(alias = \"CreateContext\")]\n\n pub fn create() -> Self {\n\n Self::create_internal(None)\n\n }\n\n /// Creates a new active imgui-rs context with a shared font atlas.\n\n ///\n\n /// # Panics\n\n ///\n\n /// Panics if an active context already exists\n", "file_path": "imgui/src/context.rs", "rank": 22, "score": 165694.57499843353 }, { "content": "#[inline]\n\nfn fmt_ptr() -> *const c_char {\n\n FMT.as_ptr() as *const c_char\n\n}\n\n\n\n/// Unique ID used by tree nodes\n\n#[derive(Copy, Clone, Debug, Eq, PartialEq)]\n\npub enum TreeNodeId<T> {\n\n Str(T),\n\n Ptr(*const c_void),\n\n}\n\n\n\nimpl<T: AsRef<str>> From<T> for TreeNodeId<T> {\n\n fn from(s: T) -> Self {\n\n TreeNodeId::Str(s)\n\n }\n\n}\n\n\n\n// this is a bit wonky here using the T param...\n\nimpl<T> From<*const T> for TreeNodeId<T> {\n\n fn from(p: *const T) -> Self {\n", "file_path": "imgui/src/widget/tree.rs", "rank": 23, "score": 153088.44154431103 }, { "content": "#[inline]\n\nfn fmt_ptr() -> *const c_char {\n\n FMT.as_ptr() as *const c_char\n\n}\n\n\n\n/// # Widgets: Text\n\nimpl Ui {\n\n /// Renders simple text\n\n #[doc(alias = \"TextUnformatted\")]\n\n pub fn text<T: AsRef<str>>(&self, text: T) {\n\n let s = text.as_ref();\n\n unsafe {\n\n let start = s.as_ptr();\n\n let end = start.add(s.len());\n\n sys::igTextUnformatted(start as *const c_char, end as *const c_char);\n\n }\n\n }\n\n /// Renders simple text using the given text color\n\n pub fn text_colored<T: AsRef<str>>(&self, color: impl Into<MintVec4>, text: T) {\n\n let style = self.push_style_color(StyleColor::Text, color);\n\n self.text(text);\n", "file_path": "imgui/src/widget/text.rs", "rank": 24, "score": 153088.44154431103 }, { "content": "pub fn project_root() -> PathBuf {\n\n Path::new(\n\n &std::env::var(\"CARGO_MANIFEST_DIR\")\n\n .unwrap_or_else(|_| env!(\"CARGO_MANIFEST_DIR\").to_owned()),\n\n )\n\n .ancestors()\n\n .nth(1)\n\n .unwrap()\n\n .to_path_buf()\n\n}\n", "file_path": "xtask/src/main.rs", "rank": 25, "score": 153087.11242149005 }, { "content": "#[test]\n\nfn test_drop_suspended() {\n\n let _guard = crate::test::TEST_MUTEX.lock();\n\n let suspended = Context::create().suspend();\n\n assert!(no_current_context());\n\n let ctx2 = Context::create();\n\n ::std::mem::drop(suspended);\n\n assert!(ctx2.is_current_context());\n\n}\n\n\n", "file_path": "imgui/src/context.rs", "rank": 26, "score": 151798.53268655683 }, { "content": "fn main() {\n\n let system = support::init(file!());\n\n system.main_loop(move |_, ui| {\n\n let items = vec![\"a\", \"b\", \"c\", \"d\"];\n\n\n\n ui.window(\"Broken Example\")\n\n .position([0.0, 0.0], imgui::Condition::FirstUseEver)\n\n .size([390.0, 200.0], imgui::Condition::FirstUseEver)\n\n .build(|| {\n\n ui.text(\"Broken! Only first button responds to clicks\");\n\n\n\n // Because all our buttons have the same label (and thus ID),\n\n // only the first button responds to clicks!\n\n for it in &items {\n\n ui.text(it);\n\n for num in 0..5 {\n\n ui.same_line();\n\n if ui.button(\"Example\") {\n\n println!(\"{}: {}\", it, num);\n\n }\n", "file_path": "imgui-examples/examples/id_wrangling.rs", "rank": 27, "score": 151688.36681598946 }, { "content": " DragDropAcceptFlags = ImGuiDragDropFlags_None;\n", "file_path": "imgui-sys/third-party/imgui-docking/imgui/imgui_internal.h", "rank": 28, "score": 151191.3676768944 }, { "content": " DragDropAcceptFlags = ImGuiDragDropFlags_None;\n", "file_path": "imgui-sys/third-party/imgui-master/imgui/imgui_internal.h", "rank": 29, "score": 151191.3676768944 }, { "content": "fn main() {\n\n let mut state = State::default();\n\n\n\n let system = support::init(file!());\n\n system.main_loop(move |run, ui| show_test_window(ui, &mut state, run));\n\n}\n\n\n", "file_path": "imgui-examples/examples/test_window_impl.rs", "rank": 30, "score": 149232.42527300882 }, { "content": "#[derive(Debug, Clone, Copy, PartialEq, Eq, Ord, PartialOrd)]\n\n#[repr(C)]\n\nstruct TypedPayloadHeader {\n\n type_id: any::TypeId,\n\n #[cfg(debug_assertions)]\n\n type_name: &'static str,\n\n}\n\n\n\nimpl TypedPayloadHeader {\n\n #[cfg(debug_assertions)]\n\n fn new<T: 'static>() -> Self {\n\n Self {\n\n type_id: any::TypeId::of::<T>(),\n\n type_name: any::type_name::<T>(),\n\n }\n\n }\n\n\n\n #[cfg(not(debug_assertions))]\n\n fn new<T: 'static>() -> Self {\n\n Self {\n\n type_id: any::TypeId::of::<T>(),\n\n }\n", "file_path": "imgui/src/drag_drop.rs", "rank": 31, "score": 149051.78929252343 }, { "content": "struct State {\n\n show_app_main_menu_bar: bool,\n\n show_app_console: bool,\n\n show_app_log: bool,\n\n show_app_layout: bool,\n\n show_app_property_editor: bool,\n\n show_app_long_text: bool,\n\n show_app_auto_resize: bool,\n\n show_app_constrained_resize: bool,\n\n show_app_fixed_overlay: bool,\n\n show_app_manipulating_window_title: bool,\n\n show_app_custom_rendering: bool,\n\n show_app_style_editor: bool,\n\n show_app_metrics: bool,\n\n show_app_about: bool,\n\n no_titlebar: bool,\n\n no_resize: bool,\n\n no_move: bool,\n\n no_scrollbar: bool,\n\n no_collapse: bool,\n", "file_path": "imgui-examples/examples/test_window_impl.rs", "rank": 32, "score": 149009.33943633604 }, { "content": "#[test]\n\nfn test_style_color_variant_names() {\n\n for idx in StyleColor::VARIANTS.iter() {\n\n let our_name = idx.name();\n\n let their_name = unsafe {\n\n let ptr = sys::igGetStyleColorName(*idx as i32);\n\n std::ffi::CStr::from_ptr(ptr as *const _).to_str().unwrap()\n\n };\n\n\n\n assert_eq!(our_name, their_name);\n\n }\n\n}\n", "file_path": "imgui/src/style.rs", "rank": 33, "score": 146874.70510755279 }, { "content": "#[test]\n\nfn test_drop_clears_current_context() {\n\n let _guard = crate::test::TEST_MUTEX.lock();\n\n {\n\n let _ctx1 = Context::create();\n\n assert!(!no_current_context());\n\n }\n\n assert!(no_current_context());\n\n {\n\n let _ctx2 = Context::create();\n\n assert!(!no_current_context());\n\n }\n\n assert!(no_current_context());\n\n}\n\n\n", "file_path": "imgui/src/context.rs", "rank": 34, "score": 146850.64544518772 }, { "content": "struct TabState {\n\n // flags for the advanced tab example\n\n reorderable: bool,\n\n autoselect: bool,\n\n listbutton: bool,\n\n noclose_middlebutton: bool,\n\n fitting_resizedown: bool,\n\n fitting_scroll: bool,\n\n\n\n // opened state for tab items\n\n artichoke_tab: bool,\n\n beetroot_tab: bool,\n\n celery_tab: bool,\n\n daikon_tab: bool,\n\n}\n\n\n\nimpl Default for TabState {\n\n fn default() -> Self {\n\n Self {\n\n reorderable: true,\n", "file_path": "imgui-examples/examples/test_window_impl.rs", "rank": 35, "score": 146590.82574863973 }, { "content": "#[doc(alias = \"GetVersion\")]\n\npub fn dear_imgui_version() -> &'static str {\n\n unsafe {\n\n let bytes = std::ffi::CStr::from_ptr(sys::igGetVersion()).to_bytes();\n\n std::str::from_utf8_unchecked(bytes)\n\n }\n\n}\n\n\n\nimpl Context {\n\n /// Returns the global imgui-rs time.\n\n ///\n\n /// Incremented by Io::delta_time every frame.\n\n #[doc(alias = \"GetTime\")]\n\n pub fn time(&self) -> f64 {\n\n unsafe { sys::igGetTime() }\n\n }\n\n /// Returns the global imgui-rs frame count.\n\n ///\n\n /// Incremented by 1 every frame.\n\n #[doc(alias = \"GetFrameCount\")]\n\n pub fn frame_count(&self) -> i32 {\n", "file_path": "imgui/src/lib.rs", "rank": 36, "score": 145555.29158792185 }, { "content": "#[test]\n\nfn test_texture_id_memory_layout() {\n\n use std::mem;\n\n assert_eq!(\n\n mem::size_of::<TextureId>(),\n\n mem::size_of::<sys::ImTextureID>()\n\n );\n\n assert_eq!(\n\n mem::align_of::<TextureId>(),\n\n mem::align_of::<sys::ImTextureID>()\n\n );\n\n}\n\n\n\n/// Generic texture mapping for use by renderers.\n\n#[derive(Debug, Default)]\n\npub struct Textures<T> {\n\n textures: HashMap<usize, T>,\n\n next: usize,\n\n}\n\n\n\nimpl<T> Textures<T> {\n", "file_path": "imgui/src/render/renderer.rs", "rank": 37, "score": 144416.30009320972 }, { "content": "struct TexturesUi {\n\n generated_texture: imgui::TextureId,\n\n lenna: Lenna,\n\n}\n\n\n\nimpl TexturesUi {\n\n fn new(gl: &glow::Context, textures: &mut imgui::Textures<glow::Texture>) -> Self {\n\n Self {\n\n generated_texture: Self::generate(gl, textures),\n\n lenna: Lenna::load(gl, textures),\n\n }\n\n }\n\n\n\n /// Generate dummy texture\n\n fn generate(\n\n gl: &glow::Context,\n\n textures: &mut imgui::Textures<glow::Texture>,\n\n ) -> imgui::TextureId {\n\n const WIDTH: usize = 100;\n\n const HEIGHT: usize = 100;\n", "file_path": "imgui-glow-renderer/examples/glow_04_custom_textures.rs", "rank": 38, "score": 144337.08656466834 }, { "content": "struct ColorEditState {\n\n color: [f32; 4],\n\n hdr: bool,\n\n alpha_preview: bool,\n\n alpha_half_preview: bool,\n\n options_menu: bool,\n\n alpha: bool,\n\n alpha_bar: bool,\n\n side_preview: bool,\n\n ref_color: bool,\n\n ref_color_v: [f32; 4],\n\n}\n\n\n\nimpl Default for ColorEditState {\n\n fn default() -> Self {\n\n ColorEditState {\n\n color: [114.0 / 255.0, 144.0 / 255.0, 154.0 / 255.0, 200.0 / 255.0],\n\n hdr: false,\n\n alpha_preview: true,\n\n alpha_half_preview: false,\n\n options_menu: true,\n\n alpha: true,\n\n alpha_bar: true,\n\n side_preview: true,\n\n ref_color: false,\n\n ref_color_v: [1.0, 0.0, 1.0, 0.5],\n\n }\n\n }\n\n}\n\n\n", "file_path": "imgui-examples/examples/test_window_impl.rs", "rank": 39, "score": 144265.75666200396 }, { "content": "struct FileMenuState {\n\n enabled: bool,\n\n f: f32,\n\n n: usize,\n\n b: bool,\n\n}\n\n\n\nimpl Default for FileMenuState {\n\n fn default() -> Self {\n\n FileMenuState {\n\n enabled: true,\n\n f: 0.5,\n\n n: 0,\n\n b: true,\n\n }\n\n }\n\n}\n\n\n", "file_path": "imgui-examples/examples/test_window_impl.rs", "rank": 40, "score": 144265.75666200396 }, { "content": "struct AutoResizeState {\n\n lines: i32,\n\n}\n\n\n\nimpl Default for AutoResizeState {\n\n fn default() -> Self {\n\n AutoResizeState { lines: 10 }\n\n }\n\n}\n\n\n", "file_path": "imgui-examples/examples/test_window_impl.rs", "rank": 41, "score": 144265.75666200396 }, { "content": "struct CustomRenderingState {\n\n sz: f32,\n\n col: [f32; 3],\n\n points: Vec<[f32; 2]>,\n\n adding_line: bool,\n\n}\n\n\n\nimpl Default for CustomRenderingState {\n\n fn default() -> Self {\n\n CustomRenderingState {\n\n sz: 36.0,\n\n col: [1.0, 1.0, 0.4],\n\n points: vec![],\n\n adding_line: false,\n\n }\n\n }\n\n}\n\n\n", "file_path": "imgui-examples/examples/test_window_impl.rs", "rank": 42, "score": 144265.75666200396 }, { "content": "pub fn init() -> Option<ClipboardSupport> {\n\n ClipboardContext::new().ok().map(ClipboardSupport)\n\n}\n\n\n\nimpl ClipboardBackend for ClipboardSupport {\n\n fn get(&mut self) -> Option<String> {\n\n self.0.get_contents().ok()\n\n }\n\n fn set(&mut self, text: &str) {\n\n // ignore errors?\n\n let _ = self.0.set_contents(text.to_owned());\n\n }\n\n}\n", "file_path": "imgui-examples/examples/support/clipboard.rs", "rank": 43, "score": 143312.44941630666 }, { "content": "#[derive(Debug, Clone, Copy)]\n\n#[repr(C)]\n\nstruct TypedPayload<T> {\n\n header: TypedPayloadHeader,\n\n data: T,\n\n}\n\n\n\nimpl<T: Copy + 'static> TypedPayload<T> {\n\n /// Creates a new typed payload which contains this data.\n\n fn new(data: T) -> Self {\n\n Self {\n\n header: TypedPayloadHeader::new::<T>(),\n\n data,\n\n }\n\n }\n\n}\n\n\n\n/// A header for a typed payload.\n", "file_path": "imgui/src/drag_drop.rs", "rank": 44, "score": 142853.62834052136 }, { "content": "fn compile_default_program<F: Facade>(facade: &F) -> Result<Program, ProgramChooserCreationError> {\n\n program!(\n\n facade,\n\n 400 => {\n\n vertex: include_str!(\"shader/glsl_400.vert\"),\n\n fragment: include_str!(\"shader/glsl_400.frag\"),\n\n outputs_srgb: true,\n\n },\n\n 150 => {\n\n vertex: include_str!(\"shader/glsl_150.vert\"),\n\n fragment: include_str!(\"shader/glsl_150.frag\"),\n\n outputs_srgb: true,\n\n },\n\n 130 => {\n\n vertex: include_str!(\"shader/glsl_130.vert\"),\n\n fragment: include_str!(\"shader/glsl_130.frag\"),\n\n outputs_srgb: true,\n\n },\n\n 110 => {\n\n vertex: include_str!(\"shader/glsl_110.vert\"),\n", "file_path": "imgui-glium-renderer/src/lib.rs", "rank": 45, "score": 140382.1900560853 }, { "content": "pub fn init(title: &str) -> System {\n\n let title = match Path::new(&title).file_name() {\n\n Some(file_name) => file_name.to_str().unwrap(),\n\n None => title,\n\n };\n\n let event_loop = EventLoop::new();\n\n let context = glutin::ContextBuilder::new().with_vsync(true);\n\n let builder = WindowBuilder::new()\n\n .with_title(title.to_owned())\n\n .with_inner_size(glutin::dpi::LogicalSize::new(1024f64, 768f64));\n\n let display =\n\n Display::new(builder, context, &event_loop).expect(\"Failed to initialize display\");\n\n\n\n let mut imgui = Context::create();\n\n imgui.set_ini_filename(None);\n\n\n\n if let Some(backend) = clipboard::init() {\n\n imgui.set_clipboard_backend(backend);\n\n } else {\n\n eprintln!(\"Failed to initialize clipboard\");\n", "file_path": "imgui-examples/examples/support/mod.rs", "rank": 46, "score": 139549.8558324703 }, { "content": "pub fn test_ctx() -> (ReentrantMutexGuard<'static, ()>, Context) {\n\n let guard = TEST_MUTEX.lock();\n\n let mut ctx = Context::create();\n\n ctx.io_mut().ini_filename = ptr::null();\n\n (guard, ctx)\n\n}\n\n\n", "file_path": "imgui/src/test.rs", "rank": 47, "score": 137390.44716885017 }, { "content": "pub fn test_ctx_initialized() -> (ReentrantMutexGuard<'static, ()>, Context) {\n\n let (guard, mut ctx) = test_ctx();\n\n let io = ctx.io_mut();\n\n io.display_size = [1024.0, 768.0];\n\n io.delta_time = 1.0 / 60.0;\n\n io.mouse_pos = [0.0, 0.0];\n\n ctx.fonts().build_rgba32_texture();\n\n (guard, ctx)\n\n}\n", "file_path": "imgui/src/test.rs", "rank": 48, "score": 135309.90144808558 }, { "content": "pub fn glow_context(window: &Window) -> glow::Context {\n\n unsafe { glow::Context::from_loader_function(|s| window.get_proc_address(s).cast()) }\n\n}\n\n\n", "file_path": "imgui-glow-renderer/examples/utils/mod.rs", "rank": 49, "score": 130455.40787012911 }, { "content": "/// Handle changes in the key modifier states.\n\nfn handle_key_modifier(io: &mut Io, keymod: &Mod) {\n\n io.key_shift = keymod.intersects(Mod::LSHIFTMOD | Mod::RSHIFTMOD);\n\n io.key_ctrl = keymod.intersects(Mod::LCTRLMOD | Mod::RCTRLMOD);\n\n io.key_alt = keymod.intersects(Mod::LALTMOD | Mod::RALTMOD);\n\n io.key_super = keymod.intersects(Mod::LGUIMOD | Mod::RGUIMOD);\n\n}\n\n\n", "file_path": "imgui-sdl2-support/src/lib.rs", "rank": 50, "score": 130454.93647883652 }, { "content": "pub fn imgui_init(window: &Window) -> (WinitPlatform, imgui::Context) {\n\n let mut imgui_context = imgui::Context::create();\n\n imgui_context.set_ini_filename(None);\n\n\n\n let mut winit_platform = WinitPlatform::init(&mut imgui_context);\n\n winit_platform.attach_window(\n\n imgui_context.io_mut(),\n\n window.window(),\n\n imgui_winit_support::HiDpiMode::Rounded,\n\n );\n\n\n\n imgui_context\n\n .fonts()\n\n .add_font(&[imgui::FontSource::DefaultFontData { config: None }]);\n\n\n\n imgui_context.io_mut().font_global_scale = (1.0 / winit_platform.hidpi_factor()) as f32;\n\n\n\n (winit_platform, imgui_context)\n\n}\n\n\n", "file_path": "imgui-glow-renderer/examples/utils/mod.rs", "rank": 51, "score": 124420.38715656754 }, { "content": "fn get_types(structs_and_enums: &Path) -> Result<Vec<String>> {\n\n let types_txt = std::fs::read_to_string(structs_and_enums)?;\n\n let types_val = types_txt\n\n .parse::<smoljson::ValOwn>()\n\n .map_err(|e| anyhow!(\"Failed to parse {}: {:?}\", structs_and_enums.display(), e))?;\n\n let mut types: Vec<String> = types_val[\"enums\"]\n\n .as_object()\n\n .ok_or_else(|| anyhow!(\"No `enums` in bindings file\"))?\n\n .keys()\n\n .map(|k| format!(\"^{}\", k))\n\n .collect();\n\n types.extend(\n\n types_val[\"structs\"]\n\n .as_object()\n\n .ok_or_else(|| anyhow!(\"No `structs` in bindings file\"))?\n\n .keys()\n\n .map(|k| format!(\"^{}\", k)),\n\n );\n\n Ok(types)\n\n}\n\n\n", "file_path": "xtask/src/bindgen.rs", "rank": 52, "score": 123964.0534933666 }, { "content": "fn calculate_matrix(draw_data: &DrawData, clip_origin_is_lower_left: bool) -> [f32; 16] {\n\n #![allow(clippy::deprecated_cfg_attr)]\n\n\n\n let left = draw_data.display_pos[0];\n\n let right = draw_data.display_pos[0] + draw_data.display_size[0];\n\n let top = draw_data.display_pos[1];\n\n let bottom = draw_data.display_pos[1] + draw_data.display_size[1];\n\n\n\n #[cfg(feature = \"clip_origin_support\")]\n\n let (top, bottom) = if clip_origin_is_lower_left {\n\n (top, bottom)\n\n } else {\n\n (bottom, top)\n\n };\n\n\n\n #[cfg_attr(rustfmt, rustfmt::skip)]\n\n {\n\n [\n\n 2.0 / (right - left) , 0.0 , 0.0 , 0.0,\n\n 0.0 , (2.0 / (top - bottom)) , 0.0 , 0.0,\n", "file_path": "imgui-glow-renderer/src/lib.rs", "rank": 53, "score": 120871.39872801113 }, { "content": "CIMGUI_API void ImGui_ImplOpenGL3_Shutdown(void);\n", "file_path": "imgui-sys/third-party/imgui-docking/cimgui_impl.h", "rank": 54, "score": 118416.16376376028 }, { "content": "CIMGUI_API bool ImGui_ImplOpenGL2_Init(void);\n", "file_path": "imgui-sys/third-party/imgui-master/cimgui_impl.h", "rank": 55, "score": 118416.16376376028 }, { "content": "CIMGUI_API bool ImGui_ImplOpenGL3_Init(const char* glsl_version);\n", "file_path": "imgui-sys/third-party/imgui-master/cimgui_impl.h", "rank": 56, "score": 118416.16376376028 }, { "content": "CIMGUI_API bool ImGui_ImplOpenGL2_Init(void);\n", "file_path": "imgui-sys/third-party/imgui-docking/cimgui_impl.h", "rank": 57, "score": 118416.16376376028 }, { "content": "CIMGUI_API void ImGui_ImplOpenGL2_Shutdown(void);\n", "file_path": "imgui-sys/third-party/imgui-master/cimgui_impl.h", "rank": 58, "score": 118416.16376376028 }, { "content": "CIMGUI_API void ImGui_ImplOpenGL3_Shutdown(void);\n", "file_path": "imgui-sys/third-party/imgui-master/cimgui_impl.h", "rank": 59, "score": 118416.16376376028 }, { "content": "CIMGUI_API bool ImGui_ImplOpenGL3_Init(const char* glsl_version);\n", "file_path": "imgui-sys/third-party/imgui-docking/cimgui_impl.h", "rank": 60, "score": 118416.16376376028 }, { "content": "CIMGUI_API void ImGui_ImplOpenGL2_Shutdown(void);\n", "file_path": "imgui-sys/third-party/imgui-docking/cimgui_impl.h", "rank": 61, "score": 118416.16376376028 }, { "content": "pub fn create_window(title: &str, gl_request: GlRequest) -> (EventLoop<()>, Window) {\n\n let event_loop = glutin::event_loop::EventLoop::new();\n\n let window = glutin::window::WindowBuilder::new()\n\n .with_title(title)\n\n .with_inner_size(glutin::dpi::LogicalSize::new(1024, 768));\n\n let window = glutin::ContextBuilder::new()\n\n .with_gl(gl_request)\n\n .with_vsync(true)\n\n .build_windowed(window, &event_loop)\n\n .expect(\"could not create window\");\n\n let window = unsafe {\n\n window\n\n .make_current()\n\n .expect(\"could not make window context current\")\n\n };\n\n (event_loop, window)\n\n}\n\n\n", "file_path": "imgui-glow-renderer/examples/utils/mod.rs", "rank": 62, "score": 117389.22519217264 }, { "content": "CIMGUI_API void ImGui_ImplOpenGL2_NewFrame(void);\n", "file_path": "imgui-sys/third-party/imgui-master/cimgui_impl.h", "rank": 63, "score": 116843.61503046485 }, { "content": "CIMGUI_API bool ImGui_ImplSDL2_InitForOpenGL(SDL_Window* window,void* sdl_gl_context);\n", "file_path": "imgui-sys/third-party/imgui-docking/cimgui_impl.h", "rank": 64, "score": 116843.61503046485 }, { "content": "CIMGUI_API void ImGui_ImplOpenGL3_NewFrame(void);\n", "file_path": "imgui-sys/third-party/imgui-docking/cimgui_impl.h", "rank": 65, "score": 116843.61503046485 }, { "content": "CIMGUI_API void ImGui_ImplOpenGL3_NewFrame(void);\n", "file_path": "imgui-sys/third-party/imgui-master/cimgui_impl.h", "rank": 66, "score": 116843.61503046485 }, { "content": "typedef union SDL_Event SDL_Event;CIMGUI_API bool ImGui_ImplGlfw_InitForOpenGL(GLFWwindow* window,bool install_callbacks);\n", "file_path": "imgui-sys/third-party/imgui-docking/cimgui_impl.h", "rank": 67, "score": 116843.61503046485 }, { "content": "CIMGUI_API bool ImGui_ImplSDL2_InitForOpenGL(SDL_Window* window,void* sdl_gl_context);\n", "file_path": "imgui-sys/third-party/imgui-master/cimgui_impl.h", "rank": 68, "score": 116843.61503046485 }, { "content": "CIMGUI_API void ImGui_ImplOpenGL2_NewFrame(void);\n", "file_path": "imgui-sys/third-party/imgui-docking/cimgui_impl.h", "rank": 69, "score": 116843.61503046485 }, { "content": "typedef union SDL_Event SDL_Event;CIMGUI_API bool ImGui_ImplGlfw_InitForOpenGL(GLFWwindow* window,bool install_callbacks);\n", "file_path": "imgui-sys/third-party/imgui-master/cimgui_impl.h", "rank": 70, "score": 116843.61503046485 }, { "content": "#[cfg(any(target_vendor = \"apple\", not(feature = \"debug_message_insert_support\")))]\n\nfn gl_debug_message<G: glow::HasContext>(_gl: &G, _message: impl AsRef<str>) {}\n\n\n", "file_path": "imgui-glow-renderer/src/lib.rs", "rank": 71, "score": 116303.64280366685 }, { "content": "#[cfg(all(not(target_vendor = \"apple\"), feature = \"debug_message_insert_support\"))]\n\nfn gl_debug_message<G: glow::HasContext>(gl: &G, message: impl AsRef<str>) {\n\n unsafe {\n\n gl.debug_message_insert(\n\n glow::DEBUG_SOURCE_APPLICATION,\n\n glow::DEBUG_TYPE_MARKER,\n\n 0,\n\n glow::DEBUG_SEVERITY_NOTIFICATION,\n\n message,\n\n )\n\n };\n\n}\n\n\n", "file_path": "imgui-glow-renderer/src/lib.rs", "rank": 72, "score": 116303.64280366685 }, { "content": "CIMGUI_API bool ImGui_ImplOpenGL2_CreateDeviceObjects(void);\n", "file_path": "imgui-sys/third-party/imgui-docking/cimgui_impl.h", "rank": 73, "score": 115314.03767156787 }, { "content": "CIMGUI_API bool ImGui_ImplOpenGL2_CreateFontsTexture(void);\n", "file_path": "imgui-sys/third-party/imgui-docking/cimgui_impl.h", "rank": 74, "score": 115314.03767156787 }, { "content": "CIMGUI_API void ImGui_ImplOpenGL3_RenderDrawData(ImDrawData* draw_data);\n", "file_path": "imgui-sys/third-party/imgui-docking/cimgui_impl.h", "rank": 75, "score": 115314.03767156787 }, { "content": "CIMGUI_API void ImGui_ImplOpenGL2_DestroyFontsTexture(void);\n", "file_path": "imgui-sys/third-party/imgui-docking/cimgui_impl.h", "rank": 76, "score": 115314.03767156787 }, { "content": "CIMGUI_API void ImGui_ImplOpenGL3_DestroyDeviceObjects(void);\n", "file_path": "imgui-sys/third-party/imgui-master/cimgui_impl.h", "rank": 77, "score": 115314.03767156787 }, { "content": "CIMGUI_API bool ImGui_ImplOpenGL3_CreateFontsTexture(void);\n", "file_path": "imgui-sys/third-party/imgui-master/cimgui_impl.h", "rank": 78, "score": 115314.03767156787 }, { "content": "CIMGUI_API bool ImGui_ImplOpenGL3_CreateFontsTexture(void);\n", "file_path": "imgui-sys/third-party/imgui-docking/cimgui_impl.h", "rank": 79, "score": 115314.03767156787 }, { "content": "CIMGUI_API bool ImGui_ImplOpenGL3_CreateDeviceObjects(void);\n", "file_path": "imgui-sys/third-party/imgui-docking/cimgui_impl.h", "rank": 80, "score": 115314.03767156787 }, { "content": "CIMGUI_API bool ImGui_ImplOpenGL3_CreateDeviceObjects(void);\n", "file_path": "imgui-sys/third-party/imgui-master/cimgui_impl.h", "rank": 81, "score": 115314.03767156787 }, { "content": "CIMGUI_API void ImGui_ImplOpenGL2_RenderDrawData(ImDrawData* draw_data);\n", "file_path": "imgui-sys/third-party/imgui-master/cimgui_impl.h", "rank": 82, "score": 115314.03767156787 }, { "content": "CIMGUI_API void ImGui_ImplOpenGL3_RenderDrawData(ImDrawData* draw_data);\n", "file_path": "imgui-sys/third-party/imgui-master/cimgui_impl.h", "rank": 83, "score": 115314.03767156787 }, { "content": "CIMGUI_API void ImGui_ImplOpenGL2_DestroyDeviceObjects(void);\n", "file_path": "imgui-sys/third-party/imgui-docking/cimgui_impl.h", "rank": 84, "score": 115314.03767156787 }, { "content": "CIMGUI_API bool ImGui_ImplOpenGL2_CreateFontsTexture(void);\n", "file_path": "imgui-sys/third-party/imgui-master/cimgui_impl.h", "rank": 85, "score": 115314.03767156787 }, { "content": "CIMGUI_API bool ImGui_ImplOpenGL2_CreateDeviceObjects(void);\n", "file_path": "imgui-sys/third-party/imgui-master/cimgui_impl.h", "rank": 86, "score": 115314.03767156787 }, { "content": "CIMGUI_API void ImGui_ImplOpenGL2_DestroyFontsTexture(void);\n", "file_path": "imgui-sys/third-party/imgui-master/cimgui_impl.h", "rank": 87, "score": 115314.03767156787 }, { "content": "CIMGUI_API void ImGui_ImplOpenGL3_DestroyFontsTexture(void);\n", "file_path": "imgui-sys/third-party/imgui-docking/cimgui_impl.h", "rank": 88, "score": 115314.03767156787 }, { "content": "CIMGUI_API void ImGui_ImplOpenGL2_RenderDrawData(ImDrawData* draw_data);\n", "file_path": "imgui-sys/third-party/imgui-docking/cimgui_impl.h", "rank": 89, "score": 115314.03767156787 }, { "content": "CIMGUI_API void ImGui_ImplOpenGL3_DestroyDeviceObjects(void);\n", "file_path": "imgui-sys/third-party/imgui-docking/cimgui_impl.h", "rank": 90, "score": 115314.03767156787 }, { "content": "CIMGUI_API void ImGui_ImplOpenGL3_DestroyFontsTexture(void);\n", "file_path": "imgui-sys/third-party/imgui-master/cimgui_impl.h", "rank": 91, "score": 115314.03767156787 }, { "content": "CIMGUI_API void ImGui_ImplOpenGL2_DestroyDeviceObjects(void);\n", "file_path": "imgui-sys/third-party/imgui-master/cimgui_impl.h", "rank": 92, "score": 115314.03767156787 }, { "content": " ImGuiID OpenParentId;\n", "file_path": "imgui-sys/third-party/imgui-docking/cimgui.h", "rank": 93, "score": 109744.0998733625 }, { "content": " ImGuiID OpenParentId;\n", "file_path": "imgui-sys/third-party/imgui-master/cimgui.h", "rank": 94, "score": 109744.0998733625 }, { "content": " ImGuiDragDropFlags DragDropSourceFlags;\n", "file_path": "imgui-sys/third-party/imgui-docking/cimgui.h", "rank": 95, "score": 107872.39126921022 }, { "content": " ImGuiDragDropFlags DragDropSourceFlags;\n", "file_path": "imgui-sys/third-party/imgui-master/cimgui.h", "rank": 96, "score": 107872.39126921022 }, { "content": " ImGuiDragDropFlags DragDropAcceptFlags;\n", "file_path": "imgui-sys/third-party/imgui-docking/cimgui.h", "rank": 97, "score": 107872.39126921022 }, { "content": " }\n\n }\n\n /// Builds the collapsing header, and adds an additional close button that changes the value of\n\n /// the given mutable reference when clicked.\n\n ///\n\n /// Returns true if the collapsing header is open and content should be rendered.\n\n #[must_use]\n\n #[inline]\n\n pub fn build_with_close_button(self, ui: &Ui, opened: &mut bool) -> bool {\n\n unsafe {\n\n sys::igCollapsingHeader_BoolPtr(\n\n ui.scratch_txt(self.label),\n\n opened as *mut bool,\n\n self.flags.bits() as i32,\n\n )\n\n }\n\n }\n\n}\n\n\n\nimpl Ui {\n", "file_path": "imgui/src/widget/tree.rs", "rank": 98, "score": 77.24148790823443 }, { "content": " /// Returns true if the collapsing header is open and content should be rendered.\n\n ///\n\n /// This is the same as [build_with_close_button](Self::build_with_close_button)\n\n /// but is provided for consistent naming.\n\n #[must_use]\n\n pub fn begin_with_close_button(self, ui: &Ui, opened: &mut bool) -> bool {\n\n self.build_with_close_button(ui, opened)\n\n }\n\n\n\n /// Builds the collapsing header.\n\n ///\n\n /// Returns true if the collapsing header is open and content should be rendered.\n\n #[must_use]\n\n #[inline]\n\n pub fn build(self, ui: &Ui) -> bool {\n\n unsafe {\n\n sys::igCollapsingHeader_TreeNodeFlags(\n\n ui.scratch_txt(self.label),\n\n self.flags.bits() as i32,\n\n )\n", "file_path": "imgui/src/widget/tree.rs", "rank": 99, "score": 76.21151648692765 } ]
Rust
bolero-generator/src/uniform.rs
zhassan-aws/bolero
04a73b946da7241c188816524aaaec60d60658a3
use crate::{bounded::BoundExt, driver::DriverMode}; use core::ops::{Bound, RangeBounds}; pub trait Uniform: Sized { fn sample<F: FillBytes>(fill: &mut F, min: Bound<&Self>, max: Bound<&Self>) -> Option<Self>; } pub trait FillBytes { fn mode(&self) -> DriverMode; fn fill_bytes(&mut self, bytes: &mut [u8]) -> Option<()>; } macro_rules! uniform_int { ($ty:ident, $unsigned:ident $(, $smaller:ident)?) => { impl Uniform for $ty { #[inline] fn sample<F: FillBytes>(fill: &mut F, min: Bound<&$ty>, max: Bound<&$ty>) -> Option<$ty> { match (min, max) { (Bound::Unbounded, Bound::Unbounded) | (Bound::Unbounded, Bound::Included(&$ty::MAX)) | (Bound::Included(&$ty::MIN), Bound::Unbounded) | (Bound::Included(&$ty::MIN), Bound::Included(&$ty::MAX)) => { let mut bytes = [0u8; core::mem::size_of::<$ty>()]; fill.fill_bytes(&mut bytes)?; return Some(<$ty>::from_le_bytes(bytes)); } (Bound::Included(&x), Bound::Included(&y)) if x == y => { return Some(x); } (Bound::Included(&x), Bound::Excluded(&y)) if x + 1 == y => { return Some(x); } _ => {} } if fill.mode() == DriverMode::Direct { return Self::sample(fill, Bound::Unbounded, Bound::Unbounded) .filter(|value| (min, max).contains(value)); } let lower = match min { Bound::Included(&v) => v, Bound::Excluded(v) => v.saturating_add(1), Bound::Unbounded => $ty::MIN, }; let upper = match max { Bound::Included(&v) => v, Bound::Excluded(v) => v.saturating_sub(1), Bound::Unbounded => $ty::MAX, }; let (lower, upper) = if upper > lower { (lower, upper) } else { (upper, lower) }; let range = upper.wrapping_sub(lower) as $unsigned; if range == 0 { return Some(lower); } $({ use core::convert::TryInto; if let Ok(range) = range.try_into() { let value: $smaller = Uniform::sample(fill, Bound::Unbounded, Bound::Included(&range))?; let value = value as $ty; let value = lower.wrapping_add(value); if cfg!(test) { assert!((min, max).contains(&value), "{:?} < {} < {:?}", min, value, max); } return Some(value); } })? let value: $unsigned = Uniform::sample(fill, Bound::Unbounded, Bound::Unbounded)?; let value = value % range; let value = value as $ty; let value = lower.wrapping_add(value); if cfg!(test) { assert!((min, max).contains(&value), "{:?} < {} < {:?}", min, value, max); } Some(value) } } }; } uniform_int!(u8, u8); uniform_int!(i8, u8); uniform_int!(u16, u16, u8); uniform_int!(i16, u16, u8); uniform_int!(u32, u32, u16); uniform_int!(i32, u32, u16); uniform_int!(u64, u64, u32); uniform_int!(i64, u64, u32); uniform_int!(u128, u128, u64); uniform_int!(i128, u128, u64); uniform_int!(usize, usize, u64); uniform_int!(isize, usize, u64); impl Uniform for char { #[inline] fn sample<F: FillBytes>(fill: &mut F, min: Bound<&Self>, max: Bound<&Self>) -> Option<Self> { if fill.mode() == DriverMode::Direct { let value = u32::sample(fill, Bound::Unbounded, Bound::Unbounded)?; return char::from_u32(value); } const START: u32 = 0xD800; const LEN: u32 = 0xE000 - START; fn map_to_u32(c: &char) -> u32 { match *c as u32 { c if c >= START => c - LEN, c => c, } } let lower = BoundExt::map(min, map_to_u32); let upper = match max { Bound::Excluded(v) => Bound::Excluded(map_to_u32(v)), Bound::Included(v) => Bound::Included(map_to_u32(v)), Bound::Unbounded => Bound::Included(map_to_u32(&char::MAX)), }; let mut value = u32::sample(fill, BoundExt::as_ref(&lower), BoundExt::as_ref(&upper))?; if value >= START { value += LEN; } char::from_u32(value) } }
use crate::{bounded::BoundExt, driver::DriverMode}; use core::ops::{Bound, RangeBounds}; pub trait Uniform: Sized { fn sample<F: FillBytes>(fill: &mut F, min: Bound<&Self>, max: Bound<&Self>) -> Option<Self>; } pub trait FillBytes { fn mode(&self) -> DriverMode; fn fill_bytes(&mut self, bytes: &mut [u8]) -> Option<()>; } macro_rules! uniform_int { ($ty:ident, $unsigned:ident $(, $smaller:ident)?) => { impl Uniform for $ty { #[inline] fn sample<F: FillBytes>(fill: &mut F, min: Bound<&$ty>, max: Bound<&$ty>) -> Option<$ty> { match (min, max) { (Bound::Unbounded, Bound::Unbounded) | (Bound::Unbounded, Bound::Included(&$ty::MAX)) | (Bound::Included(&$ty::MIN), Bound::Unbounded) | (Bound::Included(&$ty::MIN), Bound::Included(&$ty::MAX)) => { let mut bytes = [0u8; core::mem::size_of::<$ty>()]; fill.fill_bytes(&mut bytes)?; return Some(<$ty>::from_le_bytes(bytes)); } (Bound::Included(&x), Bound::Included(&y)) if x == y => { return Some(x); } (Bound::Included(&x), Bound::Excluded(&y)) if x + 1 == y => { return Some(x); } _ => {} } if fill.mode() == DriverMode::Direct { return Self::sample(fill, Bound::Unbounded, Bound::Unbounded) .filter(|value| (min, max).contains(value)); } let lower = match min { Bound::Included(&v) => v, Bound::Excluded(v) => v.saturating_add(1), Bound::Unbounded => $ty::MIN, }; let upper = match max { Bound::Included(&v) => v, Bound::Excluded(v) => v.saturating_sub(1), Bound::Unbounded => $ty::MAX, }; let (lower, upper) = if upper > lower { (lower, upper) } else { (upper, lower) }; let range = upper.wrapping_sub(lower) as $unsigned; if range == 0 { return Some(lower); } $({ use core::convert::TryInto; if let Ok(range) = range.try_into() { let value: $smaller = Uniform::sample(fill, Bound::Unbounded, Bound::Included(&range))?; let value = value as $ty; let value = lower.wrapping_add(value); if cfg!(test) { assert!((min, max).contains(&value), "{:?} < {} < {:?}", min, value, max); } return Some(value); } })? let value: $unsigned = Uniform::sample(fill, Bound::Unbounded, Bound::Unbounded)?; let value = value % range; let value = value as $ty; let value = lower.wrapping_add(value); if cfg!(test) { assert!((min, max).contains(&value), "{:?} < {} < {:?}", min, value, max); } Some(value) } } }; } uniform_int!(u8, u8); uniform_int!(i8, u8); uniform_int!(u16, u16, u8); uniform_int!(i16, u16, u8); uniform_int!(u32, u32, u16); uniform_int!(i32, u32, u16); uniform_int!(u64, u64, u32); uniform_int!(i64, u64, u32); uniform_int!(u128, u128, u64); uniform_int!(i128, u128, u64); uniform_int!(usize, usize, u64); uniform_int!(isize, usize, u64); impl Uniform for char { #[inline] fn sample<F: FillBytes>(fill: &mut F, min: Bound<&Self>, max: Bound<&Self>) -> Option<Self> {
const START: u32 = 0xD800; const LEN: u32 = 0xE000 - START; fn map_to_u32(c: &char) -> u32 { match *c as u32 { c if c >= START => c - LEN, c => c, } } let lower = BoundExt::map(min, map_to_u32); let upper = match max { Bound::Excluded(v) => Bound::Excluded(map_to_u32(v)), Bound::Included(v) => Bound::Included(map_to_u32(v)), Bound::Unbounded => Bound::Included(map_to_u32(&char::MAX)), }; let mut value = u32::sample(fill, BoundExt::as_ref(&lower), BoundExt::as_ref(&upper))?; if value >= START { value += LEN; } char::from_u32(value) } }
if fill.mode() == DriverMode::Direct { let value = u32::sample(fill, Bound::Unbounded, Bound::Unbounded)?; return char::from_u32(value); }
if_condition
[ { "content": "pub trait BoundedValue<B = Self>: Sized {\n\n fn gen_bounded<D: Driver>(driver: &mut D, min: Bound<&B>, max: Bound<&B>) -> Option<Self>;\n\n\n\n fn mutate_bounded<D: Driver>(\n\n &mut self,\n\n driver: &mut D,\n\n min: Bound<&B>,\n\n max: Bound<&B>,\n\n ) -> Option<()> {\n\n *self = Self::gen_bounded(driver, min, max)?;\n\n Some(())\n\n }\n\n}\n\n\n\nmacro_rules! range_generator {\n\n ($ty:ident) => {\n\n impl<T: BoundedValue> ValueGenerator for core::ops::$ty<T> {\n\n type Output = T;\n\n\n\n fn generate<D: Driver>(&self, driver: &mut D) -> Option<Self::Output> {\n", "file_path": "bolero-generator/src/bounded.rs", "rank": 0, "score": 253645.68147508032 }, { "content": "fn lower_type_index(value: usize, max: usize, span: Span) -> TokenStream2 {\n\n assert!(value <= max);\n\n\n\n if max == 0 {\n\n return Error::new(span, \"Empty enums cannot be generated\").to_compile_error();\n\n }\n\n\n\n if max < core::u8::MAX as usize {\n\n let value = value as u8;\n\n return quote_spanned!(span=> #value);\n\n }\n\n\n\n if max < core::u16::MAX as usize {\n\n let value = value as u16;\n\n return quote_spanned!(span=> #value);\n\n }\n\n\n\n assert!(max < core::u32::MAX as usize);\n\n let value = value as u32;\n\n return quote_spanned!(span=> #value);\n\n}\n\n\n", "file_path": "bolero-generator-derive/src/lib.rs", "rank": 3, "score": 217800.4334764277 }, { "content": "/// Generate a value with a parameterized generator\n\npub trait ValueGenerator: Sized {\n\n type Output;\n\n\n\n /// Generates a value with the given driver\n\n fn generate<D: Driver>(&self, driver: &mut D) -> Option<Self::Output>;\n\n\n\n /// Mutates an existing value with the given driver\n\n fn mutate<D: Driver>(&self, driver: &mut D, value: &mut Self::Output) -> Option<()> {\n\n *value = self.generate(driver)?;\n\n Some(())\n\n }\n\n\n\n /// Map the value of a generator\n\n fn map<F: Fn(Self::Output) -> T, T>(self, map: F) -> MapGenerator<Self, F> {\n\n MapGenerator {\n\n generator: self,\n\n map,\n\n }\n\n }\n\n\n", "file_path": "bolero-generator/src/lib.rs", "rank": 4, "score": 215460.1119463975 }, { "content": "fn gen_foo() -> impl ValueGenerator<Output = u32> {\n\n 4..6\n\n}\n\n\n\n#[derive(TypeGenerator)]\n\npub struct Unit;\n\n\n\n#[derive(Debug, Clone, TypeGenerator, PartialEq)]\n\npub struct NewType(#[generator(4..10)] u64);\n\n\n\n#[derive(Debug, Clone, TypeGenerator, PartialEq)]\n\npub struct Struct {\n\n #[generator(gen_foo())]\n\n field_a: u32,\n\n\n\n #[generator(Vec::gen().with().len(1usize..5))]\n\n field_b: Vec<NewType>,\n\n\n\n #[generator(_code = \"gen::<u8>().with()\")]\n\n field_c: u8,\n", "file_path": "bolero-generator/tests/derive_test.rs", "rank": 5, "score": 203116.07166236336 }, { "content": "pub fn run(a: u8, b: u8) -> u8 {\n\n if a == 4 && b == 5 && std::env::var(\"SHOULD_PANIC\").is_ok() {\n\n panic!(\"it found me\");\n\n }\n\n\n\n 0\n\n}\n\n\n", "file_path": "examples/workspace/crate_b/src/lib.rs", "rank": 6, "score": 193469.4434837968 }, { "content": "pub fn run(a: u8, b: u8) -> u8 {\n\n if a == 1 && b == 2 && std::env::var(\"SHOULD_PANIC\").is_ok() {\n\n panic!(\"it found me\");\n\n }\n\n\n\n 0\n\n}\n\n\n", "file_path": "examples/workspace/crate_a/src/lib.rs", "rank": 7, "score": 193469.4434837968 }, { "content": "/// Pick a value for the provided set of values\n\npub fn one_value_of<O: OneValueOfGenerator>(options: O) -> OneValueOf<O> {\n\n OneValueOf(options)\n\n}\n\n\n", "file_path": "bolero-generator/src/one_of.rs", "rank": 8, "score": 187471.31120879776 }, { "content": "/// Doctest to make sure it compiles\n\n/// ```\n\n/// assert_eq!(basic::add(1, 2, false), 3);\n\n/// ```\n\npub fn add(a: u8, b: u8, should_panic: bool) -> u8 {\n\n if should_panic {\n\n a + b\n\n } else {\n\n a.saturating_add(b)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use bolero::{check, generator::*};\n\n\n\n #[test]\n\n fn add_test() {\n\n let should_panic = std::env::var(\"SHOULD_PANIC\").is_ok();\n\n\n\n check!()\n\n .with_generator((0..254).map_gen(|a: u8| (a, a + 1)))\n\n .cloned()\n", "file_path": "examples/basic/src/lib.rs", "rank": 9, "score": 183557.71942267832 }, { "content": "/// Extensions for picking a value from a set of values\n\npub trait OneValueOfExt {\n\n type Generator;\n\n\n\n /// Pick a value for the provided set of values\n\n fn one_value_of(self) -> OneValueOf<Self::Generator>;\n\n}\n\n\n\nimpl<O: OneOfGenerator> OneOfExt for O {\n\n type Generator = O;\n\n\n\n fn one_of(self) -> OneOf<Self> {\n\n OneOf(self)\n\n }\n\n}\n\n\n\nimpl<O: OneValueOfGenerator> OneValueOfExt for O {\n\n type Generator = O;\n\n\n\n fn one_value_of(self) -> OneValueOf<Self> {\n\n OneValueOf(self)\n\n }\n\n}\n\n\n", "file_path": "bolero-generator/src/one_of.rs", "rank": 10, "score": 178451.95470425824 }, { "content": "pub trait OneValueOfGenerator {\n\n type Output;\n\n\n\n fn generate_one_value_of<D: Driver>(&self, _driver: &mut D) -> Option<Self::Output>;\n\n fn mutate_one_value_of<D: Driver>(\n\n &self,\n\n driver: &mut D,\n\n value: &mut Self::Output,\n\n ) -> Option<()>;\n\n}\n\n\n\nimpl<Output, T: ValueGenerator<Output = Output>> OneOfGenerator for &[T] {\n\n type Output = Output;\n\n\n\n fn generate_one_of<D_: Driver>(&self, driver: &mut D_) -> Option<Self::Output> {\n\n let index = (0..self.len()).generate(driver)?;\n\n self[index].generate(driver)\n\n }\n\n\n\n fn mutate_one_of<D: Driver>(&self, driver: &mut D, value: &mut Self::Output) -> Option<()> {\n", "file_path": "bolero-generator/src/one_of.rs", "rank": 11, "score": 178446.0753914017 }, { "content": "/// Trait for driving the generation of a value\n\n///\n\n/// In a test engine, this is typically backed by\n\n/// a byte slice, but other drivers can be used instead, e.g.\n\n/// an RNG implementation.\n\npub trait Driver: Sized {\n\n /// Generate a value with type `T`\n\n fn gen<T: TypeGenerator>(&mut self) -> Option<T> {\n\n T::generate(self)\n\n }\n\n\n\n gen_method!(gen_u8, u8);\n\n gen_method!(gen_i8, i8);\n\n gen_method!(gen_u16, u16);\n\n gen_method!(gen_i16, i16);\n\n gen_method!(gen_u32, u32);\n\n gen_method!(gen_i32, i32);\n\n gen_method!(gen_u64, u64);\n\n gen_method!(gen_i64, i64);\n\n gen_method!(gen_u128, u128);\n\n gen_method!(gen_i128, i128);\n\n gen_method!(gen_usize, usize);\n\n gen_method!(gen_isize, isize);\n\n gen_method!(gen_f32, f32);\n\n gen_method!(gen_f64, f64);\n", "file_path": "bolero-generator/src/driver.rs", "rank": 12, "score": 174490.3027787632 }, { "content": "/// Trait for defining a test case\n\npub trait Test: Sized {\n\n /// The input value for the test case\n\n type Value;\n\n\n\n /// Execute one test with the given input\n\n fn test<T: TestInput<Result<bool, PanicError>>>(\n\n &mut self,\n\n input: &mut T,\n\n ) -> Result<bool, PanicError>;\n\n\n\n /// Generate a value for the given input.\n\n ///\n\n /// Note: this is used for printing the value related to a test failure\n\n fn generate_value<T: TestInput<Self::Value>>(&self, input: &mut T) -> Self::Value;\n\n\n\n /// Shrink the input to a simpler form\n\n fn shrink(\n\n &mut self,\n\n input: Vec<u8>,\n\n seed: Option<u64>,\n", "file_path": "bolero-engine/src/test.rs", "rank": 13, "score": 174477.58584909514 }, { "content": "pub fn take_panic() -> Option<PanicError> {\n\n ERROR.with(|error| error.borrow_mut().take())\n\n}\n\n\n", "file_path": "bolero-engine/src/panic.rs", "rank": 14, "score": 174461.4742808136 }, { "content": "/// Generate a value for a given type\n\npub trait TypeGenerator: Sized {\n\n /// Generates a value with the given driver\n\n fn generate<D: Driver>(driver: &mut D) -> Option<Self>;\n\n\n\n /// Mutates an existing value with the given driver\n\n fn mutate<D: Driver>(&mut self, driver: &mut D) -> Option<()> {\n\n *self = Self::generate(driver)?;\n\n Some(())\n\n }\n\n\n\n /// Returns a generator for a given type\n\n #[inline]\n\n fn gen() -> TypeValueGenerator<Self> {\n\n gen()\n\n }\n\n}\n\n\n", "file_path": "bolero-generator/src/lib.rs", "rank": 15, "score": 170913.57143730478 }, { "content": "pub trait CollectionGenerator: Sized {\n\n type Item;\n\n\n\n fn mutate_collection<D: Driver, G>(\n\n &mut self,\n\n driver: &mut D,\n\n new_len: usize,\n\n item_gen: &G,\n\n ) -> Option<()>\n\n where\n\n G: ValueGenerator<Output = Self::Item>;\n\n}\n\n\n\n#[macro_export]\n\nmacro_rules! impl_values_collection_generator {\n\n ($ty:ident, $generator:ident, $default_len_range:expr $(,[$($params:path),*])?) => {\n\n pub struct $generator<V, L> {\n\n values: V,\n\n len: L,\n\n }\n", "file_path": "bolero-generator/src/alloc/collections.rs", "rank": 16, "score": 167525.03306723922 }, { "content": "pub fn catch<F: RefUnwindSafe + FnOnce() -> Output, Output>(fun: F) -> Result<Output, PanicError> {\n\n catch_unwind(AssertUnwindSafe(|| __panic_marker_start__(fun))).map_err(|err| {\n\n if let Some(err) = take_panic() {\n\n return err;\n\n }\n\n macro_rules! try_downcast {\n\n ($ty:ty, $fmt:expr) => {\n\n if let Some(err) = err.downcast_ref::<$ty>() {\n\n return PanicError::new(format!($fmt, err));\n\n }\n\n };\n\n }\n\n try_downcast!(PanicInfo, \"{}\");\n\n try_downcast!(anyhow::Error, \"{}\");\n\n try_downcast!(String, \"{}\");\n\n try_downcast!(&'static str, \"{}\");\n\n try_downcast!(Box<dyn Display>, \"{}\");\n\n try_downcast!(Box<dyn Debug>, \"{:?}\");\n\n PanicError::new(\"thread panicked with an unknown error\".to_string())\n\n })\n\n}\n\n\n", "file_path": "bolero-engine/src/panic.rs", "rank": 17, "score": 158785.18991332944 }, { "content": "/// Trait for defining an engine that executes a test\n\npub trait Engine<T: Test>: Sized {\n\n type Output;\n\n\n\n fn set_driver_mode(&mut self, mode: DriverMode);\n\n fn set_shrink_time(&mut self, shrink_time: Duration);\n\n fn run(self, test: T) -> Self::Output;\n\n}\n\n\n\n// TODO change this to `!` when stabilized\n\n#[doc(hidden)]\n\npub type Never = ();\n", "file_path": "bolero-engine/src/lib.rs", "rank": 18, "score": 158148.8881787299 }, { "content": "pub fn capture_backtrace(value: bool) -> bool {\n\n CAPTURE_BACKTRACE.with(|cell| {\n\n let prev = *cell.borrow();\n\n *cell.borrow_mut() = value;\n\n prev\n\n })\n\n}\n\n\n", "file_path": "bolero-engine/src/panic.rs", "rank": 19, "score": 154601.3952961904 }, { "content": "pub fn forward_panic(value: bool) -> bool {\n\n FORWARD_PANIC.with(|cell| {\n\n let prev = *cell.borrow();\n\n *cell.borrow_mut() = value;\n\n prev\n\n })\n\n}\n\n\n", "file_path": "bolero-engine/src/panic.rs", "rank": 20, "score": 154601.3952961904 }, { "content": "#[inline]\n\npub fn gen<T: TypeGenerator>() -> TypeValueGenerator<T> {\n\n TypeValueGenerator(PhantomData)\n\n}\n\n\n\n/// Generate a value for a given type with additional constraints\n", "file_path": "bolero-generator/src/lib.rs", "rank": 21, "score": 142754.79764986836 }, { "content": "#[inline]\n\npub fn constant<T: Clone>(value: T) -> Constant<T> {\n\n Constant { value }\n\n}\n\n\n\npub mod prelude {\n\n pub use crate::{\n\n constant, gen, gen_with,\n\n one_of::{one_of, one_value_of, OneOfExt, OneValueOfExt},\n\n TypeGenerator, TypeGeneratorWithParams, ValueGenerator,\n\n };\n\n}\n", "file_path": "bolero-generator/src/lib.rs", "rank": 22, "score": 141025.08836525574 }, { "content": "#[doc(hidden)]\n\npub fn test(\n\n location: TargetLocation,\n\n) -> TestTarget<ByteSliceGenerator, DefaultEngine, BorrowedInput> {\n\n TestTarget::new(DefaultEngine::new(location))\n\n}\n\n\n\n/// Default generator for byte slices\n\n#[derive(Copy, Clone, Default, PartialEq)]\n\npub struct ByteSliceGenerator;\n\n\n\nimpl<Engine> TestTarget<ByteSliceGenerator, Engine, BorrowedInput> {\n\n /// Create a `TestTarget` with the given `Engine`\n\n pub fn new(engine: Engine) -> TestTarget<ByteSliceGenerator, Engine, BorrowedInput> {\n\n Self {\n\n driver_mode: None,\n\n shrink_time: None,\n\n generator: ByteSliceGenerator,\n\n engine,\n\n input_ownership: PhantomData,\n\n }\n", "file_path": "bolero/src/lib.rs", "rank": 23, "score": 141001.25180136718 }, { "content": "#[inline(never)]\n\nfn __panic_marker_start__<F: FnOnce() -> R, R>(f: F) -> R {\n\n f()\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct Backtrace {\n\n frames: Vec<BacktraceFrame>,\n\n}\n\n\n\nimpl Display for Backtrace {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n for (i, frame) in self.frames.iter().enumerate() {\n\n writeln!(f, \" {}: {}\", i, frame.symbol)?;\n\n if let Some(location) = frame.location.as_ref() {\n\n writeln!(f, \" at {}\", location)?;\n\n }\n\n }\n\n\n\n Ok(())\n\n }\n", "file_path": "bolero-engine/src/panic.rs", "rank": 24, "score": 138624.6091600799 }, { "content": "/// Pick a generator for the provided set of generators\n\npub fn one_of<O: OneOfGenerator>(options: O) -> OneOf<O> {\n\n OneOf(options)\n\n}\n\n\n\n#[inline]\n", "file_path": "bolero-generator/src/one_of.rs", "rank": 25, "score": 138126.8888258759 }, { "content": "/// Extensions for picking a generator from a set of generators\n\npub trait OneOfExt {\n\n type Generator;\n\n\n\n /// Pick a generator for the provided set of generators\n\n fn one_of(self) -> OneOf<Self::Generator>;\n\n}\n\n\n", "file_path": "bolero-generator/src/one_of.rs", "rank": 26, "score": 136275.4546314435 }, { "content": "pub trait OneOfGenerator {\n\n type Output;\n\n\n\n fn generate_one_of<D: Driver>(&self, driver: &mut D) -> Option<Self::Output>;\n\n fn mutate_one_of<D: Driver>(&self, driver: &mut D, value: &mut Self::Output) -> Option<()>;\n\n}\n\n\n", "file_path": "bolero-generator/src/one_of.rs", "rank": 27, "score": 136275.4546314435 }, { "content": "pub fn set_hook() {\n\n *PANIC_HOOK\n\n}\n\n\n", "file_path": "bolero-engine/src/panic.rs", "rank": 28, "score": 135860.81395856442 }, { "content": "/// Trait that turns the test return value into a `Result`\n\npub trait IntoTestResult {\n\n fn into_test_result(self) -> Result<(), PanicError>;\n\n}\n\n\n\nimpl IntoTestResult for () {\n\n fn into_test_result(self) -> Result<(), PanicError> {\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl IntoTestResult for bool {\n\n fn into_test_result(self) -> Result<(), PanicError> {\n\n if self {\n\n Ok(())\n\n } else {\n\n Err(PanicError::new(\"test returned `false`\".to_string()))\n\n }\n\n }\n\n}\n\n\n", "file_path": "bolero-engine/src/test_result.rs", "rank": 29, "score": 133914.76691650547 }, { "content": "/// Convert a type generator into the default value generator\n\npub trait TypeGeneratorWithParams {\n\n type Output: ValueGenerator;\n\n\n\n fn gen_with() -> Self::Output;\n\n}\n\n\n\n/// Non-parameterized ValueGenerator given a TypeGenerator\n\n#[derive(Copy, Clone, Debug)]\n\npub struct TypeValueGenerator<T: TypeGenerator>(PhantomData<T>);\n\n\n\nimpl<T: TypeGenerator> Default for TypeValueGenerator<T> {\n\n fn default() -> Self {\n\n Self(PhantomData)\n\n }\n\n}\n\n\n\nimpl<T: TypeGenerator + TypeGeneratorWithParams> TypeValueGenerator<T> {\n\n pub fn with(self) -> <T as TypeGeneratorWithParams>::Output {\n\n T::gen_with()\n\n }\n", "file_path": "bolero-generator/src/lib.rs", "rank": 30, "score": 133904.5328812896 }, { "content": "#[test]\n\nfn range_with_test() {\n\n use core::ops::Range;\n\n\n\n let _ = generator_test!(gen::<Range<usize>>().with().start(4..6).end(6..10));\n\n}\n\n\n", "file_path": "bolero-generator/src/range.rs", "rank": 31, "score": 133393.73529516315 }, { "content": "#[test]\n\nfn char_bounds_test() {\n\n let _ = generator_test!(gen::<char>().with().bounds('a'..='f'));\n\n}\n", "file_path": "bolero-generator/src/char.rs", "rank": 32, "score": 131175.37450799893 }, { "content": "#[test]\n\nfn char_type_test() {\n\n let _ = generator_test!(gen::<char>());\n\n}\n\n\n", "file_path": "bolero-generator/src/char.rs", "rank": 33, "score": 131175.37450799893 }, { "content": "#[test]\n\nfn range_type_test() {\n\n use core::ops::Range;\n\n\n\n let _ = generator_test!(gen::<Range<usize>>());\n\n}\n\n\n", "file_path": "bolero-generator/src/range.rs", "rank": 34, "score": 131157.62598104464 }, { "content": "#[test]\n\nfn range_gen_test() {\n\n let _ = generator_test!(0usize..10);\n\n}\n", "file_path": "bolero-generator/src/range.rs", "rank": 35, "score": 131157.62598104464 }, { "content": "pub fn rust_backtrace() -> bool {\n\n *RUST_BACKTRACE\n\n}\n\n\n", "file_path": "bolero-engine/src/panic.rs", "rank": 36, "score": 125986.45247311816 }, { "content": "pub fn thread_name() -> String {\n\n THREAD_NAME.with(|cell| cell.clone())\n\n}\n\n\n", "file_path": "bolero-engine/src/panic.rs", "rank": 37, "score": 125986.45247311816 }, { "content": "pub trait TestInput<Output> {\n\n type Driver: Driver + RefUnwindSafe;\n\n\n\n /// Provide a slice of the test input\n\n fn with_slice<F: FnMut(&[u8]) -> Output>(&mut self, f: &mut F) -> Output;\n\n\n\n /// Provide a test driver for the test input\n\n ///\n\n /// Note: Drivers are used with `bolero_generator::ValueGenerator` implementations.\n\n fn with_driver<F: FnMut(&mut Self::Driver) -> Output>(&mut self, f: &mut F) -> Output;\n\n}\n\n\n\nimpl<'a, Output> TestInput<Output> for &'a [u8] {\n\n type Driver = ByteSliceDriver<'a>;\n\n\n\n fn with_slice<F: FnMut(&[u8]) -> Output>(&mut self, f: &mut F) -> Output {\n\n f(self)\n\n }\n\n\n\n fn with_driver<F: FnMut(&mut Self::Driver) -> Output>(&mut self, f: &mut F) -> Output {\n", "file_path": "bolero-engine/src/test_input.rs", "rank": 38, "score": 124108.57960630377 }, { "content": " int UseValueProfile = false;\n", "file_path": "bolero-libfuzzer/libfuzzer/FuzzerOptions.h", "rank": 39, "score": 123875.20169396966 }, { "content": "#[test]\n\nfn range_generator_test() {\n\n check!().with_generator(0..=5).for_each(|_input: &u8| {\n\n // println!(\"{:?}\", input);\n\n });\n\n}\n\n\n", "file_path": "bolero/src/lib.rs", "rank": 40, "score": 121661.61212568276 }, { "content": "#[test]\n\nfn option_test() {\n\n let _ = generator_test!(gen::<Option<u8>>());\n\n}\n", "file_path": "bolero-generator/src/result.rs", "rank": 41, "score": 121494.169635796 }, { "content": "/// Shrink the input to a simpler form\n\npub fn shrink<T: Test>(\n\n test: &mut T,\n\n input: Vec<u8>,\n\n seed: Option<u64>,\n\n driver_mode: Option<DriverMode>,\n\n shrink_time: Option<Duration>,\n\n) -> Option<TestFailure<T::Value>> {\n\n Shrinker::new(test, input, seed, driver_mode, shrink_time).shrink()\n\n}\n\n\n\nmacro_rules! predicate {\n\n ($expr:expr) => {\n\n if !($expr) {\n\n return Err(());\n\n }\n\n };\n\n}\n\n\n\nmacro_rules! shrink_integer {\n\n ($current:expr, $check:expr) => {{\n\n let mut check = $check;\n\n\n\n (0..($current)).into_iter().find(|value| check(*value))\n\n }};\n\n}\n\n\n", "file_path": "bolero-engine/src/shrink.rs", "rank": 42, "score": 120642.56459782289 }, { "content": "#[test]\n\nfn range_generator_cloned_test() {\n\n check!()\n\n .with_generator(0..=5)\n\n .cloned()\n\n .for_each(|_input: u8| {\n\n // println!(\"{:?}\", input);\n\n });\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn nested_test() {\n\n check!().with_generator(0..=5).for_each(|_input: &u8| {\n\n // println!(\"{:?}\", input);\n\n });\n\n }\n\n}\n", "file_path": "bolero/src/lib.rs", "rank": 43, "score": 119160.34129441441 }, { "content": "#[test]\n\nfn one_value_of_test() {\n\n use core::cmp::Ordering;\n\n\n\n generator_test!([Ordering::Equal, Ordering::Less, Ordering::Greater].one_value_of());\n\n}\n", "file_path": "bolero-generator/src/one_of.rs", "rank": 44, "score": 119129.94330333502 }, { "content": "fn main() {\n\n check!().for_each(|input: &[u8]| {\n\n // TODO implement checks\n\n let _ = input;\n\n });\n\n}\n", "file_path": "cargo-bolero/tests/fuzz_bytes/fuzz_target.rs", "rank": 45, "score": 116775.36278490422 }, { "content": "fn main() {\n\n let should_panic = env::var(\"SHOULD_PANIC\").is_ok();\n\n\n\n check!().for_each(|input| {\n\n if input.len() < 2 {\n\n return;\n\n }\n\n\n\n if should_panic {\n\n assert_ne!(input[0], 123);\n\n }\n\n\n\n let a = input[0];\n\n let b = input[1];\n\n assert!(add(a, b, should_panic) >= a);\n\n });\n\n}\n", "file_path": "examples/basic/tests/fuzz_bytes/fuzz_target.rs", "rank": 46, "score": 116775.36278490422 }, { "content": "#[test]\n\nfn vec_with_values_test() {\n\n let _ = generator_test!(gen::<Vec<_>>().with().values(4u16..6));\n\n}\n\n\n", "file_path": "bolero-generator/src/alloc/mod.rs", "rank": 47, "score": 116759.4103751984 }, { "content": "#[test]\n\nfn hash_set_with_values_test() {\n\n let _ = generator_test!(gen::<HashSet<_>>().with().values(4u16..6));\n\n}\n\n\n\nimpl_key_values_collection_generator!(HashMap, HashMapGenerator, DEFAULT_LEN_RANGE, [Hash, Eq]);\n\n\n", "file_path": "bolero-generator/src/std/mod.rs", "rank": 48, "score": 114508.91534676541 }, { "content": "#[inline]\n\npub fn gen_with<T: TypeGeneratorWithParams>() -> T::Output {\n\n T::gen_with()\n\n}\n\n\n\npub use one_of::{one_of, one_value_of};\n\n\n\nimpl<T> ValueGenerator for PhantomData<T> {\n\n type Output = Self;\n\n\n\n fn generate<D: Driver>(&self, _driver: &mut D) -> Option<Self::Output> {\n\n Some(PhantomData)\n\n }\n\n}\n\n\n\nimpl<T> TypeGenerator for PhantomData<T> {\n\n fn generate<D: Driver>(_driver: &mut D) -> Option<Self> {\n\n Some(PhantomData)\n\n }\n\n}\n\n\n", "file_path": "bolero-generator/src/lib.rs", "rank": 49, "score": 105179.68286001915 }, { "content": "#[proc_macro_derive(TypeGenerator, attributes(generator))]\n\npub fn derive_type_generator(input: TokenStream) -> TokenStream {\n\n let input = parse_macro_input!(input as DeriveInput);\n\n match input.data {\n\n Data::Struct(data) => {\n\n generate_struct_type_gen(input.attrs, input.ident, input.generics, data)\n\n }\n\n Data::Enum(data) => generate_enum_type_gen(input.attrs, input.ident, input.generics, data),\n\n Data::Union(data) => {\n\n generate_union_type_gen(input.attrs, input.ident, input.generics, data)\n\n }\n\n }\n\n .into()\n\n}\n\n\n", "file_path": "bolero-generator-derive/src/lib.rs", "rank": 50, "score": 104661.01666077299 }, { "content": "fn get_var<T: std::str::FromStr>(name: &str) -> Option<T> {\n\n std::env::var(name)\n\n .ok()\n\n .and_then(|value| value.parse::<T>().ok())\n\n}\n", "file_path": "bolero-engine/src/rng.rs", "rank": 51, "score": 95291.55272774947 }, { "content": "fn parse_code_hack(meta: &MetaList) -> Result<Option<TokenStream2>, Error> {\n\n for meta in meta.nested.iter() {\n\n if let NestedMeta::Meta(Meta::NameValue(meta)) = meta {\n\n if !meta.path.is_ident(\"_code\") {\n\n continue;\n\n }\n\n if let Lit::Str(lit) = &meta.lit {\n\n return Ok(Some(lit.parse()?));\n\n }\n\n };\n\n }\n\n Ok(None)\n\n}\n\n\n\npub struct GeneratorAttrValue<'a>(&'a GeneratorAttr);\n\n\n\nimpl ToTokens for GeneratorAttrValue<'_> {\n\n fn to_tokens(&self, tokens: &mut TokenStream2) {\n\n let generator = self.0;\n\n let span = generator.span();\n\n tokens.extend(quote_spanned!(span=>\n\n bolero_generator::ValueGenerator::generate(&(#generator), __bolero_driver)?\n\n ))\n\n }\n\n}\n", "file_path": "bolero-generator-derive/src/generator_attr.rs", "rank": 52, "score": 94161.63964655038 }, { "content": "fn write<P: AsRef<Path>, C: AsRef<[u8]>>(path: P, contents: C) {\n\n let path = path.as_ref();\n\n fs::write(path, contents).expect(\"could not create file\");\n\n println!(\"wrote {:?}\", path);\n\n}\n", "file_path": "cargo-bolero/src/new.rs", "rank": 53, "score": 91097.64617222143 }, { "content": "fn write_control_file<W: Write>(file: &mut W, corpus_dir: &Path) -> Result<Vec<PathBuf>> {\n\n let mut inputs = vec![];\n\n for entry in fs::read_dir(corpus_dir)? {\n\n inputs.push(entry?.path());\n\n }\n\n inputs.sort();\n\n\n\n // The control file example:\n\n //\n\n // 3 # The number of inputs\n\n // 1 # The number of inputs in the first corpus, <= the previous number\n\n // file0\n\n // file1\n\n // file2 # One file name per line.\n\n // STARTED 0 123 # FileID, file size\n\n // FT 0 1 4 6 8 # FileID COV1 COV2 ...\n\n // COV 0 7 8 9 # FileID COV1 COV1\n\n // STARTED 1 456 # If FT is missing, the input crashed while processing.\n\n // STARTED 2 567\n\n // FT 2 8 9\n", "file_path": "cargo-bolero/src/libfuzzer.rs", "rank": 54, "score": 85024.21274508747 }, { "content": "static inline void mangle_UseValue(run_t* run, const uint8_t* val, size_t len, bool printable) {\n\n if (util_rnd64() & 1) {\n\n mangle_Overwrite(run, mangle_getOffSet(run), val, len, printable);\n\n } else {\n\n mangle_Insert(run, mangle_getOffSetPlus1(run), val, len, printable);\n\n }\n", "file_path": "bolero-honggfuzz/honggfuzz/mangle.c", "rank": 55, "score": 84710.36113517251 }, { "content": "static inline void mangle_UseValueAt(\n\n run_t* run, size_t off, const uint8_t* val, size_t len, bool printable) {\n\n if (util_rnd64() & 1) {\n\n mangle_Overwrite(run, off, val, len, printable);\n\n } else {\n\n mangle_Insert(run, off, val, len, printable);\n\n }\n", "file_path": "bolero-honggfuzz/honggfuzz/mangle.c", "rank": 56, "score": 84710.36113517251 }, { "content": " bool UseMemmem = true;\n", "file_path": "bolero-libfuzzer/libfuzzer/FuzzerOptions.h", "rank": 57, "score": 84587.60373034666 }, { "content": " size_t MaxLen = 0;\n", "file_path": "bolero-libfuzzer/libfuzzer/FuzzerOptions.h", "rank": 58, "score": 84587.60373034666 }, { "content": " bool UseCounters = false;\n", "file_path": "bolero-libfuzzer/libfuzzer/FuzzerOptions.h", "rank": 59, "score": 84587.60373034666 }, { "content": " bool UseCmp = false;\n", "file_path": "bolero-libfuzzer/libfuzzer/FuzzerOptions.h", "rank": 60, "score": 84587.60373034666 }, { "content": " bool CrossOverUniformDist = false;\n", "file_path": "bolero-libfuzzer/libfuzzer/FuzzerOptions.h", "rank": 61, "score": 82561.84047663916 }, { "content": " size_t MaxNumberOfRuns = -1L;\n", "file_path": "bolero-libfuzzer/libfuzzer/FuzzerOptions.h", "rank": 62, "score": 82549.17178148372 }, { "content": " size_t size;\n", "file_path": "bolero-honggfuzz/honggfuzz/honggfuzz.h", "rank": 63, "score": 82209.22928054651 }, { "content": " const size_t size;\n", "file_path": "bolero-honggfuzz/honggfuzz/mangle.c", "rank": 64, "score": 82209.22928054651 }, { "content": " size_t Size = 0;\n", "file_path": "bolero-libfuzzer/libfuzzer/FuzzerMerge.h", "rank": 65, "score": 81081.42396644526 }, { "content": " bool operator<(const SizedFile &B) const { return Size < B.Size; }\n", "file_path": "bolero-libfuzzer/libfuzzer/FuzzerIO.h", "rank": 66, "score": 81081.42396644526 }, { "content": " int MaxTotalTimeSec = 0;\n", "file_path": "bolero-libfuzzer/libfuzzer/FuzzerOptions.h", "rank": 67, "score": 80606.67415023247 }, { "content": " static const size_t kMapSizeInBits = 1 << 16;\n", "file_path": "bolero-libfuzzer/libfuzzer/FuzzerValueBitMap.h", "rank": 68, "score": 78849.63185433672 }, { "content": "fn main() {\n\n println!(\n\n \"cargo:rustc-env=DEFAULT_TARGET={}\",\n\n std::env::var(\"TARGET\").unwrap()\n\n );\n\n}\n", "file_path": "cargo-bolero/build.rs", "rank": 69, "score": 77354.2665342535 }, { "content": "fn main() {\n\n println!(\"cargo:rerun-if-env-changed=BOLERO_FUZZER\");\n\n println!(\"cargo:rerun-if-env-changed=CARGO_CFG_FUZZING_LIBFUZZER\");\n\n\n\n if std::env::var(\"CARGO_CFG_FUZZING_LIBFUZZER\").is_ok() {\n\n let mut build = cc::Build::new();\n\n let sources = ::std::fs::read_dir(\"libfuzzer\")\n\n .expect(\"listable source directory\")\n\n .map(|de| de.expect(\"file in directory\").path())\n\n .filter(|p| p.extension().map(|ext| ext == \"cpp\").unwrap_or(false))\n\n .filter(|p| {\n\n // We use FuzzerAPI instead\n\n p.file_stem()\n\n .map(|name| name != \"FuzzerMain\")\n\n .unwrap_or(false)\n\n });\n\n\n\n for source in sources {\n\n build.file(source.to_str().unwrap());\n\n }\n\n\n\n build.file(\"src/FuzzerAPI.cpp\");\n\n build.flag(\"-std=c++11\");\n\n build.flag(\"-fno-omit-frame-pointer\");\n\n build.flag(\"-w\");\n\n build.cpp(true);\n\n build.compile(\"libfuzzer.a\");\n\n }\n\n}\n", "file_path": "bolero-libfuzzer/build.rs", "rank": 70, "score": 77354.2665342535 }, { "content": "fn main() {\n\n println!(\"cargo:rerun-if-env-changed=BOLERO_FUZZER\");\n\n println!(\"cargo:rerun-if-env-changed=CARGO_CFG_FUZZING_HONGGFUZZ\");\n\n println!(\"cargo:rerun-if-env-changed=CARGO_FEATURE_BIN\");\n\n\n\n if std::env::var(\"CARGO_CFG_FUZZING_HONGGFUZZ\").is_ok() {\n\n build(\"libhfuzz/libhfuzz.a\", \"libhfuzz.a\", \"hfuzz\");\n\n return;\n\n }\n\n\n\n if std::env::var(\"CARGO_FEATURE_BIN\").is_ok() {\n\n build(\"libhonggfuzz.a\", \"libhonggfuzz.a\", \"honggfuzz\");\n\n\n\n if cfg!(target_os = \"macos\") {\n\n println!(\"cargo:rustc-link-search=framework=/System/Library/PrivateFrameworks\");\n\n println!(\"cargo:rustc-link-search=framework=/System/Library/Frameworks\");\n\n\n\n for framework in [\n\n \"CoreSymbolication\",\n\n \"IOKit\",\n", "file_path": "bolero-honggfuzz/build.rs", "rank": 71, "score": 77354.2665342535 }, { "content": "fn main() {\n\n println!(\"cargo:rerun-if-env-changed=BOLERO_FUZZER\");\n\n println!(\"cargo:rerun-if-env-changed=CARGO_CFG_FUZZING_AFL\");\n\n println!(\"cargo:rerun-if-env-changed=CARGO_FEATURE_BIN\");\n\n\n\n if std::env::var(\"CARGO_CFG_FUZZING_AFL\").is_ok() {\n\n let mut build = cc::Build::new();\n\n\n\n build.file(\"afl/llvm_mode/afl-llvm-rt.o.c\");\n\n build.flag(\"-fno-omit-frame-pointer\");\n\n build.flag(\"-fpermissive\");\n\n build.flag(\"-w\");\n\n build.compile(\"afl-llvm-rt.a\");\n\n return;\n\n }\n\n\n\n if std::env::var(\"CARGO_FEATURE_BIN\").is_ok() {\n\n let mut build = cc::Build::new();\n\n\n\n build.include(\"src/bolero-afl-util.h\");\n\n build.file(\"afl/afl-fuzz.c\");\n\n build.define(\"BIN_PATH\", \"\\\"/\\\"\");\n\n build.define(\"DOC_PATH\", \"\\\"/\\\"\");\n\n build.flag(\"-fno-omit-frame-pointer\");\n\n build.flag(\"-fpermissive\");\n\n build.flag(\"-w\");\n\n build.compile(\"afl.a\");\n\n }\n\n}\n", "file_path": "bolero-afl/build.rs", "rank": 72, "score": 77354.2665342535 }, { "content": "#[test]\n\nfn other_integration_test() {\n\n let should_panic = std::env::var(\"OTHER_SHOULD_PANIC\").is_ok();\n\n\n\n check!()\n\n .with_generator((0..254).map_gen(|a: u8| (a, a + 1)))\n\n .cloned()\n\n .for_each(|(a, b)| {\n\n assert!(add(a, b, should_panic) >= a);\n\n });\n\n}\n\n\n\nmod nested {\n\n use super::*;\n\n\n\n #[test]\n\n fn other_nested_integration_test() {\n\n let should_panic = std::env::var(\"OTHER_SHOULD_PANIC\").is_ok();\n\n\n\n check!()\n\n .with_generator((0..254).map_gen(|a: u8| (a, a + 1)))\n\n .cloned()\n\n .for_each(|(a, b)| {\n\n assert!(add(a, b, should_panic) >= a);\n\n });\n\n }\n\n}\n", "file_path": "examples/basic/tests/other.rs", "rank": 73, "score": 75960.28531897001 }, { "content": "#[test]\n\nfn and_then_test() {\n\n let _ = generator_test!(gen::<u8>().and_then(|value| value..));\n\n}\n\n\n\n#[derive(Clone, Debug)]\n\npub struct FilterGenerator<Generator, Filter> {\n\n pub(crate) generator: Generator,\n\n pub(crate) filter: Filter,\n\n}\n\n\n\nimpl<G: ValueGenerator, F: Fn(&G::Output) -> bool> ValueGenerator for FilterGenerator<G, F> {\n\n type Output = G::Output;\n\n\n\n fn generate<D: Driver>(&self, driver: &mut D) -> Option<Self::Output> {\n\n let value = self.generator.generate(driver)?;\n\n if (self.filter)(&value) {\n\n Some(value)\n\n } else {\n\n None\n\n }\n\n }\n\n}\n\n\n", "file_path": "bolero-generator/src/combinator.rs", "rank": 74, "score": 75960.28531897001 }, { "content": "fn progress() {\n\n if cfg!(miri) {\n\n use std::io::{stderr, Write};\n\n\n\n // miri doesn't capture explicit writes to stderr\n\n #[allow(clippy::explicit_write)]\n\n let _ = write!(stderr(), \".\");\n\n }\n\n}\n\n\n\nimpl<T: Test> Engine<T> for TestEngine\n\nwhere\n\n T::Value: core::fmt::Debug,\n\n{\n\n type Output = Never;\n\n\n\n fn set_driver_mode(&mut self, mode: DriverMode) {\n\n self.driver_mode = Some(mode);\n\n }\n\n\n", "file_path": "bolero/src/test/mod.rs", "rank": 75, "score": 75960.28531897001 }, { "content": "fn main() {\n\n let args = std::env::args()\n\n .enumerate()\n\n .filter_map(|(i, v)| match (i, v.as_ref()) {\n\n (1, \"bolero\") => None, // cargo passes the subcommand so filter it out\n\n _ => Some(v),\n\n });\n\n\n\n if let Err(err) = Commands::from_iter(args).exec() {\n\n eprintln!(\"error: {}\", err);\n\n std::process::exit(1);\n\n }\n\n}\n\n\n\npub(crate) fn exec(mut cmd: std::process::Command) -> Result<()> {\n\n cmd.spawn()?.wait()?.status_as_result()\n\n}\n\n\n\npub(crate) trait StatusAsResult {\n\n type Output;\n", "file_path": "cargo-bolero/src/main.rs", "rank": 76, "score": 75960.28531897001 }, { "content": " int size;\n", "file_path": "bolero-honggfuzz/honggfuzz/third_party/android/libBlocksRuntime/Block_private.h", "rank": 77, "score": 75355.72533445146 }, { "content": "#[test]\n\nfn with_bounds_test() {\n\n let _ = generator_test!(gen::<u8>().with().bounds(0..32));\n\n}\n", "file_path": "bolero-generator/src/bounded.rs", "rank": 78, "score": 74642.69415375148 }, { "content": "#[test]\n\nfn bool_test() {\n\n let _ = generator_test!(gen::<bool>());\n\n}\n\n\n", "file_path": "bolero-generator/src/bool.rs", "rank": 79, "score": 74642.69415375148 }, { "content": "#[test]\n\nfn tuple_with_test() {\n\n let _ = generator_test!(gen::<(u8, u8)>().with());\n\n}\n", "file_path": "bolero-generator/src/tuple.rs", "rank": 80, "score": 74642.69415375148 }, { "content": "#[test]\n\n#[should_panic]\n\nfn slice_generator_test() {\n\n check!().for_each(|input| {\n\n assert!(input.len() > 1000);\n\n });\n\n}\n\n\n", "file_path": "bolero/src/lib.rs", "rank": 81, "score": 74642.69415375148 }, { "content": "#[test]\n\nfn atomicu8_test() {\n\n let _ = generator_no_clone_test!(gen::<AtomicU8>());\n\n}\n\n\n\n// #[test]\n\n// fn atomicu8_with_test() {\n\n// let _ = generator_test!(gen::<AtomicU8>()\n\n// .with()\n\n// .bounds(AtomicU8::new(0u8)..AtomicU8::new(5)));\n\n// }\n\n\n\nimpl_atomic_integer!(AtomicI8, i8);\n\nimpl_atomic_integer!(AtomicU8, u8);\n\nimpl_atomic_integer!(AtomicI16, i16);\n\nimpl_atomic_integer!(AtomicU16, u16);\n\nimpl_atomic_integer!(AtomicI32, i32);\n\nimpl_atomic_integer!(AtomicU32, u32);\n\nimpl_atomic_integer!(AtomicI64, i64);\n\nimpl_atomic_integer!(AtomicU64, u64);\n\nimpl_atomic_integer!(AtomicIsize, isize);\n", "file_path": "bolero-generator/src/atomic.rs", "rank": 82, "score": 74642.69415375148 }, { "content": "#[test]\n\nfn map_test() {\n\n let _ = generator_test!(gen::<u8>().map(|value| value > 4));\n\n}\n\n\n\n#[derive(Clone, Debug)]\n\npub struct AndThenGenerator<Generator, AndThen> {\n\n pub(crate) generator: Generator,\n\n pub(crate) and_then: AndThen,\n\n}\n\n\n\nimpl<G: ValueGenerator, H: ValueGenerator, F: Fn(G::Output) -> H> ValueGenerator\n\n for AndThenGenerator<G, F>\n\n{\n\n type Output = H::Output;\n\n\n\n fn generate<D: Driver>(&self, driver: &mut D) -> Option<Self::Output> {\n\n let value = self.generator.generate(driver)?;\n\n (self.and_then)(value).generate(driver)\n\n }\n\n}\n\n\n", "file_path": "bolero-generator/src/combinator.rs", "rank": 83, "score": 74642.69415375148 }, { "content": "#[test]\n\nfn result_test() {\n\n let _ = generator_test!(gen::<Result<u8, u8>>());\n\n}\n\n\n", "file_path": "bolero-generator/src/result.rs", "rank": 84, "score": 74642.69415375148 }, { "content": "#[test]\n\nfn filter_test() {\n\n let _ = generator_test!(gen::<u8>().filter(|value| *value > 40));\n\n}\n\n\n\n#[derive(Clone, Debug)]\n\npub struct FilterMapGenerator<Generator, FilterMap> {\n\n pub(crate) generator: Generator,\n\n pub(crate) filter_map: FilterMap,\n\n}\n\n\n\nimpl<G: ValueGenerator, F: Fn(G::Output) -> Option<T>, T> ValueGenerator\n\n for FilterMapGenerator<G, F>\n\n{\n\n type Output = T;\n\n\n\n fn generate<D: Driver>(&self, driver: &mut D) -> Option<Self::Output> {\n\n let value = self.generator.generate(driver)?;\n\n (self.filter_map)(value)\n\n }\n\n}\n\n\n", "file_path": "bolero-generator/src/combinator.rs", "rank": 85, "score": 74642.69415375148 }, { "content": "#[test]\n\n#[should_panic]\n\nfn type_generator_test() {\n\n check!().with_type().for_each(|input: &u8| {\n\n assert!(input < &128);\n\n });\n\n}\n\n\n", "file_path": "bolero/src/lib.rs", "rank": 86, "score": 74642.69415375148 }, { "content": "#[test]\n\nfn bool_with_test() {\n\n let _ = generator_test!(gen::<bool>().with().weight(0.1));\n\n}\n", "file_path": "bolero-generator/src/bool.rs", "rank": 87, "score": 74642.69415375148 }, { "content": "#[test]\n\nfn one_of_array_test() {\n\n use crate::gen;\n\n\n\n let options = [gen::<u8>(), gen(), gen()];\n\n let _ = generator_test!(one_of(options));\n\n let _ = generator_test!(options.one_of());\n\n let _ = generator_test!(one_of(&options[..]));\n\n\n\n let _ = generator_test!([1u8, 2, 3].one_of());\n\n}\n\n\n", "file_path": "bolero-generator/src/one_of.rs", "rank": 88, "score": 73395.38124915675 }, { "content": "#[test]\n\nfn vecdeque_test() {\n\n let _ = generator_test!(gen::<VecDeque<u8>>());\n\n}\n\n\n\nimpl_values_collection_generator!(Vec, VecGenerator, DEFAULT_LEN_RANGE);\n\n\n\nimpl<T> CollectionGenerator for Vec<T> {\n\n type Item = T;\n\n\n\n fn mutate_collection<D: Driver, G>(\n\n &mut self,\n\n driver: &mut D,\n\n new_len: usize,\n\n item_gen: &G,\n\n ) -> Option<()>\n\n where\n\n G: ValueGenerator<Output = Self::Item>,\n\n {\n\n for item in self.iter_mut().take(new_len) {\n\n item_gen.mutate(driver, item)?;\n", "file_path": "bolero-generator/src/alloc/mod.rs", "rank": 89, "score": 73395.38124915675 }, { "content": "#[test]\n\nfn array_gen_test() {\n\n let _ = generator_test!([gen::<u8>(), gen::<u8>()]);\n\n}\n", "file_path": "bolero-generator/src/array.rs", "rank": 90, "score": 73395.38124915675 }, { "content": "#[test]\n\nfn tuple_type_test() {\n\n let _ = generator_test!(gen::<(u8, u16, u32, u64)>());\n\n}\n\n\n", "file_path": "bolero-generator/src/tuple.rs", "rank": 91, "score": 73395.38124915675 }, { "content": "#[test]\n\nfn array_type_test() {\n\n let _ = generator_test!(gen::<[u8; 10]>());\n\n}\n\n\n", "file_path": "bolero-generator/src/array.rs", "rank": 92, "score": 73395.38124915675 }, { "content": "#[test]\n\n#[should_panic]\n\nfn type_generator_cloned_test() {\n\n check!().with_type().cloned().for_each(|input: u8| {\n\n assert!(input < 128);\n\n });\n\n}\n\n\n", "file_path": "bolero/src/lib.rs", "rank": 93, "score": 73395.38124915675 }, { "content": "#[test]\n\nfn string_with_test() {\n\n for _ in 0..100 {\n\n if let Some(string) = generator_test!(gen::<String>().with().len(32usize)) {\n\n assert_eq!(string.chars().count(), 32usize);\n\n return;\n\n }\n\n }\n\n\n\n panic!(\"failed to generate a valid string\");\n\n}\n", "file_path": "bolero-generator/src/alloc/string.rs", "rank": 94, "score": 73395.38124915675 }, { "content": "#[test]\n\nfn one_of_slice_test() {\n\n use crate::constant;\n\n use core::cmp::Ordering;\n\n\n\n let options = [\n\n constant(Ordering::Equal),\n\n constant(Ordering::Less),\n\n constant(Ordering::Greater),\n\n ];\n\n\n\n let _ = generator_test!(one_of(&options[..]));\n\n}\n\n\n", "file_path": "bolero-generator/src/one_of.rs", "rank": 95, "score": 73395.38124915675 }, { "content": "#[test]\n\nfn one_of_tuple_test() {\n\n let _ = generator_test!(one_of((gen::<u8>(), 0..4, 8..9)));\n\n}\n\n\n", "file_path": "bolero-generator/src/one_of.rs", "rank": 96, "score": 73395.38124915675 }, { "content": "macro_rules! gen_int {\n\n ($name:ident, $ty:ident) => {\n\n #[inline]\n\n fn $name(&mut self, min: Bound<&$ty>, max: Bound<&$ty>) -> Option<$ty> {\n\n Uniform::sample(self, min, max)\n\n }\n\n };\n\n}\n\n\n\nmacro_rules! gen_float {\n\n ($name:ident, $ty:ident) => {\n\n #[inline]\n\n fn $name(&mut self, min: Bound<&$ty>, max: Bound<&$ty>) -> Option<$ty> {\n\n use core::ops::RangeBounds;\n\n\n\n if let (Bound::Unbounded, Bound::Unbounded) = (min, max) {\n\n let mut bytes = [0u8; core::mem::size_of::<$ty>()];\n\n self.fill_bytes(&mut bytes)?;\n\n return Some(<$ty>::from_le_bytes(bytes));\n\n }\n", "file_path": "bolero-generator/src/driver/macros.rs", "rank": 98, "score": 49.85367342835718 } ]
Rust
alacritty/src/main.rs
vitaly-zdanevich/alacritty
6b208a6958a32594cf1248f5336f8a8f79d17fe3
#![warn(rust_2018_idioms, future_incompatible)] #![deny(clippy::all, clippy::if_not_else, clippy::enum_glob_use, clippy::wrong_pub_self_convention)] #![cfg_attr(feature = "cargo-clippy", deny(warnings))] #![cfg_attr(all(test, feature = "bench"), feature(test))] #![windows_subsystem = "windows"] #[cfg(not(any(feature = "x11", feature = "wayland", target_os = "macos", windows)))] compile_error!(r#"at least one of the "x11"/"wayland" features must be enabled"#); #[cfg(target_os = "macos")] use std::env; use std::error::Error; use std::fs; use std::io::{self, Write}; use std::sync::Arc; use glutin::event_loop::EventLoop as GlutinEventLoop; use log::{error, info}; #[cfg(windows)] use winapi::um::wincon::{AttachConsole, FreeConsole, ATTACH_PARENT_PROCESS}; use alacritty_terminal::event_loop::{self, EventLoop, Msg}; use alacritty_terminal::sync::FairMutex; use alacritty_terminal::term::Term; use alacritty_terminal::tty; mod cli; mod clipboard; mod config; mod cursor; mod daemon; mod display; mod event; mod input; mod logging; #[cfg(target_os = "macos")] mod macos; mod message_bar; mod meter; #[cfg(windows)] mod panic; mod renderer; mod scheduler; mod url; mod window; #[cfg(all(feature = "wayland", not(any(target_os = "macos", windows))))] mod wayland_theme; mod gl { #![allow(clippy::all)] include!(concat!(env!("OUT_DIR"), "/gl_bindings.rs")); } use crate::cli::Options; use crate::config::monitor; use crate::config::Config; use crate::display::Display; use crate::event::{Event, EventProxy, Processor}; #[cfg(target_os = "macos")] use crate::macos::locale; use crate::message_bar::MessageBuffer; fn main() { #[cfg(windows)] panic::attach_handler(); #[cfg(windows)] unsafe { AttachConsole(ATTACH_PARENT_PROCESS); } let options = Options::new(); let window_event_loop = GlutinEventLoop::<Event>::with_user_event(); let log_file = logging::initialize(&options, window_event_loop.create_proxy()) .expect("Unable to initialize logger"); let config = config::load(&options); log::set_max_level(config.ui_config.debug.log_level); #[cfg(target_os = "macos")] env::set_current_dir(dirs::home_dir().unwrap()).unwrap(); #[cfg(target_os = "macos")] locale::set_locale_environment(); let persistent_logging = config.ui_config.debug.persistent_logging; if let Err(err) = run(window_event_loop, config, options) { error!("Alacritty encountered an unrecoverable error:\n\n\t{}\n", err); std::process::exit(1); } if let Some(log_file) = log_file { if !persistent_logging && fs::remove_file(&log_file).is_ok() { let _ = writeln!(io::stdout(), "Deleted log file at \"{}\"", log_file.display()); } } } fn run( window_event_loop: GlutinEventLoop<Event>, config: Config, options: Options, ) -> Result<(), Box<dyn Error>> { info!("Welcome to Alacritty"); log_config_path(&config); tty::setup_env(&config); let event_proxy = EventProxy::new(window_event_loop.create_proxy()); let display = Display::new(&config, &window_event_loop)?; info!( "PTY dimensions: {:?} x {:?}", display.size_info.screen_lines(), display.size_info.cols() ); let terminal = Term::new(&config, display.size_info, event_proxy.clone()); let terminal = Arc::new(FairMutex::new(terminal)); let pty = tty::new(&config, &display.size_info, display.window.x11_window_id()); let event_loop = EventLoop::new( Arc::clone(&terminal), event_proxy.clone(), pty, config.hold, config.ui_config.debug.ref_test, ); let loop_tx = event_loop.channel(); if config.ui_config.live_config_reload() { monitor::watch(config.ui_config.config_paths.clone(), event_proxy); } let message_buffer = MessageBuffer::new(); let mut processor = Processor::new( event_loop::Notifier(loop_tx.clone()), message_buffer, config, display, options, ); let io_thread = event_loop.spawn(); info!("Initialisation complete"); processor.run(terminal, window_event_loop); drop(processor); loop_tx.send(Msg::Shutdown).expect("Error sending shutdown to PTY event loop"); io_thread.join().expect("join io thread"); #[cfg(windows)] unsafe { FreeConsole(); } info!("Goodbye"); Ok(()) } fn log_config_path(config: &Config) { let mut msg = String::from("Configuration files loaded from:"); for path in &config.ui_config.config_paths { msg.push_str(&format!("\n {:?}", path.display())); } info!("{}", msg); }
#![warn(rust_2018_idioms, future_incompatible)] #![deny(clippy::all, clippy::if_not_else, clippy::enum_glob_use, clippy::wrong_pub_self_convention)] #![cfg_attr(feature = "cargo-clippy", deny(warnings))] #![cfg_attr(all(test, feature = "bench"), feature(test))] #![windows_subsystem = "windows"] #[cfg(not(any(feature = "x11", feature = "wayland", target_os = "macos", windows)))] compile_error!(r#"at least one of the "x11"/"wayland" features must be enabled"#); #[cfg(target_os = "macos")] use std::env; use std::error::Error; use std::fs; use std::io::{self, Write}; use std::sync::Arc; use glutin::event_loop::EventLoop as GlutinEventLoop; use log::{error, info}; #[cfg(windows)] use winapi::um::wincon::{AttachConsole, FreeConsole, ATTACH_PARENT_PROCESS}; use alacritty_terminal::event_loop::{self, EventLoop, Msg}; use alacritty_terminal::sync::FairMutex; use alacritty_terminal::term::Term; use alacritty_terminal::tty; mod cli; mod clipboard; mod config; mod cursor; mod daemon; mod display; mod event; mod input; mod logging; #[cfg(target_os = "macos")] mod macos; mod message_bar; mod meter; #[cfg(windows)] mod panic; mod renderer; mod scheduler; mod url; mod window; #[cfg(all(feature = "wayland", not(any(target_os = "macos", windows))))] mod wayland_theme; mod gl { #![allow(clippy::all)] include!(concat!(env!("OUT_DIR"), "/gl_bindings.rs")); } use crate::cli::Options; use crate::config::monitor; use crate::config::Config; use crate::display::Display; use crate::event::{Event, EventProxy, Processor}; #[cfg(target_os = "macos")] use crate::macos::locale; use crate::message_bar::MessageBuffer; fn main() { #[cfg(windows)] panic::attach_handler(); #[cfg(windows)] unsafe { AttachConsole(ATTACH_PARENT_PROCESS); } let options = Options::new(); let window_event_loop = GlutinEventLoop::<Event>::with_user_event(); let log_file = logging::initialize(&options, window_event_loop.create_proxy()) .expect("Unable to initialize logger"); let config = config::load(&options); log::set_max_level(config.ui_config.debug.log_level); #[cfg(target_os = "macos")] env::set_current_dir(dirs::home_dir().unwrap()).unwrap(); #[cfg(target_os = "macos")] locale::set_locale_environment(); let persistent_logging = config.ui_config.debug.persistent_logging; if let Err(err) = run(window_event_loop, config, options) { error!("Alacritty encountered an unrecoverable error:\n\n\t{}\n", err); std::process::exit(1); } if let Some(log_file) = log_file { if !persistent_logging && fs::remove_file(&log_file).is_ok() { let _ = writeln!(io::stdout(), "Deleted log file at \"{}\"", log_file.display()); } } } fn run( window_event_loop: GlutinEventLoop<Event>, config: Config, options: Options, ) -> Result<(), Box<dyn Error>> { info!("Welcome to Alacritty"); log_config_path(&config); tty::setup_env(&config); let event_proxy = EventProxy::new(window_event_loop.create_proxy()); let display = Display::new(&config, &window_event_loop)?; info!( "PTY dimensions: {:?} x {:?}", display.size_info.screen_lines(), display.size_info.cols() ); let terminal = Term::new(&config, display.size_info, event_proxy.clone()); let terminal = Arc::new(FairMutex::new(terminal)); let pty = tty::new(&config, &display.size_info, display.window.x11_window_id()); let event_loop = EventLoop::new( Arc::clone(&terminal), event_proxy.clone(), pty, config.hold, config.ui_config.debug.ref_test, ); let loop_tx = event_loop.channel(); if config.ui_config.live_config_reload() { monitor::watch(config.ui_config.config_paths.clone(), event_proxy); } let message_buffer = MessageBuffer::new(); let mut processor = Processor::new( event_loop::Notifier(loop_tx.clone()), message_buffer, config, display, options, ); let io_thread = event_loop.spawn(); info!("Initialisation complete"); processor.run(terminal, window_event_loop); drop(processor); loop_tx.send(Msg::Shutdown).expect("Error sending shutdown to PTY event loop"); io_thread.join().expect("join io thread"); #[cfg(windows)] unsafe { FreeConsole(); } info!("Goodbye"); Ok(()) }
fn log_config_path(config: &Config) { let mut msg = String::from("Configuration files loaded from:"); for path in &config.ui_config.config_paths { msg.push_str(&format!("\n {:?}", path.display())); } info!("{}", msg); }
function_block-full_function
[ { "content": "#[cfg(not(all(feature = \"winpty\", target_env = \"msvc\")))]\n\npub fn new<C>(config: &Config<C>, size: &SizeInfo, window_id: Option<usize>) -> Pty {\n\n conpty::new(config, size, window_id).expect(\"Failed to create ConPTY backend\")\n\n}\n\n\n", "file_path": "alacritty_terminal/src/tty/windows/mod.rs", "rank": 0, "score": 339784.2646587654 }, { "content": "pub fn new<C>(config: &Config<C>, size: &SizeInfo, _window_id: Option<usize>) -> Option<Pty> {\n\n if config.winpty_backend {\n\n return None;\n\n }\n\n\n\n let api = ConptyApi::new()?;\n\n\n\n let mut pty_handle = 0 as HPCON;\n\n\n\n // Passing 0 as the size parameter allows the \"system default\" buffer\n\n // size to be used. There may be small performance and memory advantages\n\n // to be gained by tuning this in the future, but it's likely a reasonable\n\n // start point.\n\n let (conout, conout_pty_handle) = miow::pipe::anonymous(0).unwrap();\n\n let (conin_pty_handle, conin) = miow::pipe::anonymous(0).unwrap();\n\n\n\n let coord =\n\n coord_from_sizeinfo(size).expect(\"Overflow when creating initial size on pseudoconsole\");\n\n\n\n // Create the Pseudo Console, using the pipes.\n", "file_path": "alacritty_terminal/src/tty/windows/conpty.rs", "rank": 1, "score": 324302.5492259516 }, { "content": "pub fn option_explicit_none<'de, T, D>(deserializer: D) -> Result<Option<T>, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n T: Deserialize<'de> + Default,\n\n{\n\n Ok(match Value::deserialize(deserializer)? {\n\n Value::String(ref value) if value.to_lowercase() == \"none\" => None,\n\n value => Some(T::deserialize(value).unwrap_or_else(fallback_default)),\n\n })\n\n}\n", "file_path": "alacritty_terminal/src/config/mod.rs", "rank": 2, "score": 313039.2850724211 }, { "content": "pub fn new<C>(config: &Config<C>, size: &SizeInfo, _window_id: Option<usize>) -> Pty {\n\n // Create config.\n\n let mut wconfig = WinptyConfig::new(ConfigFlags::empty()).unwrap();\n\n\n\n wconfig.set_initial_size(size.cols().0 as i32, size.screen_lines().0 as i32);\n\n wconfig.set_mouse_mode(&MouseMode::Auto);\n\n\n\n // Start agent.\n\n let mut agent = Winpty::open(&wconfig).unwrap();\n\n let (conin, conout) = (agent.conin_name(), agent.conout_name());\n\n\n\n let cmdline = cmdline(&config);\n\n\n\n // Spawn process.\n\n let spawnconfig = SpawnConfig::new(\n\n SpawnFlags::AUTO_SHUTDOWN | SpawnFlags::EXIT_AFTER_SHUTDOWN,\n\n None, // appname.\n\n Some(&cmdline),\n\n config.working_directory.as_deref(),\n\n None, // Env.\n", "file_path": "alacritty_terminal/src/tty/windows/winpty.rs", "rank": 3, "score": 309529.21963437845 }, { "content": "fn deserialize_cursor_thickness<'a, D>(deserializer: D) -> Result<Percentage, D::Error>\n\nwhere\n\n D: Deserializer<'a>,\n\n{\n\n let value = Value::deserialize(deserializer)?;\n\n match Percentage::deserialize(value) {\n\n Ok(value) => Ok(value),\n\n Err(err) => {\n\n error!(\n\n target: LOG_TARGET_CONFIG,\n\n \"Problem with config: {}, using default thickness value {}\",\n\n err,\n\n DEFAULT_CURSOR_THICKNESS\n\n );\n\n\n\n Ok(Percentage::new(DEFAULT_CURSOR_THICKNESS))\n\n },\n\n }\n\n}\n\n\n", "file_path": "alacritty_terminal/src/config/mod.rs", "rank": 4, "score": 306049.625099173 }, { "content": "pub fn new<C>(config: &Config<C>, size: &SizeInfo, window_id: Option<usize>) -> Pty {\n\n if let Some(pty) = conpty::new(config, size, window_id) {\n\n info!(\"Using ConPTY backend\");\n\n pty\n\n } else {\n\n info!(\"Using WinPTY backend\");\n\n winpty::new(config, size, window_id)\n\n }\n\n}\n\n\n\npub enum PtyBackend {\n\n Winpty(winpty::Agent),\n\n Conpty(conpty::Conpty),\n\n}\n\n\n\nimpl OnResize for PtyBackend {\n\n fn on_resize(&mut self, size: &SizeInfo) {\n\n match self {\n\n PtyBackend::Winpty(w) => w.on_resize(size),\n\n PtyBackend::Conpty(c) => c.on_resize(size),\n", "file_path": "alacritty_terminal/src/tty/windows/automatic_backend.rs", "rank": 5, "score": 305381.0042548784 }, { "content": "/// Create a new TTY and return a handle to interact with it.\n\npub fn new<C>(config: &Config<C>, size: &SizeInfo, window_id: Option<usize>) -> Pty {\n\n let (master, slave) = make_pty(size.to_winsize());\n\n\n\n #[cfg(any(target_os = \"linux\", target_os = \"macos\"))]\n\n if let Ok(mut termios) = termios::tcgetattr(master) {\n\n // Set character encoding to UTF-8.\n\n termios.input_flags.set(InputFlags::IUTF8, true);\n\n let _ = termios::tcsetattr(master, SetArg::TCSANOW, &termios);\n\n }\n\n\n\n let mut buf = [0; 1024];\n\n let pw = get_pw_entry(&mut buf);\n\n\n\n let shell = match config.shell.as_ref() {\n\n Some(shell) => Cow::Borrowed(shell),\n\n None => Cow::Owned(default_shell(&pw)),\n\n };\n\n\n\n let mut builder = Command::new(shell.program());\n\n for arg in shell.args() {\n", "file_path": "alacritty_terminal/src/tty/unix.rs", "rank": 6, "score": 301689.6296996905 }, { "content": "/// Format an option in the format of `parent.field=value` to a serde Value.\n\nfn option_as_value(option: &str) -> Result<Value, serde_yaml::Error> {\n\n let mut yaml_text = String::with_capacity(option.len());\n\n let mut closing_brackets = String::new();\n\n\n\n for (i, c) in option.chars().enumerate() {\n\n match c {\n\n '=' => {\n\n yaml_text.push_str(\": \");\n\n yaml_text.push_str(&option[i + 1..]);\n\n break;\n\n },\n\n '.' => {\n\n yaml_text.push_str(\": {\");\n\n closing_brackets.push('}');\n\n },\n\n _ => yaml_text.push(c),\n\n }\n\n }\n\n\n\n yaml_text += &closing_brackets;\n", "file_path": "alacritty/src/cli.rs", "rank": 7, "score": 297234.90071226447 }, { "content": "fn deserialize_bell_command<'a, D>(deserializer: D) -> Result<Option<Program>, D::Error>\n\nwhere\n\n D: Deserializer<'a>,\n\n{\n\n // Deserialize to generic value.\n\n let val = Value::deserialize(deserializer)?;\n\n\n\n // Accept `None` to disable the bell command.\n\n if val.as_str().filter(|v| v.to_lowercase() == \"none\").is_some() {\n\n return Ok(None);\n\n }\n\n\n\n match Program::deserialize(val) {\n\n Ok(command) => Ok(Some(command)),\n\n Err(err) => {\n\n error!(target: LOG_TARGET_CONFIG, \"Problem with config: {}; ignoring field\", err);\n\n Ok(None)\n\n },\n\n }\n\n}\n", "file_path": "alacritty_terminal/src/config/bell.rs", "rank": 8, "score": 288143.0873145818 }, { "content": "/// Attempt to reload the configuration file.\n\npub fn reload(config_path: &PathBuf, options: &Options) -> Result<Config> {\n\n // Load config, propagating errors.\n\n let config_options = options.config_options().clone();\n\n let mut config = load_from(&config_path, config_options)?;\n\n\n\n // Override config with CLI options.\n\n options.override_config(&mut config);\n\n\n\n Ok(config)\n\n}\n\n\n", "file_path": "alacritty/src/config/mod.rs", "rank": 9, "score": 276237.2656783181 }, { "content": "fn deserialize_launcher<'a, D>(deserializer: D) -> std::result::Result<Option<Program>, D::Error>\n\nwhere\n\n D: Deserializer<'a>,\n\n{\n\n let default = Url::default().launcher;\n\n\n\n // Deserialize to generic value.\n\n let val = serde_yaml::Value::deserialize(deserializer)?;\n\n\n\n // Accept `None` to disable the launcher.\n\n if val.as_str().filter(|v| v.to_lowercase() == \"none\").is_some() {\n\n return Ok(None);\n\n }\n\n\n\n match <Option<Program>>::deserialize(val) {\n\n Ok(launcher) => Ok(launcher),\n\n Err(err) => {\n\n error!(\n\n target: LOG_TARGET_CONFIG,\n\n \"Problem with config: {}; using {}\",\n", "file_path": "alacritty/src/config/mouse.rs", "rank": 10, "score": 272408.7264822671 }, { "content": "/// Load configuration file and log errors.\n\nfn load_from(path: &PathBuf, cli_config: Value) -> Result<Config> {\n\n match read_config(path, cli_config) {\n\n Ok(config) => Ok(config),\n\n Err(err) => {\n\n error!(target: LOG_TARGET_CONFIG, \"Unable to load config {:?}: {}\", path, err);\n\n Err(err)\n\n },\n\n }\n\n}\n\n\n", "file_path": "alacritty/src/config/mod.rs", "rank": 11, "score": 271088.54467668873 }, { "content": "/// Deserialize configuration file from path.\n\nfn read_config(path: &PathBuf, cli_config: Value) -> Result<Config> {\n\n let mut config_paths = Vec::new();\n\n let mut config_value = parse_config(&path, &mut config_paths, IMPORT_RECURSION_LIMIT)?;\n\n\n\n // Override config with CLI options.\n\n config_value = serde_utils::merge(config_value, cli_config);\n\n\n\n // Deserialize to concrete type.\n\n let mut config = Config::deserialize(config_value)?;\n\n config.ui_config.config_paths = config_paths;\n\n\n\n print_deprecation_warnings(&config);\n\n\n\n Ok(config)\n\n}\n\n\n", "file_path": "alacritty/src/config/mod.rs", "rank": 12, "score": 270761.5804387086 }, { "content": "pub fn failure_default<'a, D, T>(deserializer: D) -> Result<T, D::Error>\n\nwhere\n\n D: Deserializer<'a>,\n\n T: Deserialize<'a> + Default,\n\n{\n\n Ok(T::deserialize(Value::deserialize(deserializer)?).unwrap_or_else(fallback_default))\n\n}\n\n\n", "file_path": "alacritty_terminal/src/config/mod.rs", "rank": 13, "score": 262051.15989834425 }, { "content": "fn cmdline<C>(config: &Config<C>) -> String {\n\n let default_shell = Program::Just(\"powershell\".to_owned());\n\n let shell = config.shell.as_ref().unwrap_or(&default_shell);\n\n\n\n once(shell.program().as_ref())\n\n .chain(shell.args().iter().map(|a| a.as_ref()))\n\n .collect::<Vec<_>>()\n\n .join(\" \")\n\n}\n\n\n", "file_path": "alacritty_terminal/src/tty/windows/mod.rs", "rank": 14, "score": 250191.8289063703 }, { "content": "fn deserialize_class<'a, D>(deserializer: D) -> Result<Class, D::Error>\n\nwhere\n\n D: Deserializer<'a>,\n\n{\n\n let value = Value::deserialize(deserializer)?;\n\n\n\n if let Value::String(instance) = value {\n\n return Ok(Class { instance, general: DEFAULT_NAME.into() });\n\n }\n\n\n\n match Class::deserialize(value) {\n\n Ok(value) => Ok(value),\n\n Err(err) => {\n\n error!(\n\n target: LOG_TARGET_CONFIG,\n\n \"Problem with config: {}; using class {}\", err, DEFAULT_NAME\n\n );\n\n Ok(Class::default())\n\n },\n\n }\n\n}\n", "file_path": "alacritty/src/config/window.rs", "rank": 15, "score": 249117.04386730137 }, { "content": "fn deserialize_class_resource<'a, D>(deserializer: D) -> Result<String, D::Error>\n\nwhere\n\n D: Deserializer<'a>,\n\n{\n\n let value = Value::deserialize(deserializer)?;\n\n match String::deserialize(value) {\n\n Ok(value) => Ok(value),\n\n Err(err) => {\n\n error!(\n\n target: LOG_TARGET_CONFIG,\n\n \"Problem with config: {}, using default value {}\", err, DEFAULT_NAME,\n\n );\n\n\n\n Ok(DEFAULT_NAME.into())\n\n },\n\n }\n\n}\n\n\n", "file_path": "alacritty/src/config/window.rs", "rank": 17, "score": 245048.54299201333 }, { "content": "fn replace_if_some<T>(option: &mut T, value: Option<T>) {\n\n if let Some(value) = value {\n\n *option = value;\n\n }\n\n}\n\n\n", "file_path": "alacritty/src/cli.rs", "rank": 18, "score": 243865.13583135814 }, { "content": "fn deserialize_log_level<'a, D>(deserializer: D) -> Result<LevelFilter, D::Error>\n\nwhere\n\n D: Deserializer<'a>,\n\n{\n\n Ok(match failure_default::<D, String>(deserializer)?.to_lowercase().as_str() {\n\n \"off\" | \"none\" => LevelFilter::Off,\n\n \"error\" => LevelFilter::Error,\n\n \"warn\" => LevelFilter::Warn,\n\n \"info\" => LevelFilter::Info,\n\n \"debug\" => LevelFilter::Debug,\n\n \"trace\" => LevelFilter::Trace,\n\n level => {\n\n error!(\n\n target: LOG_TARGET_CONFIG,\n\n \"Problem with config: invalid log level {}; using level Warn\", level\n\n );\n\n default_log_level()\n\n },\n\n })\n\n}\n", "file_path": "alacritty/src/config/debug.rs", "rank": 19, "score": 241416.19432608812 }, { "content": "fn deserialize_color_index<'a, D>(deserializer: D) -> Result<u8, D::Error>\n\nwhere\n\n D: Deserializer<'a>,\n\n{\n\n let value = Value::deserialize(deserializer)?;\n\n match u8::deserialize(value) {\n\n Ok(index) => {\n\n if index < 16 {\n\n error!(\n\n target: LOG_TARGET_CONFIG,\n\n \"Problem with config: indexed_color's index is {}, but a value bigger than 15 \\\n\n was expected; ignoring setting\",\n\n index\n\n );\n\n\n\n // Return value out of range to ignore this color.\n\n Ok(0)\n\n } else {\n\n Ok(index)\n\n }\n", "file_path": "alacritty_terminal/src/config/colors.rs", "rank": 20, "score": 237689.77722475686 }, { "content": "fn deserialize_bell_color<'a, D>(deserializer: D) -> Result<Rgb, D::Error>\n\nwhere\n\n D: Deserializer<'a>,\n\n{\n\n let value = Value::deserialize(deserializer)?;\n\n match Rgb::deserialize(value) {\n\n Ok(value) => Ok(value),\n\n Err(err) => {\n\n error!(\n\n target: LOG_TARGET_CONFIG,\n\n \"Problem with config: {}, using default color value {}\", err, DEFAULT_BELL_COLOR\n\n );\n\n\n\n Ok(DEFAULT_BELL_COLOR)\n\n },\n\n }\n\n}\n\n\n", "file_path": "alacritty_terminal/src/config/bell.rs", "rank": 21, "score": 237689.77722475686 }, { "content": "fn attrs_from_sgr_parameters(params: &mut ParamsIter<'_>) -> Vec<Option<Attr>> {\n\n let mut attrs = Vec::with_capacity(params.size_hint().0);\n\n\n\n while let Some(param) = params.next() {\n\n let attr = match param {\n\n [0] => Some(Attr::Reset),\n\n [1] => Some(Attr::Bold),\n\n [2] => Some(Attr::Dim),\n\n [3] => Some(Attr::Italic),\n\n [4, 0] => Some(Attr::CancelUnderline),\n\n [4, 2] => Some(Attr::DoubleUnderline),\n\n [4, ..] => Some(Attr::Underline),\n\n [5] => Some(Attr::BlinkSlow),\n\n [6] => Some(Attr::BlinkFast),\n\n [7] => Some(Attr::Reverse),\n\n [8] => Some(Attr::Hidden),\n\n [9] => Some(Attr::Strike),\n\n [21] => Some(Attr::CancelBold),\n\n [22] => Some(Attr::CancelBoldDim),\n\n [23] => Some(Attr::CancelItalic),\n", "file_path": "alacritty_terminal/src/ansi.rs", "rank": 22, "score": 234539.05004143924 }, { "content": "fn deserialize_match_background<'a, D>(deserializer: D) -> Result<CellRgb, D::Error>\n\nwhere\n\n D: Deserializer<'a>,\n\n{\n\n let value = Value::deserialize(deserializer)?;\n\n Ok(CellRgb::deserialize(value).unwrap_or_else(|_| default_match_background()))\n\n}\n\n\n", "file_path": "alacritty_terminal/src/config/colors.rs", "rank": 23, "score": 234035.80491345382 }, { "content": "/// Load the configuration file.\n\npub fn load(options: &Options) -> Config {\n\n let config_options = options.config_options().clone();\n\n let config_path = options.config_path().or_else(installed_config);\n\n\n\n if config_path.is_none() {\n\n info!(target: LOG_TARGET_CONFIG, \"No config file found; using default\");\n\n }\n\n\n\n // Load the config using the following fallback behavior:\n\n // - Config path + CLI overrides\n\n // - CLI overrides\n\n // - Default\n\n let mut config = config_path\n\n .and_then(|config_path| load_from(&config_path, config_options.clone()).ok())\n\n .unwrap_or_else(|| Config::deserialize(config_options).unwrap_or_default());\n\n\n\n // Override config with CLI options.\n\n options.override_config(&mut config);\n\n\n\n config\n\n}\n\n\n", "file_path": "alacritty/src/config/mod.rs", "rank": 24, "score": 233194.6548678578 }, { "content": "fn parse_number(input: &[u8]) -> Option<u8> {\n\n if input.is_empty() {\n\n return None;\n\n }\n\n let mut num: u8 = 0;\n\n for c in input {\n\n let c = *c as char;\n\n if let Some(digit) = c.to_digit(10) {\n\n num = match num.checked_mul(10).and_then(|v| v.checked_add(digit as u8)) {\n\n Some(v) => v,\n\n None => return None,\n\n }\n\n } else {\n\n return None;\n\n }\n\n }\n\n Some(num)\n\n}\n\n\n\n/// The processor wraps a `vte::Parser` to ultimately call methods on a Handler.\n\npub struct Processor {\n\n state: ProcessorState,\n\n parser: vte::Parser,\n\n}\n\n\n", "file_path": "alacritty_terminal/src/ansi.rs", "rank": 25, "score": 233077.9296194159 }, { "content": "#[cfg(not(windows))]\n\nfn spawn_daemon<I, S>(program: &str, args: I) -> io::Result<()>\n\nwhere\n\n I: IntoIterator<Item = S> + Copy,\n\n S: AsRef<OsStr>,\n\n{\n\n unsafe {\n\n Command::new(program)\n\n .args(args)\n\n .stdin(Stdio::null())\n\n .stdout(Stdio::null())\n\n .stderr(Stdio::null())\n\n .pre_exec(|| {\n\n match libc::fork() {\n\n -1 => return Err(io::Error::last_os_error()),\n\n 0 => (),\n\n _ => libc::_exit(0),\n\n }\n\n\n\n if libc::setsid() == -1 {\n\n return Err(io::Error::last_os_error());\n\n }\n\n\n\n Ok(())\n\n })\n\n .spawn()?\n\n .wait()\n\n .map(|_| ())\n\n }\n\n}\n", "file_path": "alacritty/src/daemon.rs", "rank": 26, "score": 232971.47089911153 }, { "content": "fn fallback_default<T, E>(err: E) -> T\n\nwhere\n\n T: Default,\n\n E: Display,\n\n{\n\n error!(target: LOG_TARGET_CONFIG, \"Problem with config: {}; using default value\", err);\n\n T::default()\n\n}\n\n\n", "file_path": "alacritty_terminal/src/config/mod.rs", "rank": 27, "score": 231171.87611246275 }, { "content": "/// Helper to build a COORD from a SizeInfo, returning None in overflow cases.\n\nfn coord_from_sizeinfo(size: &SizeInfo) -> Option<COORD> {\n\n let cols = size.cols().0;\n\n let lines = size.screen_lines().0;\n\n\n\n if cols <= i16::MAX as usize && lines <= i16::MAX as usize {\n\n Some(COORD { X: cols as i16, Y: lines as i16 })\n\n } else {\n\n None\n\n }\n\n}\n", "file_path": "alacritty_terminal/src/tty/windows/conpty.rs", "rank": 28, "score": 230369.71378833923 }, { "content": "#[cfg(windows)]\n\nfn installed_config() -> Option<PathBuf> {\n\n dirs::config_dir().map(|path| path.join(\"alacritty\\\\alacritty.yml\")).filter(|new| new.exists())\n\n}\n\n\n", "file_path": "alacritty/src/config/mod.rs", "rank": 29, "score": 226838.56480879005 }, { "content": "pub fn cwd(pid: c_int) -> Result<PathBuf, Error> {\n\n let mut info = MaybeUninit::<sys::proc_vnodepathinfo>::uninit();\n\n let info_ptr = info.as_mut_ptr() as *mut c_void;\n\n let size = mem::size_of::<sys::proc_vnodepathinfo>() as c_int;\n\n\n\n let c_str = unsafe {\n\n let pidinfo_size = sys::proc_pidinfo(pid, sys::PROC_PIDVNODEPATHINFO, 0, info_ptr, size);\n\n match pidinfo_size {\n\n c if c < 0 => return Err(io::Error::last_os_error().into()),\n\n s if s != size => return Err(Error::InvalidSize),\n\n _ => CStr::from_ptr(info.assume_init().pvi_cdir.vip_path.as_ptr()),\n\n }\n\n };\n\n\n\n Ok(CString::from(c_str).into_string().map(PathBuf::from)?)\n\n}\n\n\n\n/// Bindings for libproc.\n\n#[allow(non_camel_case_types)]\n\nmod sys {\n", "file_path": "alacritty/src/macos/proc.rs", "rank": 30, "score": 222843.1483034512 }, { "content": "fn deserialize_duration_ms<'a, D>(deserializer: D) -> ::std::result::Result<Duration, D::Error>\n\nwhere\n\n D: Deserializer<'a>,\n\n{\n\n let value = serde_yaml::Value::deserialize(deserializer)?;\n\n match u64::deserialize(value) {\n\n Ok(threshold_ms) => Ok(Duration::from_millis(threshold_ms)),\n\n Err(err) => {\n\n error!(target: LOG_TARGET_CONFIG, \"Problem with config: {}; using default value\", err);\n\n Ok(default_threshold_ms())\n\n },\n\n }\n\n}\n", "file_path": "alacritty/src/config/mouse.rs", "rank": 31, "score": 219546.4996043646 }, { "content": "/// Setup environment variables.\n\npub fn setup_env<C>(config: &Config<C>) {\n\n // Default to 'alacritty' terminfo if it is available, otherwise\n\n // default to 'xterm-256color'. May be overridden by user's config\n\n // below.\n\n env::set_var(\n\n \"TERM\",\n\n if Database::from_name(\"alacritty\").is_ok() { \"alacritty\" } else { \"xterm-256color\" },\n\n );\n\n\n\n // Advertise 24-bit color support.\n\n env::set_var(\"COLORTERM\", \"truecolor\");\n\n\n\n // Prevent child processes from inheriting startup notification env.\n\n env::remove_var(\"DESKTOP_STARTUP_ID\");\n\n\n\n // Set env vars from config.\n\n for (key, value) in config.env.iter() {\n\n env::set_var(key, value);\n\n }\n\n}\n", "file_path": "alacritty_terminal/src/tty/mod.rs", "rank": 32, "score": 213242.9646138208 }, { "content": "/// Load all referenced configuration files.\n\nfn load_imports(config: &Value, config_paths: &mut Vec<PathBuf>, recursion_limit: usize) -> Value {\n\n let imports = match config.get(\"import\") {\n\n Some(Value::Sequence(imports)) => imports,\n\n Some(_) => {\n\n error!(target: LOG_TARGET_CONFIG, \"Invalid import type: expected a sequence\");\n\n return Value::Null;\n\n },\n\n None => return Value::Null,\n\n };\n\n\n\n // Limit recursion to prevent infinite loops.\n\n if !imports.is_empty() && recursion_limit == 0 {\n\n error!(target: LOG_TARGET_CONFIG, \"Exceeded maximum configuration import depth\");\n\n return Value::Null;\n\n }\n\n\n\n let mut merged = Value::Null;\n\n\n\n for import in imports {\n\n let mut path = match import {\n", "file_path": "alacritty/src/config/mod.rs", "rank": 33, "score": 212893.8655388638 }, { "content": "fn get_program_info_log(program: GLuint) -> String {\n\n // Get expected log length.\n\n let mut max_length: GLint = 0;\n\n unsafe {\n\n gl::GetProgramiv(program, gl::INFO_LOG_LENGTH, &mut max_length);\n\n }\n\n\n\n // Read the info log.\n\n let mut actual_length: GLint = 0;\n\n let mut buf: Vec<u8> = Vec::with_capacity(max_length as usize);\n\n unsafe {\n\n gl::GetProgramInfoLog(program, max_length, &mut actual_length, buf.as_mut_ptr() as *mut _);\n\n }\n\n\n\n // Build a string.\n\n unsafe {\n\n buf.set_len(actual_length as usize);\n\n }\n\n\n\n // XXX should we expect OpenGL to return garbage?\n\n String::from_utf8(buf).unwrap()\n\n}\n\n\n", "file_path": "alacritty/src/renderer/mod.rs", "rank": 34, "score": 210358.72427920697 }, { "content": "fn get_shader_info_log(shader: GLuint) -> String {\n\n // Get expected log length.\n\n let mut max_length: GLint = 0;\n\n unsafe {\n\n gl::GetShaderiv(shader, gl::INFO_LOG_LENGTH, &mut max_length);\n\n }\n\n\n\n // Read the info log.\n\n let mut actual_length: GLint = 0;\n\n let mut buf: Vec<u8> = Vec::with_capacity(max_length as usize);\n\n unsafe {\n\n gl::GetShaderInfoLog(shader, max_length, &mut actual_length, buf.as_mut_ptr() as *mut _);\n\n }\n\n\n\n // Build a string.\n\n unsafe {\n\n buf.set_len(actual_length as usize);\n\n }\n\n\n\n // XXX should we expect OpenGL to return garbage?\n", "file_path": "alacritty/src/renderer/mod.rs", "rank": 35, "score": 210358.72427920697 }, { "content": "fn deserialize_mouse_bindings<'a, D>(deserializer: D) -> Result<Vec<MouseBinding>, D::Error>\n\nwhere\n\n D: Deserializer<'a>,\n\n{\n\n deserialize_bindings(deserializer, bindings::default_mouse_bindings())\n\n}\n\n\n", "file_path": "alacritty/src/config/ui_config.rs", "rank": 36, "score": 210146.1193196145 }, { "content": "fn deserialize_key_bindings<'a, D>(deserializer: D) -> Result<Vec<KeyBinding>, D::Error>\n\nwhere\n\n D: Deserializer<'a>,\n\n{\n\n deserialize_bindings(deserializer, bindings::default_key_bindings())\n\n}\n\n\n", "file_path": "alacritty/src/config/ui_config.rs", "rank": 37, "score": 210146.11931961446 }, { "content": "#[inline]\n\nfn clear_atlas(atlas: &mut Vec<Atlas>, current_atlas: &mut usize) {\n\n for atlas in atlas.iter_mut() {\n\n atlas.clear();\n\n }\n\n *current_atlas = 0;\n\n}\n\n\n\nimpl<'a> LoadGlyph for LoaderApi<'a> {\n\n fn load_glyph(&mut self, rasterized: &RasterizedGlyph) -> Glyph {\n\n load_glyph(self.active_tex, self.atlas, self.current_atlas, rasterized)\n\n }\n\n\n\n fn clear(&mut self) {\n\n clear_atlas(self.atlas, self.current_atlas)\n\n }\n\n}\n\n\n\nimpl<'a> LoadGlyph for RenderApi<'a> {\n\n fn load_glyph(&mut self, rasterized: &RasterizedGlyph) -> Glyph {\n\n load_glyph(self.active_tex, self.atlas, self.current_atlas, rasterized)\n", "file_path": "alacritty/src/renderer/mod.rs", "rank": 38, "score": 208006.18405558408 }, { "content": "/// Parse a color specifier from list of attributes.\n\nfn parse_sgr_color(params: &mut dyn Iterator<Item = u16>) -> Option<Color> {\n\n match params.next() {\n\n Some(2) => Some(Color::Spec(Rgb {\n\n r: u8::try_from(params.next()?).ok()?,\n\n g: u8::try_from(params.next()?).ok()?,\n\n b: u8::try_from(params.next()?).ok()?,\n\n })),\n\n Some(5) => Some(Color::Indexed(u8::try_from(params.next()?).ok()?)),\n\n _ => None,\n\n }\n\n}\n\n\n\n/// C0 set of 7-bit control characters (from ANSI X3.4-1977).\n\n#[allow(non_snake_case)]\n\npub mod C0 {\n\n /// Null filler, terminal should ignore this character.\n\n pub const NUL: u8 = 0x00;\n\n /// Start of Header.\n\n pub const SOH: u8 = 0x01;\n\n /// Start of Text, implied end of header.\n", "file_path": "alacritty_terminal/src/ansi.rs", "rank": 39, "score": 203848.43142909202 }, { "content": "pub fn create_program(vertex: GLuint, fragment: GLuint) -> Result<GLuint, ShaderCreationError> {\n\n unsafe {\n\n let program = gl::CreateProgram();\n\n gl::AttachShader(program, vertex);\n\n gl::AttachShader(program, fragment);\n\n gl::LinkProgram(program);\n\n\n\n let mut success: GLint = 0;\n\n gl::GetProgramiv(program, gl::LINK_STATUS, &mut success);\n\n\n\n if success == i32::from(gl::TRUE) {\n\n Ok(program)\n\n } else {\n\n Err(ShaderCreationError::Link(get_program_info_log(program)))\n\n }\n\n }\n\n}\n\n\n", "file_path": "alacritty/src/renderer/mod.rs", "rank": 40, "score": 203006.3184633485 }, { "content": "pub fn create_shader(kind: GLenum, source: &'static str) -> Result<GLuint, ShaderCreationError> {\n\n let len: [GLint; 1] = [source.len() as GLint];\n\n\n\n let shader = unsafe {\n\n let shader = gl::CreateShader(kind);\n\n gl::ShaderSource(shader, 1, &(source.as_ptr() as *const _), len.as_ptr());\n\n gl::CompileShader(shader);\n\n shader\n\n };\n\n\n\n let mut success: GLint = 0;\n\n unsafe {\n\n gl::GetShaderiv(shader, gl::COMPILE_STATUS, &mut success);\n\n }\n\n\n\n if success == GLint::from(gl::TRUE) {\n\n Ok(shader)\n\n } else {\n\n // Read log.\n\n let log = get_shader_info_log(shader);\n\n\n\n // Cleanup.\n\n unsafe {\n\n gl::DeleteShader(shader);\n\n }\n\n\n\n Err(ShaderCreationError::Compile(log))\n\n }\n\n}\n\n\n", "file_path": "alacritty/src/renderer/mod.rs", "rank": 42, "score": 201416.6662576398 }, { "content": "/// Result of fallible operations concerning a Window.\n\ntype Result<T> = std::result::Result<T, Error>;\n\n\n\nimpl std::error::Error for Error {\n\n fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {\n\n match self {\n\n Error::ContextCreation(err) => err.source(),\n\n Error::Context(err) => err.source(),\n\n Error::Font(err) => err.source(),\n\n }\n\n }\n\n}\n\n\n\nimpl Display for Error {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {\n\n match self {\n\n Error::ContextCreation(err) => write!(f, \"Error creating GL context; {}\", err),\n\n Error::Context(err) => write!(f, \"Error operating on render context; {}\", err),\n\n Error::Font(err) => err.fmt(f),\n\n }\n\n }\n", "file_path": "alacritty/src/window.rs", "rank": 43, "score": 201120.74460575863 }, { "content": "/// Terminal version for escape sequence reports.\n\n///\n\n/// This returns the current terminal version as a unique number based on alacritty_terminal's\n\n/// semver version. The different versions are padded to ensure that a higher semver version will\n\n/// always report a higher version number.\n\nfn version_number(mut version: &str) -> usize {\n\n if let Some(separator) = version.rfind('-') {\n\n version = &version[..separator];\n\n }\n\n\n\n let mut version_number = 0;\n\n\n\n let semver_versions = version.split('.');\n\n for (i, semver_version) in semver_versions.rev().enumerate() {\n\n let semver_number = semver_version.parse::<usize>().unwrap_or(0);\n\n version_number += usize::pow(100, i as u32) * semver_number;\n\n }\n\n\n\n version_number\n\n}\n\n\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq)]\n\npub enum ClipboardType {\n\n Clipboard,\n\n Selection,\n\n}\n\n\n", "file_path": "alacritty_terminal/src/term/mod.rs", "rank": 44, "score": 200078.31371878297 }, { "content": "/// A pseudoterminal (or PTY).\n\n///\n\n/// This is a refinement of EventedReadWrite that also provides a channel through which we can be\n\n/// notified if the PTY child process does something we care about (other than writing to the TTY).\n\n/// In particular, this allows for race-free child exit notification on UNIX (cf. `SIGCHLD`).\n\npub trait EventedPty: EventedReadWrite {\n\n fn child_event_token(&self) -> mio::Token;\n\n\n\n /// Tries to retrieve an event.\n\n ///\n\n /// Returns `Some(event)` on success, or `None` if there are no events to retrieve.\n\n fn next_child_event(&mut self) -> Option<ChildEvent>;\n\n}\n\n\n", "file_path": "alacritty_terminal/src/tty/mod.rs", "rank": 45, "score": 199991.67611525598 }, { "content": "fn print_deprecation_warnings(config: &Config) {\n\n if config.scrolling.faux_multiplier().is_some() {\n\n warn!(\n\n target: LOG_TARGET_CONFIG,\n\n \"Config scrolling.faux_multiplier is deprecated; the alternate scroll escape can now \\\n\n be used to disable it and `scrolling.multiplier` controls the number of scrolled \\\n\n lines\"\n\n );\n\n }\n\n\n\n if config.scrolling.auto_scroll.is_some() {\n\n warn!(\n\n target: LOG_TARGET_CONFIG,\n\n \"Config scrolling.auto_scroll has been removed and is now always disabled, it can be \\\n\n safely removed from the config\"\n\n );\n\n }\n\n\n\n if config.tabspaces.is_some() {\n\n warn!(\n", "file_path": "alacritty/src/config/mod.rs", "rank": 46, "score": 199871.07947781705 }, { "content": "pub fn watch(mut paths: Vec<PathBuf>, event_proxy: EventProxy) {\n\n // Canonicalize all paths, filtering out the ones that do not exist.\n\n paths = paths\n\n .drain(..)\n\n .filter_map(|path| match fs::canonicalize(&path) {\n\n Ok(path) => Some(path),\n\n Err(err) => {\n\n error!(\"Unable to canonicalize config path {:?}: {}\", path, err);\n\n None\n\n },\n\n })\n\n .collect();\n\n\n\n // Don't monitor config if there is no path to watch.\n\n if paths.is_empty() {\n\n return;\n\n }\n\n\n\n // The Duration argument is a debouncing period.\n\n let (tx, rx) = mpsc::channel();\n", "file_path": "alacritty/src/config/monitor.rs", "rank": 47, "score": 196735.10350673224 }, { "content": "// Panic with the last os error as message.\n\nfn panic_shell_spawn() {\n\n panic!(\"Unable to spawn shell: {}\", Error::last_os_error());\n\n}\n\n\n\nimpl OnResize for Conpty {\n\n fn on_resize(&mut self, sizeinfo: &SizeInfo) {\n\n if let Some(coord) = coord_from_sizeinfo(sizeinfo) {\n\n let result = unsafe { (self.api.ResizePseudoConsole)(self.handle, coord) };\n\n assert_eq!(result, S_OK);\n\n }\n\n }\n\n}\n\n\n", "file_path": "alacritty_terminal/src/tty/windows/conpty.rs", "rank": 48, "score": 185589.3859208726 }, { "content": "fn paste<T: EventListener, A: ActionContext<T>>(ctx: &mut A, contents: &str) {\n\n if ctx.terminal().mode().contains(TermMode::BRACKETED_PASTE) {\n\n ctx.write_to_pty(&b\"\\x1b[200~\"[..]);\n\n ctx.write_to_pty(contents.replace(\"\\x1b\", \"\").into_bytes());\n\n ctx.write_to_pty(&b\"\\x1b[201~\"[..]);\n\n } else {\n\n // In non-bracketed (ie: normal) mode, terminal applications cannot distinguish\n\n // pasted data from keystrokes.\n\n // In theory, we should construct the keystrokes needed to produce the data we are\n\n // pasting... since that's neither practical nor sensible (and probably an impossible\n\n // task to solve in a general way), we'll just replace line breaks (windows and unix\n\n // style) with a single carriage return (\\r, which is what the Enter key produces).\n\n ctx.write_to_pty(contents.replace(\"\\r\\n\", \"\\r\").replace(\"\\n\", \"\\r\").into_bytes());\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, PartialEq)]\n\npub enum MouseState {\n\n Url(Url),\n\n MessageBar,\n", "file_path": "alacritty/src/input.rs", "rank": 49, "score": 183622.46591687156 }, { "content": "/// Return a Passwd struct with pointers into the provided buf.\n\n///\n\n/// # Unsafety\n\n///\n\n/// If `buf` is changed while `Passwd` is alive, bad thing will almost certainly happen.\n\nfn get_pw_entry(buf: &mut [i8; 1024]) -> Passwd<'_> {\n\n // Create zeroed passwd struct.\n\n let mut entry: MaybeUninit<libc::passwd> = MaybeUninit::uninit();\n\n\n\n let mut res: *mut libc::passwd = ptr::null_mut();\n\n\n\n // Try and read the pw file.\n\n let uid = unsafe { libc::getuid() };\n\n let status = unsafe {\n\n libc::getpwuid_r(uid, entry.as_mut_ptr(), buf.as_mut_ptr() as *mut _, buf.len(), &mut res)\n\n };\n\n let entry = unsafe { entry.assume_init() };\n\n\n\n if status < 0 {\n\n die!(\"getpwuid_r failed\");\n\n }\n\n\n\n if res.is_null() {\n\n die!(\"pw not found\");\n\n }\n", "file_path": "alacritty_terminal/src/tty/unix.rs", "rank": 50, "score": 182433.97258354005 }, { "content": "#[inline]\n\nfn compute_cell_size(config: &Config, metrics: &crossfont::Metrics) -> (f32, f32) {\n\n let offset_x = f64::from(config.ui_config.font.offset.x);\n\n let offset_y = f64::from(config.ui_config.font.offset.y);\n\n (\n\n (metrics.average_advance + offset_x).floor().max(1.) as f32,\n\n (metrics.line_height + offset_y).floor().max(1.) as f32,\n\n )\n\n}\n\n\n", "file_path": "alacritty/src/display.rs", "rank": 51, "score": 175138.71280035735 }, { "content": "/// Helper type which tracks how much of a buffer has been written.\n\nstruct Writing {\n\n source: Cow<'static, [u8]>,\n\n written: usize,\n\n}\n\n\n\n/// All of the mutable state needed to run the event loop.\n\n///\n\n/// Contains list of items to write, current write state, etc. Anything that\n\n/// would otherwise be mutated on the `EventLoop` goes here.\n\npub struct State {\n\n write_list: VecDeque<Cow<'static, [u8]>>,\n\n writing: Option<Writing>,\n\n parser: ansi::Processor,\n\n}\n\n\n\npub struct Notifier(pub Sender<Msg>);\n\n\n\nimpl event::Notify for Notifier {\n\n fn notify<B>(&mut self, bytes: B)\n\n where\n", "file_path": "alacritty_terminal/src/event_loop.rs", "rank": 52, "score": 172091.46661614135 }, { "content": "/// Helper type that implements `vte::Perform`.\n\n///\n\n/// Processor creates a Performer when running advance and passes the Performer\n\n/// to `vte::Parser`.\n\nstruct Performer<'a, H: Handler, W: io::Write> {\n\n state: &'a mut ProcessorState,\n\n handler: &'a mut H,\n\n writer: &'a mut W,\n\n}\n\n\n\nimpl<'a, H: Handler + 'a, W: io::Write> Performer<'a, H, W> {\n\n /// Create a performer.\n\n #[inline]\n\n pub fn new<'b>(\n\n state: &'b mut ProcessorState,\n\n handler: &'b mut H,\n\n writer: &'b mut W,\n\n ) -> Performer<'b, H, W> {\n\n Performer { state, handler, writer }\n\n }\n\n}\n\n\n\nimpl Default for Processor {\n\n fn default() -> Processor {\n", "file_path": "alacritty_terminal/src/ansi.rs", "rank": 53, "score": 168987.2874083902 }, { "content": "/// Deserialize all configuration files as generic Value.\n\nfn parse_config(\n\n path: &PathBuf,\n\n config_paths: &mut Vec<PathBuf>,\n\n recursion_limit: usize,\n\n) -> Result<Value> {\n\n config_paths.push(path.to_owned());\n\n\n\n let mut contents = fs::read_to_string(path)?;\n\n\n\n // Remove UTF-8 BOM.\n\n if contents.starts_with('\\u{FEFF}') {\n\n contents = contents.split_off(3);\n\n }\n\n\n\n // Load configuration file as Value.\n\n let config: Value = match serde_yaml::from_str(&contents) {\n\n Ok(config) => config,\n\n Err(error) => {\n\n // Prevent parsing error with an empty string and commented out file.\n\n if error.to_string() == \"EOF while parsing a value\" {\n", "file_path": "alacritty/src/config/mod.rs", "rank": 54, "score": 168001.1583241342 }, { "content": "#[derive(Debug, Eq, PartialEq, Copy, Clone, Deserialize)]\n\nstruct RenderableCursor {\n\n text_color: CellRgb,\n\n cursor_color: CellRgb,\n\n key: CursorKey,\n\n point: Point,\n\n rendered: bool,\n\n}\n\n\n\n/// A key for caching cursor glyphs.\n\n#[derive(Debug, Eq, PartialEq, Copy, Clone, Hash, Deserialize)]\n\npub struct CursorKey {\n\n pub shape: CursorShape,\n\n pub is_wide: bool,\n\n}\n\n\n", "file_path": "alacritty_terminal/src/term/mod.rs", "rank": 56, "score": 166098.94523617637 }, { "content": "/// Parse colors in XParseColor format.\n\nfn xparse_color(color: &[u8]) -> Option<Rgb> {\n\n if !color.is_empty() && color[0] == b'#' {\n\n parse_legacy_color(&color[1..])\n\n } else if color.len() >= 4 && &color[..4] == b\"rgb:\" {\n\n parse_rgb_color(&color[4..])\n\n } else {\n\n None\n\n }\n\n}\n\n\n", "file_path": "alacritty_terminal/src/ansi.rs", "rank": 57, "score": 165733.8223633902 }, { "content": "fn create_gl_window<E>(\n\n mut window: WindowBuilder,\n\n event_loop: &EventLoop<E>,\n\n srgb: bool,\n\n vsync: bool,\n\n dimensions: Option<PhysicalSize<u32>>,\n\n) -> Result<WindowedContext<PossiblyCurrent>> {\n\n if let Some(dimensions) = dimensions {\n\n window = window.with_inner_size(dimensions);\n\n }\n\n\n\n let windowed_context = ContextBuilder::new()\n\n .with_srgb(srgb)\n\n .with_vsync(vsync)\n\n .with_hardware_acceleration(None)\n\n .build_windowed(window, event_loop)?;\n\n\n\n // Make the context current so OpenGL operations can run.\n\n let windowed_context = unsafe { windowed_context.make_current().map_err(|(_, err)| err)? };\n\n\n", "file_path": "alacritty/src/window.rs", "rank": 58, "score": 165685.9561289187 }, { "content": "fn create_log_message(record: &log::Record<'_>, target: &str) -> String {\n\n let now = time::strftime(\"%F %T.%f\", &time::now()).unwrap();\n\n let mut message = format!(\"[{}] [{:<5}] [{}] \", now, record.level(), target);\n\n\n\n // Alignment for the lines after the first new line character in the payload. We don't deal\n\n // with fullwidth/unicode chars here, so just `message.len()` is sufficient.\n\n let alignment = message.len();\n\n\n\n // Push lines with added extra padding on the next line, which is trimmed later.\n\n let lines = record.args().to_string();\n\n for line in lines.split('\\n') {\n\n let line = format!(\"{}\\n{:width$}\", line, \"\", width = alignment);\n\n message.push_str(&line);\n\n }\n\n\n\n // Drop extra trailing alignment.\n\n message.truncate(message.len() - alignment);\n\n message\n\n}\n\n\n", "file_path": "alacritty/src/logging.rs", "rank": 59, "score": 164059.1062198013 }, { "content": "/// Calculate the size of the window given padding, terminal dimensions and cell size.\n\nfn window_size(\n\n config: &Config,\n\n dimensions: Dimensions,\n\n cell_width: f32,\n\n cell_height: f32,\n\n dpr: f64,\n\n) -> PhysicalSize<u32> {\n\n let padding = config.ui_config.window.padding(dpr);\n\n\n\n let grid_width = cell_width * dimensions.columns.0.max(MIN_COLS) as f32;\n\n let grid_height = cell_height * dimensions.lines.0.max(MIN_SCREEN_LINES) as f32;\n\n\n\n let width = (padding.0).mul_add(2., grid_width).floor();\n\n let height = (padding.1).mul_add(2., grid_height).floor();\n\n\n\n PhysicalSize::new(width as u32, height as u32)\n\n}\n", "file_path": "alacritty/src/display.rs", "rank": 60, "score": 163302.93783434428 }, { "content": "/// Parse colors in `rgb:r(rrr)/g(ggg)/b(bbb)` format.\n\nfn parse_rgb_color(color: &[u8]) -> Option<Rgb> {\n\n let colors = str::from_utf8(color).ok()?.split('/').collect::<Vec<_>>();\n\n\n\n if colors.len() != 3 {\n\n return None;\n\n }\n\n\n\n // Scale values instead of filling with `0`s.\n\n let scale = |input: &str| {\n\n if input.len() > 4 {\n\n None\n\n } else {\n\n let max = u32::pow(16, input.len() as u32) - 1;\n\n let value = u32::from_str_radix(input, 16).ok()?;\n\n Some((255 * value / max) as u8)\n\n }\n\n };\n\n\n\n Some(Rgb { r: scale(colors[0])?, g: scale(colors[1])?, b: scale(colors[2])? })\n\n}\n\n\n", "file_path": "alacritty_terminal/src/ansi.rs", "rank": 61, "score": 162975.72438347258 }, { "content": "/// Parse colors in `#r(rrr)g(ggg)b(bbb)` format.\n\nfn parse_legacy_color(color: &[u8]) -> Option<Rgb> {\n\n let item_len = color.len() / 3;\n\n\n\n // Truncate/Fill to two byte precision.\n\n let color_from_slice = |slice: &[u8]| {\n\n let col = usize::from_str_radix(str::from_utf8(slice).ok()?, 16).ok()? << 4;\n\n Some((col >> (4 * slice.len().saturating_sub(1))) as u8)\n\n };\n\n\n\n Some(Rgb {\n\n r: color_from_slice(&color[0..item_len])?,\n\n g: color_from_slice(&color[item_len..item_len * 2])?,\n\n b: color_from_slice(&color[item_len * 2..])?,\n\n })\n\n}\n\n\n", "file_path": "alacritty_terminal/src/ansi.rs", "rank": 62, "score": 162975.72438347258 }, { "content": "pub fn initialize(\n\n options: &Options,\n\n event_proxy: EventLoopProxy<Event>,\n\n) -> Result<Option<PathBuf>, log::SetLoggerError> {\n\n log::set_max_level(options.log_level);\n\n\n\n let logger = Logger::new(event_proxy);\n\n let path = logger.file_path();\n\n log::set_boxed_logger(Box::new(logger))?;\n\n\n\n Ok(path)\n\n}\n\n\n\npub struct Logger {\n\n logfile: Mutex<OnDemandLogFile>,\n\n stdout: Mutex<LineWriter<Stdout>>,\n\n event_proxy: Mutex<EventLoopProxy<Event>>,\n\n}\n\n\n\nimpl Logger {\n", "file_path": "alacritty/src/logging.rs", "rank": 63, "score": 162336.29875859205 }, { "content": "fn main() {\n\n println!(\"cargo:rustc-env=GIT_HASH={}\", commit_hash());\n\n\n\n let dest = env::var(\"OUT_DIR\").unwrap();\n\n let mut file = File::create(&Path::new(&dest).join(\"gl_bindings.rs\")).unwrap();\n\n\n\n Registry::new(Api::Gl, (3, 3), Profile::Core, Fallbacks::All, [\"GL_ARB_blend_func_extended\"])\n\n .write_bindings(GlobalGenerator, &mut file)\n\n .unwrap();\n\n\n\n #[cfg(windows)]\n\n embed_resource::compile(\"../extra/windows/windows.rc\");\n\n}\n\n\n", "file_path": "alacritty/build.rs", "rank": 64, "score": 159922.64689870435 }, { "content": "#[inline]\n\nfn load_glyph(\n\n active_tex: &mut GLuint,\n\n atlas: &mut Vec<Atlas>,\n\n current_atlas: &mut usize,\n\n rasterized: &RasterizedGlyph,\n\n) -> Glyph {\n\n // At least one atlas is guaranteed to be in the `self.atlas` list; thus\n\n // the unwrap.\n\n match atlas[*current_atlas].insert(rasterized, active_tex) {\n\n Ok(glyph) => glyph,\n\n Err(AtlasInsertError::Full) => {\n\n *current_atlas += 1;\n\n if *current_atlas == atlas.len() {\n\n let new = Atlas::new(ATLAS_SIZE);\n\n *active_tex = 0; // Atlas::new binds a texture. Ugh this is sloppy.\n\n atlas.push(new);\n\n }\n\n load_glyph(active_tex, atlas, current_atlas, rasterized)\n\n },\n\n Err(AtlasInsertError::GlyphTooLarge) => Glyph {\n", "file_path": "alacritty/src/renderer/mod.rs", "rank": 65, "score": 158099.38226962477 }, { "content": "/// This trait defines the behaviour needed to read and/or write to a stream.\n\n/// It defines an abstraction over mio's interface in order to allow either one\n\n/// read/write object or a separate read and write object.\n\npub trait EventedReadWrite {\n\n type Reader: io::Read;\n\n type Writer: io::Write;\n\n\n\n fn register(\n\n &mut self,\n\n _: &mio::Poll,\n\n _: &mut dyn Iterator<Item = mio::Token>,\n\n _: mio::Ready,\n\n _: mio::PollOpt,\n\n ) -> io::Result<()>;\n\n fn reregister(&mut self, _: &mio::Poll, _: mio::Ready, _: mio::PollOpt) -> io::Result<()>;\n\n fn deregister(&mut self, _: &mio::Poll) -> io::Result<()>;\n\n\n\n fn reader(&mut self) -> &mut Self::Reader;\n\n fn read_token(&self) -> mio::Token;\n\n fn writer(&mut self) -> &mut Self::Writer;\n\n fn write_token(&self) -> mio::Token;\n\n}\n\n\n\n/// Events concerning TTY child processes.\n\n#[derive(Debug, PartialEq)]\n\npub enum ChildEvent {\n\n /// Indicates the child has exited.\n\n Exited,\n\n}\n\n\n", "file_path": "alacritty_terminal/src/tty/mod.rs", "rank": 66, "score": 157806.87039479468 }, { "content": "/// Merge two key/value mappings.\n\nfn merge_mapping(mut base: Mapping, replacement: Mapping) -> Mapping {\n\n for (key, value) in replacement {\n\n let value = match base.remove(&key) {\n\n Some(base_value) => merge(base_value, value),\n\n None => value,\n\n };\n\n base.insert(key, value);\n\n }\n\n\n\n base\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn merge_primitive() {\n\n let base = Value::Null;\n\n let replacement = Value::Bool(true);\n", "file_path": "alacritty/src/config/serde_utils.rs", "rank": 67, "score": 157053.0728284591 }, { "content": "pub fn default_title() -> String {\n\n DEFAULT_NAME.to_string()\n\n}\n\n\n\nimpl WindowConfig {\n\n #[inline]\n\n pub fn dynamic_title(&self) -> bool {\n\n self.dynamic_title.0\n\n }\n\n\n\n #[inline]\n\n pub fn set_dynamic_title(&mut self, dynamic_title: bool) {\n\n self.dynamic_title.0 = dynamic_title;\n\n }\n\n\n\n #[inline]\n\n pub fn dimensions(&self) -> Option<Dimensions> {\n\n if self.dimensions.columns.0 != 0\n\n && self.dimensions.lines.0 != 0\n\n && self.startup_mode != StartupMode::Maximized\n", "file_path": "alacritty/src/config/window.rs", "rank": 68, "score": 149290.17870740956 }, { "content": "fn default_log_level() -> LevelFilter {\n\n LevelFilter::Warn\n\n}\n\n\n", "file_path": "alacritty/src/config/debug.rs", "rank": 69, "score": 147786.7027558108 }, { "content": "/// Converts the string slice into a Windows-standard representation for \"W\"-\n\n/// suffixed function variants, which accept UTF-16 encoded string values.\n\npub fn win32_string<S: AsRef<OsStr> + ?Sized>(value: &S) -> Vec<u16> {\n\n OsStr::new(value).encode_wide().chain(once(0)).collect()\n\n}\n", "file_path": "alacritty_terminal/src/tty/windows/mod.rs", "rank": 70, "score": 146089.34981570355 }, { "content": "/// Move by whitespace separated word, like W/B/E/gE in vi.\n\nfn word<T: EventListener>(\n\n term: &mut Term<T>,\n\n mut point: Point<usize>,\n\n direction: Direction,\n\n side: Side,\n\n) -> Point<usize> {\n\n // Make sure we jump above wide chars.\n\n point = term.expand_wide(point, direction);\n\n\n\n if direction == side {\n\n // Skip whitespace until right before a word.\n\n let mut next_point = advance(term, point, direction);\n\n while !is_boundary(term, point, direction) && is_space(term, next_point) {\n\n point = next_point;\n\n next_point = advance(term, point, direction);\n\n }\n\n\n\n // Skip non-whitespace until right inside word boundary.\n\n let mut next_point = advance(term, point, direction);\n\n while !is_boundary(term, point, direction) && !is_space(term, next_point) {\n", "file_path": "alacritty_terminal/src/vi_mode.rs", "rank": 71, "score": 142648.37352204975 }, { "content": "/// Move by semantically separated word, like w/b/e/ge in vi.\n\nfn semantic<T: EventListener>(\n\n term: &mut Term<T>,\n\n mut point: Point<usize>,\n\n direction: Direction,\n\n side: Side,\n\n) -> Point<usize> {\n\n // Expand semantically based on movement direction.\n\n let expand_semantic = |point: Point<usize>| {\n\n // Do not expand when currently on a semantic escape char.\n\n let cell = &term.grid()[point.line][point.col];\n\n if term.semantic_escape_chars().contains(cell.c)\n\n && !cell.flags.intersects(Flags::WIDE_CHAR_SPACER | Flags::LEADING_WIDE_CHAR_SPACER)\n\n {\n\n point\n\n } else if direction == Direction::Left {\n\n term.semantic_search_left(point)\n\n } else {\n\n term.semantic_search_right(point)\n\n }\n\n };\n", "file_path": "alacritty_terminal/src/vi_mode.rs", "rank": 72, "score": 142648.37352204975 }, { "content": "fn default_match_background() -> CellRgb {\n\n CellRgb::Rgb(Rgb { r: 0xff, g: 0xff, b: 0xff })\n\n}\n\n\n\n#[serde(default)]\n\n#[derive(Deserialize, Debug, Copy, Clone, Default, PartialEq, Eq)]\n\npub struct BarColors {\n\n #[serde(deserialize_with = \"failure_default\")]\n\n foreground: Option<Rgb>,\n\n #[serde(deserialize_with = \"failure_default\")]\n\n background: Option<Rgb>,\n\n}\n\n\n\n#[serde(default)]\n\n#[derive(Deserialize, Clone, Debug, PartialEq, Eq)]\n\npub struct PrimaryColors {\n\n #[serde(deserialize_with = \"failure_default\")]\n\n pub background: Rgb,\n\n #[serde(deserialize_with = \"failure_default\")]\n\n pub foreground: Rgb,\n", "file_path": "alacritty_terminal/src/config/colors.rs", "rank": 73, "score": 140633.74220381508 }, { "content": "/// Find next end of line to move to.\n\nfn last<T>(term: &Term<T>, mut point: Point<usize>) -> Point<usize> {\n\n let cols = term.cols();\n\n\n\n // Expand across wide cells.\n\n point = term.expand_wide(point, Direction::Right);\n\n\n\n // Find last non-empty cell in the current line.\n\n let occupied = last_occupied_in_line(term, point.line).unwrap_or_default();\n\n\n\n if point.col < occupied.col {\n\n // Jump to last occupied cell when not already at or beyond it.\n\n occupied\n\n } else if is_wrap(term, point) {\n\n // Jump to last occupied cell across linewraps.\n\n while point.line > 0 && is_wrap(term, point) {\n\n point.line -= 1;\n\n }\n\n\n\n last_occupied_in_line(term, point.line).unwrap_or(point)\n\n } else {\n\n // Jump to last column when beyond the last occupied cell.\n\n Point::new(point.line, cols - 1)\n\n }\n\n}\n\n\n", "file_path": "alacritty_terminal/src/vi_mode.rs", "rank": 74, "score": 140334.31044072044 }, { "content": "/// Find first non-empty cell in line.\n\nfn first_occupied_in_line<T>(term: &Term<T>, line: usize) -> Option<Point<usize>> {\n\n (0..term.cols().0)\n\n .map(|col| Point::new(line, Column(col)))\n\n .find(|&point| !is_space(term, point))\n\n}\n\n\n", "file_path": "alacritty_terminal/src/vi_mode.rs", "rank": 75, "score": 139394.9940100836 }, { "content": "/// Find last non-empty cell in line.\n\nfn last_occupied_in_line<T>(term: &Term<T>, line: usize) -> Option<Point<usize>> {\n\n (0..term.cols().0)\n\n .map(|col| Point::new(line, Column(col)))\n\n .rfind(|&point| !is_space(term, point))\n\n}\n\n\n", "file_path": "alacritty_terminal/src/vi_mode.rs", "rank": 76, "score": 139394.9940100836 }, { "content": "#[cfg(all(feature = \"x11\", not(any(target_os = \"macos\", windows))))]\n\nfn x_embed_window(window: &GlutinWindow, parent_id: std::os::raw::c_ulong) {\n\n let (xlib_display, xlib_window) = match (window.xlib_display(), window.xlib_window()) {\n\n (Some(display), Some(window)) => (display, window),\n\n _ => return,\n\n };\n\n\n\n let xlib = Xlib::open().expect(\"get xlib\");\n\n\n\n unsafe {\n\n let atom = (xlib.XInternAtom)(xlib_display as *mut _, \"_XEMBED\".as_ptr() as *const _, 0);\n\n (xlib.XChangeProperty)(\n\n xlib_display as _,\n\n xlib_window as _,\n\n atom,\n\n atom,\n\n 32,\n\n PropModeReplace,\n\n [0, 1].as_ptr(),\n\n 2,\n\n );\n", "file_path": "alacritty/src/window.rs", "rank": 77, "score": 139332.88772750433 }, { "content": "/// Find next non-empty cell to move to.\n\nfn first_occupied<T>(term: &Term<T>, mut point: Point<usize>) -> Point<usize> {\n\n let cols = term.cols();\n\n\n\n // Expand left across wide chars, since we're searching lines left to right.\n\n point = term.expand_wide(point, Direction::Left);\n\n\n\n // Find first non-empty cell in current line.\n\n let occupied = first_occupied_in_line(term, point.line)\n\n .unwrap_or_else(|| Point::new(point.line, cols - 1));\n\n\n\n // Jump across wrapped lines if we're already at this line's first occupied cell.\n\n if point == occupied {\n\n let mut occupied = None;\n\n\n\n // Search for non-empty cell in previous lines.\n\n for line in (point.line + 1)..term.total_lines() {\n\n if !is_wrap(term, Point::new(line, cols - 1)) {\n\n break;\n\n }\n\n\n", "file_path": "alacritty_terminal/src/vi_mode.rs", "rank": 78, "score": 138229.5660208111 }, { "content": "fn cubic_bezier(p0: f64, p1: f64, p2: f64, p3: f64, x: f64) -> f64 {\n\n (1.0 - x).powi(3) * p0\n\n + 3.0 * (1.0 - x).powi(2) * x * p1\n\n + 3.0 * (1.0 - x) * x.powi(2) * p2\n\n + x.powi(3) * p3\n\n}\n\n\n\nimpl VisualBell {\n\n /// Ring the visual bell, and return its intensity.\n\n pub fn ring(&mut self) -> f64 {\n\n let now = Instant::now();\n\n self.start_time = Some(now);\n\n self.intensity_at_instant(now)\n\n }\n\n\n\n /// Get the currently intensity of the visual bell. The bell's intensity\n\n /// ramps down from 1.0 to 0.0 at a rate determined by the bell's duration.\n\n pub fn intensity(&self) -> f64 {\n\n self.intensity_at_instant(Instant::now())\n\n }\n", "file_path": "alacritty_terminal/src/term/mod.rs", "rank": 79, "score": 135878.12577408817 }, { "content": " ErrorKind::Interrupted | ErrorKind::WouldBlock => {\n\n break;\n\n },\n\n _ => return Err(err),\n\n },\n\n }\n\n }\n\n\n\n if processed > 0 {\n\n // Queue terminal redraw.\n\n self.event_proxy.send_event(Event::Wakeup);\n\n }\n\n\n\n Ok(())\n\n }\n\n\n\n #[inline]\n\n fn pty_write(&mut self, state: &mut State) -> io::Result<()> {\n\n state.ensure_next();\n\n\n", "file_path": "alacritty_terminal/src/event_loop.rs", "rank": 80, "score": 132416.0212083869 }, { "content": "use crate::thread;\n\nuse crate::tty;\n\n\n\n/// Max bytes to read from the PTY.\n\nconst MAX_READ: usize = 0x10_000;\n\n\n\n/// Messages that may be sent to the `EventLoop`.\n\n#[derive(Debug)]\n\npub enum Msg {\n\n /// Data that should be written to the PTY.\n\n Input(Cow<'static, [u8]>),\n\n\n\n /// Indicates that the `EventLoop` should shut down, as Alacritty is shutting down.\n\n Shutdown,\n\n\n\n /// Instruction to resize the PTY.\n\n Resize(SizeInfo),\n\n}\n\n\n\n/// The main event!.. loop.\n", "file_path": "alacritty_terminal/src/event_loop.rs", "rank": 81, "score": 132413.21471076278 }, { "content": " ) -> io::Result<()>\n\n where\n\n X: Write,\n\n {\n\n let mut processed = 0;\n\n let mut terminal = None;\n\n\n\n loop {\n\n match self.pty.reader().read(&mut buf[..]) {\n\n Ok(0) => break,\n\n Ok(got) => {\n\n // Record bytes read; used to limit time spent in pty_read.\n\n processed += got;\n\n\n\n // Send a copy of bytes read to a subscriber. Used for\n\n // example with ref test recording.\n\n writer = writer.map(|w| {\n\n w.write_all(&buf[..got]).unwrap();\n\n w\n\n });\n", "file_path": "alacritty_terminal/src/event_loop.rs", "rank": 82, "score": 132412.74785143757 }, { "content": "//! The main event loop which performs I/O on the pseudoterminal.\n\n\n\nuse std::borrow::Cow;\n\nuse std::collections::VecDeque;\n\nuse std::fs::File;\n\nuse std::io::{self, ErrorKind, Read, Write};\n\nuse std::marker::Send;\n\nuse std::sync::Arc;\n\nuse std::thread::JoinHandle;\n\n\n\nuse log::error;\n\n#[cfg(not(windows))]\n\nuse mio::unix::UnixReady;\n\nuse mio::{self, Events, PollOpt, Ready};\n\nuse mio_extras::channel::{self, Receiver, Sender};\n\n\n\nuse crate::ansi;\n\nuse crate::event::{self, Event, EventListener};\n\nuse crate::sync::FairMutex;\n\nuse crate::term::{SizeInfo, Term};\n", "file_path": "alacritty_terminal/src/event_loop.rs", "rank": 83, "score": 132412.6723182083 }, { "content": " // Register TTY through EventedRW interface.\n\n self.pty.register(&self.poll, &mut tokens, Ready::readable(), poll_opts).unwrap();\n\n\n\n let mut events = Events::with_capacity(1024);\n\n\n\n let mut pipe = if self.ref_test {\n\n Some(File::create(\"./alacritty.recording\").expect(\"create alacritty recording\"))\n\n } else {\n\n None\n\n };\n\n\n\n 'event_loop: loop {\n\n if let Err(err) = self.poll.poll(&mut events, None) {\n\n match err.kind() {\n\n ErrorKind::Interrupted => continue,\n\n _ => panic!(\"EventLoop polling error: {:?}\", err),\n\n }\n\n }\n\n\n\n for event in events.iter() {\n", "file_path": "alacritty_terminal/src/event_loop.rs", "rank": 84, "score": 132408.34159152675 }, { "content": " error!(\"Error reading from PTY in event loop: {}\", err);\n\n break 'event_loop;\n\n }\n\n }\n\n\n\n if event.readiness().is_writable() {\n\n if let Err(err) = self.pty_write(&mut state) {\n\n error!(\"Error writing to PTY in event loop: {}\", err);\n\n break 'event_loop;\n\n }\n\n }\n\n }\n\n _ => (),\n\n }\n\n }\n\n\n\n // Register write interest if necessary.\n\n let mut interest = Ready::readable();\n\n if state.needs_write() {\n\n interest.insert(Ready::writable());\n", "file_path": "alacritty_terminal/src/event_loop.rs", "rank": 85, "score": 132405.5241496801 }, { "content": " B: Into<Cow<'static, [u8]>>,\n\n {\n\n let bytes = bytes.into();\n\n // terminal hangs if we send 0 bytes through.\n\n if bytes.len() == 0 {\n\n return;\n\n }\n\n\n\n self.0.send(Msg::Input(bytes)).expect(\"send event loop msg\");\n\n }\n\n}\n\n\n\nimpl event::OnResize for Notifier {\n\n fn on_resize(&mut self, size: &SizeInfo) {\n\n self.0.send(Msg::Resize(*size)).expect(\"expected send event loop msg\");\n\n }\n\n}\n\n\n\nimpl Default for State {\n\n fn default() -> State {\n", "file_path": "alacritty_terminal/src/event_loop.rs", "rank": 86, "score": 132403.2143706173 }, { "content": " 'write_many: while let Some(mut current) = state.take_current() {\n\n 'write_one: loop {\n\n match self.pty.writer().write(current.remaining_bytes()) {\n\n Ok(0) => {\n\n state.set_current(Some(current));\n\n break 'write_many;\n\n },\n\n Ok(n) => {\n\n current.advance(n);\n\n if current.finished() {\n\n state.goto_next();\n\n break 'write_one;\n\n }\n\n },\n\n Err(err) => {\n\n state.set_current(Some(current));\n\n match err.kind() {\n\n ErrorKind::Interrupted | ErrorKind::WouldBlock => break 'write_many,\n\n _ => return Err(err),\n\n }\n", "file_path": "alacritty_terminal/src/event_loop.rs", "rank": 87, "score": 132402.54901682638 }, { "content": "\n\n pub fn channel(&self) -> Sender<Msg> {\n\n self.tx.clone()\n\n }\n\n\n\n /// Drain the channel.\n\n ///\n\n /// Returns `false` when a shutdown message was received.\n\n fn drain_recv_channel(&mut self, state: &mut State) -> bool {\n\n while let Ok(msg) = self.rx.try_recv() {\n\n match msg {\n\n Msg::Input(input) => state.write_list.push_back(input),\n\n Msg::Shutdown => return false,\n\n Msg::Resize(size) => self.pty.on_resize(&size),\n\n }\n\n }\n\n\n\n true\n\n }\n\n\n", "file_path": "alacritty_terminal/src/event_loop.rs", "rank": 88, "score": 132398.6720077375 }, { "content": " match event.token() {\n\n token if token == channel_token => {\n\n if !self.channel_event(channel_token, &mut state) {\n\n break 'event_loop;\n\n }\n\n },\n\n\n\n token if token == self.pty.child_event_token() => {\n\n if let Some(tty::ChildEvent::Exited) = self.pty.next_child_event() {\n\n if !self.hold {\n\n self.terminal.lock().exit();\n\n }\n\n self.event_proxy.send_event(Event::Wakeup);\n\n break 'event_loop;\n\n }\n\n },\n\n\n\n token\n\n if token == self.pty.read_token()\n\n || token == self.pty.write_token() =>\n", "file_path": "alacritty_terminal/src/event_loop.rs", "rank": 89, "score": 132397.21903739794 }, { "content": " /// Returns a `bool` indicating whether or not the event loop should continue running.\n\n #[inline]\n\n fn channel_event(&mut self, token: mio::Token, state: &mut State) -> bool {\n\n if !self.drain_recv_channel(state) {\n\n return false;\n\n }\n\n\n\n self.poll\n\n .reregister(&self.rx, token, Ready::readable(), PollOpt::edge() | PollOpt::oneshot())\n\n .unwrap();\n\n\n\n true\n\n }\n\n\n\n #[inline]\n\n fn pty_read<X>(\n\n &mut self,\n\n state: &mut State,\n\n buf: &mut [u8],\n\n mut writer: Option<&mut X>,\n", "file_path": "alacritty_terminal/src/event_loop.rs", "rank": 90, "score": 132395.6493013755 }, { "content": "///\n\n/// Handles all the PTY I/O and runs the PTY parser which updates terminal\n\n/// state.\n\npub struct EventLoop<T: tty::EventedPty, U: EventListener> {\n\n poll: mio::Poll,\n\n pty: T,\n\n rx: Receiver<Msg>,\n\n tx: Sender<Msg>,\n\n terminal: Arc<FairMutex<Term<U>>>,\n\n event_proxy: U,\n\n hold: bool,\n\n ref_test: bool,\n\n}\n\n\n\n/// Helper type which tracks how much of a buffer has been written.\n", "file_path": "alacritty_terminal/src/event_loop.rs", "rank": 91, "score": 132394.73865784993 }, { "content": " {\n\n #[cfg(unix)]\n\n if UnixReady::from(event.readiness()).is_hup() {\n\n // Don't try to do I/O on a dead PTY.\n\n continue;\n\n }\n\n\n\n if event.readiness().is_readable() {\n\n if let Err(err) = self.pty_read(&mut state, &mut buf, pipe.as_mut())\n\n {\n\n // On Linux, a `read` on the master side of a PTY can fail\n\n // with `EIO` if the client side hangs up. In that case,\n\n // just loop back round for the inevitable `Exited` event.\n\n // This sucks, but checking the process is either racy or\n\n // blocking.\n\n #[cfg(target_os = \"linux\")]\n\n if err.kind() == ErrorKind::Other {\n\n continue;\n\n }\n\n\n", "file_path": "alacritty_terminal/src/event_loop.rs", "rank": 92, "score": 132393.65237662225 }, { "content": "\n\n // Get reference to terminal. Lock is acquired on initial\n\n // iteration and held until there's no bytes left to parse\n\n // or we've reached `MAX_READ`.\n\n if terminal.is_none() {\n\n terminal = Some(self.terminal.lock());\n\n }\n\n let terminal = terminal.as_mut().unwrap();\n\n\n\n // Run the parser.\n\n for byte in &buf[..got] {\n\n state.parser.advance(&mut **terminal, *byte, &mut self.pty.writer());\n\n }\n\n\n\n // Exit if we've processed enough bytes.\n\n if processed > MAX_READ {\n\n break;\n\n }\n\n },\n\n Err(err) => match err.kind() {\n", "file_path": "alacritty_terminal/src/event_loop.rs", "rank": 93, "score": 132393.564203824 }, { "content": " fn advance(&mut self, n: usize) {\n\n self.written += n;\n\n }\n\n\n\n #[inline]\n\n fn remaining_bytes(&self) -> &[u8] {\n\n &self.source[self.written..]\n\n }\n\n\n\n #[inline]\n\n fn finished(&self) -> bool {\n\n self.written >= self.source.len()\n\n }\n\n}\n\n\n\nimpl<T, U> EventLoop<T, U>\n\nwhere\n\n T: tty::EventedPty + event::OnResize + Send + 'static,\n\n U: EventListener + Send + 'static,\n\n{\n", "file_path": "alacritty_terminal/src/event_loop.rs", "rank": 94, "score": 132389.0930642473 }, { "content": " /// Create a new event loop.\n\n pub fn new(\n\n terminal: Arc<FairMutex<Term<U>>>,\n\n event_proxy: U,\n\n pty: T,\n\n hold: bool,\n\n ref_test: bool,\n\n ) -> EventLoop<T, U> {\n\n let (tx, rx) = channel::channel();\n\n EventLoop {\n\n poll: mio::Poll::new().expect(\"create mio Poll\"),\n\n pty,\n\n tx,\n\n rx,\n\n terminal,\n\n event_proxy,\n\n hold,\n\n ref_test,\n\n }\n\n }\n", "file_path": "alacritty_terminal/src/event_loop.rs", "rank": 95, "score": 132387.25036924946 }, { "content": " State { write_list: VecDeque::new(), parser: ansi::Processor::new(), writing: None }\n\n }\n\n}\n\n\n\nimpl State {\n\n #[inline]\n\n fn ensure_next(&mut self) {\n\n if self.writing.is_none() {\n\n self.goto_next();\n\n }\n\n }\n\n\n\n #[inline]\n\n fn goto_next(&mut self) {\n\n self.writing = self.write_list.pop_front().map(Writing::new);\n\n }\n\n\n\n #[inline]\n\n fn take_current(&mut self) -> Option<Writing> {\n\n self.writing.take()\n", "file_path": "alacritty_terminal/src/event_loop.rs", "rank": 96, "score": 132385.51739821292 } ]
Rust
src/engine.rs
0ncorhynchus/mictyris
5f8bffe0fb6833048bac9d240050b92f5acd9a0d
mod auxiliary; pub mod procedure; mod storage; use self::auxiliary::*; use self::procedure::*; use self::storage::*; use crate::lexer::Identifier; use crate::parser::{Datum, Formals, ListDatum, Lit}; use crate::pass::*; use std::fmt; use std::rc::Rc; use Value::*; #[derive(Clone, Debug, PartialEq)] pub enum Value { Symbol(Identifier), Character(char), Number(f64), Pair(Location, Location, bool), Vector(Vec<Location>), Str(String), Bool(bool), Null, Unspecified, Undefined, Procedure(Proc), } impl Value { pub fn number(&self) -> Option<f64> { match self { Number(num) => Some(*num), _ => None, } } pub fn pair(&self) -> Option<(Location, Location, bool)> { match self { Pair(car, cdr, mutable) => Some((car.clone(), cdr.clone(), *mutable)), _ => None, } } } #[derive(Clone, Debug, PartialEq)] pub struct EvalError { pub message: String, } impl fmt::Display for EvalError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.message) } } #[derive(Default)] pub struct Engine { env: Env, store: Store, } impl Engine { pub fn new() -> Self { let mut engine = Self::default(); engine.register_proc("list", procedure::list); engine.register_proc("cons", procedure::cons); engine.register_proc("<", procedure::less); engine.register_proc("+", procedure::add); engine.register_proc("car", procedure::car); engine.register_proc("cdr", procedure::cdr); engine.register_proc("set-car!", procedure::setcar); engine.register_proc("eqv?", procedure::eqv); engine.register_proc("apply", procedure::apply); engine.register_proc("call-with-current-continuation", procedure::cwcc); engine.register_proc("call/cc", procedure::cwcc); engine.register_proc("values", procedure::values); engine.register_proc("call-with-values", procedure::cwv); engine } pub fn register(&mut self, variable: &str, value: Value) { let location = self.store.reserve(); self.store.update(&location, value); self.env.borrow_mut().insert(variable, location); } pub fn register_proc<F: 'static>(&mut self, variable: &str, proc: F) where F: Fn(&[Value], ExprCont) -> CommCont, { let location = self.store.reserve(); self.store .update(&location, Procedure(Proc::new(Rc::new(proc)))); self.env.borrow_mut().insert(variable, location); } pub fn eval(&mut self, ast: &AST) -> Answer { let expr_cont: ExprCont = Rc::new(|mut values: Vec<Value>| { let answer = values.pop().unwrap_or(Unspecified); let cont: CommCont = Rc::new(move |_store: &mut Store| Ok(answer.clone())); cont }); let cont = eval(ast, Rc::clone(&self.env), expr_cont); cont(&mut self.store) } pub fn eval_and_print(&mut self, ast: &AST) { let expr_cont: ExprCont = Rc::new(|mut values: Vec<Value>| { if let Some(value) = values.pop() { write(value) } else { Rc::new(|_| Ok(Unspecified)) } }); let cont = eval(ast, Rc::clone(&self.env), expr_cont); match cont(&mut self.store) { Ok(_) => (), Err(err) => eprintln!("Error: {}", err), } } } fn eval(ast: &AST, env: Env, expr_cont: ExprCont) -> CommCont { match ast { AST::Const(lit) => eval_literal(lit, expr_cont), AST::Var(ident) => eval_variable(ident, env, expr_cont), AST::Call(f, args) => eval_proc_call(f, args, env, expr_cont), AST::Lambda(args, commands, expr) => match args { Formals::List(args) => eval_lambda(args, commands, expr, env, expr_cont), Formals::Dot(args, var) => eval_lambda_dot(args, var, commands, expr, env, expr_cont), }, AST::Cond(test, conseq, alter) => match alter { Some(alter) => eval_conditional1(test, conseq, alter, env, expr_cont), None => eval_conditional2(test, conseq, env, expr_cont), }, AST::Assign(ident, expr) => eval_assign(ident, expr, env, expr_cont), } } fn eval_literal(lit: &Lit, expr_cont: ExprCont) -> CommCont { fn literal(store: &mut Store, lit: &Lit) -> Value { match lit { Lit::Bool(b) => Bool(*b), Lit::Number(n) => Number(*n), Lit::Character(c) => Character(*c), Lit::Str(s) => Str(s.clone()), Lit::Quote(d) => eval_datum(store, d), } } fn eval_datum(store: &mut Store, datum: &Datum) -> Value { match datum { Datum::Bool(b) => Bool(*b), Datum::Number(n) => Number(*n), Datum::Character(c) => Character(*c), Datum::Str(s) => Str(s.clone()), Datum::Symbol(ident) => Symbol(ident.clone()), Datum::List(data) => match data { ListDatum::List(data) => data.iter().rev().fold(Null, |acc, x| { let cdr = store.reserve(); store.update(&cdr, acc); let car = store.reserve(); let x = eval_datum(store, x); store.update(&car, x); Pair(car, cdr, true) }), ListDatum::Cons(data, last) => { let last = eval_datum(store, last); data.iter().rev().fold(last, |acc, x| { let cdr = store.reserve(); store.update(&cdr, acc); let car = store.reserve(); let x = eval_datum(store, x); store.update(&car, x); Pair(car, cdr, true) }) } ListDatum::Abbrev(_) => unimplemented!(), }, Datum::Vector(data) => { let mut locations = Vec::with_capacity(data.len()); for elem in data { let elem = eval_datum(store, elem); let loc = store.reserve(); store.update(&loc, elem); locations.push(loc); } Vector(locations) } } } let lit = lit.clone(); Rc::new(move |store| send(literal(store, &lit), &expr_cont)(store)) } fn eval_variable(ident: &str, env: Env, expr_cont: ExprCont) -> CommCont { let location = match env.borrow().lookup(ident) { Some(location) => location, None => { return wrong("undefined variable"); } }; let cont = single(move |value| send(value, &expr_cont)); hold(location, cont) } fn eval_proc_call(f: &AST, args: &[AST], env: Env, cont: ExprCont) -> CommCont { let mut exprs = Vec::with_capacity(args.len() + 1); exprs.push(f.clone()); exprs.extend_from_slice(args); let cont: ExprCont = Rc::new(move |values: Vec<Value>| { let (f, args) = values.split_first().unwrap(); applicate(f, args, Rc::clone(&cont)) }); eval_list(&exprs, env, cont) } fn eval_list(exprs: &[AST], env: Env, cont: ExprCont) -> CommCont { match exprs.split_first() { None => cont(vec![]), Some((head, tail)) => { let tail = tail.to_vec(); let copied_env = Rc::clone(&env); let cont = single(move |value: Value| { let cont = Rc::clone(&cont); let cont: ExprCont = Rc::new(move |mut values| { values.insert(0, value.clone()); cont(values) }); eval_list(&tail, Rc::clone(&copied_env), cont) }); eval(head, env, cont) } } } fn eval_lambda( args: &[String], commands: &[AST], expr: &AST, env: Env, cont: ExprCont, ) -> CommCont { let args = args.to_vec(); let commands = commands.to_vec(); let expr = expr.clone(); Rc::new(move |store: &mut Store| { let args = args.clone(); let commands = commands.clone(); let expr = expr.clone(); let env = Rc::clone(&env); let inner = Rc::new(move |values: &[Value], cont: ExprCont| { let args = args.clone(); let commands = commands.clone(); let expr = expr.clone(); if values.len() == args.len() { let env = Rc::clone(&env); let f = Rc::new(move |locations: &[Location]| { let env = extends(&env, &args, &locations); let cont = eval(&expr, Rc::clone(&env), Rc::clone(&cont)); eval_commands(&commands, env, cont) }); tievals(f, values) } else { wrong("wrong number of arguments") } }); let proc = Procedure(Proc::new(inner)); send(proc, &cont)(store) }) } #[allow(unused_variables)] fn eval_lambda_dot( args: &[String], var: &str, commands: &[AST], expr: &AST, env: Env, cont: ExprCont, ) -> CommCont { let min_args = args.len(); let mut args = args.to_vec(); args.push(var.to_string()); let commands = commands.to_vec(); let expr = expr.clone(); Rc::new(move |store: &mut Store| { let args = args.clone(); let commands = commands.clone(); let expr = expr.clone(); let env = Rc::clone(&env); let location = store.reserve(); let inner = Rc::new(move |values: &[Value], cont: ExprCont| { let args = args.clone(); let commands = commands.clone(); let expr = expr.clone(); if values.len() >= min_args { let env = Rc::clone(&env); let f = Rc::new(move |locations: &[Location]| { let env = extends(&env, &args, &locations); let cont = eval(&expr, Rc::clone(&env), Rc::clone(&cont)); eval_commands(&commands, env, cont) }); tievalsrest(f, values, min_args) } else { wrong("too few arguments") } }); let proc = Procedure(Proc::new(inner)); send(proc, &cont)(store) }) } fn eval_commands(commands: &[AST], env: Env, cont: CommCont) -> CommCont { match commands.split_first() { Some((head, tail)) => { let tail = tail.to_vec(); let copied_env = Rc::clone(&env); let cont = Rc::new(move |_: Vec<Value>| { eval_commands(&tail, Rc::clone(&copied_env), Rc::clone(&cont)) }); eval(head, env, cont) } None => cont, } } fn eval_conditional1(test: &AST, conseq: &AST, alter: &AST, env: Env, cont: ExprCont) -> CommCont { let conseq = conseq.clone(); let alter = alter.clone(); let copied_env = Rc::clone(&env); let cont = single(move |value| { let cont = Rc::clone(&cont); let env = Rc::clone(&copied_env); if truish(value) { eval(&conseq.clone(), env, cont) } else { eval(&alter.clone(), env, cont) } }); eval(test, env, cont) } fn eval_conditional2(test: &AST, conseq: &AST, env: Env, cont: ExprCont) -> CommCont { let conseq = conseq.clone(); let copied_env = Rc::clone(&env); let cont = single(move |value| { if truish(value) { eval(&conseq.clone(), Rc::clone(&copied_env), Rc::clone(&cont)) } else { send(Unspecified, &cont) } }); eval(test, env, cont) } fn eval_assign(ident: &str, expr: &AST, env: Env, cont: ExprCont) -> CommCont { let ident = ident.to_string(); let copied_env = Rc::clone(&env); let cont = single(move |value: Value| { let location = match env.borrow().lookup(&ident) { Some(location) => location, None => { return wrong("undefined variable"); } }; assign(location, value, send(Unspecified, &cont)) }); eval(expr, copied_env, cont) } pub type Answer = Result<Value, EvalError>; #[derive(Clone)] pub struct Proc { inner: Rc<dyn Fn(&[Value], ExprCont) -> CommCont>, } impl Proc { fn new(inner: Rc<dyn Fn(&[Value], ExprCont) -> CommCont>) -> Self { Self { inner } } } impl fmt::Debug for Proc { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "Proc") } } impl PartialEq for Proc { fn eq(&self, other: &Self) -> bool { Rc::ptr_eq(&self.inner, &other.inner) } } pub type CommCont = Rc<dyn Fn(&mut Store) -> Answer>; pub type ExprCont = Rc<dyn Fn(Vec<Value>) -> CommCont>; pub fn write(value: Value) -> CommCont { fn fmt(store: &Store, value: &Value) -> String { match value { Symbol(ident) => format!("{}", ident), Character(c) => format!("#\\{}", c), Number(n) => format!("{}", n), Pair(loc1, loc2, _) => format!( "({} . {})", fmt(store, &store.get(loc1)), fmt(store, &store.get(loc2)), ), Vector(locations) => { let mut strings = Vec::with_capacity(locations.len()); for loc in locations { strings.push(fmt(store, &store.get(loc))); } format!("#({})", strings.join(" ")) } Str(s) => s.clone(), Bool(b) => format!("{}", b), Null => "()".to_string(), Unspecified => "<unspecified>".to_string(), Undefined => "<undefined>".to_string(), Procedure(_) => "<procedure>".to_string(), } } Rc::new(move |store: &mut Store| { println!("{}", fmt(store, &value)); Ok(Unspecified) }) }
mod auxiliary; pub mod procedure; mod storage; use self::auxiliary::*; use self::procedure::*; use self::storage::*; use crate::lexer::Identifier; use crate::parser::{Datum, Formals, ListDatum, Lit}; use crate::pass::*; use std::fmt; use std::rc::Rc; use Value::*; #[derive(Clone, Debug, PartialEq)] pub enum Value { Symbol(Identifier), Character(char), Number(f64), Pair(Location, Location, bool), Vector(Vec<Location>), Str(String), Bool(bool), Null, Unspecified, Undefined, Procedure(Proc), } impl Value { pub fn number(&self) -> Option<f64> { match self { Number(num) => Some(*num), _ => None, } } pub fn pair(&self) -> Option<(Location, Location, bool)> { match self { Pair(car, cdr, mutable) => Some((car.clone(), cdr.clone(), *mutable)), _ => None, } } } #[derive(Clone, Debug, PartialEq)] pub struct EvalError { pub message: String, } impl fmt::Display for EvalError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.message) } } #[derive(Default)] pub struct Engine { env: Env, store: Store, } impl Engine { pub fn new() -> Self { let mut engine = Self::default(); engine.register_proc("list", procedure::list); engine.register_proc("cons", procedure::cons); engine.register_proc("<", procedure::less); engine.register_proc("+", procedure::add); engine.register_proc("car", procedure::car); engine.register_proc("cdr", procedure::cdr); engine.register_proc("set-car!", procedure::setcar); engine.register_proc("eqv?", procedure::eqv); engine.register_proc("apply", procedure::apply); engine.register_proc("call-with-current-continuation", procedure::cwcc); engine.register_proc("call/cc", procedure::cwcc); engine.register_proc("values", procedure::values); engine.register_proc("call-with-values", procedure::cwv); engine } pub fn register(&mut self, variable: &str, value: Value) { let location = self.store.reserve(); self.store.update(&location, value); self.env.borrow_mut().insert(variable, location); } pub fn register_proc<F: 'static>(&mut self, variable: &str, proc: F) where F: Fn(&[Value], ExprCont) -> CommCont, { let location = self.store.reserve(); self.store .update(&location, Procedure(Proc::new(Rc::new(proc)))); self.env.borrow_mut().insert(variable, location); } pub fn eval(&mut self, ast: &AST) -> Answer { let expr_cont: ExprCont = Rc::new(|mut values: Vec<Value>| { let answer = values.pop().unwrap_or(Unspecified); let cont: CommCont = Rc::new(move |_store: &mut Store| Ok(answer.clone())); cont }); let cont = eval(ast, Rc::clone(&self.env), expr_cont); cont(&mut self.store) } pub fn eval_and_print(&mut self, ast: &AST) { let expr_cont: ExprCont = Rc::new(|mut values: Vec<Value>| { if let Some(value) = values.pop() { write(value) } else { Rc::new(|_| Ok(Unspecified)) } }); let cont = eval(ast, Rc::clone(&self.env), expr_cont); match cont(&mut self.store) { Ok(_) => (), Err(err) => eprintln!("Error: {}", err), } } } fn eval(ast: &AST, env: Env, expr_cont: ExprCont) -> CommCont { match ast { AST::Const(lit) => eval_literal(lit, expr_cont), AST::Var(ident) => eval_variable(ident, env, expr_cont), AST::Call(f, args) => eval_proc_call(f, args, env, expr_cont), AST::Lambda(args, commands, expr) => match args { Formals::List(args) => eval_lambda(args, commands, expr, env, expr_cont), Formals::Dot(args, var) => eval_lambda_dot(args, var, commands, expr, env, expr_cont), }, AST::Cond(test, conseq, alter) => match alter { Some(alter) => eval_conditional1(test, conseq, alter, env, expr_cont), None => eval_conditional2(test, conseq, env, expr_cont), }, AST::Assign(ident, expr) => eval_assign(ident, expr, env, expr_cont), } } fn eval_literal(lit: &Lit, expr_cont: ExprCont) -> CommCont { fn literal(store: &mut Store, lit: &Lit) -> Value { match lit { Lit::Bool(b) => Bool(*b), Lit::Number(n) => Number(*n), Lit::Character(c) => Character(*c), Lit::Str(s) => Str(s.clone()), Lit::Quote(d) => eval_datum(store, d), } } fn eval_datum(store: &mut Store, datum: &Datum) -> Value { match datum { Datum::Bool(b) => Bool(*b), Datum::Number(n) => Number(*n), Datum::Character(c) => Character(*c), Datum::Str(s) => Str(s.clone()), Datum::Symbol(ident) => Symbol(ident.clone()), Datum::List(data) => match data { ListDatum::List(data) => data.iter().rev().fold(Null, |acc, x| { let cdr = store.reserve(); store.update(&cdr, acc); let car = store.reserve(); let x = eval_datum(store, x); store.update(&car, x); Pair(car, cdr, true) }), ListDatum::Cons(data, last) => { let last = eval_datum(store, last); data.iter().rev().fold(last, |acc, x| { let cdr = store.reserve(); store.update(&cdr, acc); let car = store.reserve(); let x = eval_datum(store, x); store.update(&car, x); Pair(car, cdr, true) }) } ListDatum::Abbrev(_) => unimplemented!(), }, Datum::Vector(data) => { let mut locations = Vec::with_capacity(data.len()); for elem in data { let elem = eval_datum(store, elem); let loc = store.reserve(); store.update(&loc, elem); locations.push(loc); } Vector(locations) } } } let lit = lit.clone(); Rc::new(move |store| send(literal(store, &lit), &expr_cont)(store)) } fn eval_variable(ident: &str, env: Env, expr_cont: ExprCont) -> CommCont { let location = match env.borrow().lookup(ident) { Some(location) => location, None => { return wrong("undefined variable"); } }; let cont = single(move |value| send(value, &expr_cont)); hold(location, cont) } fn eval_proc_call(f: &AST, args: &[AST], env: Env, cont: ExprCont) -> CommCont { let mut exprs = Vec::with_capacity(args.len() + 1); exprs.push(f.clone()); exprs.extend_from_slice(args); let cont: ExprCont = Rc::new(move |values: Vec<Value>| { let (f, args) = values.split_first().unwrap(); applicate(f, args, Rc::clone(&cont)) }); eval_list(&exprs, env, cont) } fn eval_list(exprs: &[AST], env: Env, cont: ExprCont) -> CommCont { match exprs.split_first() { None => cont(vec![]), Some((head, tail)) => { let tail = tail.to_vec(); let copied_env = Rc::clone(&env); let cont = single(move |value: Value| { let cont = Rc::clone(&cont); let cont: ExprCont = Rc::new(move |mut values| { values.insert(0, value.clone()); cont(values) }); eval_list(&tail, Rc::clone(&copied_env), cont) }); eval(head, env, cont) } } } fn eval_lambda( args: &[String], commands: &[AST], expr: &AST, env: Env, cont: ExprCont, ) -> CommCont { let args = args.to_vec(); let commands = commands.to_vec(); let expr = expr.clone(); Rc::new(move |store: &mut Store| { let args = args.clone(); let commands = commands.clone(); let expr = expr.clone(); let env = Rc::clone(&env); let inner = Rc::new(move |values: &[Value], cont: ExprCont| { let args = args.clone(); let commands = commands.clone(); let expr = expr.clone(); if values.len() == args.len() { let env = Rc::clone(&env); let f = Rc::new(move |locations: &[Location]| { let env = extends(&env, &args, &locations); let cont = eval(&expr, Rc::clone(&env), Rc::clone(&cont)); eval_commands(&commands, env, cont) }); tievals(f, values) } else { wrong("wrong number of arguments") } }); let proc = Procedure(Proc::new(inner)); send(proc, &cont)(store) }) } #[allow(unused_variables)] fn eval_lambda_dot( args: &[String], var: &str, commands: &[AST], expr: &AST, env: Env, cont: ExprCont, ) -> CommCont { let min_args = args.len(); let mut args = args.to_vec(); args.push(var.to_string()); let commands = commands.to_vec(); let expr = expr.clone(); Rc::new(move |store: &mut Store| { let args = args.clone(); let commands = commands.clone(); let expr = expr.clone(); let env = Rc::clone(&env); let location = store.reserve(); let inner = Rc::new(move |values: &[Value], cont: ExprCont| { let args = args.clone(); let commands = commands.clone(); let expr = expr.clone(); if values.len() >= min_args { let env = Rc::clone(&env); let f = Rc::new(move |locations: &[Location]| { let env = extends(&env, &args, &locations); let cont = eval(&expr, Rc::clone(&env), Rc::clone(&cont)); eval_commands(&commands, env, cont) }); tievalsrest(f, values, min_args) } else { wrong("too few arguments") } }); let proc = Procedure(Proc::new(inner)); send(proc, &cont)(store) }) } fn eval_commands(commands: &[AST], env: Env, cont: CommCont) -> CommCont { match commands.split_first() { Some((head, tail)) => { let tail = tail.to_vec(); let copied_env = Rc::clone(&env); let cont = Rc::new(move |_: Vec<Value>| { eval_commands(&tail, Rc::clone(&copied_env), Rc::clone(&cont)) }); eval(head, env, cont) } None => cont, } } fn eval_conditional1(test: &AST, conseq: &AST, alter: &AST, env: Env, cont: ExprCont) -> CommCont { let conseq = conseq.clone(); let alter = alter.clone(); let copied_env = Rc::clone(&env); let cont = single(move |value| { let cont = Rc::clone(&cont); let env = Rc::clone(&copied_env); if truish(value) { eval(&conseq.clone(), env, cont) } else { eval(&alter.clone(), env, cont) } }); eval(test, env, cont) } fn eval_conditional2(test: &AST, conseq: &AST, env: Env, cont: ExprCont) -> CommCont { let conseq = conseq.clone(); let copied_env = Rc::clone(&env); let cont = single(move |value| { if truish(value) { eval(&conseq.clone(), Rc::clone(&copied_env), Rc::clone(&cont)) } else { send(Unspecified, &cont) } }); eval(test, env, cont) } fn eval_assign(ident: &str, expr: &AST, env: Env, cont: ExprCont) -> CommCont { let ident = ident.to_string(); let copied_env = Rc::clone(&env); let cont = single(move |value: Value| { let location = match env.borrow().lookup(&ident) { Some(location) => location, None => { return wrong("undefined variable"); } }; assign(location, value, send(Unspecified, &cont)) }); eval(expr, copied_env, cont) } pub type Answer = Result<Value, EvalError>; #[derive(Clone)] pub struct Proc { inner: Rc<dyn Fn(&[Value], ExprCont) -> CommCont>, } impl Proc { fn new(inner: Rc<dyn Fn(&[Value], ExprCont) -> CommCont>) -> Self { Self { inner } } } impl fmt::Debug for Proc { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "Proc") } } impl PartialEq for Proc { fn eq(&self, other: &Self) -> bool { Rc::ptr_eq(&self.inner, &other.inner) } } pub type CommCont = Rc<dyn Fn(&mut Store) -> Answer>; pub type ExprCont = Rc<dyn Fn(Vec<Value>) -> CommCont>;
pub fn write(value: Value) -> CommCont { fn fmt(store: &Store, value: &Value) -> String { match value { Symbol(ident) => format!("{}", ident), Character(c) => format!("#\\{}", c), Number(n) => format!("{}", n), Pair(loc1, loc2, _) => format!( "({} . {})", fmt(store, &store.get(loc1)), fmt(store, &store.get(loc2)), ), Vector(locations) => { let mut strings = Vec::with_capacity(locations.len()); for loc in locations { strings.push(fmt(store, &store.get(loc))); } format!("#({})", strings.join(" ")) } Str(s) => s.clone(), Bool(b) => format!("{}", b), Null => "()".to_string(), Unspecified => "<unspecified>".to_string(), Undefined => "<undefined>".to_string(), Procedure(_) => "<procedure>".to_string(), } } Rc::new(move |store: &mut Store| { println!("{}", fmt(store, &value)); Ok(Unspecified) }) }
function_block-full_function
[ { "content": "pub fn cdr(values: &[Value], cont: ExprCont) -> CommCont {\n\n onearg(\n\n |arg, cont| match arg.pair() {\n\n Some((_, cdr, _)) => hold(cdr, cont),\n\n None => wrong(\"non-pair argument\"),\n\n },\n\n values,\n\n cont,\n\n )\n\n}\n\n\n", "file_path": "src/engine/procedure.rs", "rank": 4, "score": 243173.6051900477 }, { "content": "pub fn car(values: &[Value], cont: ExprCont) -> CommCont {\n\n onearg(\n\n |arg, cont| match arg.pair() {\n\n Some((car, _, _)) => hold(car, cont),\n\n None => wrong(\"non-pair argument\"),\n\n },\n\n values,\n\n cont,\n\n )\n\n}\n\n\n", "file_path": "src/engine/procedure.rs", "rank": 5, "score": 243173.6051900477 }, { "content": "pub fn onearg<F: 'static>(f: F, values: &[Value], cont: ExprCont) -> CommCont\n\nwhere\n\n F: Fn(&Value, ExprCont) -> CommCont,\n\n{\n\n match values {\n\n [arg] => f(arg, cont),\n\n _ => wrong(\"wrong number of arguments\"),\n\n }\n\n}\n\n\n", "file_path": "src/engine/auxiliary.rs", "rank": 8, "score": 231611.68536269834 }, { "content": "pub fn twoarg<F: 'static>(f: F, values: &[Value], cont: ExprCont) -> CommCont\n\nwhere\n\n F: Fn(&Value, &Value, ExprCont) -> CommCont,\n\n{\n\n match values {\n\n [arg1, arg2] => f(arg1, arg2, cont),\n\n _ => wrong(\"wrong number of arguments\"),\n\n }\n\n}\n", "file_path": "src/engine/auxiliary.rs", "rank": 9, "score": 231611.68536269834 }, { "content": "pub fn applicate(f: &Value, args: &[Value], cont: ExprCont) -> CommCont {\n\n match f {\n\n Procedure(proc) => (proc.inner)(args, cont),\n\n _ => wrong(\"bad procedure\"),\n\n }\n\n}\n\n\n", "file_path": "src/engine/auxiliary.rs", "rank": 10, "score": 229000.6432012883 }, { "content": "pub fn extends(root: &Env, args: &[String], locations: &[Location]) -> Env {\n\n let mut env = Environment::make_scope(Rc::clone(root));\n\n let itr = args.iter().zip(locations.iter().cloned());\n\n for (ident, location) in itr {\n\n env.insert(ident, location);\n\n }\n\n Rc::new(RefCell::new(env))\n\n}\n\n\n", "file_path": "src/engine/auxiliary.rs", "rank": 12, "score": 214943.18103095662 }, { "content": "pub fn values(values: &[Value], cont: ExprCont) -> CommCont {\n\n cont(values.to_vec())\n\n}\n\n\n", "file_path": "src/engine/procedure.rs", "rank": 13, "score": 212329.58123037138 }, { "content": "pub fn hold(location: Location, cont: ExprCont) -> CommCont {\n\n Rc::new(move |store: &mut Store| {\n\n let cont = send(store.get(&location), &cont);\n\n cont(store)\n\n })\n\n}\n\n\n", "file_path": "src/engine/auxiliary.rs", "rank": 15, "score": 208868.34517005362 }, { "content": "pub fn send(value: Value, cont: &ExprCont) -> CommCont {\n\n cont(vec![value])\n\n}\n\n\n", "file_path": "src/engine/auxiliary.rs", "rank": 16, "score": 207967.08881278677 }, { "content": "pub fn cwv(values: &[Value], cont: ExprCont) -> CommCont {\n\n twoarg(\n\n |lhs, rhs, cont| {\n\n let rhs = rhs.clone();\n\n let cont = Rc::clone(&cont);\n\n applicate(\n\n &lhs,\n\n &[],\n\n Rc::new(move |values| applicate(&rhs, &values, Rc::clone(&cont))),\n\n )\n\n },\n\n values,\n\n cont,\n\n )\n\n}\n", "file_path": "src/engine/procedure.rs", "rank": 17, "score": 207894.10277114413 }, { "content": "pub fn setcar(values: &[Value], cont: ExprCont) -> CommCont {\n\n twoarg(\n\n |pair, value, cont| match pair.pair() {\n\n Some((car, _, mutable)) => {\n\n if mutable {\n\n assign(car, value.clone(), send(Unspecified, &cont))\n\n } else {\n\n wrong(\"immutable argument\")\n\n }\n\n }\n\n None => wrong(\"non-pair argument\"),\n\n },\n\n values,\n\n cont,\n\n )\n\n}\n\n\n", "file_path": "src/engine/procedure.rs", "rank": 18, "score": 207894.10277114416 }, { "content": "pub fn list(values: &[Value], cont: ExprCont) -> CommCont {\n\n match values.split_first() {\n\n Some((head, tail)) => {\n\n let head = head.clone();\n\n list(\n\n tail,\n\n single(move |value| cons(&[head.clone(), value], Rc::clone(&cont))),\n\n )\n\n }\n\n None => send(Null, &cont),\n\n }\n\n}\n\n\n", "file_path": "src/engine/procedure.rs", "rank": 19, "score": 207894.10277114416 }, { "content": "// support function\n\npub fn valueslist(values: &[Value], cont: ExprCont) -> CommCont {\n\n onearg(\n\n |arg, cont| match arg {\n\n Pair(_, _, _) => {\n\n let arg = arg.clone();\n\n let cont = cont.clone();\n\n cdr(\n\n &[arg.clone()],\n\n Rc::new(move |values| {\n\n let arg = arg.clone();\n\n let cont = cont.clone();\n\n valueslist(\n\n &values,\n\n Rc::new(move |values| {\n\n let cont = cont.clone();\n\n car(\n\n &[arg.clone()],\n\n single(move |value| {\n\n let mut values = values.clone();\n\n values.insert(0, value);\n", "file_path": "src/engine/procedure.rs", "rank": 20, "score": 207894.10277114416 }, { "content": "pub fn eqv(values: &[Value], cont: ExprCont) -> CommCont {\n\n twoarg(|lhs, rhs, cont| send(Bool(lhs == rhs), &cont), values, cont)\n\n}\n\n\n", "file_path": "src/engine/procedure.rs", "rank": 21, "score": 207894.10277114416 }, { "content": "pub fn add(values: &[Value], cont: ExprCont) -> CommCont {\n\n let mut args = Vec::with_capacity(values.len());\n\n for v in values {\n\n match v.number() {\n\n Some(num) => args.push(num),\n\n None => return wrong(\"non-numeric argument\"),\n\n }\n\n }\n\n send(Number(args.into_iter().sum()), &cont)\n\n}\n\n\n", "file_path": "src/engine/procedure.rs", "rank": 22, "score": 207894.10277114416 }, { "content": "pub fn cwcc(values: &[Value], cont: ExprCont) -> CommCont {\n\n onearg(\n\n |arg, cont| match arg {\n\n Procedure(proc) => {\n\n let proc = Rc::clone(&proc.inner);\n\n let cont = Rc::clone(&cont);\n\n Rc::new(move |store| {\n\n let new_cont = Rc::clone(&cont);\n\n let new_proc = Procedure(Proc::new(Rc::new(move |values, _cont| {\n\n new_cont(values.to_vec())\n\n })));\n\n proc(&[new_proc], Rc::clone(&cont))(store)\n\n })\n\n }\n\n _ => wrong(\"bad procedure argument\"),\n\n },\n\n values,\n\n cont,\n\n )\n\n}\n\n\n", "file_path": "src/engine/procedure.rs", "rank": 23, "score": 207894.10277114416 }, { "content": "pub fn apply(values: &[Value], cont: ExprCont) -> CommCont {\n\n twoarg(\n\n |operator, operand, cont| match operator {\n\n Procedure(proc) => {\n\n let proc = Rc::clone(&proc.inner);\n\n valueslist(\n\n &[operand.clone()],\n\n Rc::new(move |values| proc(&values, Rc::clone(&cont))),\n\n )\n\n }\n\n _ => wrong(\"bad procedure argument\"),\n\n },\n\n values,\n\n cont,\n\n )\n\n}\n\n\n", "file_path": "src/engine/procedure.rs", "rank": 24, "score": 207894.10277114413 }, { "content": "pub fn cons(values: &[Value], cont: ExprCont) -> CommCont {\n\n twoarg(\n\n |head, tail, cont| {\n\n let head = head.clone();\n\n let tail = tail.clone();\n\n Rc::new(move |store: &mut Store| {\n\n let loc1 = store.reserve();\n\n store.update(&loc1, head.clone());\n\n let loc2 = store.reserve();\n\n store.update(&loc2, tail.clone());\n\n send(Pair(loc1, loc2, true), &cont)(store)\n\n })\n\n },\n\n values,\n\n cont,\n\n )\n\n}\n\n\n", "file_path": "src/engine/procedure.rs", "rank": 25, "score": 207894.10277114416 }, { "content": "pub fn less(values: &[Value], cont: ExprCont) -> CommCont {\n\n twoarg(\n\n |lhs, rhs, cont| {\n\n let lhs = match lhs.number() {\n\n Some(num) => num,\n\n None => return wrong(\"non-numeric argument\"),\n\n };\n\n let rhs = match rhs.number() {\n\n Some(num) => num,\n\n None => return wrong(\"non-numeric argument\"),\n\n };\n\n send(Bool(lhs < rhs), &cont)\n\n },\n\n values,\n\n cont,\n\n )\n\n}\n\n\n", "file_path": "src/engine/procedure.rs", "rank": 26, "score": 207894.10277114416 }, { "content": "pub fn single<F: 'static>(f: F) -> ExprCont\n\nwhere\n\n F: Fn(Value) -> CommCont,\n\n{\n\n Rc::new(move |mut values| {\n\n if values.len() == 1 {\n\n f(values.pop().unwrap())\n\n } else {\n\n wrong(\"wrong number of return values\")\n\n }\n\n })\n\n}\n\n\n", "file_path": "src/engine/auxiliary.rs", "rank": 27, "score": 204344.07473813996 }, { "content": "pub fn assign(location: Location, value: Value, cont: CommCont) -> CommCont {\n\n Rc::new(move |store: &mut Store| {\n\n store.update(&location, value.clone());\n\n cont(store)\n\n })\n\n}\n\n\n", "file_path": "src/engine/auxiliary.rs", "rank": 29, "score": 193715.07080382708 }, { "content": "pub fn wrong(message: &'static str) -> CommCont {\n\n let err = EvalError {\n\n message: message.to_string(),\n\n };\n\n Rc::new(move |_| Err(err.clone()))\n\n}\n\n\n", "file_path": "src/engine/auxiliary.rs", "rank": 30, "score": 189881.1018194704 }, { "content": "pub fn tievals(f: Rc<dyn Fn(&[Location]) -> CommCont>, values: &[Value]) -> CommCont {\n\n match values.split_first() {\n\n Some((head, tail)) => {\n\n let head = head.clone();\n\n let tail = tail.to_vec();\n\n let f = Rc::clone(&f);\n\n Rc::new(move |store: &mut Store| {\n\n let location = store.reserve();\n\n let loc = location.clone();\n\n let f = Rc::clone(&f);\n\n let new_f = Rc::new(move |locations: &[Location]| {\n\n let mut new_locs = Vec::with_capacity(locations.len() + 1);\n\n new_locs.push(loc.clone());\n\n new_locs.extend_from_slice(locations);\n\n f(&new_locs)\n\n });\n\n\n\n store.update(&location, head.clone());\n\n tievals(new_f, &tail)(store)\n\n })\n\n }\n\n None => f(&[]),\n\n }\n\n}\n\n\n", "file_path": "src/engine/auxiliary.rs", "rank": 31, "score": 186369.9902899071 }, { "content": "pub fn tievalsrest(f: Rc<dyn Fn(&[Location]) -> CommCont>, values: &[Value], n: usize) -> CommCont {\n\n let rest = values[..n].to_vec();\n\n list(\n\n &values[n..],\n\n single(move |value| {\n\n let mut rest = rest.clone();\n\n rest.push(value);\n\n tievals(Rc::clone(&f), &rest)\n\n }),\n\n )\n\n}\n\n\n", "file_path": "src/engine/auxiliary.rs", "rank": 32, "score": 178576.65756667516 }, { "content": "pub fn truish(value: Value) -> bool {\n\n value != Bool(false)\n\n}\n\n\n", "file_path": "src/engine/auxiliary.rs", "rank": 33, "score": 175793.40151237213 }, { "content": "pub fn pass(expr: &Expr) -> Option<AST> {\n\n match expr {\n\n Expr::Variable(variable) => Some(AST::Var(variable.clone())),\n\n Expr::Literal(literal) => Some(AST::Const(literal.clone())),\n\n Expr::ProcCall(operator, operands) => {\n\n let operator = Box::new(pass(operator)?);\n\n let operands = operands.iter().map(pass).collect::<Option<Vec<_>>>()?;\n\n Some(AST::Call(operator, operands))\n\n }\n\n Expr::Lambda(formals, defs, body) => {\n\n let inner_formals =\n\n Formals::List(defs.iter().map(|d| d.get_var().to_string()).collect());\n\n let mut commands = body.iter().map(pass).collect::<Option<Vec<_>>>()?;\n\n let expr = Box::new(commands.pop()?);\n\n let inner = AST::Lambda(inner_formals, commands, expr);\n\n\n\n let mut args = Vec::with_capacity(defs.len());\n\n for d in defs {\n\n args.push(pass(d.get_expr())?);\n\n }\n", "file_path": "src/pass.rs", "rank": 34, "score": 158866.65836120793 }, { "content": "fn is_initial(c: char) -> bool {\n\n match c {\n\n '!' | '$' | '%' | '&' | '*' | '/' | ':' => true,\n\n '<' | '=' | '>' | '?' | '^' | '_' | '~' => true,\n\n c if 'a' <= c && c <= 'z' => true,\n\n c if 'A' <= c && c <= 'Z' => true,\n\n _ => false,\n\n }\n\n}\n\n\n", "file_path": "src/lexer.rs", "rank": 38, "score": 80855.78523974445 }, { "content": "fn is_delimiter(c: char) -> bool {\n\n match c {\n\n '(' | ')' | '\"' | ';' => true,\n\n c => is_whitespace(c),\n\n }\n\n}\n\n\n", "file_path": "src/lexer.rs", "rank": 39, "score": 80855.78523974445 }, { "content": "fn is_whitespace(c: char) -> bool {\n\n match c {\n\n ' ' | '\\t' | '\\n' => true,\n\n _ => false,\n\n }\n\n}\n\n\n", "file_path": "src/lexer.rs", "rank": 40, "score": 80855.78523974445 }, { "content": "fn is_subsequent(c: char) -> bool {\n\n match c {\n\n c if is_initial(c) => true,\n\n c if c.is_digit(10) => true,\n\n '+' | '-' | '.' | '@' => true,\n\n _ => false,\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug, PartialEq)]\n\npub struct Token {\n\n pub kind: TokenKind,\n\n}\n\n\n\nimpl Token {\n\n pub fn new(kind: TokenKind) -> Self {\n\n Self { kind }\n\n }\n\n}\n\n\n", "file_path": "src/lexer.rs", "rank": 41, "score": 80855.78523974445 }, { "content": " inner: HashMap::new(),\n\n }\n\n }\n\n\n\n pub fn insert(&mut self, ident: &str, location: Location) {\n\n self.inner.insert(ident.to_lowercase(), location);\n\n }\n\n\n\n pub fn lookup(&self, ident: &str) -> Option<Location> {\n\n match self.inner.get(ident) {\n\n Some(location) => Some(location.clone()),\n\n None => self.root.as_ref()?.borrow().lookup(ident),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Default)]\n\npub struct Store;\n\n\n\nimpl Store {\n", "file_path": "src/engine/storage.rs", "rank": 42, "score": 49607.5474468838 }, { "content": "use super::Value;\n\nuse std::cell::RefCell;\n\nuse std::collections::HashMap;\n\nuse std::rc::Rc;\n\n\n\n#[derive(Clone, Debug, PartialEq)]\n\npub struct Location(Rc<RefCell<Value>>);\n\n\n\npub type Env = Rc<RefCell<Environment>>;\n\n\n\n#[derive(Default, Clone)]\n\npub struct Environment {\n\n root: Option<Env>,\n\n inner: HashMap<String, Location>,\n\n}\n\n\n\nimpl Environment {\n\n pub fn make_scope(root: Env) -> Self {\n\n Self {\n\n root: Some(root),\n", "file_path": "src/engine/storage.rs", "rank": 43, "score": 49602.997268819134 }, { "content": " pub fn get(&self, location: &Location) -> Value {\n\n location.0.borrow().clone()\n\n }\n\n\n\n pub fn reserve(&mut self) -> Location {\n\n Location(Rc::new(RefCell::new(Value::Undefined)))\n\n }\n\n\n\n pub fn update(&mut self, location: &Location, value: Value) {\n\n *location.0.borrow_mut() = value;\n\n }\n\n}\n", "file_path": "src/engine/storage.rs", "rank": 44, "score": 49599.894383046274 }, { "content": "use super::*;\n\nuse std::cell::RefCell;\n\nuse std::rc::Rc;\n\n\n", "file_path": "src/engine/auxiliary.rs", "rank": 45, "score": 49068.14870109735 }, { "content": " cont(values)\n\n }),\n\n )\n\n }),\n\n )\n\n }),\n\n )\n\n }\n\n Null => cont(vec![]),\n\n _ => wrong(\"non-list argument\"),\n\n },\n\n values,\n\n cont,\n\n )\n\n}\n\n\n", "file_path": "src/engine/procedure.rs", "rank": 46, "score": 48967.91228247738 }, { "content": "use super::*;\n\n\n", "file_path": "src/engine/procedure.rs", "rank": 47, "score": 48958.93186402434 }, { "content": "fn main() -> io::Result<()> {\n\n let mut engine = engine::Engine::new();\n\n engine.register(\"pi\", Number(std::f64::consts::PI));\n\n\n\n loop {\n\n print!(\"mictyris> \");\n\n io::stdout().flush()?;\n\n\n\n let mut buffer = String::new();\n\n if io::stdin().read_line(&mut buffer)? == 0 {\n\n // EOF\n\n println!();\n\n break;\n\n }\n\n\n\n match buffer.trim() {\n\n \"\" => continue,\n\n \"quit\" => break,\n\n _ => (),\n\n }\n", "file_path": "src/main.rs", "rank": 48, "score": 37542.10285693365 }, { "content": " Cons(Vec<Datum>, Box<Datum>),\n\n Abbrev(Box<Datum>),\n\n}\n\n\n\n#[derive(Clone, Debug, PartialEq)]\n\npub enum Formals {\n\n List(Vec<String>),\n\n Dot(Vec<String>, String),\n\n}\n\n\n\n#[derive(Clone, Debug, PartialEq)]\n\npub enum Def {\n\n Variable(String, Box<Expr>),\n\n}\n\n\n\nimpl Def {\n\n pub fn get_var(&self) -> &str {\n\n match self {\n\n Self::Variable(var, _) => var,\n\n }\n", "file_path": "src/parser.rs", "rank": 61, "score": 24.4854933544541 }, { "content": " pub fn new(input: &'a str) -> Self {\n\n Self {\n\n lexer: Lookahead::new(Lexer::new(input)),\n\n }\n\n }\n\n\n\n pub fn parse(&mut self) -> Option<Expr> {\n\n let tok = self.lexer.next()?;\n\n match tok.kind {\n\n TokenKind::Ident(ident) => Some(Variable(ident.var()?)),\n\n TokenKind::Bool(b) => Some(Literal(Lit::Bool(b))),\n\n TokenKind::Number(num) => Some(Literal(Lit::Number(num))),\n\n TokenKind::Character(c) => Some(Literal(Lit::Character(c))),\n\n TokenKind::Str(s) => Some(Literal(Lit::Str(s))),\n\n TokenKind::OpenParen => match self.lexer.first()?.kind.ident() {\n\n Some(ident) => match ident {\n\n Identifier::Lambda => self.parse_lambda(),\n\n Identifier::If => self.parse_conditional(),\n\n Identifier::Set => self.parse_assign(),\n\n Identifier::Var(_) => self.parse_call(),\n", "file_path": "src/parser.rs", "rank": 62, "score": 22.6635396667816 }, { "content": " let mut parser = Parser::new(input);\n\n let answer = ProcCall(\n\n Box::new(Lambda(\n\n Formals::List(vec![\"x\".to_string()]),\n\n vec![],\n\n vec![Variable(\"x\".to_string())],\n\n )),\n\n vec![Literal(Lit::Bool(true))],\n\n );\n\n assert_eq!(parser.parse(), Some(answer));\n\n }\n\n}\n", "file_path": "src/parser.rs", "rank": 63, "score": 21.07826865993251 }, { "content": "use crate::lexer::*;\n\n\n\nuse Expr::*;\n\n\n\n#[derive(Clone, Debug, PartialEq)]\n\npub enum Expr {\n\n Variable(String), // variable\n\n Literal(Lit), // literal\n\n ProcCall(Box<Expr>, Vec<Expr>), // procedure call\n\n Lambda(Formals, Vec<Def>, Vec<Expr>), // lambda expression\n\n Cond(Box<Expr>, Box<Expr>, Option<Box<Expr>>), // conditional\n\n Assignment(String, Box<Expr>), // assignment\n\n Derived, // derived expression\n\n MacroUse, // macro use\n\n MacroBlock, // macro block\n\n}\n\n\n\n#[derive(Clone, Debug, PartialEq)]\n\npub enum Lit {\n\n Bool(bool),\n", "file_path": "src/parser.rs", "rank": 64, "score": 20.50721708253855 }, { "content": " And,\n\n Or,\n\n Case,\n\n Let,\n\n LetStar,\n\n Letrec,\n\n Do,\n\n Delay,\n\n Quasiquote,\n\n}\n\n\n\nimpl Identifier {\n\n pub fn var(&self) -> Option<String> {\n\n match self {\n\n Self::Var(var) => Some(var.to_string()),\n\n _ => None,\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/lexer.rs", "rank": 65, "score": 19.588998368345344 }, { "content": " let mut exprs = Vec::new();\n\n loop {\n\n if self.eat_close_paren() {\n\n break;\n\n }\n\n exprs.push(self.parse()?);\n\n }\n\n Some(exprs)\n\n }\n\n\n\n fn parse_datum(&mut self) -> Option<Datum> {\n\n let tok = self.lexer.next()?;\n\n match tok.kind {\n\n TokenKind::Ident(ident) => Some(Datum::Symbol(ident)),\n\n TokenKind::Bool(b) => Some(Datum::Bool(b)),\n\n TokenKind::Number(num) => Some(Datum::Number(num)),\n\n TokenKind::Character(c) => Some(Datum::Character(c)),\n\n TokenKind::Str(s) => Some(Datum::Str(s)),\n\n TokenKind::OpenParen => self.parse_list(),\n\n TokenKind::SharpParen => self.parse_vector(),\n", "file_path": "src/parser.rs", "rank": 66, "score": 17.77319737237873 }, { "content": "#[derive(Clone, Debug, PartialEq)]\n\npub enum TokenKind {\n\n Ident(Identifier),\n\n Bool(bool),\n\n Number(f64),\n\n Character(char),\n\n Str(String),\n\n OpenParen,\n\n CloseParen,\n\n SharpParen,\n\n Quote,\n\n Backquote,\n\n Comma,\n\n CommaAt,\n\n Dot,\n\n}\n\n\n\nimpl TokenKind {\n\n pub fn ident(&self) -> Option<&Identifier> {\n\n match self {\n", "file_path": "src/lexer.rs", "rank": 67, "score": 17.3725697570015 }, { "content": " Number(f64),\n\n Character(char),\n\n Str(String),\n\n Quote(Datum),\n\n}\n\n\n\n#[derive(Clone, Debug, PartialEq)]\n\npub enum Datum {\n\n Bool(bool),\n\n Number(f64),\n\n Character(char),\n\n Str(String),\n\n Symbol(Identifier),\n\n List(ListDatum),\n\n Vector(Vec<Datum>),\n\n}\n\n\n\n#[derive(Clone, Debug, PartialEq)]\n\npub enum ListDatum {\n\n List(Vec<Datum>),\n", "file_path": "src/parser.rs", "rank": 68, "score": 17.207950434450176 }, { "content": "\n\n let mut parser = parser::Parser::new(&buffer);\n\n let expr = match parser.parse() {\n\n Some(expr) => expr,\n\n None => continue,\n\n };\n\n let ast = match pass::pass(&expr) {\n\n Some(ast) => ast,\n\n None => continue,\n\n };\n\n engine.eval_and_print(&ast);\n\n }\n\n Ok(())\n\n}\n", "file_path": "src/main.rs", "rank": 69, "score": 17.131773365201873 }, { "content": " }\n\n\n\n pub fn get_expr(&self) -> &Expr {\n\n match self {\n\n Self::Variable(_, expr) => expr,\n\n }\n\n }\n\n}\n\n\n\npub struct Lookahead<I>\n\nwhere\n\n I: Iterator,\n\n{\n\n inner: I,\n\n first: Option<I::Item>,\n\n second: Option<I::Item>,\n\n}\n\n\n\nimpl<I> Lookahead<I>\n\nwhere\n", "file_path": "src/parser.rs", "rank": 70, "score": 16.88801641426189 }, { "content": "use crate::parser::*;\n\n\n\n#[derive(Clone, Debug, PartialEq)]\n\npub enum AST {\n\n Const(Lit),\n\n Var(String),\n\n Call(Box<AST>, Vec<AST>),\n\n Lambda(Formals, Vec<AST>, Box<AST>),\n\n Cond(Box<AST>, Box<AST>, Option<Box<AST>>),\n\n Assign(String, Box<AST>),\n\n}\n\n\n", "file_path": "src/pass.rs", "rank": 71, "score": 16.593497363173665 }, { "content": "}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_parse_ident() {\n\n let mut lexer = Lexer::new(\"lambda ...\");\n\n assert_eq!(lexer.parse(), Some(Token::new(Ident(Identifier::Lambda))));\n\n assert_eq!(\n\n lexer.parse(),\n\n Some(Token::new(Ident(Identifier::Var(\"...\".to_string()))))\n\n );\n\n assert_eq!(lexer.parse(), None);\n\n }\n\n\n\n #[test]\n\n fn test_parse_bool() {\n\n let mut lexer = Lexer::new(\"#t #f\");\n", "file_path": "src/lexer.rs", "rank": 72, "score": 16.315781109755637 }, { "content": " let expr = AST::Call(Box::new(inner), args);\n\n Some(AST::Lambda(formals.clone(), vec![], Box::new(expr)))\n\n }\n\n Expr::Cond(test, consequent, alternate) => {\n\n let test = Box::new(pass(test)?);\n\n let consequent = Box::new(pass(consequent)?);\n\n let alternate = match alternate {\n\n Some(alt) => Some(Box::new(pass(alt)?)),\n\n None => None,\n\n };\n\n Some(AST::Cond(test, consequent, alternate))\n\n }\n\n Expr::Assignment(var, expr) => {\n\n let expr = Box::new(pass(expr)?);\n\n Some(AST::Assign(var.clone(), expr))\n\n }\n\n _ => None,\n\n }\n\n}\n", "file_path": "src/pass.rs", "rank": 73, "score": 15.42729449055194 }, { "content": " Self::Ident(ident) => Some(ident),\n\n _ => None,\n\n }\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug, PartialEq)]\n\npub enum Identifier {\n\n Var(String),\n\n Else,\n\n Arrow,\n\n Define,\n\n Unquote,\n\n UnquoteSplicing,\n\n Quote,\n\n Lambda,\n\n If,\n\n Set,\n\n Begin,\n\n Cond,\n", "file_path": "src/lexer.rs", "rank": 74, "score": 15.034214066039233 }, { "content": "pub mod engine;\n\npub mod lexer;\n\npub mod parser;\n\npub mod pass;\n\n\n\nuse engine::Value::*;\n\nuse std::io::{self, Write};\n\n\n", "file_path": "src/main.rs", "rank": 75, "score": 15.001000075859121 }, { "content": " return Some(Datum::List(ListDatum::List(data)));\n\n }\n\n TokenKind::Dot => {\n\n if data.is_empty() {\n\n return None;\n\n }\n\n self.lexer.next();\n\n let last = Box::new(self.parse_datum()?);\n\n if self.eat_close_paren() {\n\n return Some(Datum::List(ListDatum::Cons(data, last)));\n\n }\n\n return None;\n\n }\n\n _ => data.push(self.parse_datum()?),\n\n }\n\n }\n\n None\n\n }\n\n\n\n fn parse_vector(&mut self) -> Option<Datum> {\n", "file_path": "src/parser.rs", "rank": 76, "score": 14.65211858262899 }, { "content": "\n\npub struct Cursor<'a> {\n\n chars: Chars<'a>,\n\n last_len: usize,\n\n}\n\n\n\nimpl<'a> Cursor<'a> {\n\n pub fn new(input: &'a str) -> Self {\n\n Self {\n\n last_len: input.len(),\n\n chars: input.chars(),\n\n }\n\n }\n\n\n\n pub fn peek(&self) -> Option<char> {\n\n self.chars.clone().next()\n\n }\n\n\n\n pub fn eat(&mut self) -> Option<char> {\n\n self.chars.next()\n", "file_path": "src/lexer/cursor.rs", "rank": 77, "score": 14.500248862746709 }, { "content": "use super::{is_delimiter, is_initial, is_subsequent, is_whitespace};\n\nuse std::str::Chars;\n\n\n\nuse TokenKind::*;\n\n\n\n#[derive(Debug, PartialEq)]\n\npub struct Token {\n\n pub kind: TokenKind,\n\n pub len: usize,\n\n}\n\n\n\nimpl Token {\n\n pub fn new(kind: TokenKind, len: usize) -> Self {\n\n Self { kind, len }\n\n }\n\n}\n\n\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum TokenKind {\n\n // Atomosphere\n", "file_path": "src/lexer/cursor.rs", "rank": 78, "score": 14.329282333463517 }, { "content": " }\n\n\n\n pub fn terminated<P: Fn(char) -> bool>(&self, pred: P) -> bool {\n\n if let Some(c) = self.peek() {\n\n pred(c)\n\n } else {\n\n true\n\n }\n\n }\n\n\n\n pub fn len_eaten(&mut self) -> usize {\n\n let current = self.chars.as_str().len();\n\n let len = self.last_len - current;\n\n self.last_len = current;\n\n len\n\n }\n\n\n\n pub fn get_token(&mut self) -> Option<Token> {\n\n let kind = self.get_token_kind()?;\n\n let len = self.len_eaten();\n", "file_path": "src/lexer/cursor.rs", "rank": 79, "score": 14.296316565248834 }, { "content": " };\n\n Some(Ident(ident))\n\n }\n\n\n\n fn parse_bool(&mut self, start: usize) -> Option<TokenKind> {\n\n match self.substr(start) {\n\n \"#t\" => Some(Bool(true)),\n\n \"#f\" => Some(Bool(false)),\n\n _ => None,\n\n }\n\n }\n\n\n\n fn parse_number(&mut self, start: usize) -> Option<TokenKind> {\n\n let num: f64 = self.substr(start).parse().ok()?;\n\n Some(Number(num))\n\n }\n\n\n\n fn parse_character(&mut self, start: usize) -> Option<TokenKind> {\n\n let s = self.substr(start + 2);\n\n\n", "file_path": "src/lexer.rs", "rank": 80, "score": 14.216213078510679 }, { "content": " let mut data = Vec::new();\n\n loop {\n\n if self.eat_close_paren() {\n\n break;\n\n }\n\n data.push(self.parse_datum()?);\n\n }\n\n Some(Datum::Vector(data))\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_lookahead() {\n\n let xs = [1, 2, 3];\n\n let mut iter = Lookahead::new(xs.iter());\n\n\n", "file_path": "src/parser.rs", "rank": 81, "score": 13.906723756431488 }, { "content": " _ => None,\n\n },\n\n None => self.parse_call(),\n\n },\n\n TokenKind::Quote => Some(Literal(Lit::Quote(self.parse_datum()?))),\n\n _ => None,\n\n }\n\n }\n\n\n\n fn eat_close_paren(&mut self) -> bool {\n\n if let Some(tok) = self.lexer.first() {\n\n if tok.kind == TokenKind::CloseParen {\n\n self.lexer.next();\n\n return true;\n\n }\n\n }\n\n false\n\n }\n\n\n\n fn parse_lambda(&mut self) -> Option<Expr> {\n", "file_path": "src/parser.rs", "rank": 82, "score": 13.640086142842843 }, { "content": " Some(Token::new(kind, len))\n\n }\n\n\n\n pub fn get_token_kind(&mut self) -> Option<TokenKind> {\n\n match self.eat()? {\n\n '(' => Some(OpenParen),\n\n ')' => Some(CloseParen),\n\n '#' => match self.eat()? {\n\n '(' => Some(SharpParen),\n\n '\\\\' => {\n\n self.eat()?;\n\n self.eat_until(is_delimiter);\n\n Some(Character)\n\n }\n\n 't' | 'f' => Some(Bool),\n\n 'i' | 'e' | 'b' | 'o' | 'd' | 'x' => {\n\n self.eat_until(is_delimiter);\n\n Some(Number)\n\n }\n\n _ => None,\n", "file_path": "src/lexer/cursor.rs", "rank": 83, "score": 13.024286486650428 }, { "content": "\n\n fn string(&mut self) -> Option<()> {\n\n while let Some(c) = self.eat() {\n\n match c {\n\n '\"' => return Some(()),\n\n '\\\\' => match self.eat()? {\n\n '\"' | '\\\\' => (),\n\n _ => return None,\n\n },\n\n _ => (),\n\n }\n\n }\n\n None\n\n }\n\n\n\n fn triple_dot(&mut self) -> bool {\n\n if self.eat() != Some('.') {\n\n return false;\n\n }\n\n if self.eat() != Some('.') {\n", "file_path": "src/lexer/cursor.rs", "rank": 84, "score": 12.992025358805742 }, { "content": " self.lexer.next();\n\n let formals = self.parse_formals()?;\n\n let (defs, body) = self.parse_body()?;\n\n if self.eat_close_paren() {\n\n Some(Lambda(formals, defs, body))\n\n } else {\n\n None\n\n }\n\n }\n\n\n\n fn parse_formals(&mut self) -> Option<Formals> {\n\n match self.lexer.next()?.kind {\n\n TokenKind::Ident(ident) => Some(Formals::Dot(vec![], ident.var()?)),\n\n TokenKind::OpenParen => {\n\n let mut idents = Vec::new();\n\n while let Some(tok) = self.lexer.next() {\n\n match tok.kind {\n\n TokenKind::CloseParen => {\n\n return Some(Formals::List(idents));\n\n }\n", "file_path": "src/parser.rs", "rank": 85, "score": 12.654757851697395 }, { "content": " let c = if s.len() == 1 {\n\n s.chars().next()?\n\n } else {\n\n match s.to_lowercase().as_str() {\n\n \"space\" => ' ',\n\n \"newline\" => '\\n',\n\n _ => return None,\n\n }\n\n };\n\n\n\n Some(Character(c))\n\n }\n\n}\n\n\n\nimpl Iterator for Lexer<'_> {\n\n type Item = Token;\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n\n self.parse()\n\n }\n", "file_path": "src/lexer.rs", "rank": 86, "score": 12.45740195043116 }, { "content": " }\n\n\n\n fn parse_assign(&mut self) -> Option<Expr> {\n\n self.lexer.next();\n\n let var = self.lexer.next()?.kind.ident()?.var()?;\n\n let expr = Box::new(self.parse()?);\n\n if self.eat_close_paren() {\n\n Some(Assignment(var, expr))\n\n } else {\n\n None\n\n }\n\n }\n\n\n\n fn parse_call(&mut self) -> Option<Expr> {\n\n let operator = Box::new(self.parse()?);\n\n let operands = self.parse_to_close()?;\n\n Some(ProcCall(operator, operands))\n\n }\n\n\n\n fn parse_to_close(&mut self) -> Option<Vec<Expr>> {\n", "file_path": "src/parser.rs", "rank": 87, "score": 12.291696221908134 }, { "content": " I: Iterator,\n\n{\n\n pub fn new(mut inner: I) -> Self {\n\n let first = inner.next();\n\n let second = inner.next();\n\n Self {\n\n inner,\n\n first,\n\n second,\n\n }\n\n }\n\n\n\n pub fn first(&self) -> Option<&I::Item> {\n\n self.first.as_ref()\n\n }\n\n\n\n pub fn second(&self) -> Option<&I::Item> {\n\n self.second.as_ref()\n\n }\n\n}\n", "file_path": "src/parser.rs", "rank": 88, "score": 12.27172796416627 }, { "content": "impl fmt::Display for Identifier {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n match self {\n\n Self::Var(ident) => write!(f, \"{}\", ident),\n\n Self::Else => write!(f, \"else\"),\n\n Self::Arrow => write!(f, \"=>\"),\n\n Self::Define => write!(f, \"define\"),\n\n Self::Unquote => write!(f, \"unquote\"),\n\n Self::UnquoteSplicing => write!(f, \"unquote-splicing\"),\n\n Self::Quote => write!(f, \"quote\"),\n\n Self::Lambda => write!(f, \"lambda\"),\n\n Self::If => write!(f, \"if\"),\n\n Self::Set => write!(f, \"set!\"),\n\n Self::Begin => write!(f, \"begin\"),\n\n Self::Cond => write!(f, \"cond\"),\n\n Self::And => write!(f, \"and\"),\n\n Self::Or => write!(f, \"or\"),\n\n Self::Case => write!(f, \"case\"),\n\n Self::Let => write!(f, \"let\"),\n\n Self::LetStar => write!(f, \"let*\"),\n", "file_path": "src/lexer.rs", "rank": 89, "score": 11.964074420599323 }, { "content": " }\n\n }\n\n\n\n pub fn parse(&mut self) -> Option<Token> {\n\n let tok = self.cursor.get_token()?;\n\n\n\n let start = self.pos;\n\n self.pos += tok.len;\n\n\n\n let kind = match tok.kind {\n\n cursor::TokenKind::Whitespace => return self.parse(),\n\n cursor::TokenKind::Comment => return self.parse(),\n\n\n\n cursor::TokenKind::Ident => self.parse_ident(start)?,\n\n cursor::TokenKind::Bool => self.parse_bool(start)?,\n\n cursor::TokenKind::Number => self.parse_number(start)?,\n\n cursor::TokenKind::Character => self.parse_character(start)?,\n\n cursor::TokenKind::Str => Str(self.substr(start).to_string()),\n\n\n\n cursor::TokenKind::OpenParen => OpenParen,\n", "file_path": "src/lexer.rs", "rank": 90, "score": 11.958200348107608 }, { "content": " Self::Letrec => write!(f, \"letrec\"),\n\n Self::Do => write!(f, \"do\"),\n\n Self::Delay => write!(f, \"delay\"),\n\n Self::Quasiquote => write!(f, \"quasiquote\"),\n\n }\n\n }\n\n}\n\n\n\npub struct Lexer<'a> {\n\n cursor: cursor::Cursor<'a>,\n\n input: &'a str,\n\n pos: usize,\n\n}\n\n\n\nimpl<'a> Lexer<'a> {\n\n pub fn new(input: &'a str) -> Self {\n\n Self {\n\n cursor: cursor::Cursor::new(input),\n\n input,\n\n pos: 0,\n", "file_path": "src/lexer.rs", "rank": 91, "score": 11.926171834487917 }, { "content": " assert_eq!(lexer.parse(), Some(Token::new(Bool(true))));\n\n assert_eq!(lexer.parse(), Some(Token::new(Bool(false))));\n\n assert_eq!(lexer.parse(), None);\n\n }\n\n\n\n #[test]\n\n fn test_parse_number() {\n\n let mut lexer = Lexer::new(\"0.1 100\");\n\n assert_eq!(lexer.parse(), Some(Token::new(Number(0.1))));\n\n assert_eq!(lexer.parse(), Some(Token::new(Number(100.0))));\n\n assert_eq!(lexer.parse(), None);\n\n }\n\n\n\n #[test]\n\n fn test_parse_character() {\n\n let mut lexer = Lexer::new(\"#\\\\a #\\\\space\");\n\n assert_eq!(lexer.parse(), Some(Token::new(Character('a'))));\n\n assert_eq!(lexer.parse(), Some(Token::new(Character(' '))));\n\n assert_eq!(lexer.parse(), None);\n\n }\n", "file_path": "src/lexer.rs", "rank": 92, "score": 11.711581534529138 }, { "content": " self.eat_until(is_delimiter);\n\n Some(Number)\n\n }\n\n _ => None,\n\n }\n\n }\n\n\n\n fn eat_while<P: Fn(char) -> bool>(&mut self, pred: P) {\n\n while let Some(c) = self.peek() {\n\n if pred(c) {\n\n self.eat();\n\n } else {\n\n break;\n\n }\n\n }\n\n }\n\n\n\n fn eat_until<P: Fn(char) -> bool>(&mut self, pred: P) {\n\n self.eat_while(|c| !pred(c));\n\n }\n", "file_path": "src/lexer/cursor.rs", "rank": 93, "score": 11.694829489046045 }, { "content": " if tok.kind == TokenKind::CloseParen {\n\n if exprs.is_empty() {\n\n return None;\n\n } else {\n\n return Some((defs, exprs));\n\n }\n\n } else {\n\n exprs.push(self.parse()?);\n\n }\n\n }\n\n None\n\n }\n\n\n\n fn parse_define(&mut self) -> Option<Def> {\n\n self.lexer.next(); // eat '('\n\n self.lexer.next(); // eat \"define\"\n\n let var = self.lexer.next()?.kind.ident()?.var()?;\n\n let expr = Box::new(self.parse()?);\n\n if self.eat_close_paren() {\n\n Some(Def::Variable(var, expr))\n", "file_path": "src/parser.rs", "rank": 94, "score": 11.68169352249493 }, { "content": " #[test]\n\n fn test_parse_string() {\n\n let mut lexer = Cursor::new(\"\\\"string\\\"\\\"\\\\\\\"\\\"\");\n\n assert_eq!(lexer.get_token(), Some(Token::new(Str, 8)));\n\n assert_eq!(lexer.get_token(), Some(Token::new(Str, 4)));\n\n assert_eq!(lexer.get_token(), None);\n\n\n\n let mut lexer = Cursor::new(\"\\\"string\");\n\n assert_eq!(lexer.get_token(), None);\n\n\n\n let mut lexer = Cursor::new(\"\\\\a\");\n\n assert_eq!(lexer.get_token(), None);\n\n }\n\n\n\n #[test]\n\n fn test_parse_character() {\n\n let mut lexer = Cursor::new(\"#\\\\a\");\n\n assert_eq!(lexer.get_token(), Some(Token::new(Character, 3)));\n\n\n\n let mut lexer = Cursor::new(\"#\\\\\");\n", "file_path": "src/lexer/cursor.rs", "rank": 95, "score": 11.54597331615398 }, { "content": " TokenKind::Dot => {\n\n if idents.is_empty() {\n\n return None;\n\n }\n\n let last = self.lexer.next()?.kind.ident()?.var()?;\n\n if self.eat_close_paren() {\n\n return Some(Formals::Dot(idents, last));\n\n }\n\n return None;\n\n }\n\n TokenKind::Ident(ident) => {\n\n idents.push(ident.var()?);\n\n }\n\n _ => return None,\n\n }\n\n }\n\n None\n\n }\n\n _ => None,\n\n }\n", "file_path": "src/parser.rs", "rank": 96, "score": 11.382446248909996 }, { "content": " TokenKind::Quote => self.parse_abbrev_list(),\n\n TokenKind::Backquote => self.parse_abbrev_list(),\n\n TokenKind::Comma => self.parse_abbrev_list(),\n\n TokenKind::CommaAt => self.parse_abbrev_list(),\n\n _ => None,\n\n }\n\n }\n\n\n\n fn parse_abbrev_list(&mut self) -> Option<Datum> {\n\n Some(Datum::List(ListDatum::Abbrev(Box::new(\n\n self.parse_datum()?,\n\n ))))\n\n }\n\n\n\n fn parse_list(&mut self) -> Option<Datum> {\n\n let mut data = Vec::new();\n\n while let Some(tok) = self.lexer.first() {\n\n match tok.kind {\n\n TokenKind::CloseParen => {\n\n self.lexer.next();\n", "file_path": "src/parser.rs", "rank": 97, "score": 10.805422604058768 }, { "content": "\n\nimpl<I> Iterator for Lookahead<I>\n\nwhere\n\n I: Iterator,\n\n{\n\n type Item = I::Item;\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n\n let mut retval = self.inner.next();\n\n std::mem::swap(&mut retval, &mut self.first);\n\n std::mem::swap(&mut self.first, &mut self.second);\n\n retval\n\n }\n\n}\n\n\n\npub struct Parser<'a> {\n\n lexer: Lookahead<Lexer<'a>>,\n\n}\n\n\n\nimpl<'a> Parser<'a> {\n", "file_path": "src/parser.rs", "rank": 98, "score": 10.486747256872901 }, { "content": " assert_eq!(lexer.get_token(), None);\n\n\n\n let mut lexer = Cursor::new(\"#\\\\newline\");\n\n assert_eq!(lexer.get_token(), Some(Token::new(Character, 9)));\n\n }\n\n\n\n #[test]\n\n fn test_parse_boolean() {\n\n let mut lexer = Cursor::new(\"#t #f\");\n\n assert_eq!(lexer.get_token(), Some(Token::new(Bool, 2)));\n\n assert_eq!(lexer.get_token(), Some(Token::new(Whitespace, 1)));\n\n assert_eq!(lexer.get_token(), Some(Token::new(Bool, 2)));\n\n }\n\n\n\n #[test]\n\n fn test_parse_peculiar_identifier() {\n\n let mut lexer = Cursor::new(\"+ - ... \");\n\n assert_eq!(lexer.get_token(), Some(Token::new(Ident, 1)));\n\n assert_eq!(lexer.get_token(), Some(Token::new(Whitespace, 1)));\n\n assert_eq!(lexer.get_token(), Some(Token::new(Ident, 1)));\n", "file_path": "src/lexer/cursor.rs", "rank": 99, "score": 9.175788956599776 } ]
Rust
src/network.rs
Aloxaf/mcfly
0f50f2deeed7c8ec75d41711369c2fc927006a4e
#![allow(clippy::unreadable_literal)] use crate::node::Node; use crate::training_sample_generator::TrainingSampleGenerator; use crate::history::Features; use rand::Rng; #[derive(Debug, Copy, Clone)] pub struct Network { pub final_bias: f64, pub final_weights: [f64; 3], pub final_sum: f64, pub final_output: f64, pub hidden_nodes: [Node; 3], pub hidden_node_sums: [f64; 3], pub hidden_node_outputs: [f64; 3], } impl Default for Network { fn default() -> Network { Network { final_bias: -0.3829333755179377, final_weights: [ 0.44656858145177714, -1.9550439349609872, -2.963322601316632 ], final_sum: 0.0, final_output: 0.0, hidden_nodes: [ Node { offset: -0.878184962836099, age: -0.9045522440219468, length: 0.5406937685800283, exit: -0.3472765681766297, recent_failure: -0.05291342121445077, selected_dir: -0.35027519196134, dir: -0.2466069217936986, overlap: 0.4791784213482642, immediate_overlap: 0.5565797758340211, selected_occurrences: -0.3600203296209723, occurrences: 0.15694312742881805 }, Node { offset: -0.04362945902379799, age: -0.25381913331319716, length: 0.4238780143901607, exit: 0.21906785628210726, recent_failure: -0.9510136025685453, selected_dir: -0.04654084670567356, dir: -2.2858050301068693, overlap: -0.562274365705918, immediate_overlap: -0.47252489212451904, selected_occurrences: 0.2446391951417497, occurrences: -1.4846489581676605 }, Node { offset: -0.11992725490486622, age: 0.3759013420273308, length: 1.674601413922965, exit: -0.15529596916772864, recent_failure: -0.7819181782432957, selected_dir: -1.1890532332896768, dir: 0.34723729558743677, overlap: 0.09372412920642742, immediate_overlap: 0.393989158881144, selected_occurrences: -0.2383372126951215, occurrences: -2.196219880265691 } ], hidden_node_sums: [ 0.0, 0.0, 0.0 ], hidden_node_outputs: [ 0.0, 0.0, 0.0 ] } } } impl Network { pub fn random() -> Network { let mut rng = rand::thread_rng(); Network { final_bias: rng.gen_range(-1.0, 1.0), final_weights: [rng.gen_range(-1.0, 1.0), rng.gen_range(-1.0, 1.0), rng.gen_range(-1.0, 1.0)], hidden_nodes: [Node::random(), Node::random(), Node::random()], hidden_node_sums: [0.0, 0.0, 0.0], hidden_node_outputs: [0.0, 0.0, 0.0], final_sum: 0.0, final_output: 0.0, } } pub fn compute(&mut self, features: &Features) { self.final_sum = self.final_bias; for i in 0..self.hidden_nodes.len() { self.hidden_node_sums[i] = self.hidden_nodes[i].dot(features); self.hidden_node_outputs[i] = self.hidden_node_sums[i].tanh(); self.final_sum += self.hidden_node_outputs[i] * self.final_weights[i]; } self.final_output = self.final_sum.tanh(); } pub fn dot(&self, features: &Features) -> f64 { let mut network_output = self.final_bias; for (node, output_weight) in self.hidden_nodes.iter().zip(self.final_weights.iter()) { let node_output = node.output(features); network_output += node_output * output_weight; } network_output } pub fn output(&self, features: &Features) -> f64 { self.dot(features).tanh() } pub fn average_error(&self, generator: &TrainingSampleGenerator, records: usize) -> f64 { let mut error = 0.0; let mut samples = 0.0; generator.generate(Some(records), |features: &Features, correct: bool| { let target = if correct { 1.0 } else { -1.0 }; let output = self.output(features); error += 0.5 * (target - output).powi(2); samples += 1.0; }); error / samples } }
#![allow(clippy::unreadable_literal)] use crate::node::Node; use crate::training_sample_generator::TrainingSampleGenerator; use crate::history::Features; use rand::Rng; #[derive(Debug, Copy, Clone)] pub struct Network { pub final_bias: f64, pub final_weights: [f64; 3], pub final_sum: f64, pub final_output: f64, pub hidden_nodes: [Node; 3], pub hidden_node_sums: [f64; 3], pub hidden_node_outputs: [f64; 3], } impl Default for Network { fn default() -> Network { Network { final_bias: -0.3829333755179377, final_weights: [ 0.44656858145177714, -1.9550439349609872, -2.963322601316632 ], final_sum: 0.0, final_output: 0.0, hidden_nodes: [ Node { offset: -0.878184962836099, age: -0.9045522440219468, length: 0.5406937685800283, exit: -0.3472765681766297, recent_failure: -0.05291342121445077, selected_dir: -0.35027519196134, dir: -0.2466069217936986, overlap: 0.4791784213482642, immediate_overlap: 0.5565797758340211, selected_occurrences: -0.3600203296209723, occurrences: 0.15694312742881805 }, Node { offset: -0.04362945902379799, age: -0.25381913331319716, length: 0.4238780143901607, exit: 0.21906785628210726, recent_failure: -0.9510136025685453, selected_dir: -0.04654084670567356, dir: -2.2858050301068693, overlap: -0.562274365705918, immediate_overlap: -0.47252489212451904, selected_occurrences: 0.2446391951417497, occurrences: -1.4846489581676605 }, Node { offset: -0.11992725490486622, age: 0.3759013420273308, length: 1.674601413922965, exit: -0.15529596916772864, recent_failure: -0.7819181782432957, selected_dir: -1.1890532332896768, dir: 0.34723729558743677, overlap: 0.09372412920642742, immediate_overlap: 0.393989158881144, selected_occurrences: -0.2383372126951215, occurrences: -2.196219880265691 } ], hidden_node_sums: [ 0.0, 0.0, 0.0 ], hidden_node_outputs: [ 0.0, 0.0, 0.0 ] } } } impl Network { pub fn random() -> Network { let mut rng = rand::thread_rng(); Network { final_bias: rng.gen_range(-1.0, 1.0), final_weights: [rng.gen_range(-1.0, 1.0), rng.gen_range(-1.0, 1.0), rng.gen_range(-1.0, 1.0)], hidden_nodes: [Node::random(), Node::random(), Node::random()], hidden_node_sums: [0.0, 0.0, 0.0], hidden_node_outputs: [0.0, 0.0, 0.0], final_sum: 0.0, final_output: 0.0, } } pub fn compute(&mut self, features: &Features) { self.final_sum = self.final_bias; for i in 0..self.hidden_nodes.len() { self.hidden_node_sums[i] = self.hidden_nodes[i].dot(features); self.hidden_node_outputs[i] = self.hidden_node_sums[i].tanh(); sel
pub fn dot(&self, features: &Features) -> f64 { let mut network_output = self.final_bias; for (node, output_weight) in self.hidden_nodes.iter().zip(self.final_weights.iter()) { let node_output = node.output(features); network_output += node_output * output_weight; } network_output } pub fn output(&self, features: &Features) -> f64 { self.dot(features).tanh() } pub fn average_error(&self, generator: &TrainingSampleGenerator, records: usize) -> f64 { let mut error = 0.0; let mut samples = 0.0; generator.generate(Some(records), |features: &Features, correct: bool| { let target = if correct { 1.0 } else { -1.0 }; let output = self.output(features); error += 0.5 * (target - output).powi(2); samples += 1.0; }); error / samples } }
f.final_sum += self.hidden_node_outputs[i] * self.final_weights[i]; } self.final_output = self.final_sum.tanh(); }
function_block-function_prefixed
[ { "content": "pub fn use_tiocsti(string: &str) {\n\n for byte in string.as_bytes() {\n\n let a: *const u8 = byte;\n\n if unsafe { ioctl(0, libc::TIOCSTI as u32, a) } < 0 {\n\n panic!(\"Error encountered when calling ioctl\");\n\n }\n\n }\n\n}\n", "file_path": "src/fake_typer.rs", "rank": 0, "score": 89140.51610810748 }, { "content": "fn output_row(writer: &mut Writer<File>, features: &Features, correct: bool) {\n\n writer\n\n .write_record(&[\n\n format!(\"{}\", features.age_factor),\n\n format!(\"{}\", features.length_factor),\n\n format!(\"{}\", features.exit_factor),\n\n format!(\"{}\", features.recent_failure_factor),\n\n format!(\"{}\", features.selected_dir_factor),\n\n format!(\"{}\", features.dir_factor),\n\n format!(\"{}\", features.overlap_factor),\n\n format!(\"{}\", features.immediate_overlap_factor),\n\n format!(\"{}\", features.selected_occurrences_factor),\n\n format!(\"{}\", features.occurrences_factor),\n\n if correct {\n\n String::from(\"t\")\n\n } else {\n\n String::from(\"f\")\n\n },\n\n ])\n\n .unwrap_or_else(|err| panic!(format!(\"McFly error: Expected to write to CSV ({})\", err)));\n\n writer.flush().unwrap_or_else(|err| panic!(format!(\"McFly error: Expected to flush CSV ({})\", err)));\n\n}\n", "file_path": "src/training_cache.rs", "rank": 1, "score": 77339.90674662417 }, { "content": "pub fn read(cache_path: &PathBuf) -> Vec<(Features, bool)> {\n\n let mut data_set: Vec<(Features, bool)> = Vec::new();\n\n\n\n let mut reader = Reader::from_path(cache_path).unwrap_or_else(|err| panic!(format!(\"McFly error: Expected to be able to read from CSV ({})\", err)));\n\n\n\n for result in reader.records() {\n\n let record = result.unwrap_or_else(|err| panic!(format!(\"McFly error: Expected to be able to unwrap cached result ({})\", err)));\n\n\n\n let features = Features {\n\n age_factor: record[0].parse().unwrap(),\n\n length_factor: record[1].parse().unwrap(),\n\n exit_factor: record[2].parse().unwrap(),\n\n recent_failure_factor: record[3].parse().unwrap(),\n\n selected_dir_factor: record[4].parse().unwrap(),\n\n dir_factor: record[5].parse().unwrap(),\n\n overlap_factor: record[6].parse().unwrap(),\n\n immediate_overlap_factor: record[7].parse().unwrap(),\n\n selected_occurrences_factor: record[8].parse().unwrap(),\n\n occurrences_factor: record[9].parse().unwrap()\n\n };\n\n\n\n data_set.push((features, record[10].eq(\"t\")));\n\n }\n\n\n\n data_set\n\n}\n\n\n", "file_path": "src/training_cache.rs", "rank": 2, "score": 72755.75198182782 }, { "content": "pub fn write(data_set: &[(Features, bool)], cache_path: &PathBuf) {\n\n let mut writer = Writer::from_path(cache_path).unwrap_or_else(|err| panic!(format!(\"McFly error: Expected to be able to write a CSV ({})\", err)));\n\n output_header(&mut writer);\n\n\n\n for (features, correct) in data_set {\n\n output_row(&mut writer, features, *correct);\n\n }\n\n}\n\n\n", "file_path": "src/training_cache.rs", "rank": 3, "score": 70728.34004358845 }, { "content": "pub fn migrate(connection: &Connection) {\n\n make_schema_versions_table(connection);\n\n\n\n let current_version: u16 = connection\n\n .query_row::<Option<u16>, _, _>(\n\n \"select max(version) FROM schema_versions ORDER BY version DESC LIMIT 1\",\n\n NO_PARAMS,\n\n |row| row.get(0),\n\n )\n\n .unwrap_or_else(|err| panic!(format!(\"McFly error: Query to work ({})\", err)))\n\n .unwrap_or(0);\n\n\n\n if current_version < CURRENT_SCHEMA_VERSION {\n\n print!(\n\n \"McFly: Upgrading McFly DB to version {}, please wait...\",\n\n CURRENT_SCHEMA_VERSION\n\n );\n\n io::stdout().flush().unwrap_or_else(|err| panic!(format!(\"McFly error: STDOUT flush should work ({})\", err)));\n\n }\n\n\n", "file_path": "src/history/schema.rs", "rank": 4, "score": 69608.51234032755 }, { "content": "pub fn bash_history_file_path() -> PathBuf {\n\n let path =\n\n PathBuf::from(env::var(\"HISTFILE\").unwrap_or_else(|err| panic!(format!(\"McFly error: Please ensure HISTFILE is set for your shell ({})\", err))));\n\n fs::canonicalize(&path).unwrap_or_else(|err| panic!(format!(\"McFly error: The contents of $HISTFILE appear invalid ({})\", err)))\n\n}\n\n\n", "file_path": "src/bash_history.rs", "rank": 5, "score": 67134.50407950423 }, { "content": "pub fn first_time_setup(connection: &Connection) {\n\n make_schema_versions_table(connection);\n\n write_current_schema_version(connection);\n\n}\n\n\n", "file_path": "src/history/schema.rs", "rank": 6, "score": 65676.29177772088 }, { "content": "pub fn add_db_functions(db: &Connection) {\n\n let network = Network::default();\n\n db.create_scalar_function(\"nn_rank\", 10, true, move |ctx| {\n\n let age_factor = ctx.get::<f64>(0)?;\n\n let length_factor = ctx.get::<f64>(1)?;\n\n let exit_factor = ctx.get::<f64>(2)?;\n\n let recent_failure_factor = ctx.get::<f64>(3)?;\n\n let selected_dir_factor = ctx.get::<f64>(4)?;\n\n let dir_factor = ctx.get::<f64>(5)?;\n\n let overlap_factor = ctx.get::<f64>(6)?;\n\n let immediate_overlap_factor = ctx.get::<f64>(7)?;\n\n let selected_occurrences_factor = ctx.get::<f64>(8)?;\n\n let occurrences_factor = ctx.get::<f64>(9)?;\n\n\n\n let features = Features {\n\n age_factor,\n\n length_factor,\n\n exit_factor,\n\n recent_failure_factor,\n\n selected_dir_factor,\n", "file_path": "src/history/db_extensions.rs", "rank": 7, "score": 63960.989204976664 }, { "content": "fn output_header(writer: &mut Writer<File>) {\n\n writer\n\n .write_record(&[\n\n \"age_factor\",\n\n \"length_factor\",\n\n \"exit_factor\",\n\n \"recent_failure_factor\",\n\n \"selected_dir_factor\",\n\n \"dir_factor\",\n\n \"overlap_factor\",\n\n \"immediate_overlap_factor\",\n\n \"selected_occurrences_factor\",\n\n \"occurrences_factor\",\n\n \"correct\",\n\n ])\n\n .unwrap_or_else(|err| panic!(format!(\"McFly error: Expected to write to CSV ({})\", err)));\n\n writer.flush().unwrap_or_else(|err| panic!(format!(\"McFly error: Expected to flush CSV ({})\", err)));\n\n}\n\n\n", "file_path": "src/training_cache.rs", "rank": 8, "score": 63941.97095862917 }, { "content": "fn handle_addition(settings: &Settings, history: &mut History) {\n\n if history.should_add(&settings.command) {\n\n history.add(\n\n &settings.command,\n\n &settings.session_id,\n\n &settings.dir,\n\n &settings.when_run,\n\n settings.exit_code,\n\n &settings.old_dir,\n\n );\n\n\n\n if settings.append_to_histfile {\n\n let histfile = PathBuf::from(env::var(\"HISTFILE\")\n\n .unwrap_or_else(|err| panic!(format!(\"McFly error: Please ensure that HISTFILE is set ({})\", err))));\n\n bash_history::append_history_entry(&settings.command, &histfile)\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 9, "score": 61813.14369458346 }, { "content": "fn handle_move(settings: &Settings, history: &mut History) {\n\n history.update_paths(&settings.old_dir.clone().unwrap(), &settings.dir, true);\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 10, "score": 61813.14369458346 }, { "content": "fn handle_train(settings: &Settings, history: &mut History) {\n\n Trainer::new(settings, history).train();\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 11, "score": 61813.14369458346 }, { "content": "pub fn delete_last_history_entry_if_search(path: &PathBuf) {\n\n let bash_history_contents = read_ignoring_utf_errors(&path);\n\n\n\n let mut lines = bash_history_contents\n\n .split('\\n')\n\n .map(String::from)\n\n .collect::<Vec<String>>();\n\n\n\n if !lines.is_empty() && lines[lines.len() - 1].is_empty() {\n\n lines.pop();\n\n }\n\n\n\n if lines.is_empty() || !lines[lines.len() - 1].starts_with(\"#mcfly:\") {\n\n return; // Abort if empty or the last line isn't a comment.\n\n }\n\n\n\n lines.pop();\n\n\n\n if !lines.is_empty() && has_leading_timestamp(&lines[lines.len() - 1]) {\n\n lines.pop();\n\n }\n\n\n\n lines.push(String::from(\"\")); // New line at end of file expected by bash.\n\n\n\n fs::write(&path, lines.join(\"\\n\")).unwrap_or_else(|_| panic!(\"McFly error: Unable to update {:?}\", &path));\n\n}\n\n\n", "file_path": "src/bash_history.rs", "rank": 12, "score": 60933.71035962482 }, { "content": "pub fn normalize_path(incoming_path: &str) -> String {\n\n let expanded_path = shellexpand::full(incoming_path).unwrap_or_else(|err| panic!(format!(\"McFly error: Unable to expand command path ({})\", err)));\n\n\n\n let current_dir = env::var(\"PWD\").unwrap_or_else(|err| panic!(format!(\"McFly error: Unable to determine current directory ({})\", err)));\n\n let current_dir_path = Path::new(&current_dir);\n\n\n\n let path_buf = if expanded_path.starts_with('/') {\n\n RelativePath::new(&expanded_path.into_owned()).normalize().to_path(\"/\")\n\n } else {\n\n let to_current_dir = RelativePath::new(&expanded_path).to_path(current_dir_path);\n\n RelativePath::new(to_current_dir.to_str().unwrap()).normalize().to_path(\"/\")\n\n };\n\n\n\n path_buf.to_str().unwrap_or_else(|| panic!(\"McFly error: Path must be a valid UTF8 string\")).to_string()\n\n}\n\n\n", "file_path": "src/path_update_helpers.rs", "rank": 13, "score": 59847.82575198195 }, { "content": "pub fn full_history(path: &PathBuf) -> Vec<String> {\n\n let bash_history_contents = read_ignoring_utf_errors(&path);\n\n\n\n bash_history_contents\n\n .split('\\n')\n\n .filter(|line| !has_leading_timestamp(line) && !line.is_empty())\n\n .map(String::from)\n\n .collect::<Vec<String>>()\n\n}\n\n\n", "file_path": "src/bash_history.rs", "rank": 14, "score": 57772.13250976625 }, { "content": "pub fn delete_lines(path: &PathBuf, command: &str) {\n\n let history_contents = read_ignoring_utf_errors(&path);\n\n\n\n let lines = history_contents\n\n .split('\\n')\n\n .map(String::from)\n\n .filter(|cmd| !cmd.eq(command))\n\n .collect::<Vec<String>>();\n\n\n\n fs::write(&path, lines.join(\"\\n\")).unwrap_or_else(|_| panic!(\"McFly error: Unable to update {:?}\", &path));\n\n}\n\n\n", "file_path": "src/bash_history.rs", "rank": 15, "score": 57772.13250976625 }, { "content": "pub fn append_history_entry(command: &str, path: &PathBuf) {\n\n let mut file = OpenOptions::new()\n\n .write(true)\n\n .append(true)\n\n .open(path)\n\n .unwrap_or_else(|err| panic!(format!(\"McFly error: please make sure HISTFILE exists ({})\", err)));\n\n\n\n if let Err(e) = writeln!(file, \"{}\", command) {\n\n eprintln!(\"Couldn't append to file {:?}: {}\", &path, e);\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::has_leading_timestamp;\n\n\n\n #[test]\n\n fn has_leading_timestamp_works() {\n\n assert_eq!(false, has_leading_timestamp(\"abc\"));\n\n assert_eq!(false, has_leading_timestamp(\"#abc\"));\n\n assert_eq!(false, has_leading_timestamp(\"#123456\"));\n\n assert_eq!(true, has_leading_timestamp(\"#1234567890\"));\n\n assert_eq!(false, has_leading_timestamp(\"#123456789\"));\n\n assert_eq!(false, has_leading_timestamp(\"# 1234567890\"));\n\n assert_eq!(false, has_leading_timestamp(\"1234567890\"));\n\n assert_eq!(false, has_leading_timestamp(\"hello 1234567890\"));\n\n }\n\n}\n", "file_path": "src/bash_history.rs", "rank": 16, "score": 56320.23058608254 }, { "content": "pub fn parse_mv_command(command: &str) -> Vec<String> {\n\n let mut in_double_quote = false;\n\n let mut in_single_quote = false;\n\n let mut escaped = false;\n\n let mut buffer = String::new();\n\n let mut result: Vec<String> = Vec::new();\n\n\n\n for grapheme in command.graphemes(true) {\n\n match grapheme {\n\n \"\\\\\" => {\n\n escaped = true;\n\n }\n\n \"\\\"\" => {\n\n if escaped {\n\n escaped = false;\n\n buffer.push_str(grapheme);\n\n } else if in_double_quote {\n\n in_double_quote = false;\n\n if !buffer.is_empty() {\n\n result.push(buffer);\n", "file_path": "src/path_update_helpers.rs", "rank": 17, "score": 56320.23058608254 }, { "content": "pub fn last_history_line(path: &PathBuf) -> Option<String> {\n\n // Could switch to https://github.com/mikeycgto/rev_lines\n\n full_history(path).last().map(|s| s.trim().to_string())\n\n}\n\n\n", "file_path": "src/bash_history.rs", "rank": 18, "score": 56320.23058608254 }, { "content": "pub fn clean(settings: &Settings, history: &History, command: &str) {\n\n // Clean up the database.\n\n history.delete_command(command);\n\n\n\n // Clean up the contents of MCFLY_HISTORY and all other temporary history files in the same\n\n // directory.\n\n clean_temporary_files(&settings.mcfly_history, command);\n\n\n\n // Clean up HISTFILE.\n\n let histfile =\n\n PathBuf::from(env::var(\"HISTFILE\").unwrap_or_else(|err| panic!(format!(\"McFly error: Please ensure that HISTFILE is set ({})\", err))));\n\n bash_history::delete_lines(&histfile, command);\n\n}\n\n\n", "file_path": "src/history_cleaner.rs", "rank": 19, "score": 54579.948368321464 }, { "content": "fn main() {\n\n let settings = Settings::parse_args();\n\n\n\n let mut history = History::load();\n\n\n\n match settings.mode {\n\n Mode::Add => {\n\n handle_addition(&settings, &mut history);\n\n }\n\n Mode::Search => {\n\n handle_search(&settings, &history);\n\n }\n\n Mode::Train => {\n\n handle_train(&settings, &mut history);\n\n }\n\n Mode::Move => {\n\n handle_move(&settings, &mut history);\n\n }\n\n }\n\n}\n", "file_path": "src/main.rs", "rank": 20, "score": 45499.73857536139 }, { "content": "fn make_schema_versions_table(connection: &Connection) {\n\n connection\n\n .execute_batch(\n\n \"CREATE TABLE IF NOT EXISTS schema_versions( \\\n\n id INTEGER PRIMARY KEY AUTOINCREMENT, \\\n\n version INTEGER NOT NULL, \\\n\n when_run INTEGER NOT NULL);\n\n\n\n CREATE UNIQUE INDEX IF NOT EXISTS schema_versions_index ON schema_versions (version);\",\n\n )\n\n .unwrap_or_else(|err| panic!(format!(\"McFly error: Unable to create schema_versions db table ({})\", err)));\n\n}\n\n\n", "file_path": "src/history/schema.rs", "rank": 21, "score": 33666.39164644902 }, { "content": "fn write_current_schema_version(connection: &Connection) {\n\n let insert = format!(\n\n \"INSERT INTO schema_versions (version, when_run) VALUES ({}, strftime('%s','now'))\",\n\n CURRENT_SCHEMA_VERSION\n\n );\n\n connection\n\n .execute_batch(&insert)\n\n .unwrap_or_else(|err| panic!(format!(\"McFly error: Unable to update schema_versions ({})\", err)));\n\n}\n\n\n", "file_path": "src/history/schema.rs", "rank": 22, "score": 33666.39164644902 }, { "content": "#[allow(clippy::if_same_then_else)]\n\nfn has_leading_timestamp(line: &str) -> bool {\n\n let mut matched_chars = 0;\n\n\n\n for (index, c) in line.chars().enumerate() {\n\n if index == 0 && c == '#' {\n\n matched_chars += 1;\n\n } else if index > 0 && index < 11 && (c.is_digit(10)) {\n\n matched_chars += 1;\n\n } else if index > 11 {\n\n break;\n\n }\n\n }\n\n\n\n matched_chars == 11\n\n}\n\n\n", "file_path": "src/bash_history.rs", "rank": 23, "score": 33077.44954993237 }, { "content": "fn handle_search(settings: &Settings, history: &History) {\n\n let result = Interface::new(settings, history).display();\n\n if let Some(cmd) = result.selection {\n\n fake_typer::use_tiocsti(&cmd);\n\n\n\n if result.run {\n\n fake_typer::use_tiocsti(&\"\\n\".to_string());\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 24, "score": 31862.54326259064 }, { "content": "fn read_ignoring_utf_errors(path: &PathBuf) -> String {\n\n let mut f = File::open(path).unwrap_or_else(|_| panic!(\"McFly error: {:?} file not found\", &path));\n\n let mut buffer = Vec::new();\n\n f.read_to_end(&mut buffer).unwrap_or_else(|_| panic!(\"McFly error: Unable to read from {:?}\", &path));\n\n String::from_utf8_lossy(&buffer).to_string()\n\n}\n\n\n", "file_path": "src/bash_history.rs", "rank": 25, "score": 30514.204973013842 }, { "content": "use crate::history::Features;\n\nuse rand::Rng;\n\nuse std::f64;\n\n\n\n#[derive(Debug, Copy, Clone, Default)]\n\npub struct Node {\n\n pub offset: f64,\n\n pub age: f64,\n\n pub length: f64,\n\n pub exit: f64,\n\n pub recent_failure: f64,\n\n pub selected_dir: f64,\n\n pub dir: f64,\n\n pub overlap: f64,\n\n pub immediate_overlap: f64,\n\n pub selected_occurrences: f64,\n\n pub occurrences: f64,\n\n}\n\n\n\nimpl Node {\n", "file_path": "src/node.rs", "rank": 26, "score": 29912.50903173243 }, { "content": " pub fn random() -> Node {\n\n let mut rng = rand::thread_rng();\n\n\n\n Node {\n\n offset: rng.gen_range(-1.0, 1.0),\n\n age: rng.gen_range(-1.0, 1.0),\n\n length: rng.gen_range(-1.0, 1.0),\n\n exit: rng.gen_range(-1.0, 1.0),\n\n recent_failure: rng.gen_range(-1.0, 1.0),\n\n selected_dir: rng.gen_range(-1.0, 1.0),\n\n dir: rng.gen_range(-1.0, 1.0),\n\n overlap: rng.gen_range(-1.0, 1.0),\n\n immediate_overlap: rng.gen_range(-1.0, 1.0),\n\n selected_occurrences: rng.gen_range(-1.0, 1.0),\n\n occurrences: rng.gen_range(-1.0, 1.0),\n\n }\n\n }\n\n\n\n pub fn dot(&self, features: &Features) -> f64 {\n\n self.offset\n", "file_path": "src/node.rs", "rank": 27, "score": 29904.23671344071 }, { "content": " + features.age_factor * self.age\n\n + features.length_factor * self.length\n\n + features.exit_factor * self.exit\n\n + features.recent_failure_factor * self.recent_failure\n\n + features.selected_dir_factor * self.selected_dir\n\n + features.dir_factor * self.dir\n\n + features.overlap_factor * self.overlap\n\n + features.immediate_overlap_factor * self.immediate_overlap\n\n + features.selected_occurrences_factor * self.selected_occurrences\n\n + features.occurrences_factor * self.occurrences\n\n }\n\n\n\n pub fn output(&self, features: &Features) -> f64 {\n\n self.dot(features).tanh()\n\n }\n\n}\n", "file_path": "src/node.rs", "rank": 28, "score": 29897.688057811858 }, { "content": "fn cmd_strings(connection: &Connection) -> Vec<(i64, String)> {\n\n let query = \"SELECT id, cmd FROM commands ORDER BY id DESC\";\n\n let mut statement = connection.prepare(query).unwrap();\n\n let command_iter = statement\n\n .query_map(NO_PARAMS, |row| (row.get(0), row.get(1)))\n\n .unwrap_or_else(|err| panic!(format!(\"McFly error: Query Map to work ({})\", err)));\n\n\n\n let mut vec = Vec::new();\n\n for result in command_iter {\n\n if let Ok(command) = result {\n\n vec.push(command);\n\n }\n\n }\n\n\n\n vec\n\n}\n", "file_path": "src/history/schema.rs", "rank": 36, "score": 29087.36113972865 }, { "content": "fn clean_temporary_files(mcfly_history: &PathBuf, command: &str) {\n\n let path = mcfly_history.as_path();\n\n if let Some(directory) = path.parent() {\n\n let expanded_path =\n\n fs::canonicalize(directory).unwrap_or_else(|err| panic!(format!(\"McFly error: The contents of $MCFLY_HISTORY appear invalid ({})\", err)));\n\n let paths = fs::read_dir(&expanded_path).unwrap();\n\n\n\n for path in paths {\n\n if let Ok(entry) = path {\n\n if let Some(file_name) = entry.path().file_name() {\n\n if let Some(valid_unicode_str) = file_name.to_str() {\n\n if valid_unicode_str.starts_with(\"mcfly.\") {\n\n bash_history::delete_lines(&entry.path(), command);\n\n }\n\n }\n\n }\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/history_cleaner.rs", "rank": 37, "score": 28578.818596897443 }, { "content": "use unicode_segmentation::UnicodeSegmentation;\n\n\n\n#[derive(Debug)]\n\npub struct FixedLengthGraphemeString {\n\n pub string: String,\n\n pub grapheme_length: u16,\n\n pub max_grapheme_length: u16,\n\n}\n\n\n\nimpl FixedLengthGraphemeString {\n\n pub fn empty(max_grapheme_length: u16) -> FixedLengthGraphemeString {\n\n FixedLengthGraphemeString {\n\n string: String::new(),\n\n grapheme_length: 0,\n\n max_grapheme_length,\n\n }\n\n }\n\n\n\n pub fn new<S: Into<String>>(s: S, max_grapheme_length: u16) -> FixedLengthGraphemeString {\n\n let mut fixed_length_grapheme_string =\n", "file_path": "src/fixed_length_grapheme_string.rs", "rank": 38, "score": 25044.323453196896 }, { "content": " FixedLengthGraphemeString::empty(max_grapheme_length);\n\n fixed_length_grapheme_string.push_grapheme_str(s);\n\n fixed_length_grapheme_string\n\n }\n\n\n\n pub fn push_grapheme_str<S: Into<String>>(&mut self, s: S) {\n\n for grapheme in s.into().graphemes(true) {\n\n if self.grapheme_length >= self.max_grapheme_length {\n\n return;\n\n }\n\n self.string.push_str(grapheme);\n\n self.grapheme_length += 1;\n\n }\n\n }\n\n\n\n pub fn push_str(&mut self, s: &str) {\n\n self.string.push_str(s);\n\n }\n\n}\n\n\n", "file_path": "src/fixed_length_grapheme_string.rs", "rank": 39, "score": 25041.043346819057 }, { "content": "#[cfg(test)]\n\nmod tests {\n\n use super::FixedLengthGraphemeString;\n\n\n\n #[test]\n\n fn length_works() {\n\n let input = FixedLengthGraphemeString::new(\"こんにちは世界\", 20);\n\n assert_eq!(input.grapheme_length, 7);\n\n }\n\n\n\n #[test]\n\n fn max_length_works() {\n\n let mut input = FixedLengthGraphemeString::new(\"こんにちは世界\", 5);\n\n assert_eq!(input.string, \"こんにちは\");\n\n input.push_grapheme_str(\"世界\");\n\n assert_eq!(input.string, \"こんにちは\");\n\n input.max_grapheme_length = 7;\n\n input.push_grapheme_str(\"世界\");\n\n assert_eq!(input.string, \"こんにちは世界\");\n\n }\n\n}\n", "file_path": "src/fixed_length_grapheme_string.rs", "rank": 40, "score": 25039.03123647981 }, { "content": "\n\n let node0 = network.hidden_nodes[0];\n\n let node1 = network.hidden_nodes[1];\n\n let node2 = network.hidden_nodes[2];\n\n network = Network {\n\n hidden_nodes: [\n\n Node {\n\n offset: node0.offset - node_increments[0].offset,\n\n age: node0.age - node_increments[0].age,\n\n length: node0.length - node_increments[0].length,\n\n exit: node0.exit - node_increments[0].exit,\n\n recent_failure: node0.recent_failure - node_increments[0].recent_failure,\n\n selected_dir: node0.selected_dir - node_increments[0].selected_dir,\n\n dir: node0.dir - node_increments[0].dir,\n\n overlap: node0.overlap - node_increments[0].overlap,\n\n immediate_overlap: node0.immediate_overlap - node_increments[0].immediate_overlap,\n\n selected_occurrences: node0.selected_occurrences - node_increments[0].selected_occurrences,\n\n occurrences: node0.occurrences - node_increments[0].occurrences,\n\n },\n\n Node {\n", "file_path": "src/trainer.rs", "rank": 41, "score": 28.462234456081788 }, { "content": " let d_o_2_d_s_2 = 1.0 - network.hidden_node_sums[2].tanh().powi(2);\n\n let d_e_d_s_0 = d_e_d_o_3 * d_o_3_d_s_3 * d_s_3_d_o_0 * d_o_0_d_s_0;\n\n let d_e_d_s_1 = d_e_d_o_3 * d_o_3_d_s_3 * d_s_3_d_o_1 * d_o_1_d_s_1;\n\n let d_e_d_s_2 = d_e_d_o_3 * d_o_3_d_s_3 * d_s_3_d_o_2 * d_o_2_d_s_2;\n\n\n\n node_increments[0].offset = momentum * node_increments[0].offset + lr * d_e_d_s_0 * 1.0;\n\n node_increments[0].age = momentum * node_increments[0].age + lr * d_e_d_s_0 * features.age_factor;\n\n node_increments[0].length = momentum * node_increments[0].length + lr * d_e_d_s_0 * features.length_factor;\n\n node_increments[0].exit = momentum * node_increments[0].exit + lr * d_e_d_s_0 * features.exit_factor;\n\n node_increments[0].recent_failure = momentum * node_increments[0].recent_failure + lr * d_e_d_s_0 * features.recent_failure_factor;\n\n node_increments[0].selected_dir = momentum * node_increments[0].selected_dir + lr * d_e_d_s_0 * features.selected_dir_factor;\n\n node_increments[0].dir = momentum * node_increments[0].dir + lr * d_e_d_s_0 * features.dir_factor;\n\n node_increments[0].overlap = momentum * node_increments[0].overlap + lr * d_e_d_s_0 * features.overlap_factor;\n\n node_increments[0].immediate_overlap = momentum * node_increments[0].immediate_overlap + lr * d_e_d_s_0 * features.immediate_overlap_factor;\n\n node_increments[0].selected_occurrences = momentum * node_increments[0].selected_occurrences + lr * d_e_d_s_0 * features.selected_occurrences_factor;\n\n node_increments[0].occurrences = momentum * node_increments[0].occurrences + lr * d_e_d_s_0 * features.occurrences_factor;\n\n\n\n node_increments[1].offset = momentum * node_increments[1].offset + lr * d_e_d_s_1 * 1.0;\n\n node_increments[1].age = momentum * node_increments[1].age + lr * d_e_d_s_1 * features.age_factor;\n\n node_increments[1].length = momentum * node_increments[1].length + lr * d_e_d_s_1 * features.length_factor;\n", "file_path": "src/trainer.rs", "rank": 42, "score": 27.752867766317337 }, { "content": " offset: node1.offset - node_increments[1].offset,\n\n age: node1.age - node_increments[1].age,\n\n length: node1.length - node_increments[1].length,\n\n exit: node1.exit - node_increments[1].exit,\n\n recent_failure: node1.recent_failure - node_increments[1].recent_failure,\n\n selected_dir: node1.selected_dir - node_increments[1].selected_dir,\n\n dir: node1.dir - node_increments[1].dir,\n\n overlap: node1.overlap - node_increments[1].overlap,\n\n immediate_overlap: node1.immediate_overlap - node_increments[1].immediate_overlap,\n\n selected_occurrences: node1.selected_occurrences - node_increments[1].selected_occurrences,\n\n occurrences: node1.occurrences - node_increments[1].occurrences,\n\n },\n\n Node {\n\n offset: node2.offset - node_increments[2].offset,\n\n age: node2.age - node_increments[2].age,\n\n length: node2.length - node_increments[2].length,\n\n exit: node2.exit - node_increments[2].exit,\n\n recent_failure: node2.recent_failure - node_increments[2].recent_failure,\n\n selected_dir: node2.selected_dir - node_increments[2].selected_dir,\n\n dir: node2.dir - node_increments[2].dir,\n", "file_path": "src/trainer.rs", "rank": 43, "score": 27.0498643621335 }, { "content": " node_increments[1].exit = momentum * node_increments[1].exit + lr * d_e_d_s_1 * features.exit_factor;\n\n node_increments[1].recent_failure = momentum * node_increments[1].recent_failure + lr * d_e_d_s_1 * features.recent_failure_factor;\n\n node_increments[1].selected_dir = momentum * node_increments[1].selected_dir + lr * d_e_d_s_1 * features.selected_dir_factor;\n\n node_increments[1].dir = momentum * node_increments[1].dir + lr * d_e_d_s_1 * features.dir_factor;\n\n node_increments[1].overlap = momentum * node_increments[1].overlap + lr * d_e_d_s_1 * features.overlap_factor;\n\n node_increments[1].immediate_overlap = momentum * node_increments[1].immediate_overlap + lr * d_e_d_s_1 * features.immediate_overlap_factor;\n\n node_increments[1].selected_occurrences = momentum * node_increments[1].selected_occurrences + lr * d_e_d_s_1 * features.selected_occurrences_factor;\n\n node_increments[1].occurrences = momentum * node_increments[1].occurrences + lr * d_e_d_s_1 * features.occurrences_factor;\n\n\n\n node_increments[2].offset = momentum * node_increments[2].offset + lr * d_e_d_s_2 * 1.0;\n\n node_increments[2].age = momentum * node_increments[2].age + lr * d_e_d_s_2 * features.age_factor;\n\n node_increments[2].length = momentum * node_increments[2].length + lr * d_e_d_s_2 * features.length_factor;\n\n node_increments[2].exit = momentum * node_increments[2].exit + lr * d_e_d_s_2 * features.exit_factor;\n\n node_increments[2].recent_failure = momentum * node_increments[2].recent_failure + lr * d_e_d_s_2 * features.recent_failure_factor;\n\n node_increments[2].selected_dir = momentum * node_increments[2].selected_dir + lr * d_e_d_s_2 * features.selected_dir_factor;\n\n node_increments[2].dir = momentum * node_increments[2].dir + lr * d_e_d_s_2 * features.dir_factor;\n\n node_increments[2].overlap = momentum * node_increments[2].overlap + lr * d_e_d_s_2 * features.overlap_factor;\n\n node_increments[2].immediate_overlap = momentum * node_increments[2].immediate_overlap + lr * d_e_d_s_2 * features.immediate_overlap_factor;\n\n node_increments[2].selected_occurrences = momentum * node_increments[2].selected_occurrences + lr * d_e_d_s_2 * features.selected_occurrences_factor;\n\n node_increments[2].occurrences = momentum * node_increments[2].occurrences + lr * d_e_d_s_2 * features.occurrences_factor;\n", "file_path": "src/trainer.rs", "rank": 44, "score": 25.103203928561193 }, { "content": "#![allow(clippy::module_inception)]\n\nuse crate::bash_history;\n\nuse rusqlite::{Connection, MappedRows, Row, NO_PARAMS};\n\nuse std::{fmt, fs, io};\n\nuse std::io::Write;\n\nuse std::path::PathBuf;\n\n//use std::time::Instant;\n\nuse crate::history::{db_extensions, schema};\n\nuse crate::simplified_command::SimplifiedCommand;\n\nuse crate::path_update_helpers;\n\nuse std::time::{Instant, SystemTime, UNIX_EPOCH};\n\nuse crate::network::Network;\n\nuse crate::settings::Settings;\n\nuse rusqlite::types::ToSql;\n\n\n\n#[derive(Debug, Clone, Default)]\n\npub struct Features {\n\n pub age_factor: f64,\n\n pub length_factor: f64,\n\n pub exit_factor: f64,\n", "file_path": "src/history/history.rs", "rank": 45, "score": 21.192191288838593 }, { "content": " pub recent_failure_factor: f64,\n\n pub selected_dir_factor: f64,\n\n pub dir_factor: f64,\n\n pub overlap_factor: f64,\n\n pub immediate_overlap_factor: f64,\n\n pub selected_occurrences_factor: f64,\n\n pub occurrences_factor: f64,\n\n}\n\n\n\n#[derive(Debug, Clone, Default)]\n\npub struct Command {\n\n pub id: i64,\n\n pub cmd: String,\n\n pub cmd_tpl: String,\n\n pub session_id: String,\n\n pub rank: f64,\n\n pub when_run: Option<i64>,\n\n pub exit_code: Option<i32>,\n\n pub selected: bool,\n\n pub dir: Option<String>,\n", "file_path": "src/history/history.rs", "rank": 46, "score": 21.113948943040203 }, { "content": "use crate::history::History;\n\nuse crate::settings::Settings;\n\nuse crate::history::Features;\n\nuse crate::node::Node;\n\nuse crate::network::Network;\n\nuse crate::training_sample_generator::TrainingSampleGenerator;\n\n\n\n#[derive(Debug)]\n\npub struct Trainer<'a> {\n\n settings: &'a Settings,\n\n history: &'a mut History,\n\n}\n\n\n\nimpl<'a> Trainer<'a> {\n\n pub fn new(settings: &'a Settings, history: &'a mut History) -> Trainer<'a> {\n\n Trainer { settings, history }\n\n }\n\n\n\n pub fn train(&mut self) {\n\n let lr = 0.000005;\n", "file_path": "src/trainer.rs", "rank": 47, "score": 19.456318622194587 }, { "content": " pub features: Features,\n\n}\n\n\n\nimpl fmt::Display for Command {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n self.cmd.fmt(f)\n\n }\n\n}\n\n\n\nimpl From<Command> for String {\n\n fn from(command: Command) -> Self {\n\n command.cmd\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct History {\n\n pub connection: Connection,\n\n pub network: Network,\n\n}\n", "file_path": "src/history/history.rs", "rank": 48, "score": 18.080807443994964 }, { "content": " out.push_str(&format!(\"{}\", color::Fg(color::LightBlue)));\n\n out.push_grapheme_str(format!(\"rnk: {:.*} \", 2, command.rank));\n\n out.push_grapheme_str(format!(\"age: {:.*} \", 2, command.features.age_factor));\n\n out.push_grapheme_str(format!(\"lng: {:.*} \", 2, command.features.length_factor));\n\n out.push_grapheme_str(format!(\"ext: {:.*} \", 0, command.features.exit_factor));\n\n out.push_grapheme_str(format!(\"r_ext: {:.*} \", 0, command.features.recent_failure_factor));\n\n out.push_grapheme_str(format!(\"dir: {:.*} \", 3, command.features.dir_factor));\n\n out.push_grapheme_str(format!(\"s_dir: {:.*} \", 3, command.features.selected_dir_factor));\n\n out.push_grapheme_str(format!(\"ovlp: {:.*} \", 3, command.features.overlap_factor));\n\n out.push_grapheme_str(format!(\"i_ovlp: {:.*} \", 3, command.features.immediate_overlap_factor));\n\n out.push_grapheme_str(format!(\"occ: {:.*}\", 2, command.features.occurrences_factor));\n\n out.push_grapheme_str(format!(\"s_occ: {:.*} \", 2, command.features.selected_occurrences_factor));\n\n out.push_str(&base_color);\n\n }\n\n\n\n out.string\n\n }\n\n}\n\n\n\n// TODO:\n", "file_path": "src/interface.rs", "rank": 49, "score": 14.835927805564841 }, { "content": " overlap: node2.overlap - node_increments[2].overlap,\n\n immediate_overlap: node2.immediate_overlap - node_increments[2].immediate_overlap,\n\n selected_occurrences: node2.selected_occurrences - node_increments[2].selected_occurrences,\n\n occurrences: node2.occurrences - node_increments[2].occurrences,\n\n }\n\n ],\n\n hidden_node_sums: [0.0, 0.0, 0.0],\n\n hidden_node_outputs: [0.0, 0.0, 0.0],\n\n final_bias: network.final_bias - output_increments[0],\n\n final_weights: [network.final_weights[0] - output_increments[1], network.final_weights[1] - output_increments[2], network.final_weights[2] - output_increments[3]],\n\n final_sum: 0.0,\n\n final_output: 0.0,\n\n };\n\n });\n\n\n\n if batch_error / batch_samples < best_restart_error {\n\n best_restart_error = batch_error / batch_samples;\n\n best_restart_network = network;\n\n cycles_since_best_restart_error = 0;\n\n } else {\n", "file_path": "src/trainer.rs", "rank": 50, "score": 14.233636049125655 }, { "content": " dir_factor,\n\n overlap_factor,\n\n immediate_overlap_factor,\n\n selected_occurrences_factor,\n\n occurrences_factor,\n\n };\n\n\n\n Ok(network.output(&features))\n\n }).unwrap_or_else(|err| panic!(format!(\"McFly error: Successful create_scalar_function ({})\", err)));\n\n}\n", "file_path": "src/history/db_extensions.rs", "rank": 51, "score": 13.985483677593157 }, { "content": "\n\n self.connection\n\n .execute(\n\n \"UPDATE contextual_commands\n\n SET rank = nn_rank(age_factor, length_factor, exit_factor,\n\n recent_failure_factor, selected_dir_factor, dir_factor,\n\n overlap_factor, immediate_overlap_factor,\n\n selected_occurrences_factor, occurrences_factor);\",\n\n NO_PARAMS,\n\n )\n\n .unwrap_or_else(|err| panic!(format!(\"McFly error: Ranking of temp table to work ({})\", err)));\n\n\n\n self.connection\n\n .execute(\n\n \"CREATE INDEX temp.MyIndex ON contextual_commands(id);\",\n\n NO_PARAMS,\n\n )\n\n .unwrap_or_else(|err| panic!(format!(\"McFly error: Creation of index on temp table to work ({})\", err)));\n\n\n\n // println!(\"Seconds: {}\", (beginning_of_execution.elapsed().as_secs() as f64) + (beginning_of_execution.elapsed().subsec_nanos() as f64 / 1000_000_000.0));\n", "file_path": "src/history/history.rs", "rank": 52, "score": 13.663961457337916 }, { "content": " let momentum = 0.0;\n\n let batch_size = 1000;\n\n let plateau_threshold = 3000;\n\n let generator = TrainingSampleGenerator::new(self.settings, self.history);\n\n\n\n println!(\"Evaluating error rate on current {:#?}\", self.history.network);\n\n let mut best_overall_network = self.history.network;\n\n let mut best_overall_error = self.history.network.average_error(&generator, batch_size * 10);\n\n println!(\"Current network error rate is {}\", best_overall_error);\n\n\n\n loop {\n\n let mut best_restart_network = Network::random();\n\n let mut best_restart_error = 10000.0;\n\n let mut cycles_since_best_restart_error = 0;\n\n let mut network = Network::random();\n\n let mut node_increments = [Node::default(), Node::default(), Node::default()];\n\n let mut output_increments = [0.0, 0.0, 0.0, 0.0];\n\n\n\n loop {\n\n let mut batch_error = 0.0;\n", "file_path": "src/trainer.rs", "rank": 53, "score": 13.317087559396164 }, { "content": " }\n\n\n\n pub fn commands(&self, session_id: &Option<String>, num: i16, offset: u16, random: bool) -> Vec<Command> {\n\n let order = if random { \"RANDOM()\" } else { \"id\" };\n\n let query = if session_id.is_none() {\n\n format!(\"SELECT id, cmd, cmd_tpl, session_id, when_run, exit_code, selected, dir FROM commands ORDER BY {} DESC LIMIT :limit OFFSET :offset\", order)\n\n } else {\n\n format!(\"SELECT id, cmd, cmd_tpl, session_id, when_run, exit_code, selected, dir FROM commands WHERE session_id = :session_id ORDER BY {} DESC LIMIT :limit OFFSET :offset\", order)\n\n };\n\n\n\n if session_id.is_none() {\n\n self.run_query(&query, &[\n\n (\":limit\", &num),\n\n (\":offset\", &offset),\n\n ])\n\n } else {\n\n self.run_query(&query, &[\n\n (\":session_id\", &session_id.to_owned().unwrap()),\n\n (\":limit\", &num),\n\n (\":offset\", &offset),\n", "file_path": "src/history/history.rs", "rank": 54, "score": 12.39810459733944 }, { "content": " features: Features {\n\n age_factor: row.get_checked(9).unwrap_or_else(|err| panic!(format!(\"McFly error: age_factor to be readable ({})\", err))),\n\n length_factor: row.get_checked(10).unwrap_or_else(|err| panic!(format!(\"McFly error: length_factor to be readable ({})\", err))),\n\n exit_factor: row.get_checked(11).unwrap_or_else(|err| panic!(format!(\"McFly error: exit_factor to be readable ({})\", err))),\n\n recent_failure_factor: row.get_checked(12)\n\n .unwrap_or_else(|err| panic!(format!(\"McFly error: recent_failure_factor to be readable ({})\", err))),\n\n selected_dir_factor: row.get_checked(13)\n\n .unwrap_or_else(|err| panic!(format!(\"McFly error: selected_dir_factor to be readable ({})\", err))),\n\n dir_factor: row.get_checked(14).unwrap_or_else(|err| panic!(format!(\"McFly error: dir_factor to be readable ({})\", err))),\n\n overlap_factor: row.get_checked(15).unwrap_or_else(|err| panic!(format!(\"McFly error: overlap_factor to be readable ({})\", err))),\n\n immediate_overlap_factor: row.get_checked(16)\n\n .unwrap_or_else(|err| panic!(format!(\"McFly error: immediate_overlap_factor to be readable ({})\", err))),\n\n selected_occurrences_factor: row.get_checked(17)\n\n .unwrap_or_else(|err| panic!(format!(\"McFly error: selected_occurrences_factor to be readable ({})\", err))),\n\n occurrences_factor: row.get_checked(18)\n\n .unwrap_or_else(|err| panic!(format!(\"McFly error: occurrences_factor to be readable ({})\", err))),\n\n },\n\n },\n\n )\n\n .unwrap_or_else(|err| panic!(format!(\"McFly error: Query Map to work ({})\", err)));\n", "file_path": "src/history/history.rs", "rank": 55, "score": 12.387688984599652 }, { "content": "\n\n println!(\"Done!\");\n\n\n\n data_set\n\n }\n\n\n\n pub fn generate<F>(&self, records: Option<usize>, mut handler: F) where F: FnMut(&Features, bool) {\n\n let mut positive_examples = 0;\n\n let mut negative_examples = 0;\n\n let records = records.unwrap_or_else(|| self.data_set.len());\n\n\n\n loop {\n\n if let Some((features, correct)) = rand::thread_rng().choose(&self.data_set) {\n\n if *correct && positive_examples <= negative_examples {\n\n handler(features, *correct);\n\n positive_examples += 1;\n\n } else if !*correct && negative_examples <= positive_examples {\n\n handler(features, *correct);\n\n negative_examples += 1;\n\n }\n\n }\n\n\n\n if positive_examples + negative_examples >= records {\n\n break;\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/training_sample_generator.rs", "rank": 56, "score": 11.7534615925689 }, { "content": "use crate::history::Command;\n\nuse crate::history::Features;\n\nuse crate::history::History;\n\nuse crate::settings::Settings;\n\nuse crate::training_cache;\n\nuse rand::Rng;\n\n\n\n#[derive(Debug)]\n\npub struct TrainingSampleGenerator<'a> {\n\n settings: &'a Settings,\n\n history: &'a History,\n\n data_set: Vec<(Features, bool)>,\n\n}\n\n\n\nimpl<'a> TrainingSampleGenerator<'a> {\n\n pub fn new(settings: &'a Settings, history: &'a History) -> TrainingSampleGenerator<'a> {\n\n let cache_path = Settings::mcfly_training_cache_path();\n\n let data_set =\n\n if settings.refresh_training_cache || !cache_path.exists() {\n\n let ds = TrainingSampleGenerator::generate_data_set(history);\n", "file_path": "src/training_sample_generator.rs", "rank": 57, "score": 11.658602882473328 }, { "content": " let mut batch_samples = 0.0;\n\n\n\n // Two node network example:\n\n // (Note: we currently are using a three node version, s_0 to s_2 and o_0 to o_2.)\n\n //\n\n // b_1\n\n // \\\n\n // f_1 --- s_1 -- o_1\n\n // \\ / \\\n\n // x b_3 -- s_3 -> o_3 -> e\n\n // / \\ /\n\n // f_2 --- s_2 -- o_2\n\n // /\n\n // b_2\n\n //\n\n // Error (e) = 0.5(t - o_3)^2\n\n // Final output (o_3) = tanh(s_3)\n\n // Final sum (s_3) = b_3 + w3_1*o_1 + w3_2*o_2\n\n // Hidden node 1 output (o_1) = tanh(s_1)\n\n // Hidden node 1 sum (s_1) = b_1 + w1_1*f_1 + w1_2*f_2\n", "file_path": "src/trainer.rs", "rank": 58, "score": 11.193090577883089 }, { "content": "use rusqlite;\n\nuse rusqlite::Connection;\n\nuse crate::history::history::Features;\n\nuse crate::network::Network;\n\n\n", "file_path": "src/history/db_extensions.rs", "rank": 59, "score": 10.997498576742046 }, { "content": " training_cache::write(&ds, &cache_path);\n\n ds\n\n } else {\n\n training_cache::read(&cache_path)\n\n };\n\n\n\n TrainingSampleGenerator { settings, history, data_set }\n\n }\n\n\n\n pub fn generate_data_set(history: &History) -> Vec<(Features, bool)> {\n\n let mut data_set: Vec<(Features, bool)> = Vec::new();\n\n let commands = history.commands(&None, -1, 0, true);\n\n\n\n let mut positive_examples = 0;\n\n let mut negative_examples = 0;\n\n\n\n println!(\"Generating training set for {} commands\", commands.len());\n\n\n\n for (i, command) in commands.iter().enumerate() {\n\n if command.dir.is_none() || command.exit_code.is_none() || command.when_run.is_none() { continue; }\n", "file_path": "src/training_sample_generator.rs", "rank": 60, "score": 10.243557601541902 }, { "content": "\n\n #[allow(unused_variables)]\n\n let beginning_of_execution = Instant::now();\n\n self.connection.execute_named(\n\n \"CREATE TEMP TABLE contextual_commands AS SELECT\n\n id, cmd, cmd_tpl, session_id, when_run, exit_code, selected, dir,\n\n\n\n /* to be filled in later */\n\n 0.0 AS rank,\n\n\n\n /* length of the command string */\n\n LENGTH(c.cmd) / :max_length AS length_factor,\n\n\n\n /* age of the last execution of this command (0.0 is new, 1.0 is old) */\n\n MIN((:when_run_max - when_run) / :history_duration) AS age_factor,\n\n\n\n /* average error state (1: always successful, 0: always errors) */\n\n SUM(CASE WHEN exit_code = 0 THEN 1.0 ELSE 0.0 END) / COUNT(*) as exit_factor,\n\n\n\n /* recent failure (1 if failed recently, 0 if not) */\n", "file_path": "src/history/history.rs", "rank": 61, "score": 10.189970509795302 }, { "content": " if positive_examples <= negative_examples {\n\n if let Some(our_command_index) = results.iter().position(|ref c| c.cmd.eq(&command.cmd)) {\n\n let what_should_have_been_first = &results[our_command_index];\n\n data_set.push((what_should_have_been_first.features.clone(), true));\n\n positive_examples += 1;\n\n }\n\n }\n\n\n\n if negative_examples <= positive_examples {\n\n // Get the features for another command that isn't the correct one.\n\n if let Some(random_command) = rand::thread_rng().choose(&results\n\n .iter()\n\n .filter(|c| !c.cmd.eq(&command.cmd))\n\n .collect::<Vec<&Command>>())\n\n {\n\n data_set.push((random_command.features.clone(), false));\n\n negative_examples += 1;\n\n }\n\n }\n\n }\n", "file_path": "src/training_sample_generator.rs", "rank": 62, "score": 10.139210910859843 }, { "content": "pub mod bash_history;\n\npub mod command_input;\n\npub mod fake_typer;\n\npub mod fixed_length_grapheme_string;\n\npub mod history;\n\npub mod history_cleaner;\n\npub mod interface;\n\npub mod path_update_helpers;\n\npub mod settings;\n\npub mod simplified_command;\n\npub mod trainer;\n\npub mod training_cache;\n\npub mod training_sample_generator;\n\npub mod node;\n\npub mod network;\n", "file_path": "src/lib.rs", "rank": 63, "score": 9.887830151362577 }, { "content": " Exact(usize),\n\n}\n\n\n\n#[derive(Debug)]\n\n/// CommandInput data structure\n\npub struct CommandInput {\n\n /// The command itself\n\n pub command: String,\n\n /// The current cursor position\n\n pub cursor: usize,\n\n /// A cache of the length of command in graphemes (not bytes or chars!)\n\n pub len: usize,\n\n}\n\n\n\nimpl fmt::Display for CommandInput {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n self.command.fmt(f)\n\n }\n\n}\n\n\n", "file_path": "src/command_input.rs", "rank": 64, "score": 9.873555940457477 }, { "content": " MAX(CASE WHEN exit_code != 0 AND :now - when_run < 120 THEN 1.0 ELSE 0.0 END) AS recent_failure_factor,\n\n\n\n /* percentage run in this directory (1: always run in this directory, 0: never run in this directory) */\n\n SUM(CASE WHEN dir = :directory THEN 1.0 ELSE 0.0 END) / COUNT(*) as dir_factor,\n\n\n\n /* percentage of time selected in this directory (1: only selected in this dir, 0: only selected elsewhere) */\n\n SUM(CASE WHEN dir = :directory AND selected = 1 THEN 1.0 ELSE 0.0 END) / (SUM(CASE WHEN selected = 1 THEN 1.0 ELSE 0.0 END) + 1) as selected_dir_factor,\n\n\n\n /* average contextual overlap of this command (0: none of the last 3 commands has ever overlapped with this command, 1: all of the last three commands always overlap with this command) */\n\n SUM((\n\n SELECT COUNT(DISTINCT c2.cmd_tpl) FROM commands c2\n\n WHERE c2.id >= c.id - :lookback AND c2.id < c.id AND c2.cmd_tpl IN (:last_commands0, :last_commands1, :last_commands2)\n\n ) / :lookback_f64) / COUNT(*) AS overlap_factor,\n\n\n\n /* average overlap with the last command (0: this command never follows the last command, 1: this command always follows the last command) */\n\n SUM((SELECT COUNT(*) FROM commands c2 WHERE c2.id = c.id - 1 AND c2.cmd_tpl = :last_commands0)) / COUNT(*) AS immediate_overlap_factor,\n\n\n\n /* percentage selected (1: this is the most commonly selected command, 0: this command is never selected) */\n\n SUM(CASE WHEN selected = 1 THEN 1.0 ELSE 0.0 END) / :max_selected_occurrences AS selected_occurrences_factor,\n\n\n", "file_path": "src/history/history.rs", "rank": 65, "score": 9.524113840185743 }, { "content": "use crate::bash_history;\n\nuse clap::{crate_version, crate_authors, value_t};\n\nuse clap::AppSettings;\n\nuse clap::{App, Arg, SubCommand};\n\nuse std::env;\n\nuse std::path::PathBuf;\n\nuse std::time::SystemTime;\n\nuse std::time::UNIX_EPOCH;\n\nuse dirs::home_dir;\n\n\n\n#[derive(Debug)]\n\npub enum Mode {\n\n Add,\n\n Search,\n\n Train,\n\n Move\n\n}\n\n\n\n#[derive(Debug)]\n\npub enum KeyScheme {\n", "file_path": "src/settings.rs", "rank": 66, "score": 9.380028092497094 }, { "content": "pub use self::history::{Command, Features, History};\n\n\n\nmod db_extensions;\n\nmod history;\n\nmod schema;\n", "file_path": "src/history/mod.rs", "rank": 67, "score": 9.057607658097634 }, { "content": " age_factor, length_factor, exit_factor, recent_failure_factor,\n\n selected_dir_factor, dir_factor, overlap_factor, immediate_overlap_factor,\n\n selected_occurrences_factor, occurrences_factor\n\n FROM contextual_commands\n\n WHERE cmd LIKE (:like)\n\n ORDER BY rank DESC LIMIT :limit\";\n\n let mut statement = self.connection.prepare(query).unwrap_or_else(|err| panic!(format!(\"McFly error: Prepare to work ({})\", err)));\n\n let command_iter = statement\n\n .query_map_named(\n\n &[(\":like\", &like_query), (\":limit\", &num)],\n\n |row| Command {\n\n id: row.get_checked(0).unwrap_or_else(|err| panic!(format!(\"McFly error: id to be readable ({})\", err))),\n\n cmd: row.get_checked(1).unwrap_or_else(|err| panic!(format!(\"McFly error: cmd to be readable ({})\", err))),\n\n cmd_tpl: row.get_checked(2).unwrap_or_else(|err| panic!(format!(\"McFly error: cmd_tpl to be readable ({})\", err))),\n\n session_id: row.get_checked(3).unwrap_or_else(|err| panic!(format!(\"McFly error: session_id to be readable ({})\", err))),\n\n when_run: row.get_checked(4).unwrap_or_else(|err| panic!(format!(\"McFly error: when_run to be readable ({})\", err))),\n\n exit_code: row.get_checked(5).unwrap_or_else(|err| panic!(format!(\"McFly error: exit_code to be readable ({})\", err))),\n\n selected: row.get_checked(6).unwrap_or_else(|err| panic!(format!(\"McFly error: selected to be readable ({})\", err))),\n\n dir: row.get_checked(7).unwrap_or_else(|err| panic!(format!(\"McFly error: dir to be readable ({})\", err))),\n\n rank: row.get_checked(8).unwrap_or_else(|err| panic!(format!(\"McFly error: rank to be readable ({})\", err))),\n", "file_path": "src/history/history.rs", "rank": 68, "score": 9.04318694062998 }, { "content": " .execute_named(\n\n \"DELETE FROM commands WHERE cmd = :command\",\n\n &[(\":command\", &command)],\n\n )\n\n .unwrap_or_else(|err| panic!(format!(\"McFly error: DELETE from commands to work ({})\", err)));\n\n }\n\n\n\n pub fn update_paths(&self, old_path: &str, new_path: &str, print_output: bool) {\n\n let normalized_old_path = path_update_helpers::normalize_path(old_path);\n\n let normalized_new_path = path_update_helpers::normalize_path(new_path);\n\n\n\n if normalized_old_path.len() > 1 && normalized_new_path.len() > 1 {\n\n let like_query = normalized_old_path.to_string() + \"/%\";\n\n\n\n let mut dir_update_statement = self.connection.prepare(\n\n \"UPDATE commands SET dir = :new_dir || SUBSTR(dir, :length) WHERE dir = :exact OR dir LIKE (:like)\"\n\n ).unwrap();\n\n\n\n let mut old_dir_update_statement = self.connection.prepare(\n\n \"UPDATE commands SET old_dir = :new_dir || SUBSTR(old_dir, :length) WHERE old_dir = :exact OR old_dir LIKE (:like)\"\n", "file_path": "src/history/history.rs", "rank": 69, "score": 8.871721658695522 }, { "content": "\n\nimpl Default for Settings {\n\n fn default() -> Settings {\n\n Settings {\n\n mode: Mode::Add,\n\n command: String::new(),\n\n session_id: String::new(),\n\n mcfly_history: PathBuf::new(),\n\n dir: String::new(),\n\n when_run: None,\n\n exit_code: None,\n\n old_dir: None,\n\n refresh_training_cache: false,\n\n append_to_histfile: false,\n\n debug: false,\n\n lightmode: false,\n\n key_scheme: KeyScheme::Emacs\n\n }\n\n }\n\n}\n", "file_path": "src/settings.rs", "rank": 70, "score": 8.864538501817716 }, { "content": "impl CommandInput {\n\n pub fn from<S: Into<String>>(s: S) -> CommandInput {\n\n let mut input = CommandInput {\n\n command: s.into(),\n\n cursor: 0,\n\n len: 0,\n\n };\n\n input.recompute_caches();\n\n input.cursor = input.len;\n\n input\n\n }\n\n\n\n pub fn clear(&mut self) {\n\n self.command.clear();\n\n self.recompute_caches();\n\n }\n\n\n\n pub fn set(&mut self, str: &str) {\n\n self.command = str.to_string();\n\n self.recompute_caches();\n", "file_path": "src/command_input.rs", "rank": 71, "score": 8.83164648518991 }, { "content": " Emacs,\n\n Vim\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct Settings {\n\n pub mode: Mode,\n\n pub debug: bool,\n\n pub session_id: String,\n\n pub mcfly_history: PathBuf,\n\n pub command: String,\n\n pub dir: String,\n\n pub when_run: Option<i64>,\n\n pub exit_code: Option<i32>,\n\n pub old_dir: Option<String>,\n\n pub append_to_histfile: bool,\n\n pub refresh_training_cache: bool,\n\n pub lightmode: bool,\n\n pub key_scheme: KeyScheme\n\n}\n", "file_path": "src/settings.rs", "rank": 72, "score": 8.639407246926611 }, { "content": " when_run_min -= 60.0 * 60.0;\n\n }\n\n\n\n let max_occurrences: f64 = self.connection\n\n .query_row(\n\n \"SELECT COUNT(*) AS c FROM commands GROUP BY cmd ORDER BY c DESC LIMIT 1\",\n\n NO_PARAMS,\n\n |row| row.get(0),\n\n )\n\n .unwrap_or(1.0);\n\n\n\n let max_selected_occurrences: f64 = self.connection\n\n .query_row(\"SELECT COUNT(*) AS c FROM commands WHERE selected = 1 GROUP BY cmd ORDER BY c DESC LIMIT 1\", NO_PARAMS,\n\n |row| row.get(0)).unwrap_or(1.0);\n\n\n\n let max_length: f64 = self.connection\n\n .query_row(\"SELECT MAX(LENGTH(cmd)) FROM commands\", NO_PARAMS, |row| {\n\n row.get(0)\n\n })\n\n .unwrap_or(100.0);\n", "file_path": "src/history/history.rs", "rank": 73, "score": 8.57908806609486 }, { "content": " return true;\n\n }\n\n !command.eq(&last_command.unwrap().cmd)\n\n }\n\n\n\n pub fn add(\n\n &self,\n\n command: &str,\n\n session_id: &str,\n\n dir: &str,\n\n when_run: &Option<i64>,\n\n exit_code: Option<i32>,\n\n old_dir: &Option<String>,\n\n ) {\n\n self.possibly_update_paths(command, exit_code);\n\n let selected = self.determine_if_selected_from_ui(command, session_id, dir);\n\n let simplified_command = SimplifiedCommand::new(command, true);\n\n self.connection.execute_named(\"INSERT INTO commands (cmd, cmd_tpl, session_id, when_run, exit_code, selected, dir, old_dir) VALUES (:cmd, :cmd_tpl, :session_id, :when_run, :exit_code, :selected, :dir, :old_dir)\",\n\n &[\n\n (\":cmd\", &command.to_owned()),\n", "file_path": "src/history/history.rs", "rank": 74, "score": 8.189390052651643 }, { "content": " } else {\n\n // Don't try to handle non-utf8 filenames, at least for now.\n\n return;\n\n }\n\n }\n\n\n\n let to_pathbuf = PathBuf::from(&normalized_to);\n\n if to_pathbuf.exists() && to_pathbuf.is_dir() {\n\n self.update_paths(&normalized_from, &normalized_to, false);\n\n }\n\n }\n\n }\n\n }\n\n\n\n pub fn find_matches(&self, cmd: &str, num: i16) -> Vec<Command> {\n\n let mut like_query = \"%\".to_string();\n\n like_query.push_str(cmd);\n\n like_query.push_str(\"%\");\n\n\n\n let query = \"SELECT id, cmd, cmd_tpl, session_id, when_run, exit_code, selected, dir, rank,\n", "file_path": "src/history/history.rs", "rank": 75, "score": 8.059899648001318 }, { "content": "use crate::command_input::{CommandInput, Move};\n\nuse crate::history::History;\n\n\n\nuse crate::fixed_length_grapheme_string::FixedLengthGraphemeString;\n\nuse crate::history::Command;\n\nuse crate::history_cleaner;\n\nuse crate::settings::Settings;\n\nuse crate::settings::KeyScheme;\n\nuse std::io::{stdin, stdout, Write};\n\nuse termion::color;\n\nuse termion::event::Key;\n\nuse termion::input::TermRead;\n\nuse termion::raw::IntoRawMode;\n\nuse termion::screen::AlternateScreen;\n\nuse termion::{clear, cursor, terminal_size};\n\n\n\npub struct Interface<'a> {\n\n history: &'a History,\n\n settings: &'a Settings,\n\n input: CommandInput,\n", "file_path": "src/interface.rs", "rank": 76, "score": 8.036056323355927 }, { "content": " batch_error += error;\n\n batch_samples += 1.0;\n\n\n\n let d_e_d_o_3 = -(target - network.final_output);\n\n let d_o_3_d_s_3 = 1.0 - network.final_sum.tanh().powi(2);\n\n\n\n // Output bias\n\n output_increments[0] = momentum * output_increments[0] + lr * d_e_d_o_3 * d_o_3_d_s_3 * 1.0;\n\n // Final sum node 1 output weight\n\n output_increments[1] = momentum * output_increments[1] + lr * d_e_d_o_3 * d_o_3_d_s_3 * network.hidden_node_outputs[0];\n\n // Final sum node 2 output weight\n\n output_increments[2] = momentum * output_increments[2] + lr * d_e_d_o_3 * d_o_3_d_s_3 * network.hidden_node_outputs[1];\n\n // Final sum node 3 output weight\n\n output_increments[3] = momentum * output_increments[3] + lr * d_e_d_o_3 * d_o_3_d_s_3 * network.hidden_node_outputs[2];\n\n\n\n let d_s_3_d_o_0 = network.final_weights[0];\n\n let d_s_3_d_o_1 = network.final_weights[1];\n\n let d_s_3_d_o_2 = network.final_weights[2];\n\n let d_o_0_d_s_0 = 1.0 - network.hidden_node_sums[0].tanh().powi(2);\n\n let d_o_1_d_s_1 = 1.0 - network.hidden_node_sums[1].tanh().powi(2);\n", "file_path": "src/trainer.rs", "rank": 77, "score": 7.819951137942267 }, { "content": " tmp = 0;\n\n } else if tmp > self.len as isize {\n\n tmp = self.len as isize;\n\n }\n\n self.cursor = tmp as usize;\n\n }\n\n\n\n pub fn delete(&mut self, cmd: Move) {\n\n let mut new_command = String::with_capacity(self.command.len());\n\n let command_copy = self.command.to_owned();\n\n let vec = command_copy.grapheme_indices(true);\n\n\n\n match cmd {\n\n Move::Backward => {\n\n if self.cursor == 0 {\n\n return;\n\n }\n\n self.move_cursor(Move::Backward);\n\n\n\n for (count, (_, item)) in vec.enumerate() {\n", "file_path": "src/command_input.rs", "rank": 78, "score": 7.800533078407302 }, { "content": "use unicode_segmentation::UnicodeSegmentation;\n\n\n\nconst TRUNCATE_TO_N_TOKENS: u16 = 2;\n\n\n\n#[derive(Debug)]\n\npub struct SimplifiedCommand {\n\n pub original: String,\n\n pub result: String,\n\n pub truncate: bool,\n\n}\n\n\n\n#[allow(clippy::collapsible_if)]\n\n/// The goal of SimplifiedCommand is to produce a reduced approximation of the given command for template matching. It may\n\n/// not produce an exact simplification. (For example, it does not handle deeply nested escaping, and it drops escape characters.)\n\n/// Possible enhancements:\n\n/// - Sort and expand command line options.\n\n/// - Check to see if unknown strings represent valid local paths in the directory where the command was run.\n\nimpl SimplifiedCommand {\n\n pub fn new<S: Into<String>>(command: S, truncate: bool) -> SimplifiedCommand {\n\n let mut simplified_command = SimplifiedCommand {\n", "file_path": "src/simplified_command.rs", "rank": 79, "score": 7.666363527291704 }, { "content": "\n\n pub fn record_selected_from_ui(&self, command: &str, session_id: &str, dir: &str) {\n\n self.connection.execute_named(\"INSERT INTO selected_commands (cmd, session_id, dir) VALUES (:cmd, :session_id, :dir)\",\n\n &[\n\n (\":cmd\", &command.to_owned()),\n\n (\":session_id\", &session_id.to_owned()),\n\n (\":dir\", &dir.to_owned())\n\n ]).unwrap_or_else(|err| panic!(format!(\"McFly error: Insert into selected_commands to work ({})\", err)));\n\n }\n\n\n\n // Update historical paths in our database if a directory has been renamed or moved.\n\n pub fn possibly_update_paths(&self, command: &str, exit_code: Option<i32>) {\n\n let successful = exit_code.is_none() || exit_code.unwrap() == 0;\n\n let is_move = |c:&str| c.to_lowercase().starts_with(\"mv \")\n\n && !c.contains('*')\n\n && !c.contains('?');\n\n if successful && is_move(command) {\n\n let parts = path_update_helpers::parse_mv_command(command);\n\n if parts.len() == 2 {\n\n let normalized_from = path_update_helpers::normalize_path(&parts[0]);\n", "file_path": "src/history/history.rs", "rank": 80, "score": 7.664434038150662 }, { "content": " /* percentage of time this command is run relative to the most common command (1: this is the most common command, 0: this is the least common command) */\n\n COUNT(*) / :max_occurrences AS occurrences_factor\n\n\n\n FROM commands c WHERE when_run > :start_time AND when_run < :end_time GROUP BY cmd ORDER BY id DESC;\",\n\n &[\n\n (\":when_run_max\", &when_run_max),\n\n (\":history_duration\", &(when_run_max - when_run_min)),\n\n (\":directory\", &dir.to_owned()),\n\n (\":max_occurrences\", &max_occurrences),\n\n (\":max_length\", &max_length),\n\n (\":max_selected_occurrences\", &max_selected_occurrences),\n\n (\":lookback\", &lookback),\n\n (\":lookback_f64\", &(lookback as f64)),\n\n (\":last_commands0\", &last_commands[0].to_owned()),\n\n (\":last_commands1\", &last_commands[1].to_owned()),\n\n (\":last_commands2\", &last_commands[2].to_owned()),\n\n (\":start_time\", &start_time.unwrap_or(0).to_owned()),\n\n (\":end_time\", &end_time.unwrap_or(SystemTime::now().duration_since(UNIX_EPOCH).unwrap_or_else(|err| panic!(format!(\"McFly error: Time went backwards ({})\", err))).as_secs() as i64).to_owned()),\n\n (\":now\", &now.unwrap_or(SystemTime::now().duration_since(UNIX_EPOCH).unwrap_or_else(|err| panic!(format!(\"McFly error: Time went backwards ({})\", err))).as_secs() as i64).to_owned())\n\n ]).unwrap_or_else(|err| panic!(format!(\"McFly error: Creation of temp table to work ({})\", err)));\n", "file_path": "src/history/history.rs", "rank": 81, "score": 7.517174577274475 }, { "content": "use crate::history::Features;\n\nuse csv::Writer;\n\nuse std::fs::File;\n\nuse csv::Reader;\n\nuse std::path::PathBuf;\n\n\n", "file_path": "src/training_cache.rs", "rank": 82, "score": 7.059579217762275 }, { "content": " if command.cmd.is_empty() { continue; }\n\n\n\n if i % 100 == 0 {\n\n println!(\"Done with {}\", i);\n\n }\n\n\n\n // Setup the cache for the time this command was recorded.\n\n // Unwrap is safe here because we check command.dir.is_none() above.\n\n history.build_cache_table(\n\n &command.dir.to_owned().unwrap(),\n\n &Some(command.session_id.clone()),\n\n None,\n\n command.when_run,\n\n command.when_run,\n\n );\n\n\n\n // Load the entire match set.\n\n let results = history.find_matches(&String::new(), -1);\n\n\n\n // Get the features for this command at the time it was logged.\n", "file_path": "src/training_sample_generator.rs", "rank": 83, "score": 6.773572679483965 }, { "content": "use crate::simplified_command::SimplifiedCommand;\n\nuse rusqlite::{Connection, NO_PARAMS};\n\nuse std::io;\n\nuse std::io::Write;\n\n\n\npub const CURRENT_SCHEMA_VERSION: u16 = 3;\n\n\n", "file_path": "src/history/schema.rs", "rank": 84, "score": 6.67718038183284 }, { "content": " query_map_named(params, closure).\n\n unwrap_or_else(|err| panic!(format!(\"McFly error: Query Map to work ({})\", err)));\n\n\n\n let mut vec = Vec::new();\n\n for result in command_iter {\n\n if let Ok(command) = result {\n\n vec.push(command);\n\n }\n\n }\n\n\n\n vec\n\n }\n\n\n\n pub fn last_command(&self, session_id: &Option<String>) -> Option<Command> {\n\n self.commands(session_id, 1, 0, false)\n\n .get(0)\n\n .cloned()\n\n }\n\n\n\n pub fn last_command_templates(\n", "file_path": "src/history/history.rs", "rank": 85, "score": 6.651141141204207 }, { "content": "use core::mem;\n\nuse std::fmt;\n\nuse unicode_segmentation::UnicodeSegmentation;\n\n\n\n#[derive(Debug)]\n\npub enum InputCommand {\n\n Insert(char),\n\n Backspace,\n\n Delete,\n\n Move(Move),\n\n}\n\n\n\n#[derive(Debug)]\n\npub enum Move {\n\n BOL,\n\n EOL,\n\n BackwardWord,\n\n ForwardWord,\n\n Backward,\n\n Forward,\n", "file_path": "src/command_input.rs", "rank": 86, "score": 6.647073336406802 }, { "content": " // Hidden node 2 output (o_2) = tanh(s_2)\n\n // Hidden node 2 sum (s_2) = b_2 + w2_1*f_1 + w2_2*f_2\n\n // Derivative of error with respect to o_3 (d_e/d_o_3 0.5(t - o_3)^2): -(t - o_3)\n\n // Derivative of o_3 respect to s_3 (d_o_3/d_s_3 tanh(s_3)): 1.0 - tanh(s_3)^2\n\n // Derivative of s_3 with respect to weight w3_1 (d_s_3/d_w3_1 bias + w3_1*o_1 + w3_2*o_2): o_1\n\n // Derivative of error with respect to weight w3_1 (d_e/d_o_3 * d_o_3/d_s_3 * d_s_3/d_w3_1): -(t - o_3) * (1 - tanh(s_3)^2) * o_1\n\n // Derivative of s_3 with respect to o_1 (d_s_3/d_o_1 b_3 + w3_1*o_1 + w3_2*o_2): w3_1\n\n // Derivative of o_1 with respect to s_1 (d_o_1/d_s_1): 1.0 - tanh(s_1)^2\n\n // Derivative of s_1 with respect to weight w1_1 (d_s_1/d_w1_1): f_1\n\n // Full derivation of o_3: tanh(b_3 + w3_1*tanh(b_1 + w1_1*f_1 + w1_2*f_2) + w3_2*tanh(b_2 + w2_1*f_1 + w2_2*f_2))\n\n // Full derivation of s_3: b_3 + w3_1*tanh(b_1 + w1_1*f_1 + w1_2*f_2) + w3_2*tanh(b_2 + w2_1*f_1 + w2_2*f_2)\n\n // Full error derivation: 0.5(t - tanh(b_3 + w3_1*tanh(b_1 + w1_1*f_1 + w1_2*f_2) + w3_2*tanh(b_2 + w2_1*f_1 + w2_2*f_2)))^2\n\n // Full derivative for o_1: -(t - o_3) * (1.0 - tanh(s_3)^2) * w3_1\n\n // Full derivative for w1_1: -(t - o_3) * (1.0 - tanh(s_3)^2) * w3_1 * (1.0 - tanh(s_1)^2) * f_1\n\n // Checked: https://www.wolframcloud.com/objects/617707c2-5016-4fb3-b73d-bd688b884967\n\n generator.generate(Some(batch_size), |features: &Features, correct: bool| {\n\n let target = if correct { 1.0 } else { -1.0 };\n\n network.compute(features);\n\n\n\n let error = 0.5 * (target - network.final_output).powi(2);\n", "file_path": "src/trainer.rs", "rank": 87, "score": 6.404952648403816 }, { "content": "\n\n settings.when_run = Some(\n\n value_t!(add_matches, \"when\", i64).unwrap_or(SystemTime::now()\n\n .duration_since(UNIX_EPOCH)\n\n .unwrap_or_else(|err| panic!(format!(\"McFly error: Time went backwards ({})\", err)))\n\n .as_secs()\n\n as i64),\n\n );\n\n\n\n settings.append_to_histfile = add_matches.is_present(\"append_to_histfile\");\n\n\n\n if add_matches.value_of(\"exit\").is_some() {\n\n settings.exit_code =\n\n Some(value_t!(add_matches, \"exit\", i32).unwrap_or_else(|e| e.exit()));\n\n }\n\n\n\n if let Some(dir) = add_matches.value_of(\"directory\") {\n\n settings.dir = dir.to_string();\n\n } else {\n\n settings.dir = env::var(\"PWD\").unwrap_or_else(|err| panic!(format!(\"McFly error: Unable to determine current directory ({})\", err)));\n", "file_path": "src/settings.rs", "rank": 88, "score": 6.25501773511613 }, { "content": "\n\n let mut names = Vec::new();\n\n for command in command_iter {\n\n names.push(command.unwrap_or_else(|err| panic!(format!(\"McFly error: Unable to load command from DB ({})\", err))));\n\n }\n\n\n\n names\n\n }\n\n\n\n pub fn build_cache_table(\n\n &self,\n\n dir: &str,\n\n session_id: &Option<String>,\n\n start_time: Option<i64>,\n\n end_time: Option<i64>,\n\n now: Option<i64>,\n\n ) {\n\n let lookback: u16 = 3;\n\n\n\n let mut last_commands = self.last_command_templates(session_id, lookback as i16, 0);\n", "file_path": "src/history/history.rs", "rank": 89, "score": 6.2018861291648495 }, { "content": " debug: settings.debug,\n\n run: false,\n\n menu_mode: MenuMode::Normal,\n\n in_vim_insert_mode: false\n\n }\n\n }\n\n\n\n pub fn display(&mut self) -> SelectionResult {\n\n self.build_cache_table();\n\n self.select();\n\n\n\n let command = self.input.command.to_owned();\n\n\n\n if command.chars().any(|c| !c.is_whitespace()) {\n\n self.history.record_selected_from_ui(\n\n &command,\n\n &self.settings.session_id,\n\n &self.settings.dir,\n\n );\n\n SelectionResult {\n", "file_path": "src/interface.rs", "rank": 90, "score": 6.180962845679868 }, { "content": " original: command.into().clone(),\n\n result: String::new(),\n\n truncate,\n\n };\n\n simplified_command.simplify();\n\n simplified_command\n\n }\n\n\n\n fn simplify(&mut self) {\n\n let mut in_double_quote = false;\n\n let mut in_single_quote = false;\n\n let mut escaped = false;\n\n let mut buffer = String::new();\n\n let mut tokens = 0;\n\n\n\n for grapheme in self.original.graphemes(true) {\n\n match grapheme {\n\n \"\\\\\" => {\n\n escaped = true;\n\n }\n", "file_path": "src/simplified_command.rs", "rank": 91, "score": 6.148870267309356 }, { "content": "use libc;\n\n\n\n// Should we be using https://docs.rs/libc/0.2.44/libc/fn.ioctl.html instead?\n\nextern \"C\" {\n\n pub fn ioctl(fd: i8, request: u32, arg: *const u8) -> i8;\n\n}\n\n\n", "file_path": "src/fake_typer.rs", "rank": 92, "score": 6.088269763932789 }, { "content": " /// the cursor.\n\n fn previous_word_boundary(&self) -> usize {\n\n if self.cursor == 0 {\n\n return 0;\n\n }\n\n\n\n let mut word_boundaries = self.command\n\n .split_word_bound_indices()\n\n .map(|(i, _)| i)\n\n .collect::<Vec<usize>>();\n\n\n\n word_boundaries.push(self.command.len().to_owned());\n\n\n\n let mut word_index: usize = 0;\n\n let mut found_word: bool = false;\n\n let command_copy = self.command.to_owned();\n\n let vec = command_copy\n\n .grapheme_indices(true)\n\n .enumerate()\n\n .collect::<Vec<(usize, (usize, &str))>>();\n", "file_path": "src/command_input.rs", "rank": 93, "score": 6.068105683603115 }, { "content": " (\":cmd_tpl\", &simplified_command.result.to_owned()),\n\n (\":session_id\", &session_id.to_owned()),\n\n (\":when_run\", &when_run.to_owned()),\n\n (\":exit_code\", &exit_code.to_owned()),\n\n (\":selected\", &selected),\n\n (\":dir\", &dir.to_owned()),\n\n (\":old_dir\", &old_dir.to_owned()),\n\n ]).unwrap_or_else(|err| panic!(format!(\"McFly error: Insert into commands to work ({})\", err)));\n\n }\n\n\n\n fn determine_if_selected_from_ui(\n\n &self,\n\n command: &str,\n\n session_id: &str,\n\n dir: &str,\n\n ) -> bool {\n\n let rows_affected = self.connection\n\n .execute_named(\n\n \"DELETE FROM selected_commands \\\n\n WHERE cmd = :cmd \\\n", "file_path": "src/history/history.rs", "rank": 94, "score": 6.063705514004625 }, { "content": "[![Build Status](https://travis-ci.org/cantino/mcfly.svg?branch=master)](https://travis-ci.org/cantino/mcfly)\n\n[![](https://img.shields.io/crates/v/mcfly.svg)](https://crates.io/crates/mcfly)\n\n\n\n# McFly - fly through your shell history\n\n\n\n<img src=\"/docs/screenshot.png\" alt=\"screenshot\" width=\"400\">\n\n\n\nMcFly replaces your default `ctrl-r` Bash history search with an intelligent search engine that takes into account\n\nyour working directory and the context of recently executed commands. McFly's suggestions are prioritized\n\nin real time with a small neural network.\n\n \n\nTL;DR: an upgraded `ctrl-r` for Bash whose history results make sense for what you're working on right now.\n\n\n\n## Features\n\n\n\n* Rebinds `ctrl-r` to bring up a full-screen reverse history search prioritized with a small neural network.\n\n* Augments your shell history to track command exit status, timestamp, and execution directory in a SQLite database.\n\n* Maintains your normal Bash history file as well so that you can stop using McFly whenever you want.\n\n* Unicode support throughout.\n\n* Includes a simple action to scrub any history item from the McFly database and your shell history files.\n\n* Designed to be extensible for other shells in the future.\n\n* Written in Rust, so it's fast and safe.\n\n\n\n## Prioritization\n\n\n\nThe key feature of McFly is smart command prioritization powered by a small neural network that runs\n\nin real time. The goal is for the command you want to run to always be one of the top suggestions.\n\n\n\nWhen suggesting a command, McFly takes into consideration:\n\n\n\n* The directory where you ran the command. You're likely to run that command in the same directory in the future.\n\n* What commands you typed before the command (e.g., the command's execution context).\n\n* How often you run the command.\n\n* When you last ran the command.\n\n* If you've selected the command in McFly before.\n\n* The command's historical exit status. You probably don't want to run old failed commands.\n\n\n\n## Installation\n\n\n", "file_path": "README.md", "rank": 95, "score": 6.027897906553508 }, { "content": " let mut next_word_index: usize = 0;\n\n let mut found_word: bool = false;\n\n\n\n for (count, (offset, item)) in grapheme_indices.enumerate() {\n\n if count >= self.cursor {\n\n if !found_word && item == \" \" {\n\n continue; // Ignore leading spaces\n\n } else if found_word {\n\n if offset == word_boundaries[next_word_index] {\n\n // We've found the next word boundary.\n\n return count;\n\n }\n\n } else {\n\n found_word = true;\n\n\n\n while word_boundaries[next_word_index] <= offset {\n\n next_word_index += 1;\n\n }\n\n }\n\n }\n", "file_path": "src/command_input.rs", "rank": 96, "score": 6.004041514126592 }, { "content": " fn truncate_for_display(\n\n command: &Command,\n\n search: &str,\n\n width: u16,\n\n highlight_color: String,\n\n base_color: String,\n\n debug: bool,\n\n ) -> String {\n\n let mut prev = 0;\n\n let debug_space = if debug { 90 } else { 0 };\n\n let max_grapheme_length = if width > debug_space {\n\n width - debug_space\n\n } else {\n\n 2\n\n };\n\n let mut out = FixedLengthGraphemeString::empty(max_grapheme_length);\n\n\n\n let lowercase_search = search.to_lowercase();\n\n let lowercase_cmd = command.cmd.to_lowercase();\n\n let search_len = search.len();\n", "file_path": "src/interface.rs", "rank": 97, "score": 5.863551028101107 }, { "content": " None => false\n\n };\n\n settings.key_scheme = match env::var(\"MCFLY_KEY_SCHEME\").as_ref().map(String::as_ref) {\n\n Ok(\"vim\") => KeyScheme::Vim,\n\n _ => KeyScheme::Emacs\n\n };\n\n\n\n settings\n\n }\n\n\n\n pub fn mcfly_training_cache_path() -> PathBuf {\n\n Settings::storage_dir_path().join(PathBuf::from(\"training-cache.v1.csv\"))\n\n }\n\n\n\n pub fn storage_dir_path() -> PathBuf {\n\n home_dir()\n\n .unwrap_or_else(|| panic!(\"McFly error: Unable to access home directory\"))\n\n .join(PathBuf::from(\".mcfly\"))\n\n }\n\n\n\n pub fn mcfly_db_path() -> PathBuf {\n\n Settings::storage_dir_path().join(PathBuf::from(\"history.db\"))\n\n }\n\n}\n", "file_path": "src/settings.rs", "rank": 98, "score": 5.8498547598953845 }, { "content": " }\n\n }\n\n\n\n (\"train\", Some(train_matches)) => {\n\n settings.mode = Mode::Train;\n\n settings.refresh_training_cache = train_matches.is_present(\"refresh_cache\");\n\n }\n\n\n\n (\"move\", Some(move_matches)) => {\n\n settings.mode = Mode::Move;\n\n settings.old_dir = Some(String::from(move_matches.value_of(\"old_dir_path\").unwrap_or_else(|| panic!(\"McFly error: Expected value for old_dir_path\"))));\n\n settings.dir = String::from(move_matches.value_of(\"new_dir_path\").unwrap_or_else(|| panic!(\"McFly error: Expected value for new_dir_path\")));\n\n }\n\n\n\n (\"\", None) => println!(\"No subcommand was used\"), // If no subcommand was used it'll match the tuple (\"\", None)\n\n _ => unreachable!(), // If all subcommands are defined above, anything else is unreachable!()\n\n }\n\n\n\n settings.lightmode = match env::var_os(\"MCFLY_LIGHT\") {\n\n Some(_val) => true,\n", "file_path": "src/settings.rs", "rank": 99, "score": 5.616000332968991 } ]
Rust
src/node_state/leader/follower.rs
yuezato/raftlog
12315643c559118dbe9c20625ff9e3c415bf9bb3
use futures::{Async, Future}; use std::collections::BTreeMap; use std::mem; use trackable::error::ErrorKindExt; use super::super::Common; use crate::cluster::ClusterConfig; use crate::log::{Log, LogIndex}; use crate::message::{AppendEntriesReply, SequenceNumber}; use crate::node::NodeId; use crate::{ErrorKind, Io, Result}; pub struct FollowersManager<IO: Io> { followers: BTreeMap<NodeId, Follower>, config: ClusterConfig, latest_hearbeat_ack: SequenceNumber, last_broadcast_seq_no: SequenceNumber, tasks: BTreeMap<NodeId, IO::LoadLog>, } impl<IO: Io> FollowersManager<IO> { pub fn new(config: ClusterConfig) -> Self { let followers = config .members() .map(|n| (n.clone(), Follower::new())) .collect(); FollowersManager { followers, config, tasks: BTreeMap::new(), latest_hearbeat_ack: SequenceNumber::new(0), last_broadcast_seq_no: SequenceNumber::new(0), } } pub fn run_once(&mut self, common: &mut Common<IO>) -> Result<()> { let mut dones = Vec::new(); for (follower, task) in &mut self.tasks { if let Async::Ready(log) = track!(task.poll())? { dones.push((follower.clone(), log)); } } for (follower, log) in dones { let rpc = common.rpc_caller(); match log { Log::Prefix(snapshot) => rpc.send_install_snapshot(&follower, snapshot), Log::Suffix(slice) => rpc.send_append_entries(&follower, slice), } self.tasks.remove(&follower); } Ok(()) } pub fn latest_hearbeat_ack(&self) -> SequenceNumber { self.latest_hearbeat_ack } pub fn committed_log_tail(&self) -> LogIndex { self.config.consensus_value(|node_id| { let f = &self.followers[node_id]; if f.synced { f.log_tail } else { LogIndex::new(0) } }) } pub fn joint_committed_log_tail(&self) -> LogIndex { self.config.full_consensus_value(|node_id| { let f = &self.followers[node_id]; if f.synced { f.log_tail } else { LogIndex::new(0) } }) } pub fn handle_append_entries_reply( &mut self, common: &Common<IO>, reply: &AppendEntriesReply, ) -> bool { let updated = self.update_follower_state(common, reply); if self.latest_hearbeat_ack < reply.header.seq_no { self.latest_hearbeat_ack = self .config .consensus_value(|node_id| self.followers[node_id].last_seq_no); } updated } pub fn set_last_broadcast_seq_no(&mut self, seq_no: SequenceNumber) { self.last_broadcast_seq_no = seq_no; } pub fn log_sync(&mut self, common: &mut Common<IO>, reply: &AppendEntriesReply) -> Result<()> { if reply.busy || self.tasks.contains_key(&reply.header.sender) { return Ok(()); } let follower = track!(self .followers .get_mut(&reply.header.sender) .ok_or_else(|| ErrorKind::InconsistentState.error()))?; if reply.header.seq_no <= follower.obsolete_seq_no { return Ok(()); } follower.obsolete_seq_no = self.last_broadcast_seq_no; if common.log().tail().index <= follower.log_tail { return Ok(()); } let end = if follower.synced { common.log().tail().index } else { follower.log_tail }; let future = common.load_log(follower.log_tail, Some(end)); self.tasks.insert(reply.header.sender.clone(), future); Ok(()) } pub fn handle_config_updated(&mut self, config: &ClusterConfig) { for id in config.members() { if !self.followers.contains_key(id) { self.followers.insert(id.clone(), Follower::new()); } } self.followers = mem::take(&mut self.followers) .into_iter() .filter(|&(ref id, _)| config.is_known_node(id)) .collect(); self.config = config.clone(); } fn update_follower_state(&mut self, common: &Common<IO>, reply: &AppendEntriesReply) -> bool { let follower = &mut self .followers .get_mut(&reply.header.sender) .expect("Never fails"); if follower.last_seq_no < reply.header.seq_no { follower.last_seq_no = reply.header.seq_no; } match *reply { AppendEntriesReply { busy: true, .. } => false, AppendEntriesReply { log_tail, .. } if follower.synced => { let updated = follower.log_tail < log_tail.index; if updated { follower.log_tail = log_tail.index; } else if log_tail.index.as_u64() == 0 && follower.log_tail.as_u64() != 0 { follower.synced = false; } updated } AppendEntriesReply { log_tail, .. } => { let leader_term = common .log() .get_record(log_tail.index) .map(|r| r.head.prev_term); follower.synced = leader_term == Some(log_tail.prev_term); if follower.synced { follower.log_tail = log_tail.index; } else { follower.log_tail = log_tail.index.as_u64().saturating_sub(1).into(); } follower.synced } } } } #[derive(Debug)] struct Follower { pub obsolete_seq_no: SequenceNumber, pub log_tail: LogIndex, pub last_seq_no: SequenceNumber, pub synced: bool, } impl Follower { pub fn new() -> Self { Follower { obsolete_seq_no: SequenceNumber::new(0), log_tail: LogIndex::new(0), last_seq_no: SequenceNumber::new(0), synced: false, } } }
use futures::{Async, Future}; use std::collections::BTreeMap; use std::mem; use trackable::error::ErrorKindExt; use super::super::Common; use crate::cluster::ClusterConfig; use crate::log::{Log, LogIndex}; use crate::message::{AppendEntriesReply, SequenceNumber}; use crate::node::NodeId; use crate::{ErrorKind, Io, Result}; pub struct FollowersManager<IO: Io> { followers: BTreeMap<NodeId, Follower>, config: ClusterConfig, latest_hearbeat_ack: SequenceNumber, last_broadcast_seq_no: SequenceNumber, tasks: BTreeMap<NodeId, IO::LoadLog>, } impl<IO: Io> FollowersManager<IO> { pub fn new(config: ClusterConfig) -> Self { let followers = config .members() .map(|n| (n.clone(), Follower::new())) .collect(); FollowersManager { followers, config, tasks: BTreeMap::new(), latest_hearbeat_ack: SequenceNumber::new(0), last_broadcast_seq_no: SequenceNumber::new(0), } } pub fn run_once(&mut self, common: &mut Common<IO>) -> Result<()> { let mut dones = Vec::new(); for (follower, task) in &mut self.tasks { if let Async::Ready(log) = track!(task.poll())? { dones.push((follower.clone(), log)); } } for (follower, log) in dones { let rpc = common.rpc_caller(); match log { Log::Prefix(snapshot) => rpc.send_install_snapshot(&follower, snapshot), Log::Suffix(slice) => rpc.send_append_entries(&follower, slice), } self.tasks.remove(&follower); } Ok(()) } pub fn latest_hearbeat_ack(&self) -> SequenceNumber { self.latest_hearbeat_ack } pub fn committed_log_tail(&self) -> LogIndex { self.config.consensus_value(|node_id| { let f = &self.followers[node_id]; if f.synced { f.log_tail } else { LogIndex::new(0) } }) } pub fn joint_committed_log_tail(&self) -> LogIndex { self.config.full_consensus_value(|node_id| { let f = &self.followers[node_id]; if f.synced { f.log_tail } else { LogIndex::new(0) } }) } pub fn handle_append_entries_reply( &mut self, common: &Common<IO>, reply: &AppendEntriesReply, ) -> bool { let updated = self.update_follower_state(common, reply); if self.latest_hearbeat_ack < reply.header.seq_no { self.latest_hearbeat_ack = self .config .consensus_value(|node_id| self.followers[node_id].last_seq_no); } updated } pub fn set_last_broadcast_seq_no(&mut self, seq_no: SequenceNumber) { self.last_broadcast_seq_no = seq_no; } pub fn log_sync(&mut self, common: &mut Common<IO>, reply: &AppendEntriesReply) -> Result<()> { if reply.busy || self.tasks.contains_key(&reply.header.sender) { return Ok(()); }
if reply.header.seq_no <= follower.obsolete_seq_no { return Ok(()); } follower.obsolete_seq_no = self.last_broadcast_seq_no; if common.log().tail().index <= follower.log_tail { return Ok(()); } let end = if follower.synced { common.log().tail().index } else { follower.log_tail }; let future = common.load_log(follower.log_tail, Some(end)); self.tasks.insert(reply.header.sender.clone(), future); Ok(()) } pub fn handle_config_updated(&mut self, config: &ClusterConfig) { for id in config.members() { if !self.followers.contains_key(id) { self.followers.insert(id.clone(), Follower::new()); } } self.followers = mem::take(&mut self.followers) .into_iter() .filter(|&(ref id, _)| config.is_known_node(id)) .collect(); self.config = config.clone(); } fn update_follower_state(&mut self, common: &Common<IO>, reply: &AppendEntriesReply) -> bool { let follower = &mut self .followers .get_mut(&reply.header.sender) .expect("Never fails"); if follower.last_seq_no < reply.header.seq_no { follower.last_seq_no = reply.header.seq_no; } match *reply { AppendEntriesReply { busy: true, .. } => false, AppendEntriesReply { log_tail, .. } if follower.synced => { let updated = follower.log_tail < log_tail.index; if updated { follower.log_tail = log_tail.index; } else if log_tail.index.as_u64() == 0 && follower.log_tail.as_u64() != 0 { follower.synced = false; } updated } AppendEntriesReply { log_tail, .. } => { let leader_term = common .log() .get_record(log_tail.index) .map(|r| r.head.prev_term); follower.synced = leader_term == Some(log_tail.prev_term); if follower.synced { follower.log_tail = log_tail.index; } else { follower.log_tail = log_tail.index.as_u64().saturating_sub(1).into(); } follower.synced } } } } #[derive(Debug)] struct Follower { pub obsolete_seq_no: SequenceNumber, pub log_tail: LogIndex, pub last_seq_no: SequenceNumber, pub synced: bool, } impl Follower { pub fn new() -> Self { Follower { obsolete_seq_no: SequenceNumber::new(0), log_tail: LogIndex::new(0), last_seq_no: SequenceNumber::new(0), synced: false, } } }
let follower = track!(self .followers .get_mut(&reply.header.sender) .ok_or_else(|| ErrorKind::InconsistentState.error()))?;
assignment_statement
[ { "content": "struct InstallSnapshot<IO: Io> {\n\n future: IO::SaveLog,\n\n summary: SnapshotSummary,\n\n}\n\nimpl<IO: Io> InstallSnapshot<IO> {\n\n pub fn new(common: &mut Common<IO>, prefix: LogPrefix) -> Self {\n\n let summary = SnapshotSummary {\n\n tail: prefix.tail,\n\n config: prefix.config.clone(),\n\n };\n\n let future = common.io.save_log_prefix(prefix);\n\n InstallSnapshot { future, summary }\n\n }\n\n}\n\nimpl<IO: Io> Future for InstallSnapshot<IO> {\n\n type Item = SnapshotSummary;\n\n type Error = Error;\n\n fn poll(&mut self) -> Poll<Self::Item, Self::Error> {\n\n Ok(track!(self.future.poll())?.map(|()| self.summary.clone()))\n\n }\n", "file_path": "src/node_state/common/mod.rs", "rank": 0, "score": 151795.74627884332 }, { "content": "fn median<F, T>(members: &ClusterMembers, f: F) -> T\n\nwhere\n\n F: Fn(&NodeId) -> T,\n\n T: Ord + Copy + Default,\n\n{\n\n let mut values = members.iter().map(|n| f(n)).collect::<Vec<_>>();\n\n values.sort();\n\n values.reverse();\n\n if values.is_empty() {\n\n T::default()\n\n } else {\n\n values[members.len() / 2]\n\n }\n\n}\n", "file_path": "src/cluster.rs", "rank": 1, "score": 127262.75425114289 }, { "content": "fn over_write(now: &mut LogSuffix, new: &LogSuffix) {\n\n /*\n\n * 次のような上書き不能な形をしていないか検査する\n\n * now = [...)\n\n * [...) = new\n\n */\n\n assert!(new.head.index <= now.tail().index);\n\n\n\n let (offset, entries_offset) = if now.head.index <= new.head.index {\n\n /*\n\n * [ self ...\n\n * [ next ...\n\n * ^--offset\n\n */\n\n\n\n (new.head.index - now.head.index, 0)\n\n } else {\n\n /*\n\n * Strange case:\n\n * [ self ...\n", "file_path": "src/test_dsl/impl_io.rs", "rank": 2, "score": 126114.77761290272 }, { "content": "/// 引数`rlog`で表される特定のノードが、述語`pred`を満たすかどうかを調べる\n\n/// `pred`を満足する場合は`true`を返し、\n\n/// そうでない場合は`false`を返す。\n\nfn check(rlog: &ReplicatedLog<TestIo>, pred: Pred) -> bool {\n\n use Pred::*;\n\n\n\n match pred {\n\n Not(pred) => !check(rlog, *pred),\n\n IsLeader => rlog.local_node().role == Role::Leader,\n\n IsFollower => rlog.local_node().role == Role::Follower,\n\n LogTermConsistency => {\n\n let mut valid_glue = true;\n\n\n\n // snapshotとrawlogが両方ある場合は、\n\n // snapshotに続く形でrawlogが存在することを確認する\n\n if let Some(snapshot) = &rlog.io().snapshot() {\n\n if let Some(rawlog) = &rlog.io().rawlog() {\n\n valid_glue = snapshot.tail.prev_term == rawlog.head.prev_term;\n\n }\n\n }\n\n\n\n // rawlogが存在する場合は、termが昇順になっていることを確認する\n\n let is_sorted = if let Some(rawlog) = &rlog.io().rawlog() {\n", "file_path": "src/test_dsl/dsl.rs", "rank": 3, "score": 115262.26735373196 }, { "content": "fn make_role_change_histogram(builder: &mut HistogramBuilder) -> Result<Histogram> {\n\n builder\n\n .bucket(0.001)\n\n .bucket(0.005)\n\n .bucket(0.01)\n\n .bucket(0.05)\n\n .bucket(0.1)\n\n .bucket(0.2)\n\n .bucket(0.4)\n\n .bucket(0.6)\n\n .bucket(0.8)\n\n .bucket(1.0)\n\n .bucket(2.0)\n\n .bucket(4.0)\n\n .bucket(6.0)\n\n .bucket(8.0)\n\n .bucket(10.0)\n\n .bucket(20.0)\n\n .bucket(50.0)\n\n .bucket(80.0)\n\n .bucket(320.0)\n\n .bucket(640.0)\n\n .finish()\n\n .map_err(|e| track!(Error::from(e)))\n\n}\n", "file_path": "src/metrics.rs", "rank": 4, "score": 107261.07587930831 }, { "content": "/// Raftの実行に必要なI/O機能を提供するためのトレイト.\n\n///\n\n/// 機能としてはおおまかに以下の三つに区分される:\n\n///\n\n/// - **ストレージ**\n\n/// - ローカルノードの状態やログを保存するための永続ストレージ\n\n/// - Raftが完全に正しく動作するためには、このストレージは完全に信頼できるものである必要がある\n\n/// - 一度書き込まれたデータは(明示的に削除されない限り)失われたり、壊れたりすることは無い\n\n/// - 実際には、それを達成するのは困難なので、信頼性とストレージコストのトレードオフとなる\n\n/// - **チャンネル**\n\n/// - ノード間通信(RPC)用のメッセージ送受信チャンネル\n\n/// - このチャンネルの信頼性はある程度低くても良い\n\n/// - メッセージ群の順番の入れ替わりや、欠損、重複配送、は許容される\n\n/// - ただし、メッセージの改竄や捏造、はNG\n\n/// - **タイマー**\n\n/// - タイムアウト管理用のタイマー\n\npub trait Io {\n\n /// ローカルノードの投票状況を保存するための`Future`.\n\n type SaveBallot: Future<Item = (), Error = Error>;\n\n\n\n /// ノーカルノードの投票情報を取得ための`Future`.\n\n type LoadBallot: Future<Item = Option<Ballot>, Error = Error>;\n\n\n\n /// ローカルログを保存するための`Future`.\n\n type SaveLog: Future<Item = (), Error = Error>;\n\n\n\n /// ローカルログを取得するための`Future`.\n\n type LoadLog: Future<Item = Log, Error = Error>;\n\n\n\n /// タイムアウトを表現するための`Future`.\n\n type Timeout: Future<Item = (), Error = Error>;\n\n\n\n /// ローカルノードに対して送信されたメッセージの受信を試みる.\n\n ///\n\n /// # 注意\n\n ///\n", "file_path": "src/io.rs", "rank": 5, "score": 107135.49306210967 }, { "content": "/// DSLのコマンド列(プログラム)を実行する関数\n\npub fn interpret(cs: &[Command], service: &mut Service) {\n\n for c in cs {\n\n interpret_command(c.clone(), service);\n\n }\n\n}\n\n\n", "file_path": "src/test_dsl/dsl.rs", "rank": 6, "score": 107110.84573639274 }, { "content": "#[derive(Debug, Clone)]\n\nstruct SnapshotSummary {\n\n tail: LogPosition,\n\n config: ClusterConfig,\n\n}\n\n\n", "file_path": "src/node_state/common/mod.rs", "rank": 7, "score": 105196.14643878718 }, { "content": "#[derive(Default, Serialize, Deserialize)]\n\nstruct Config {\n\n simulator: raftlog_simu::SimulatorConfig,\n\n}\n\n\n", "file_path": "raftlog_simu/src/main.rs", "rank": 8, "score": 99189.68730412977 }, { "content": "/// ノードネームの列が与えられた時に\n\n/// 丁度それらを構成要素として含むようなraft clusterを構成する。\n\n///\n\n/// 全点間通信ができる状態にしてあるので\n\n/// complete graph(w.r.t. ネットワークトポロジー)という語を用いている。\n\npub fn build_complete_graph(names: &[NodeName]) -> (Service, ClusterMembers) {\n\n let nodes: BTreeSet<NodeId> = names.iter().map(|s| NodeId::new(s.to_string())).collect();\n\n\n\n let mut ios = BTreeMap::new();\n\n let mut service = BTreeMap::new();\n\n\n\n for node in &nodes {\n\n ios.insert(node.clone(), TestIo::new(node.clone(), false));\n\n }\n\n\n\n for src in &nodes {\n\n let mut io_src = ios.remove(src).unwrap();\n\n for dst in &nodes {\n\n if src != dst {\n\n let io_dst = ios.get(dst).unwrap();\n\n io_src.set_channel(dst.clone(), io_dst.copy_sender());\n\n }\n\n }\n\n ios.insert(src.clone(), io_src);\n\n }\n", "file_path": "src/test_dsl/dsl.rs", "rank": 9, "score": 91484.87596065007 }, { "content": "struct Channel {\n\n clock: u64,\n\n queue: BinaryHeap<DelayedMessage>,\n\n}\n\nimpl Channel {\n\n pub fn new() -> Self {\n\n Channel {\n\n clock: 0,\n\n queue: BinaryHeap::new(),\n\n }\n\n }\n\n pub fn try_recv_message(&mut self) -> Option<Message> {\n\n self.clock += 1;\n\n if let Some(m) = self.queue.pop() {\n\n if m.arrival_time <= self.clock {\n\n Some(m.message)\n\n } else {\n\n self.queue.push(m);\n\n None\n\n }\n", "file_path": "raftlog_simu/src/io/transport.rs", "rank": 11, "score": 77140.3932382438 }, { "content": "struct DelayedMessage {\n\n arrival_time: u64,\n\n message: Message,\n\n}\n\nimpl Ord for DelayedMessage {\n\n fn cmp(&self, other: &Self) -> Ordering {\n\n other.arrival_time.cmp(&self.arrival_time)\n\n }\n\n}\n\nimpl PartialOrd for DelayedMessage {\n\n fn partial_cmp(&self, other: &Self) -> Option<Ordering> {\n\n other.arrival_time.partial_cmp(&self.arrival_time)\n\n }\n\n}\n\nimpl Eq for DelayedMessage {}\n\nimpl PartialEq for DelayedMessage {\n\n fn eq(&self, other: &Self) -> bool {\n\n self.arrival_time == other.arrival_time\n\n }\n\n}\n", "file_path": "raftlog_simu/src/io/transport.rs", "rank": 12, "score": 74826.60828906487 }, { "content": "/// DSLの一つのコマンドを実行する関数\n\nfn interpret_command(c: Command, service: &mut Service) {\n\n use futures::Stream;\n\n use Command::*;\n\n\n\n println!(\"\\n Now executing {:?} ...\", &c);\n\n\n\n match c {\n\n TakeSnapshot(node) => {\n\n let rlog = service.get_mut(&node).unwrap();\n\n let index = rlog.local_history().tail().index;\n\n rlog.install_snapshot(index, Vec::new()).unwrap();\n\n }\n\n Check(node, pred) => {\n\n let rlog = service.get_mut(&node).unwrap();\n\n assert!(check(rlog, pred));\n\n }\n\n Heartbeat(node) => {\n\n let rlog = service.get_mut(&node).unwrap();\n\n rlog.heartbeat().unwrap();\n\n }\n", "file_path": "src/test_dsl/dsl.rs", "rank": 13, "score": 74175.63251036819 }, { "content": "fn message_to_string(m: &Message) -> String {\n\n use Message::*;\n\n match m {\n\n RequestVoteCall(vcall) => {\n\n format!(\n\n \"[Vcall] {:?} {:?} {:?}\",\n\n vcall.header.seq_no, vcall.header.term, vcall.log_tail\n\n )\n\n }\n\n RequestVoteReply(vreply) => {\n\n format!(\n\n \"[Vrep] {:?} {:?} voted={:?}\",\n\n vreply.header.seq_no, vreply.header.term, vreply.voted\n\n )\n\n }\n\n AppendEntriesCall(ecall) => {\n\n format!(\n\n \"[AEcall] {:?} {:?} commited={:?}, suffix={:?}\",\n\n ecall.header.seq_no, ecall.header.term, ecall.committed_log_tail, ecall.suffix\n\n )\n", "file_path": "src/test_dsl/impl_io.rs", "rank": 14, "score": 65949.2841820146 }, { "content": "#[allow(dead_code)]\n\nfn ballot_to_str(b: &Ballot) -> String {\n\n format!(\n\n \"ballot(term: {}, for: {})\",\n\n b.term.as_u64(),\n\n b.voted_for.as_str()\n\n )\n\n}\n\n\n", "file_path": "src/test_dsl/impl_io.rs", "rank": 15, "score": 65949.2841820146 }, { "content": " _phantom: PhantomData<IO>,\n\n}\n\nimpl<IO: Io> FollowerSnapshot<IO> {\n\n pub fn new() -> Self {\n\n FollowerSnapshot {\n\n _phantom: PhantomData,\n\n }\n\n }\n\n pub fn handle_message(\n\n &mut self,\n\n common: &mut Common<IO>,\n\n message: Message,\n\n ) -> Result<NextState<IO>> {\n\n if let Message::AppendEntriesCall(m) = message {\n\n common.rpc_callee(&m.header).reply_busy();\n\n }\n\n Ok(None)\n\n }\n\n pub fn run_once(&mut self, common: &mut Common<IO>) -> Result<NextState<IO>> {\n\n if common.is_snapshot_installing() {\n\n Ok(None)\n\n } else {\n\n let next = Follower::Idle(FollowerIdle::new());\n\n Ok(Some(RoleState::Follower(next)))\n\n }\n\n }\n\n}\n", "file_path": "src/node_state/follower/snapshot.rs", "rank": 16, "score": 64125.30242690405 }, { "content": "use std::marker::PhantomData;\n\n\n\nuse super::super::{Common, NextState, RoleState};\n\nuse super::{Follower, FollowerIdle};\n\nuse crate::message::Message;\n\nuse crate::{Io, Result};\n\n\n\n/// ローカルログへのスナップショット保存を処理するためのフォロワーのサブ状態.\n\n///\n\n/// `InstallSnapshotCast`で送られてきたスナップショットを処理する.\n\n///\n\n/// 正確には、スナップショットの処理自体は共通モジュールで行われるため、\n\n/// ここでの目的は「スナップショットの保存中に、新たなログ追記等が行われないようにする」\n\n/// ということになる.\n\n///\n\n/// なお、既にコミット済みの地点に対するスナップショットのインストール中に、\n\n/// 新規ログ追加が行われても問題は発生しないので、\n\n/// このサブ状態が使われるのは「未コミット地点に対するスナップショット」の\n\n/// インストールをリーダから指示された場合、のみである.\n\npub struct FollowerSnapshot<IO: Io> {\n", "file_path": "src/node_state/follower/snapshot.rs", "rank": 17, "score": 64113.76162346708 }, { "content": "//! `Future`トレイトの実装群.\n\nuse futures::{Async, Future, Poll};\n\nuse raftlog::election::Ballot;\n\nuse raftlog::log::Log;\n\n\n\nuse crate::types::LogicalDuration;\n\nuse crate::Error;\n\n\n\npub type Timeout = DelayedResult<(), Error>;\n\npub type SaveBallot = DelayedResult<(), Error>;\n\npub type LoadBallot = DelayedResult<Option<Ballot>, Error>;\n\npub type SaveLog = DelayedResult<(), Error>;\n\npub type LoadLog = DelayedResult<Log, Error>;\n\n\n\n/// 結果を得られるまでに、生成時に指定された論理時間の経過が必要となる`Result`型.\n\n#[derive(Debug)]\n\npub struct DelayedResult<T, E> {\n\n result: Option<Result<T, E>>,\n\n delay: LogicalDuration,\n\n}\n", "file_path": "raftlog_simu/src/io/futures.rs", "rank": 18, "score": 63428.23016583829 }, { "content": "impl<T, E> DelayedResult<T, E> {\n\n /// `value`を値とする、遅延された成功結果を返す.\n\n pub fn ok(value: T, delay: LogicalDuration) -> Self {\n\n DelayedResult::done(Ok(value), delay)\n\n }\n\n\n\n /// `error`を失敗理由とする、遅延された結果を返す.\n\n pub fn err(error: E, delay: LogicalDuration) -> Self {\n\n DelayedResult::done(Err(error), delay)\n\n }\n\n\n\n /// 遅延された結果を返す.\n\n pub fn done(result: Result<T, E>, delay: LogicalDuration) -> Self {\n\n DelayedResult {\n\n result: Some(result),\n\n delay,\n\n }\n\n }\n\n}\n\nimpl<T, E> Future for DelayedResult<T, E> {\n", "file_path": "raftlog_simu/src/io/futures.rs", "rank": 19, "score": 63425.25241986479 }, { "content": " type Item = T;\n\n type Error = E;\n\n fn poll(&mut self) -> Poll<Self::Item, Self::Error> {\n\n if self.delay == 0 {\n\n let value = self\n\n .result\n\n .take()\n\n .expect(\"Cannot poll DelayedResult twice\")?;\n\n Ok(Async::Ready(value))\n\n } else {\n\n self.delay -= 1;\n\n Ok(Async::NotReady)\n\n }\n\n }\n\n}\n", "file_path": "raftlog_simu/src/io/futures.rs", "rank": 20, "score": 63422.31080905979 }, { "content": "impl Default for StorageConfig {\n\n fn default() -> Self {\n\n StorageConfig {\n\n save_ballot_time: StorageConfig::default_save_ballot_time(),\n\n load_ballot_time: StorageConfig::default_load_ballot_time(),\n\n save_log_entry_time: StorageConfig::default_save_log_entry_time(),\n\n load_log_entry_time: StorageConfig::default_load_log_entry_time(),\n\n save_log_snapshot_time: StorageConfig::default_save_log_snapshot_time(),\n\n load_log_snapshot_time: StorageConfig::default_load_log_snapshot_time(),\n\n }\n\n }\n\n}\n\n\n\n/// 通信チャンネルの構成設定.\n\n#[derive(Debug, Clone, Serialize, Deserialize)]\n\npub struct ChannelConfig {\n\n /// メッセージの消失率.\n\n ///\n\n /// `1.0`なら全てのメッセージが相手に届くことなく消失する.\n\n #[serde(default = \"ChannelConfig::default_drop\")]\n", "file_path": "raftlog_simu/src/io/configs.rs", "rank": 21, "score": 63222.64857854171 }, { "content": "\n\n /// スナップショットの保存に要する時間.\n\n #[serde(default = \"StorageConfig::default_save_log_snapshot_time\")]\n\n pub save_log_snapshot_time: Range<LogicalDuration>,\n\n\n\n /// スナップショットの読み込みに要する時間.\n\n #[serde(default = \"StorageConfig::default_load_log_snapshot_time\")]\n\n pub load_log_snapshot_time: Range<LogicalDuration>,\n\n}\n\nimpl StorageConfig {\n\n /// `1...5`\n\n pub fn default_save_ballot_time() -> Range<LogicalDuration> {\n\n Range { min: 1, max: 5 }\n\n }\n\n\n\n /// `1...5`\n\n pub fn default_load_ballot_time() -> Range<LogicalDuration> {\n\n Range { min: 1, max: 5 }\n\n }\n\n\n", "file_path": "raftlog_simu/src/io/configs.rs", "rank": 22, "score": 63221.720309960634 }, { "content": " /// `1...5`\n\n pub fn default_save_log_entry_time() -> Range<LogicalDuration> {\n\n Range { min: 1, max: 5 }\n\n }\n\n\n\n /// `1...5`\n\n pub fn default_load_log_entry_time() -> Range<LogicalDuration> {\n\n Range { min: 1, max: 5 }\n\n }\n\n\n\n /// `100...500`\n\n pub fn default_save_log_snapshot_time() -> Range<LogicalDuration> {\n\n Range { min: 100, max: 500 }\n\n }\n\n\n\n /// `100...500`\n\n pub fn default_load_log_snapshot_time() -> Range<LogicalDuration> {\n\n Range { min: 100, max: 500 }\n\n }\n\n}\n", "file_path": "raftlog_simu/src/io/configs.rs", "rank": 23, "score": 63217.561454083894 }, { "content": "pub struct StorageConfig {\n\n /// 投票状況の保存に要する時間.\n\n #[serde(default = \"StorageConfig::default_save_ballot_time\")]\n\n pub save_ballot_time: Range<LogicalDuration>,\n\n\n\n /// 投票状況の復元に要する時間.\n\n #[serde(default = \"StorageConfig::default_load_ballot_time\")]\n\n pub load_ballot_time: Range<LogicalDuration>,\n\n\n\n /// 個々のログエントリの保存に要する時間.\n\n ///\n\n /// 対象のエントリ数がNの場合には、時間はN倍になる.\n\n #[serde(default = \"StorageConfig::default_save_log_entry_time\")]\n\n pub save_log_entry_time: Range<LogicalDuration>,\n\n\n\n /// 個々のログエントリの読み込みに要する時間.\n\n ///\n\n /// 対象のエントリ数がNの場合には、時間はN倍になる.\n\n #[serde(default = \"StorageConfig::default_load_log_entry_time\")]\n\n pub load_log_entry_time: Range<LogicalDuration>,\n", "file_path": "raftlog_simu/src/io/configs.rs", "rank": 24, "score": 63217.12335804883 }, { "content": " Probability { prob: 0.05 }\n\n }\n\n\n\n /// `0.01`\n\n pub fn default_duplicate() -> Probability {\n\n Probability { prob: 0.01 }\n\n }\n\n}\n\nimpl Default for ChannelConfig {\n\n fn default() -> Self {\n\n ChannelConfig {\n\n delay: ChannelConfig::default_delay(),\n\n drop: ChannelConfig::default_drop(),\n\n duplicate: ChannelConfig::default_duplicate(),\n\n }\n\n }\n\n}\n", "file_path": "raftlog_simu/src/io/configs.rs", "rank": 25, "score": 63216.68517447422 }, { "content": " pub fn default_election_timeout() -> LogicalDuration {\n\n 1000\n\n }\n\n\n\n /// `heartbeat_interval`フィールドのデフォルト値 (`100`).\n\n pub fn default_heartbeat_interval() -> LogicalDuration {\n\n 100\n\n }\n\n}\n\nimpl Default for TimerConfig {\n\n fn default() -> Self {\n\n TimerConfig {\n\n election_timeout: TimerConfig::default_election_timeout(),\n\n heartbeat_interval: TimerConfig::default_heartbeat_interval(),\n\n }\n\n }\n\n}\n\n\n\n/// `Storage`用の構成設定.\n\n#[derive(Debug, Clone, Serialize, Deserialize)]\n", "file_path": "raftlog_simu/src/io/configs.rs", "rank": 26, "score": 63216.62835729769 }, { "content": "//! I/O関連の構成設定を集めたモジュール.\n\nuse crate::types::{LogicalDuration, Probability, Range};\n\n\n\n/// `Timer`用の構成設定.\n\n#[derive(Debug, Clone, Serialize, Deserialize)]\n\npub struct TimerConfig {\n\n /// 一つの選挙期間のタイムアウト尺.\n\n ///\n\n /// リーダからのハートビートを受信しない期間が、\n\n /// ここで指定された尺を超えた場合には、\n\n /// リーダがダウンしたものと判断されて、次の選挙が始まる.\n\n #[serde(default = \"TimerConfig::default_election_timeout\")]\n\n pub election_timeout: LogicalDuration,\n\n\n\n /// リーダがハートビートを発行する間隔.\n\n #[serde(default = \"TimerConfig::default_heartbeat_interval\")]\n\n pub heartbeat_interval: LogicalDuration,\n\n}\n\nimpl TimerConfig {\n\n /// `election_timeout`フィールドのデフォルト値 (`1000`).\n", "file_path": "raftlog_simu/src/io/configs.rs", "rank": 27, "score": 63216.32751880202 }, { "content": " pub drop: Probability,\n\n\n\n /// メッセージの重複率.\n\n ///\n\n /// `1.0`なら(消失しなかった)全てのメッセージが複製される.\n\n #[serde(default = \"ChannelConfig::default_duplicate\")]\n\n pub duplicate: Probability,\n\n\n\n /// メッセージ遅延.\n\n #[serde(default = \"ChannelConfig::default_delay\")]\n\n pub delay: Range<LogicalDuration>,\n\n}\n\nimpl ChannelConfig {\n\n /// `10..50`\n\n pub fn default_delay() -> Range<LogicalDuration> {\n\n Range { min: 10, max: 50 }\n\n }\n\n\n\n /// `0.05`\n\n pub fn default_drop() -> Probability {\n", "file_path": "raftlog_simu/src/io/configs.rs", "rank": 28, "score": 63214.58544422495 }, { "content": "use super::Common;\n\nuse crate::log::{LogPosition, LogPrefix, LogSuffix};\n\nuse crate::message::{self, AppendEntriesReply, Message, MessageHeader, SequenceNumber};\n\nuse crate::node::NodeId;\n\nuse crate::Io;\n\n\n\n/// RPC要求メッセージの送信を補助するためのビルダ.\n\npub struct RpcCaller<'a, IO: 'a + Io> {\n\n common: &'a mut Common<IO>,\n\n}\n\nimpl<'a, IO: 'a + Io> RpcCaller<'a, IO> {\n\n pub fn new(common: &'a mut Common<IO>) -> Self {\n\n RpcCaller { common }\n\n }\n\n pub fn broadcast_request_vote(mut self) {\n\n let header = self.make_header(&NodeId::new(String::new())); // ブロードキャストノード時に空文字列を宛先に指定\n\n let log_tail = self.common.history.tail();\n\n let request = message::RequestVoteCall {\n\n header: header.clone(),\n\n log_tail,\n", "file_path": "src/node_state/common/rpc_builder.rs", "rank": 29, "score": 61473.956702074654 }, { "content": " if do_self_reply {\n\n self.common.unread_message = Some(self_reply);\n\n }\n\n }\n\n}\n\n\n\n/// RPC応答メッセージの送信を補助するためのビルダ.\n\npub struct RpcCallee<'a, IO: 'a + Io> {\n\n common: &'a mut Common<IO>,\n\n caller: &'a MessageHeader,\n\n}\n\nimpl<'a, IO: 'a + Io> RpcCallee<'a, IO> {\n\n pub fn new(common: &'a mut Common<IO>, caller: &'a MessageHeader) -> Self {\n\n RpcCallee { common, caller }\n\n }\n\n pub fn reply_request_vote(self, voted: bool) {\n\n let header = self.make_header();\n\n let message = message::RequestVoteReply { header, voted }.into();\n\n self.common.io.send_message(message);\n\n }\n", "file_path": "src/node_state/common/rpc_builder.rs", "rank": 30, "score": 61472.71678830534 }, { "content": " fn make_header(&mut self, destination: &NodeId) -> MessageHeader {\n\n let seq_no = self.common.seq_no;\n\n self.common.seq_no = SequenceNumber::new(seq_no.as_u64() + 1);\n\n MessageHeader {\n\n sender: self.common.local_node.id.clone(),\n\n destination: destination.clone(),\n\n seq_no,\n\n term: self.common.local_node.ballot.term,\n\n }\n\n }\n\n fn broadcast(&mut self, mut message: Message, self_reply: Message) {\n\n let mut do_self_reply = false;\n\n for peer in self.common.history.config().members() {\n\n if *peer == self.common.local_node.id {\n\n do_self_reply = true;\n\n } else {\n\n message.set_destination(peer);\n\n self.common.io.send_message(message.clone());\n\n }\n\n }\n", "file_path": "src/node_state/common/rpc_builder.rs", "rank": 31, "score": 61466.44635224692 }, { "content": " busy: false,\n\n }\n\n .into();\n\n self.broadcast(request, self_reply);\n\n }\n\n pub fn send_append_entries(mut self, peer: &NodeId, suffix: LogSuffix) {\n\n let message = message::AppendEntriesCall {\n\n header: self.make_header(peer),\n\n committed_log_tail: self.common.history.committed_tail().index,\n\n suffix,\n\n }\n\n .into();\n\n self.common.io.send_message(message);\n\n }\n\n pub fn send_install_snapshot(mut self, peer: &NodeId, prefix: LogPrefix) {\n\n let header = self.make_header(peer);\n\n let message = message::InstallSnapshotCast { header, prefix }.into();\n\n self.common.io.send_message(message);\n\n }\n\n\n", "file_path": "src/node_state/common/rpc_builder.rs", "rank": 32, "score": 61466.36622844293 }, { "content": " pub fn reply_append_entries(self, log_tail: LogPosition) {\n\n let message = AppendEntriesReply {\n\n header: self.make_header(),\n\n log_tail,\n\n busy: false,\n\n }\n\n .into();\n\n self.common.io.send_message(message);\n\n }\n\n pub fn reply_busy(self) {\n\n let message = AppendEntriesReply {\n\n header: self.make_header(),\n\n log_tail: self.common.history.tail(),\n\n busy: true,\n\n }\n\n .into();\n\n self.common.io.send_message(message);\n\n }\n\n\n\n fn make_header(&self) -> MessageHeader {\n\n MessageHeader {\n\n sender: self.common.local_node.id.clone(),\n\n destination: self.caller.sender.clone(),\n\n seq_no: self.caller.seq_no,\n\n term: self.common.local_node.ballot.term,\n\n }\n\n }\n\n}\n", "file_path": "src/node_state/common/rpc_builder.rs", "rank": 33, "score": 61462.59395543706 }, { "content": " }\n\n .into();\n\n let self_reply = message::RequestVoteReply {\n\n header,\n\n voted: true,\n\n }\n\n .into();\n\n self.broadcast(request, self_reply);\n\n }\n\n pub fn broadcast_append_entries(mut self, suffix: LogSuffix) {\n\n let header = self.make_header(&NodeId::new(String::new())); // ブロードキャストノード時に空文字列を宛先に指定\n\n let request = message::AppendEntriesCall {\n\n header: header.clone(),\n\n committed_log_tail: self.common.history.committed_tail().index,\n\n suffix,\n\n }\n\n .into();\n\n let self_reply = AppendEntriesReply {\n\n header,\n\n log_tail: self.common.history.tail(),\n", "file_path": "src/node_state/common/rpc_builder.rs", "rank": 34, "score": 61460.73999155762 }, { "content": "/// 次に遷移する状態.\n\n///\n\n/// `None`の場合には、遷移はせずに同じ状態が維持される.\n\ntype NextState<IO> = Option<RoleState<IO>>;\n\n\n\n/// ローカルノード用の状態(状態機械).\n\npub struct NodeState<IO: Io> {\n\n pub common: Common<IO>,\n\n pub role: RoleState<IO>,\n\n started_at: Instant,\n\n pub metrics: NodeStateMetrics,\n\n}\n\nimpl<IO: Io> NodeState<IO> {\n\n pub fn load(node_id: NodeId, config: ClusterConfig, io: IO, metrics: NodeStateMetrics) -> Self {\n\n let mut common = Common::new(node_id, io, config, metrics.clone());\n\n let role = RoleState::Loader(Loader::new(&mut common));\n\n let started_at = Instant::now();\n\n NodeState {\n\n common,\n\n role,\n\n started_at,\n\n metrics,\n\n }\n", "file_path": "src/node_state/mod.rs", "rank": 35, "score": 52101.20098796432 }, { "content": "struct Down {\n\n machine: MachineState,\n\n io: DeterministicIo,\n\n restart: LogicalDuration,\n\n}\n\nimpl Down {\n\n pub fn new(machine: MachineState, io: DeterministicIo, restart: LogicalDuration) -> Self {\n\n Down {\n\n machine,\n\n io,\n\n restart,\n\n }\n\n }\n\n pub fn propose_command(&mut self, _command: Command) -> Result<()> {\n\n track_panic!(ErrorKind::NotLeader, \"This process is down\");\n\n }\n\n pub fn propose_config(&mut self, _: ClusterMembers) -> Result<()> {\n\n track_panic!(ErrorKind::NotLeader, \"This process is down\");\n\n }\n\n pub fn heartbeat(&mut self) -> Result<()> {\n", "file_path": "raftlog_simu/src/process.rs", "rank": 36, "score": 49891.97055460813 }, { "content": "struct Alive {\n\n logger: Logger,\n\n machine: MachineState,\n\n next_commit: LogIndex,\n\n rlog: ReplicatedLog<DeterministicIo>,\n\n proposals: Vec<ProposalId>,\n\n heartbeats: VecDeque<SequenceNumber>,\n\n}\n\nimpl Alive {\n\n /// 新しい`Alive`インスタンスを生成する.\n\n pub fn new(\n\n logger: Logger,\n\n node_id: NodeId,\n\n members: ClusterMembers,\n\n io: DeterministicIo,\n\n ) -> Self {\n\n let metric_builder = MetricBuilder::new();\n\n let machine = MachineState::new();\n\n let rlog = ReplicatedLog::new(node_id, members, io, &metric_builder).expect(\"Never fails\");\n\n Alive {\n", "file_path": "raftlog_simu/src/process.rs", "rank": 37, "score": 48509.77928443576 }, { "content": "#[derive(PartialEq, Eq)]\n\nstruct Committed {\n\n entry: LogEntry,\n\n state: MachineState,\n\n}\n", "file_path": "raftlog_simu/src/simulator.rs", "rank": 38, "score": 48509.77928443576 }, { "content": "fn main() {\n\n let matches = app_from_crate!()\n\n .arg(\n\n Arg::with_name(\"CONFIG_FILE\")\n\n .short(\"c\")\n\n .long(\"config\")\n\n .takes_value(true),\n\n ).arg(Arg::with_name(\"RANDOM_SEED\").long(\"seed\").takes_value(true))\n\n .arg(\n\n Arg::with_name(\"LOOP_COUNT\")\n\n .long(\"loop-count\")\n\n .takes_value(true),\n\n ).get_matches();\n\n\n\n //\n\n // 1. Load Configuration\n\n //\n\n let mut config = if let Some(config_file) = matches.value_of(\"CONFIG_FILE\") {\n\n track_try_unwrap!(serdeconv::from_toml_file(config_file))\n\n } else {\n", "file_path": "raftlog_simu/src/main.rs", "rank": 39, "score": 48144.619339775345 }, { "content": "use futures::Future;\n\n\n\nuse crate::election::{Ballot, Role};\n\nuse crate::log::{Log, LogIndex, LogPrefix, LogSuffix};\n\nuse crate::message::Message;\n\nuse crate::{Error, Result};\n\n\n\n/// Raftの実行に必要なI/O機能を提供するためのトレイト.\n\n///\n\n/// 機能としてはおおまかに以下の三つに区分される:\n\n///\n\n/// - **ストレージ**\n\n/// - ローカルノードの状態やログを保存するための永続ストレージ\n\n/// - Raftが完全に正しく動作するためには、このストレージは完全に信頼できるものである必要がある\n\n/// - 一度書き込まれたデータは(明示的に削除されない限り)失われたり、壊れたりすることは無い\n\n/// - 実際には、それを達成するのは困難なので、信頼性とストレージコストのトレードオフとなる\n\n/// - **チャンネル**\n\n/// - ノード間通信(RPC)用のメッセージ送受信チャンネル\n\n/// - このチャンネルの信頼性はある程度低くても良い\n\n/// - メッセージ群の順番の入れ替わりや、欠損、重複配送、は許容される\n\n/// - ただし、メッセージの改竄や捏造、はNG\n\n/// - **タイマー**\n\n/// - タイムアウト管理用のタイマー\n", "file_path": "src/io.rs", "rank": 40, "score": 35538.65422440527 }, { "content": " fn load_log(&mut self, start: LogIndex, end: Option<LogIndex>) -> Self::LoadLog;\n\n\n\n /// 選挙における役割に応じた時間のタイムアウトオブジェクトを生成する.\n\n fn create_timeout(&mut self, role: Role) -> Self::Timeout;\n\n\n\n /// I/O処理を行う余裕があるかどうかを返す.\n\n ///\n\n /// これが`true`を返している間は、フォロワーの同期処理は実施されない.\n\n fn is_busy(&mut self) -> bool {\n\n false\n\n }\n\n}\n", "file_path": "src/io.rs", "rank": 41, "score": 35538.235272256396 }, { "content": " /// 保存に成功した場合は、それ以前のログ領域は破棄してしまって構わない.\n\n fn save_log_prefix(&mut self, prefix: LogPrefix) -> Self::SaveLog;\n\n\n\n /// ローカルログの末尾部分を保存(追記)する.\n\n ///\n\n /// `suffix`の開始位置が、現在のログの末尾よりも前方の場合は、\n\n /// 新しい開始位置よりも後ろの古いエントリは削除してしまって構わない.\n\n /// (リーダの入れ替えにより、ログの未コミット部分で競合が発生したことを示している)\n\n fn save_log_suffix(&mut self, suffix: &LogSuffix) -> Self::SaveLog;\n\n\n\n /// ローカルログの指定範囲のエントリを取得する.\n\n ///\n\n /// 範囲は`start`から始まり、`end`を含まない最後のエントリまでを取得する.\n\n /// `end`の値が`None`の場合には、ログの末端までを取得する.\n\n ///\n\n /// なお、`end`が指定されているにも関わらず、指定よりも少ないエントリしか\n\n /// 取得できなかった場合には「取得できたエントリのみを返す」ないし「エラーを返す」の\n\n /// どちらの挙動も許容される.\n\n ///\n\n /// ただし、`start`とは異なる位置から、エントリの取得を開始することは許可されない.\n", "file_path": "src/io.rs", "rank": 42, "score": 35536.336327575984 }, { "content": " /// このメソッドが`Err`を返した場合には、ローカルのRaftノードが\n\n /// 停止してしまうので、時間経過によって自動的には回復しない\n\n /// 致命的なものを除いては、`Err`は返さないことが望ましい.\n\n fn try_recv_message(&mut self) -> Result<Option<Message>>;\n\n\n\n /// メッセージを送信する.\n\n ///\n\n /// もしメッセージ送信に何らかの理由で失敗した場合でも、単に無視される.\n\n /// 仮にチャンネルの致命的な問題が発生している場合には、次の`try_recv_message`メソッドの\n\n /// 呼び出しで`Err`を返すこと.\n\n fn send_message(&mut self, message: Message);\n\n\n\n /// ローカルノードの投票状況を保存する.\n\n fn save_ballot(&mut self, ballot: Ballot) -> Self::SaveBallot;\n\n\n\n /// ローカルノードの前回の投票状況を取得する.\n\n fn load_ballot(&mut self) -> Self::LoadBallot;\n\n\n\n /// ローカルログの前半部分(i.e., スナップショット)を保存する.\n\n ///\n", "file_path": "src/io.rs", "rank": 43, "score": 35534.93172938488 }, { "content": " /// 新メンバのみの構成への移行完了を把握したい場合には、後続のコミットイベントの\n\n /// 追跡を行う必要がある.\n\n ///\n\n /// もし返り値の`LogPosition`とは分岐した`Event::Committed`が返された場合には、\n\n /// この提案が棄却されたことを示している.\n\n ///\n\n /// 複数の構成変更を並行して実施することは可能だが、\n\n /// その場合は、最後に提案されたものが最終的な構成として採用される.\n\n ///\n\n /// # Errors\n\n ///\n\n /// 非リーダノードに対して、このメソッドが実行された場合には、\n\n /// `ErrorKind::NotLeader`を理由としたエラーが返される.\n\n pub fn propose_config(&mut self, new_members: ClusterMembers) -> Result<ProposalId> {\n\n if let RoleState::Leader(ref mut leader) = self.node.role {\n\n let config = self.node.common.config().start_config_change(new_members);\n\n let term = self.node.common.term();\n\n let entry = LogEntry::Config { term, config };\n\n let proposal_id = leader.propose(&mut self.node.common, entry);\n\n Ok(proposal_id)\n", "file_path": "src/replicated_log.rs", "rank": 44, "score": 34270.66912043306 }, { "content": " ///\n\n /// なお、ノードの再起動時を除いて、`node_id`には対象クラスタの歴史の中でユニークなIDを\n\n /// 割り当てるのが望ましい.\n\n /// (レアケースではあるが、新規追加ノードを、以前に存在したノードと誤認識されてしまうと、\n\n /// 分散ログの整合性が壊れてしまう危険性があるため)\n\n ///\n\n /// また、以前のノードを再起動したい場合でも、もし永続ストレージが壊れている等の理由で、\n\n /// 前回の状態を正確に復元できないのであれば、\n\n /// ノード名を変更して、新規ノード追加扱いにした方が安全である.\n\n #[allow(clippy::new_ret_no_self)]\n\n pub fn new(\n\n node_id: NodeId,\n\n members: ClusterMembers,\n\n io: IO,\n\n metric_builder: &MetricBuilder,\n\n ) -> Result<Self> {\n\n let config = ClusterConfig::new(members);\n\n let mut metric_builder = metric_builder.clone();\n\n metric_builder.namespace(\"raftlog\");\n\n let metrics = track!(RaftlogMetrics::new(&metric_builder))?;\n", "file_path": "src/replicated_log.rs", "rank": 45, "score": 34266.54895619708 }, { "content": " /// 既にローカルログに対するスナップショットのインストールが進行中の場合には、\n\n /// `ErrorKind::Busy`を理由としてエラーが返される.\n\n ///\n\n /// また現在のログの先頭よりも前の地点のスナップショットをインストールしようとした場合には、\n\n /// `ErrorKind::InvalidInput`を理由としたエラーが返される.\n\n pub fn install_snapshot(&mut self, new_head: LogIndex, snapshot: Vec<u8>) -> Result<()> {\n\n track_assert!(\n\n !self.node.is_loading(),\n\n ErrorKind::Busy,\n\n \"Loading node state\"\n\n );\n\n\n\n let (prev_term, config) = {\n\n let record = track!(\n\n self.node\n\n .common\n\n .log()\n\n .get_record(new_head)\n\n .ok_or_else(|| ErrorKind::InvalidInput.error()),\n\n \"Too old log position: new_head={:?}, current_head={:?}, node={:?}\",\n", "file_path": "src/replicated_log.rs", "rank": 46, "score": 34265.626708879674 }, { "content": "use futures::{Poll, Stream};\n\nuse prometrics::metrics::MetricBuilder;\n\nuse std::sync::Arc;\n\nuse trackable::error::ErrorKindExt;\n\n\n\nuse crate::cluster::{ClusterConfig, ClusterMembers};\n\nuse crate::election::{Ballot, Role};\n\nuse crate::io::Io;\n\nuse crate::log::{LogEntry, LogHistory, LogIndex, LogPosition, LogPrefix, ProposalId};\n\nuse crate::message::SequenceNumber;\n\nuse crate::metrics::RaftlogMetrics;\n\nuse crate::node::{Node, NodeId};\n\nuse crate::node_state::{NodeState, RoleState};\n\nuse crate::{Error, ErrorKind, Result};\n\n\n\n/// Raftアルゴリズムに基づく分散複製ログ.\n\n///\n\n/// 利用者は`propose_command`メソッドを使って、コマンドをログに複製保存し、\n\n/// 発生する`Event`をハンドリングすることで、\n\n/// 整合性のある複製状態機械を実現することが可能となる.\n", "file_path": "src/replicated_log.rs", "rank": 47, "score": 34263.64583465007 }, { "content": "///\n\n/// `ReplicatedLog`は`Stream`トレイトを実装しているが、\n\n/// これは無限ストリームであり、エラー時を除いて終了することはない.\n\n///\n\n/// ただし、構成変更によりノードがクラスタから切り離された場合は、\n\n/// 最終的には、イベントが生成されることは無くなる.\n\n/// `this.local_history().config().is_known_node()`メソッドを使うことで、\n\n/// クラスタ内に属しているかどうかは判定可能なので、利用者側が明示的に確認して、\n\n/// 不要になった`ReplicatedLog`インスタンスを回収することは可能.\n\npub struct ReplicatedLog<IO: Io> {\n\n node: NodeState<IO>,\n\n metrics: Arc<RaftlogMetrics>,\n\n}\n\nimpl<IO: Io> ReplicatedLog<IO> {\n\n /// `members`で指定されたクラスタに属する`ReplicatedLog`のローカルインスタンス(ノード)を生成する.\n\n ///\n\n /// ローカルノードのIDは`node_id`で指定するが、これが`members`の含まれている必要は必ずしもない.\n\n /// 例えば、クラスタの構成変更に伴い、新規ノードを追加したい場合には、\n\n /// `members`に現行構成を指定することが望ましいが、このケースでは、\n\n /// `node_id`は`members`の中には含まれないことになる.\n", "file_path": "src/replicated_log.rs", "rank": 48, "score": 34263.2415772414 }, { "content": " /// # Errors\n\n ///\n\n /// 非リーダノードに対して、このメソッドが実行された場合には、\n\n /// `ErrorKind::NotLeader`を理由としたエラーが返される.\n\n pub fn heartbeat(&mut self) -> Result<SequenceNumber> {\n\n if let RoleState::Leader(ref mut leader) = self.node.role {\n\n let seq_no = leader.heartbeat_syn(&mut self.node.common);\n\n Ok(seq_no)\n\n } else {\n\n track_panic!(ErrorKind::NotLeader);\n\n }\n\n }\n\n\n\n /// ローカルログにスナップショットをインストールする.\n\n ///\n\n /// `new_head`が新しいローカルログの先頭位置となり、\n\n /// `snapshot`はその地点までのコマンド群が適用済みの状態機械のスナップショット、となる.\n\n ///\n\n /// # Errors\n\n ///\n", "file_path": "src/replicated_log.rs", "rank": 49, "score": 34262.66923943766 }, { "content": " self.node.common.release_io()\n\n }\n\n}\n\nimpl<IO: Io> Stream for ReplicatedLog<IO> {\n\n type Item = Event;\n\n type Error = Error;\n\n fn poll(&mut self) -> Poll<Option<Self::Item>, Self::Error> {\n\n track!(self.node.poll(), \"node={:?}\", self.local_node())\n\n }\n\n}\n\n\n\n/// `ReplicatedLog`から発生するイベント一覧.\n\n#[derive(Debug, PartialEq, Eq)]\n\n#[allow(missing_docs)]\n\npub enum Event {\n\n /// ローカルノードの役割が変わった.\n\n RoleChanged { new_role: Role },\n\n\n\n /// 新しい選挙期間に移った.\n\n TermChanged { new_ballot: Ballot },\n", "file_path": "src/replicated_log.rs", "rank": 50, "score": 34262.59552161272 }, { "content": "use std::collections::VecDeque;\n\nuse trackable::error::ErrorKindExt;\n\n\n\nuse crate::cluster::ClusterConfig;\n\nuse crate::log::{LogEntry, LogIndex, LogPosition, LogPrefix, LogSuffix};\n\nuse crate::{ErrorKind, Result};\n\n\n\n/// ローカルログの歴史(要約)を保持するためのデータ構造.\n\n///\n\n/// スナップショット地点以降のローカルログに関して発生した、\n\n/// 重要な出来事(i.g., `Term`の変更)が記録されている.\n\n///\n\n/// それ以外に「ログの末尾(log_tail)」および「ログのコミット済み末尾(log_committed_tail)」、\n\n/// 「ログの消費済み末尾(log_consumed_tail)」の三つの地点を保持している.\n\n///\n\n/// それらの関しては`log_consumed_tail <= log_committed_tail <= log_tail`の不変項が維持される.\n\n#[derive(Debug, Clone)]\n\npub struct LogHistory {\n\n appended_tail: LogPosition,\n\n committed_tail: LogPosition,\n", "file_path": "src/log/history.rs", "rank": 51, "score": 34262.561864374664 }, { "content": " ///\n\n /// `new_tail`が`LogSuffix`が保持する範囲の外の場合には、\n\n /// `ErrorKind::InvalidInput`を理由としたエラーが返される.\n\n pub fn truncate(&mut self, new_tail: LogIndex) -> Result<()> {\n\n track_assert!(self.head.index <= new_tail, ErrorKind::InvalidInput);\n\n track_assert!(new_tail <= self.tail().index, ErrorKind::InvalidInput);\n\n let delta = self.tail().index - new_tail;\n\n let new_len = self.entries.len() - delta;\n\n self.entries.truncate(new_len);\n\n Ok(())\n\n }\n\n\n\n /// 指定された範囲のログ領域を切り出して返す.\n\n ///\n\n /// # Errors\n\n ///\n\n /// `self`が指定範囲を包含していない場合には、\n\n /// `ErrorKind::InvalidInput`を理由としてエラーが返される.\n\n pub fn slice(&self, start: LogIndex, end: LogIndex) -> Result<Self> {\n\n track_assert!(self.head.index <= start, ErrorKind::InvalidInput);\n", "file_path": "src/log/mod.rs", "rank": 52, "score": 34262.489747051775 }, { "content": " /// `new_head`は、現在のログの末尾を超えていても良いが、\n\n /// 現在のログの先頭以前のものは許容されない.\n\n /// (スナップショット地点から現在までの歴史が消失してしまうため)\n\n ///\n\n /// なお、`head`以前の記録は歴史から削除される.\n\n pub fn record_snapshot_installed(\n\n &mut self,\n\n new_head: LogPosition,\n\n config: ClusterConfig,\n\n ) -> Result<()> {\n\n track_assert!(\n\n self.head().index <= new_head.index,\n\n ErrorKind::InconsistentState,\n\n \"self.head={:?}, new_head={:?}\",\n\n self.head(),\n\n new_head\n\n );\n\n\n\n // スナップショット地点までの歴史は捨てる\n\n while self\n", "file_path": "src/log/history.rs", "rank": 53, "score": 34262.47510815974 }, { "content": " /// スナップショットが読み込まれたことを記録する.\n\n ///\n\n /// ローカルログ内のスナップショット地点までのエントリは、消費されたものとして扱われる.\n\n pub fn record_snapshot_loaded(&mut self, snapshot: &LogPrefix) -> Result<()> {\n\n if self.consumed_tail.index < snapshot.tail.index {\n\n track_assert!(\n\n snapshot.tail.index <= self.committed_tail.index,\n\n ErrorKind::InconsistentState,\n\n \"snapshot.tail.index={:?}, self.committed_tail.index={:?}\",\n\n snapshot.tail.index,\n\n self.committed_tail.index\n\n );\n\n self.consumed_tail = snapshot.tail;\n\n }\n\n Ok(())\n\n }\n\n}\n\n\n\n/// `LogHistory`に保持されるレコード.\n\n#[derive(Debug, Clone)]\n", "file_path": "src/log/history.rs", "rank": 54, "score": 34262.4161360622 }, { "content": " /// # Errors\n\n ///\n\n /// 非リーダノードに対して、このメソッドが実行された場合には、\n\n /// `ErrorKind::NotLeader`を理由としたエラーが返される.\n\n pub fn propose_command(&mut self, command: Vec<u8>) -> Result<ProposalId> {\n\n if let RoleState::Leader(ref mut leader) = self.node.role {\n\n let term = self.node.common.term();\n\n let entry = LogEntry::Command { term, command };\n\n let proposal_id = leader.propose(&mut self.node.common, entry);\n\n Ok(proposal_id)\n\n } else {\n\n track_panic!(ErrorKind::NotLeader)\n\n }\n\n }\n\n\n\n /// 新しいクラスタ構成(新メンバ群)を提案する.\n\n ///\n\n /// 提案が承認(コミット)された場合には、返り値の`LogPosition`を含む\n\n /// `Event::Committed`イベントが返される.\n\n /// ただし、承認された場合であっても、それは新旧混合状態の構成が承認されただけであり、\n", "file_path": "src/replicated_log.rs", "rank": 55, "score": 34262.16914503528 }, { "content": "//! ノードローカルなログ関連の構成要素群.\n\nuse std::ops::{Add, AddAssign, Sub, SubAssign};\n\n\n\npub use self::history::{HistoryRecord, LogHistory};\n\n\n\nuse crate::cluster::ClusterConfig;\n\nuse crate::election::Term;\n\nuse crate::{ErrorKind, Result};\n\n\n\nmod history;\n\n\n\n/// ローカルログ.\n\n#[derive(Debug)]\n\npub enum Log {\n\n /// ログの前半部分 (i.e., スナップショット).\n\n Prefix(LogPrefix),\n\n\n\n /// ログの後半部分>\n\n Suffix(LogSuffix),\n\n}\n", "file_path": "src/log/mod.rs", "rank": 56, "score": 34262.079309011286 }, { "content": " new_head,\n\n self.local_history().head(),\n\n self.local_node()\n\n )?;\n\n (record.head.prev_term, record.config.clone())\n\n };\n\n let prefix = LogPrefix {\n\n tail: LogPosition {\n\n prev_term,\n\n index: new_head,\n\n },\n\n config,\n\n snapshot,\n\n };\n\n track!(self.node.common.install_snapshot(prefix))?;\n\n Ok(())\n\n }\n\n\n\n /// 新しい選挙を開始する.\n\n ///\n", "file_path": "src/replicated_log.rs", "rank": 57, "score": 34261.67656120324 }, { "content": " }\n\n\n\n /// I/O実装に対する参照を返す.\n\n pub fn io(&self) -> &IO {\n\n self.node.common.io()\n\n }\n\n\n\n /// I/O実装に対する破壊的な参照を返す.\n\n ///\n\n /// # Safety\n\n /// 破壊的な操作は、Raftの管理外の挙動となり、\n\n /// 整合性を崩してしまう可能性もあるので、\n\n /// 注意を喚起する意味で`unsafe`と設定されている.\n\n pub unsafe fn io_mut(&mut self) -> &mut IO {\n\n self.node.common.io_mut()\n\n }\n\n\n\n /// このReplicatedLogの所有権を放棄し、\n\n /// その代わりに内部で使われているIOインスタンスを取得する.\n\n pub fn release_io(self) -> IO {\n", "file_path": "src/replicated_log.rs", "rank": 58, "score": 34260.85681905093 }, { "content": "pub struct HistoryRecord {\n\n /// 記録地点.\n\n pub head: LogPosition,\n\n\n\n /// 記録時のクラスタ構成.\n\n pub config: ClusterConfig,\n\n}\n\nimpl HistoryRecord {\n\n fn new(head: LogPosition, config: ClusterConfig) -> Self {\n\n HistoryRecord { head, config }\n\n }\n\n}\n", "file_path": "src/log/history.rs", "rank": 59, "score": 34260.376056602894 }, { "content": " pub config: ClusterConfig,\n\n\n\n /// 前半部分に含まれるコマンド群の適用後の状態機械のスナップショット.\n\n pub snapshot: Vec<u8>,\n\n}\n\n\n\n/// ログの後半部分.\n\n///\n\n/// 厳密には、常に\"後半部分\"、つまり「ある地点より後ろの全てのエントリ」を\n\n/// 含んでいる訳ではない.\n\n///\n\n/// ただし、このデータ構造自体は、常に追記的なアクセスのために利用され、\n\n/// \"ログの途中の一部だけを更新する\"といった操作は発生しないので、\n\n/// \"常にログの末尾に対して適用される\"的な意味合いで`Suffix`と付けている.\n\n#[derive(Debug, Clone)]\n\npub struct LogSuffix {\n\n /// ログの開始位置.\n\n ///\n\n /// `entries`のサイズが1以上の場合には、\n\n /// その最初のエントリの位置となる.\n", "file_path": "src/log/mod.rs", "rank": 60, "score": 34259.9436534265 }, { "content": " /// 何らかの手段で現在のリーダのダウンを検知した場合に呼び出される.\n\n pub fn start_election(&mut self) {\n\n self.node.start_election();\n\n }\n\n\n\n /// ローカルノードの情報を返す.\n\n pub fn local_node(&self) -> &Node {\n\n self.node.common.local_node()\n\n }\n\n\n\n /// ローカルログの履歴を返す.\n\n pub fn local_history(&self) -> &LogHistory {\n\n self.node.common.log()\n\n }\n\n\n\n /// ローカルログへの書き込み待ちの状態の提案群の数を返す.\n\n ///\n\n /// この値は、ローカルストレージの詰まり具合を把握するために有用である.\n\n ///\n\n /// 「ローカルログへは追記完了 and コミット待ち」の個数は\n", "file_path": "src/replicated_log.rs", "rank": 61, "score": 34259.70990628072 }, { "content": " let node = NodeState::load(node_id, config, io, metrics.node_state.clone());\n\n Ok(ReplicatedLog {\n\n node,\n\n metrics: Arc::new(metrics),\n\n })\n\n }\n\n\n\n /// `raftlog` のメトリクスを返す。\n\n pub fn metrics(&self) -> &Arc<RaftlogMetrics> {\n\n &self.metrics\n\n }\n\n\n\n /// 新しいコマンドを提案する.\n\n ///\n\n /// 提案が承認(コミット)された場合には、返り値の`LogPosition`を含む\n\n /// `Event::Committed`イベントが返される.\n\n ///\n\n /// もし返り値の`LogPosition`とは分岐した`Event::Committed`が返された場合には、\n\n /// この提案が棄却されたことを示している.\n\n ///\n", "file_path": "src/replicated_log.rs", "rank": 62, "score": 34259.70951913113 }, { "content": " pub fn record_appended(&mut self, suffix: &LogSuffix) -> Result<()> {\n\n let entries_offset = if self.appended_tail.index <= suffix.head.index {\n\n 0\n\n } else {\n\n // NOTE:\n\n // 追記中にスナップショットがインストールされた場合に、\n\n // 両者の先頭位置がズレることがあるので調整する\n\n self.appended_tail.index - suffix.head.index\n\n };\n\n for (i, e) in suffix.entries.iter().enumerate().skip(entries_offset) {\n\n let tail = LogPosition {\n\n prev_term: e.term(),\n\n index: suffix.head.index + i + 1,\n\n };\n\n if let LogEntry::Config { ref config, .. } = *e {\n\n if self.last_record().config != *config {\n\n // クラスタ構成が変更された\n\n let record = HistoryRecord::new(tail, config.clone());\n\n self.records.push_back(record);\n\n }\n", "file_path": "src/log/history.rs", "rank": 63, "score": 34259.1390156538 }, { "content": " /// 知りたい場合には`local_history`メソッド経由で取得可能.\n\n ///\n\n /// ローカルノードが非リーダである場合には、常に`0`が返される.\n\n pub fn proposal_queue_len(&self) -> usize {\n\n if let RoleState::Leader(ref leader) = self.node.role {\n\n leader.proposal_queue_len(&self.node.common)\n\n } else {\n\n 0\n\n }\n\n }\n\n\n\n /// スナップショットをインストール中の場合には`true`を返す.\n\n ///\n\n /// このメソッドが`true`を返している間は、\n\n /// 新しいスナップショットのインストールを行うことはできない.\n\n pub fn is_snapshot_installing(&self) -> bool {\n\n // 起動後のロードフェーズの間もスナップショットのインストールは行えないので、\n\n // その場合も`true`を返しておく.\n\n self.node.is_loading() || self.node.common.is_snapshot_installing()\n\n }\n", "file_path": "src/replicated_log.rs", "rank": 64, "score": 34259.00493786795 }, { "content": " ///\n\n /// リーダ選出時には、最初にこのエントリがログに追加され、\n\n /// `Term`が変わったことを記録する.\n\n Noop { term: Term },\n\n\n\n /// クラスタ構成の変更を共有するためのエントリ.\n\n Config { term: Term, config: ClusterConfig },\n\n\n\n /// 状態機械の入力となるコマンドを格納したエントリ.\n\n Command { term: Term, command: Vec<u8> },\n\n}\n\nimpl LogEntry {\n\n /// このエントリが発行された`Term`を返す.\n\n pub fn term(&self) -> Term {\n\n match *self {\n\n LogEntry::Noop { term } => term,\n\n LogEntry::Config { term, .. } => term,\n\n LogEntry::Command { term, .. } => term,\n\n }\n\n }\n", "file_path": "src/log/mod.rs", "rank": 65, "score": 34258.610196698566 }, { "content": " consumed_tail: LogPosition,\n\n records: VecDeque<HistoryRecord>,\n\n}\n\nimpl LogHistory {\n\n /// 初期クラスタ構成を与えて、新しい`LogHistory`インスタンスを生成する.\n\n pub fn new(config: ClusterConfig) -> Self {\n\n let initial = HistoryRecord::new(LogPosition::default(), config);\n\n LogHistory {\n\n appended_tail: LogPosition::default(),\n\n committed_tail: LogPosition::default(),\n\n consumed_tail: LogPosition::default(),\n\n records: vec![initial].into(),\n\n }\n\n }\n\n\n\n /// ローカルログの先端位置を返す.\n\n pub fn head(&self) -> LogPosition {\n\n self.records[0].head\n\n }\n\n\n", "file_path": "src/log/history.rs", "rank": 66, "score": 34258.593878833264 }, { "content": " }\n\n if tail.prev_term != self.last_record().head.prev_term {\n\n // 新しい選挙期間(`Term`)に移った\n\n track_assert!(\n\n self.last_record().head.prev_term < tail.prev_term,\n\n ErrorKind::Other,\n\n \"last_record.head={:?}, tail={:?}\",\n\n self.last_record().head,\n\n tail\n\n );\n\n let record = HistoryRecord::new(tail, self.last_record().config.clone());\n\n self.records.push_back(record);\n\n }\n\n }\n\n self.appended_tail = suffix.tail();\n\n Ok(())\n\n }\n\n\n\n /// `new_tail_index`までコミット済み地点が進んだことを記録する.\n\n pub fn record_committed(&mut self, new_tail_index: LogIndex) -> Result<()> {\n", "file_path": "src/log/history.rs", "rank": 67, "score": 34258.092115432504 }, { "content": " Ok(())\n\n }\n\n\n\n /// `new_tail`までのログに含まれるコマンドが消費されたことを記録する.\n\n ///\n\n /// ここでの\"消費\"とは「状態機械に入力として渡されて実行された」ことを意味する.\n\n pub fn record_consumed(&mut self, new_tail_index: LogIndex) -> Result<()> {\n\n track_assert!(self.consumed_tail.index <= new_tail_index, ErrorKind::Other);\n\n track_assert!(\n\n new_tail_index <= self.committed_tail.index,\n\n ErrorKind::Other\n\n );\n\n\n\n let prev_term =\n\n track!(self.get_record(new_tail_index).ok_or_else(\n\n || ErrorKind::Other.cause(format!(\"Too old index: {:?}\", new_tail_index))\n\n ))?\n\n .head\n\n .prev_term;\n\n self.consumed_tail = LogPosition {\n", "file_path": "src/log/history.rs", "rank": 68, "score": 34257.671393677396 }, { "content": "impl Default for LogSuffix {\n\n fn default() -> Self {\n\n LogSuffix {\n\n head: LogPosition::default(),\n\n entries: Vec::new(),\n\n }\n\n }\n\n}\n\n\n\n/// `LogSuffix`に含まれるログの位置を走査するための`Iterator`実装.\n\n#[derive(Debug)]\n\npub struct LogPositions<'a> {\n\n suffix: &'a LogSuffix,\n\n offset: usize,\n\n}\n\nimpl<'a> Iterator for LogPositions<'a> {\n\n type Item = LogPosition;\n\n fn next(&mut self) -> Option<Self::Item> {\n\n if self.suffix.entries.len() < self.offset {\n\n None\n", "file_path": "src/log/mod.rs", "rank": 69, "score": 34257.63386995779 }, { "content": "\n\n /// 後半部分に含まれるエントリの位置を走査するためのイテレータを返す.\n\n pub fn positions(&self) -> LogPositions {\n\n LogPositions {\n\n suffix: self,\n\n offset: 0,\n\n }\n\n }\n\n\n\n /// `new_head`のまでスキップする.\n\n ///\n\n /// 現在の先頭から`new_head`までのエントリは破棄され、`new_head`が新しい先頭になる.\n\n ///\n\n /// # Errors\n\n ///\n\n /// 以下のいずれかの場合には`ErrorKind::InvalidInput`が返される:\n\n ///\n\n /// - `new_head < self.head.index`\n\n /// - `self.tail().index < new_head`\n\n pub fn skip_to(&mut self, new_head: LogIndex) -> Result<()> {\n", "file_path": "src/log/mod.rs", "rank": 70, "score": 34257.56506588406 }, { "content": " /// ローカルログの終端位置を返す.\n\n pub fn tail(&self) -> LogPosition {\n\n self.appended_tail\n\n }\n\n\n\n /// ローカルログのコミット済みの終端位置を返す.\n\n ///\n\n /// 「コミット済みの終端」==「未コミットの始端」\n\n pub fn committed_tail(&self) -> LogPosition {\n\n self.committed_tail\n\n }\n\n\n\n /// ローカルログの適用済みの終端位置を返す.\n\n pub fn consumed_tail(&self) -> LogPosition {\n\n self.consumed_tail\n\n }\n\n\n\n /// ローカルログに記録された最新のクラスタ構成を返す.\n\n pub fn config(&self) -> &ClusterConfig {\n\n &self.last_record().config\n", "file_path": "src/log/history.rs", "rank": 71, "score": 34257.35553214644 }, { "content": "impl From<LogPrefix> for Log {\n\n fn from(f: LogPrefix) -> Self {\n\n Log::Prefix(f)\n\n }\n\n}\n\nimpl From<LogSuffix> for Log {\n\n fn from(f: LogSuffix) -> Self {\n\n Log::Suffix(f)\n\n }\n\n}\n\n\n\n/// ログの前半部分 (i.e., スナップショット).\n\n#[derive(Debug, Clone)]\n\npub struct LogPrefix {\n\n /// 前半部分の終端位置.\n\n ///\n\n /// \"終端位置\" = \"前半部分に含まれない最初の位置\".\n\n pub tail: LogPosition,\n\n\n\n /// 前半部分に含まれる中で、最新の構成情報.\n", "file_path": "src/log/mod.rs", "rank": 72, "score": 34257.00438148588 }, { "content": " /// 新しい`LogIndex`インスタンスを生成する.\n\n pub fn new(index: u64) -> Self {\n\n LogIndex(index)\n\n }\n\n\n\n /// インデックスの値を返す.\n\n pub fn as_u64(self) -> u64 {\n\n self.0\n\n }\n\n}\n\nimpl From<u64> for LogIndex {\n\n fn from(f: u64) -> Self {\n\n LogIndex::new(f)\n\n }\n\n}\n\nimpl Add<usize> for LogIndex {\n\n type Output = Self;\n\n fn add(self, rhs: usize) -> Self::Output {\n\n LogIndex(self.0 + rhs as u64)\n\n }\n", "file_path": "src/log/mod.rs", "rank": 73, "score": 34256.94995746031 }, { "content": "}\n\nimpl AddAssign<usize> for LogIndex {\n\n fn add_assign(&mut self, rhs: usize) {\n\n self.0 += rhs as u64;\n\n }\n\n}\n\nimpl Sub for LogIndex {\n\n type Output = usize;\n\n fn sub(self, rhs: Self) -> Self::Output {\n\n (self.0 - rhs.0) as usize\n\n }\n\n}\n\nimpl Sub<usize> for LogIndex {\n\n type Output = Self;\n\n fn sub(self, rhs: usize) -> Self::Output {\n\n LogIndex(self.0 - rhs as u64)\n\n }\n\n}\n\nimpl SubAssign<usize> for LogIndex {\n\n fn sub_assign(&mut self, rhs: usize) {\n", "file_path": "src/log/mod.rs", "rank": 74, "score": 34256.26600747565 }, { "content": " let suffix = LogSuffix {\n\n head: LogPosition {\n\n prev_term: 0.into(),\n\n index: 30.into(),\n\n },\n\n entries: vec![noop(0), noop(2), noop(2)],\n\n };\n\n assert_eq!(\n\n suffix.positions().collect::<Vec<_>>(),\n\n [id(0, 30), id(0, 31), id(2, 32), id(2, 33)]\n\n );\n\n assert_eq!(suffix.entries.len(), 3);\n\n\n\n let slice = suffix.slice(31.into(), 33.into()).unwrap();\n\n assert_eq!(\n\n slice.positions().collect::<Vec<_>>(),\n\n [id(0, 31), id(2, 32), id(2, 33)]\n\n );\n\n assert_eq!(slice.entries.len(), 2);\n\n }\n\n}\n", "file_path": "src/log/mod.rs", "rank": 75, "score": 34256.1753938002 }, { "content": " prev_term,\n\n index: new_tail_index,\n\n };\n\n Ok(())\n\n }\n\n\n\n /// 「追記済み and 未コミット」な末尾領域がロールバック(破棄)されたことを記録する.\n\n ///\n\n /// ログの新しい終端は`new_tail`となる.\n\n pub fn record_rollback(&mut self, new_tail: LogPosition) -> Result<()> {\n\n track_assert!(new_tail.index <= self.appended_tail.index, ErrorKind::Other);\n\n track_assert!(\n\n self.committed_tail.index <= new_tail.index,\n\n ErrorKind::Other,\n\n \"old={:?}, new={:?}\",\n\n self.committed_tail,\n\n new_tail\n\n );\n\n track_assert_eq!(\n\n self.get_record(new_tail.index).map(|r| r.head.prev_term),\n", "file_path": "src/log/history.rs", "rank": 76, "score": 34256.11570078021 }, { "content": "\n\n /// 過半数以上の応答を得られた最新のハートビート(i.e., AppendEntriesCall) のシーケンス番号を返す.\n\n ///\n\n /// この値は、同じ選挙期間に関しては減少することはないことが保証されている.\n\n ///\n\n /// # 注意\n\n ///\n\n /// ハートビートを行うのはリーダノードのみなので、それ以外のノードに関しては、\n\n /// このメソッドが返す値は意味を持たない.\n\n pub fn last_heartbeat_ack(&self) -> SequenceNumber {\n\n if let RoleState::Leader(ref leader) = self.node.role {\n\n leader.last_heartbeat_ack()\n\n } else {\n\n SequenceNumber::new(0)\n\n }\n\n }\n\n\n\n /// 現在のクラスタ構成を返す.\n\n pub fn cluster_config(&self) -> &ClusterConfig {\n\n self.node.common.config()\n", "file_path": "src/replicated_log.rs", "rank": 77, "score": 34255.59717826611 }, { "content": " track_assert!(start <= end, ErrorKind::InvalidInput);\n\n track_assert!(end <= self.tail().index, ErrorKind::InvalidInput);\n\n let slice_start = start - self.head.index;\n\n let slice_end = end - self.head.index;\n\n let slice_head = if start == self.head.index {\n\n self.head\n\n } else {\n\n let prev_term = self.entries[slice_start - 1].term();\n\n LogPosition {\n\n prev_term,\n\n index: start,\n\n }\n\n };\n\n let slice_entries = self.entries[slice_start..slice_end].into();\n\n Ok(LogSuffix {\n\n head: slice_head,\n\n entries: slice_entries,\n\n })\n\n }\n\n}\n", "file_path": "src/log/mod.rs", "rank": 78, "score": 34255.20345067375 }, { "content": " };\n\n assert_eq!(\n\n suffix.positions().collect::<Vec<_>>(),\n\n [id(0, 30), id(0, 31), id(2, 32), id(2, 33)]\n\n );\n\n }\n\n #[test]\n\n fn log_suffix_skip_to() {\n\n let mut suffix = LogSuffix {\n\n head: LogPosition {\n\n prev_term: 0.into(),\n\n index: 30.into(),\n\n },\n\n entries: vec![noop(0), noop(2), noop(2)],\n\n };\n\n assert_eq!(\n\n suffix.positions().collect::<Vec<_>>(),\n\n [id(0, 30), id(0, 31), id(2, 32), id(2, 33)]\n\n );\n\n assert_eq!(suffix.entries.len(), 3);\n", "file_path": "src/log/mod.rs", "rank": 79, "score": 34254.69927823274 }, { "content": "\n\n suffix.skip_to(31.into()).unwrap();\n\n assert_eq!(\n\n suffix.positions().collect::<Vec<_>>(),\n\n [id(0, 31), id(2, 32), id(2, 33)]\n\n );\n\n assert_eq!(suffix.entries.len(), 2);\n\n\n\n suffix.skip_to(33.into()).unwrap();\n\n assert_eq!(suffix.positions().collect::<Vec<_>>(), [id(2, 33)]);\n\n assert_eq!(suffix.entries.len(), 0);\n\n\n\n suffix.skip_to(33.into()).unwrap();\n\n assert_eq!(suffix.positions().collect::<Vec<_>>(), [id(2, 33)]);\n\n assert_eq!(suffix.entries.len(), 0);\n\n }\n\n #[test]\n\n fn log_suffix_truncate() {\n\n let mut suffix = LogSuffix {\n\n head: LogPosition {\n", "file_path": "src/log/mod.rs", "rank": 80, "score": 34254.46706470018 }, { "content": " /// let b = LogPosition { prev_term: 10.into(), index: 3.into() };\n\n /// assert!(a.is_newer_or_equal_than(b));\n\n /// assert!(!b.is_newer_or_equal_than(a));\n\n ///\n\n /// // `a`の方がインデックスは大きいが、`b`の方が`Term`は大きい\n\n /// // => 順序が確定できない\n\n /// let a = LogPosition { prev_term: 5.into(), index: 10.into() };\n\n /// let b = LogPosition { prev_term: 10.into(), index: 3.into() };\n\n /// assert!(!a.is_newer_or_equal_than(b));\n\n /// assert!(!b.is_newer_or_equal_than(a));\n\n /// ```\n\n pub fn is_newer_or_equal_than(&self, other: LogPosition) -> bool {\n\n self.prev_term >= other.prev_term && self.index >= other.index\n\n }\n\n}\n\n\n\n/// あるログエントリのインデックス.\n\n#[derive(Debug, Default, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]\n\npub struct LogIndex(u64);\n\nimpl LogIndex {\n", "file_path": "src/log/mod.rs", "rank": 81, "score": 34254.29765847175 }, { "content": " prev_term: 0.into(),\n\n index: 30.into(),\n\n },\n\n entries: vec![noop(0), noop(2), noop(2)],\n\n };\n\n assert_eq!(\n\n suffix.positions().collect::<Vec<_>>(),\n\n [id(0, 30), id(0, 31), id(2, 32), id(2, 33)]\n\n );\n\n assert_eq!(suffix.entries.len(), 3);\n\n\n\n suffix.truncate(31.into()).unwrap();\n\n assert_eq!(\n\n suffix.positions().collect::<Vec<_>>(),\n\n [id(0, 30), id(0, 31)]\n\n );\n\n assert_eq!(suffix.entries.len(), 1);\n\n }\n\n #[test]\n\n fn log_suffix_slice() {\n", "file_path": "src/log/mod.rs", "rank": 82, "score": 34253.75957121981 }, { "content": "\n\n /// 新しいリーダーが選出された.\n\n NewLeaderElected,\n\n\n\n /// 新しいログエントリがコミットされた.\n\n ///\n\n /// エントリの内容がコマンドの場合には、\n\n /// `ReplicatedLog`の利用者は、自身が管理する状態機械に、\n\n /// `command`を適用する必要がある.\n\n ///\n\n /// ログエントリはインデックスの昇順でコミットされ,\n\n /// インデックスは常に一ずつ増加する.\n\n Committed { index: LogIndex, entry: LogEntry },\n\n\n\n /// スナップショットがロードされた.\n\n ///\n\n /// `ReplicatedLog`の利用者は、自身が管理する状態機械を、\n\n /// `snapshot`の状態にリセットする必要がある.\n\n SnapshotLoaded {\n\n new_head: LogPosition,\n", "file_path": "src/replicated_log.rs", "rank": 83, "score": 34253.342411234924 }, { "content": " snapshot: Vec<u8>,\n\n },\n\n\n\n /// スナップショットのインストールが行われた.\n\n ///\n\n /// もし`new_head`の位置が、最新のコミット済み地点よりも\n\n /// 新しい場合には、これとは別に`SnapshotLoaded`イベントが発行される.\n\n SnapshotInstalled { new_head: LogPosition },\n\n}\n", "file_path": "src/replicated_log.rs", "rank": 84, "score": 34252.8017413101 }, { "content": "}\n\n\n\n/// 提案ID.\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]\n\npub struct ProposalId {\n\n /// 提案が発行された時の`Term`.\n\n pub term: Term,\n\n\n\n /// 提案を保持するエントリのログ内でのインデックス.\n\n pub index: LogIndex,\n\n}\n\n\n\n/// ログの特定位置を識別するためのデータ構造.\n\n#[derive(Debug, Default, Clone, Copy, PartialEq, Eq, Hash)]\n\npub struct LogPosition {\n\n /// 一つ前のインデックスのエントリの`Term`.\n\n pub prev_term: Term,\n\n\n\n /// この位置のインデックス.\n\n pub index: LogIndex,\n", "file_path": "src/log/mod.rs", "rank": 85, "score": 34252.79125566706 }, { "content": " pub head: LogPosition,\n\n\n\n /// 後半部分に属するエントリ群.\n\n pub entries: Vec<LogEntry>,\n\n}\n\nimpl LogSuffix {\n\n /// ログの終端位置を返す.\n\n ///\n\n /// \"終端位置\" = \"entriesに含まれない最初のエントリの位置\".\n\n ///\n\n /// `entries`の最後の要素が、ログ全体の最後の要素と一致している場合には、\n\n /// \"終端位置\"は「次にログに追加される位置(= ログの末端)」となる.\n\n pub fn tail(&self) -> LogPosition {\n\n let prev_term = self\n\n .entries\n\n .last()\n\n .map_or(self.head.prev_term, LogEntry::term);\n\n let index = self.head.index + self.entries.len();\n\n LogPosition { prev_term, index }\n\n }\n", "file_path": "src/log/mod.rs", "rank": 86, "score": 34252.49948547952 }, { "content": "}\n\nimpl LogPosition {\n\n /// `self`がログ上で、`other`と等しい、あるいは、より後方に位置している場合に`true`が返る.\n\n ///\n\n /// なお`self`と`other`が、それぞれ分岐したログ上に位置しており、\n\n /// 前後関係が判断できない場合には`false`が返される.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// use raftlog::log::LogPosition;\n\n ///\n\n /// // `a`の方がインデックスが大きい\n\n /// let a = LogPosition { prev_term: 10.into(), index: 5.into() };\n\n /// let b = LogPosition { prev_term: 10.into(), index: 3.into() };\n\n /// assert!(a.is_newer_or_equal_than(b));\n\n /// assert!(!b.is_newer_or_equal_than(a));\n\n ///\n\n /// // `a`の方が`Term`が大きい\n\n /// let a = LogPosition { prev_term: 20.into(), index: 3.into() };\n", "file_path": "src/log/mod.rs", "rank": 87, "score": 34252.16254650308 }, { "content": " self.0 -= rhs as u64;\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n fn id(prev_term: u64, index: u64) -> LogPosition {\n\n LogPosition {\n\n prev_term: prev_term.into(),\n\n index: index.into(),\n\n }\n\n }\n\n fn noop(term: u64) -> LogEntry {\n\n LogEntry::Noop { term: term.into() }\n\n }\n\n\n\n #[test]\n\n fn log_suffix_end() {\n", "file_path": "src/log/mod.rs", "rank": 88, "score": 34251.8484899255 }, { "content": " let suffix = LogSuffix::default();\n\n assert_eq!(suffix.tail().index.as_u64(), 0);\n\n\n\n let suffix = LogSuffix {\n\n head: LogPosition::default(),\n\n entries: vec![noop(0), noop(1)],\n\n };\n\n assert_eq!(suffix.tail().index.as_u64(), 2);\n\n }\n\n #[test]\n\n fn log_suffix_positions() {\n\n let suffix = LogSuffix::default();\n\n assert_eq!(suffix.positions().collect::<Vec<_>>(), [id(0, 0)]);\n\n\n\n let suffix = LogSuffix {\n\n head: LogPosition {\n\n prev_term: 0.into(),\n\n index: 30.into(),\n\n },\n\n entries: vec![noop(0), noop(2), noop(2)],\n", "file_path": "src/log/mod.rs", "rank": 89, "score": 34251.51950467414 }, { "content": " }\n\n\n\n /// 最後に追加された`HistoryRecord`を返す.\n\n pub fn last_record(&self) -> &HistoryRecord {\n\n self.records.back().expect(\"Never fails\")\n\n }\n\n\n\n /// 指定されたインデックスが属するレコードを返す.\n\n ///\n\n /// 既に削除された領域が指定された場合には`None`が返される.\n\n pub fn get_record(&self, index: LogIndex) -> Option<&HistoryRecord> {\n\n for r in self.records.iter().rev() {\n\n if r.head.index <= index {\n\n return Some(r);\n\n }\n\n }\n\n None\n\n }\n\n\n\n /// `suffix`がローカルログに追記されたことを記録する.\n", "file_path": "src/log/history.rs", "rank": 90, "score": 34251.03482395725 }, { "content": " } else {\n\n let id = if self.offset == 0 {\n\n self.suffix.head\n\n } else {\n\n let i = self.offset - 1;\n\n let prev_term = self.suffix.entries[i].term();\n\n let index = self.suffix.head.index + self.offset;\n\n LogPosition { prev_term, index }\n\n };\n\n self.offset += 1;\n\n Some(id)\n\n }\n\n }\n\n}\n\n\n\n/// ログに格納されるエントリ.\n\n#[derive(Debug, Clone, PartialEq, Eq)]\n\n#[allow(missing_docs)]\n\npub enum LogEntry {\n\n /// 特に内容を持たないエントリ.\n", "file_path": "src/log/mod.rs", "rank": 91, "score": 34250.84713780836 }, { "content": " Some(new_tail.prev_term),\n\n ErrorKind::InconsistentState\n\n );\n\n self.appended_tail = new_tail;\n\n\n\n if let Some(new_len) = self\n\n .records\n\n .iter()\n\n .position(|r| r.head.index > new_tail.index)\n\n {\n\n self.records.truncate(new_len);\n\n }\n\n Ok(())\n\n }\n\n\n\n /// スナップショットがインストールされたことを記録する.\n\n ///\n\n /// `new_head`はスナップショットに含まれない最初のエントリのIDで、\n\n /// `config`はスナップショット取得時のクラスタ構成、を示す.\n\n ///\n", "file_path": "src/log/history.rs", "rank": 92, "score": 34250.664110564445 }, { "content": " .records\n\n .front()\n\n .map_or(false, |r| r.head.index <= new_head.index)\n\n {\n\n self.records.pop_front();\n\n }\n\n\n\n // 新しいログの先頭をセット\n\n let record = HistoryRecord::new(new_head, config);\n\n self.records.push_front(record);\n\n\n\n if self.appended_tail.index < new_head.index {\n\n self.appended_tail = new_head;\n\n }\n\n if self.committed_tail.index < new_head.index {\n\n self.committed_tail = new_head;\n\n }\n\n Ok(())\n\n }\n\n\n", "file_path": "src/log/history.rs", "rank": 93, "score": 34250.05097646147 }, { "content": " track_assert!(\n\n self.committed_tail.index <= new_tail_index,\n\n ErrorKind::Other\n\n );\n\n track_assert!(\n\n new_tail_index <= self.appended_tail.index,\n\n ErrorKind::Other,\n\n \"new_tail_index={:?}, self.appended_tail.index={:?}\",\n\n new_tail_index,\n\n self.appended_tail.index\n\n );\n\n let prev_term = track!(self\n\n .get_record(new_tail_index,)\n\n .ok_or_else(|| ErrorKind::Other.error(),))?\n\n .head\n\n .prev_term;\n\n self.committed_tail = LogPosition {\n\n prev_term,\n\n index: new_tail_index,\n\n };\n", "file_path": "src/log/history.rs", "rank": 94, "score": 34249.264323838026 }, { "content": " track_assert!(self.head.index <= new_head, ErrorKind::InvalidInput);\n\n track_assert!(new_head <= self.tail().index, ErrorKind::InvalidInput);\n\n let count = new_head - self.head.index;\n\n if count == 0 {\n\n return Ok(());\n\n }\n\n let prev_term = self\n\n .entries\n\n .drain(0..count)\n\n .last()\n\n .expect(\"Never fails\")\n\n .term();\n\n self.head.prev_term = prev_term;\n\n self.head.index += count;\n\n Ok(())\n\n }\n\n\n\n /// 終端を`new_tail`の位置まで切り詰める.\n\n ///\n\n /// # Errors\n", "file_path": "src/log/mod.rs", "rank": 95, "score": 34248.24152689339 }, { "content": " } else {\n\n track_panic!(ErrorKind::NotLeader)\n\n }\n\n }\n\n\n\n /// 強制的にハートビートメッセージ(i.e., AppendEntriesCall)をブロードキャストする.\n\n ///\n\n /// 返り値は、送信メッセージのシーケンス番号.\n\n ///\n\n /// `last_heartbeat_ack`メソッドを用いることで、\n\n /// このハートビートに対して、過半数以上の応答を得られた\n\n /// タイミングを把握することが可能.\n\n ///\n\n /// また、リーダのコミットを即座にフォロワーに伝えたい場合にも、\n\n /// このメソッドが活用可能。\n\n /// (`Event::Committed`をリーダが生成した直後に`heartbeat`メソッドを呼び出せば良い)\n\n ///\n\n /// なおノードの役割が非リーダに変わった場合には、\n\n /// 応答待機中のハートビートは全て破棄されるので注意が必要.\n\n ///\n", "file_path": "src/replicated_log.rs", "rank": 96, "score": 34244.77040871494 }, { "content": "use raftlog::cluster::ClusterMembers;\n\nuse raftlog::node::NodeId;\n\nuse rand::{Rng, SeedableRng, StdRng};\n\nuse std::collections::BTreeSet;\n\nuse std::time::{SystemTime, UNIX_EPOCH};\n\n\n\nuse crate::io::IoConfig;\n\nuse crate::types::{LogicalDuration, Probability, Range, SharedRng};\n\n\n\n#[derive(Debug, Clone, Serialize, Deserialize)]\n\npub struct SimulatorConfig {\n\n /// シミュレータで使われる乱数のシード.\n\n ///\n\n /// この値によって、シミュレータ上で発生するイベントが全て決定される.\n\n #[serde(default = \"SimulatorConfig::default_seed\")]\n\n pub seed: u64,\n\n\n\n #[serde(default = \"SimulatorConfig::default_loop_count\")]\n\n pub loop_count: usize,\n\n\n", "file_path": "raftlog_simu/src/simulator_config.rs", "rank": 97, "score": 32090.618713773903 }, { "content": " .map(NodeId::new)\n\n .collect::<Vec<_>>();\n\n rng.shuffle(&mut candidates[..]);\n\n candidates.truncate(self.cluster_size.choose(rng));\n\n candidates.into_iter().collect()\n\n }\n\n}\n\nimpl Default for SimulatorConfig {\n\n fn default() -> Self {\n\n SimulatorConfig {\n\n seed: SimulatorConfig::default_seed(),\n\n loop_count: SimulatorConfig::default_loop_count(),\n\n propose_command: SimulatorConfig::default_propose_command(),\n\n manual_heartbeat: SimulatorConfig::default_manual_heartbeat(),\n\n take_snapshot: SimulatorConfig::default_take_snapshot(),\n\n node_down: SimulatorConfig::default_node_down(),\n\n node_restart_interval: SimulatorConfig::default_node_restart_interval(),\n\n cluster_size: SimulatorConfig::default_cluster_size(),\n\n change_cluster: SimulatorConfig::default_change_cluster(),\n\n io: IoConfig::default(),\n\n nodes: SimulatorConfig::default_nodes(),\n\n }\n\n }\n\n}\n", "file_path": "raftlog_simu/src/simulator_config.rs", "rank": 98, "score": 32087.848252646214 }, { "content": " pub fn default_nodes() -> BTreeSet<String> {\n\n [\"foo\", \"bar\", \"baz\", \"qux\", \"quux\", \"corge\", \"grault\"]\n\n .iter()\n\n .map(|n| n.to_string())\n\n .collect()\n\n }\n\n\n\n pub fn make_rng(&self) -> SharedRng {\n\n let mut seed = [0; 32];\n\n for (i, v) in seed.iter_mut().enumerate().take(8) {\n\n *v = (self.seed >> (i * 8)) as u8;\n\n }\n\n let inner = StdRng::from_seed(seed);\n\n SharedRng::new(inner)\n\n }\n\n pub fn choose_members<R: Rng>(&self, rng: &mut R) -> ClusterMembers {\n\n let mut candidates = self\n\n .nodes\n\n .iter()\n\n .cloned()\n", "file_path": "raftlog_simu/src/simulator_config.rs", "rank": 99, "score": 32084.732176897018 } ]
Rust
state/api/src/chain_state.rs
xielong/starcoin
14faf39ea7231d6a24780a87f847584824e7ff18
use anyhow::{ensure, format_err, Result}; use merkle_tree::{blob::Blob, proof::SparseMerkleProof, RawKey}; use serde::de::DeserializeOwned; use serde::{Deserialize, Serialize}; use starcoin_crypto::HashValue; use starcoin_types::state_set::AccountStateSet; use starcoin_types::write_set::WriteSet; use starcoin_types::{ access_path::AccessPath, account_address::AccountAddress, account_config::{AccountResource, BalanceResource}, account_state::AccountState, language_storage::TypeTag, state_set::ChainStateSet, }; use starcoin_vm_types::account_config::{genesis_address, STC_TOKEN_CODE}; use starcoin_vm_types::genesis_config::ChainId; use starcoin_vm_types::language_storage::ModuleId; use starcoin_vm_types::on_chain_resource::{Epoch, EpochData, EpochInfo, GlobalTimeOnChain}; use starcoin_vm_types::sips::SIP; use starcoin_vm_types::token::token_code::TokenCode; use starcoin_vm_types::{ move_resource::MoveResource, on_chain_config::OnChainConfig, state_view::StateView, }; use std::convert::TryFrom; use std::sync::Arc; #[derive(Debug, Default, Eq, PartialEq, Clone, Serialize, Deserialize)] pub struct StateProof { pub account_state: Option<Blob>, pub account_proof: SparseMerkleProof, pub account_state_proof: SparseMerkleProof, } impl StateProof { pub fn new( account_state: Option<Vec<u8>>, account_proof: SparseMerkleProof, account_state_proof: SparseMerkleProof, ) -> Self { Self { account_state: account_state.map(Blob::from), account_proof, account_state_proof, } } pub fn verify( &self, expected_root_hash: HashValue, access_path: AccessPath, access_resource_blob: Option<&[u8]>, ) -> Result<()> { let (account_address, data_path) = access_path.into_inner(); match self.account_state.as_ref() { None => { ensure!( access_resource_blob.is_none(), "accessed resource should not exists" ); } Some(s) => { let account_state = AccountState::try_from(s.as_ref())?; match account_state.storage_roots()[data_path.data_type().storage_index()] { None => { ensure!( access_resource_blob.is_none(), "accessed resource should not exists" ); } Some(expected_hash) => { let blob = access_resource_blob.map(|data| Blob::from(data.to_vec())); self.account_state_proof.verify( expected_hash, data_path.key_hash(), blob.as_ref(), )?; } } } } self.account_proof.verify( expected_root_hash, account_address.key_hash(), self.account_state.as_ref(), ) } } #[derive(Debug, Eq, PartialEq, Clone, Serialize, Deserialize)] pub struct StateWithProof { pub state: Option<Vec<u8>>, pub proof: StateProof, } impl StateWithProof { pub fn new(state: Option<Vec<u8>>, proof: StateProof) -> Self { Self { state, proof } } pub fn get_state(&self) -> &Option<Vec<u8>> { &self.state } } pub trait ChainStateReader: StateView { fn get_with_proof(&self, access_path: &AccessPath) -> Result<StateWithProof>; fn get_account_state(&self, address: &AccountAddress) -> Result<Option<AccountState>>; fn get_account_state_set(&self, address: &AccountAddress) -> Result<Option<AccountStateSet>>; fn exist_account(&self, address: &AccountAddress) -> Result<bool> { self.get_account_state(address).map(|state| state.is_some()) } fn state_root(&self) -> HashValue; fn dump(&self) -> Result<ChainStateSet>; } pub trait ChainStateWriter { fn set(&self, access_path: &AccessPath, value: Vec<u8>) -> Result<()>; fn remove(&self, access_path: &AccessPath) -> Result<()>; fn apply(&self, state_set: ChainStateSet) -> Result<()>; fn apply_write_set(&self, write_set: WriteSet) -> Result<()>; fn commit(&self) -> Result<HashValue>; fn flush(&self) -> Result<()>; } pub trait IntoSuper<Super: ?Sized> { fn as_super(&self) -> &Super; fn as_super_mut(&mut self) -> &mut Super; fn into_super(self: Box<Self>) -> Box<Super>; fn into_super_arc(self: Arc<Self>) -> Arc<Super>; } pub trait ChainState: ChainStateReader + ChainStateWriter + StateView + IntoSuper<dyn StateView> + IntoSuper<dyn ChainStateReader> + IntoSuper<dyn ChainStateWriter> { } impl<'a, T: 'a + ChainStateReader> IntoSuper<dyn ChainStateReader + 'a> for T { fn as_super(&self) -> &(dyn ChainStateReader + 'a) { self } fn as_super_mut(&mut self) -> &mut (dyn ChainStateReader + 'a) { self } fn into_super(self: Box<Self>) -> Box<dyn ChainStateReader + 'a> { self } fn into_super_arc(self: Arc<Self>) -> Arc<dyn ChainStateReader + 'a> { self } } impl<'a, T: 'a + ChainStateWriter> IntoSuper<dyn ChainStateWriter + 'a> for T { fn as_super(&self) -> &(dyn ChainStateWriter + 'a) { self } fn as_super_mut(&mut self) -> &mut (dyn ChainStateWriter + 'a) { self } fn into_super(self: Box<Self>) -> Box<dyn ChainStateWriter + 'a> { self } fn into_super_arc(self: Arc<Self>) -> Arc<dyn ChainStateWriter + 'a> { self } } impl<'a, T: 'a + StateView> IntoSuper<dyn StateView + 'a> for T { fn as_super(&self) -> &(dyn StateView + 'a) { self } fn as_super_mut(&mut self) -> &mut (dyn StateView + 'a) { self } fn into_super(self: Box<Self>) -> Box<dyn StateView + 'a> { self } fn into_super_arc(self: Arc<Self>) -> Arc<dyn StateView + 'a> { self } } impl<T: ?Sized> StateReaderExt for T where T: ChainStateReader {} pub trait StateReaderExt: ChainStateReader { fn get_account_resource(&self, address: AccountAddress) -> Result<Option<AccountResource>> { self.get_resource::<AccountResource>(address) } fn get_resource<R>(&self, address: AccountAddress) -> Result<Option<R>> where R: MoveResource + DeserializeOwned, { let access_path = AccessPath::new(address, R::resource_path()); let r = self.get(&access_path).and_then(|state| match state { Some(state) => Ok(Some(bcs_ext::from_bytes::<R>(state.as_slice())?)), None => Ok(None), })?; Ok(r) } fn get_sequence_number(&self, address: AccountAddress) -> Result<u64> { self.get_account_resource(address)? .map(|resource| resource.sequence_number()) .ok_or_else(|| format_err!("Can not find account by address:{}", address)) } fn get_on_chain_config<C>(&self) -> Result<Option<C>> where C: OnChainConfig, Self: Sized, { C::fetch_config(self) } fn get_balance(&self, address: AccountAddress) -> Result<Option<u128>> { self.get_balance_by_token_code(address, STC_TOKEN_CODE.clone()) } fn get_balance_by_type( &self, address: AccountAddress, type_tag: TypeTag, ) -> Result<Option<u128>> { Ok(self .get(&AccessPath::new( address, BalanceResource::access_path_for(type_tag), )) .and_then(|bytes| match bytes { Some(bytes) => Ok(Some(bcs_ext::from_bytes::<BalanceResource>( bytes.as_slice(), )?)), None => Ok(None), })? .map(|resource| resource.token())) } fn get_balance_by_token_code( &self, address: AccountAddress, token_code: TokenCode, ) -> Result<Option<u128>> { self.get_balance_by_type(address, token_code.into()) } fn get_epoch(&self) -> Result<Epoch> { self.get_resource::<Epoch>(genesis_address())? .ok_or_else(|| format_err!("Epoch is none.")) } fn get_epoch_info(&self) -> Result<EpochInfo> { let epoch = self .get_resource::<Epoch>(genesis_address())? .ok_or_else(|| format_err!("Epoch is none."))?; let epoch_data = self .get_resource::<EpochData>(genesis_address())? .ok_or_else(|| format_err!("Epoch is none."))?; Ok(EpochInfo::new(epoch, epoch_data)) } fn get_timestamp(&self) -> Result<GlobalTimeOnChain> { self.get_resource(genesis_address())? .ok_or_else(|| format_err!("Timestamp resource should exist.")) } fn get_chain_id(&self) -> Result<ChainId> { self.get_resource::<ChainId>(genesis_address())? .ok_or_else(|| format_err!("ChainId resource should exist at genesis address. ")) } fn get_code(&self, module_id: ModuleId) -> Result<Option<Vec<u8>>> { self.get(&AccessPath::from(&module_id)) } fn is_activated(&self, sip: SIP) -> Result<bool> { self.get_code(sip.module_id()).map(|code| code.is_some()) } } pub struct AccountStateReader<'a, Reader> { reader: &'a Reader, } impl<'a, Reader> AccountStateReader<'a, Reader> where Reader: ChainStateReader, { pub fn new(reader: &'a Reader) -> Self { Self { reader } } pub fn get_account_resource( &self, address: &AccountAddress, ) -> Result<Option<AccountResource>> { self.reader.get_account_resource(*address) } pub fn get_resource<R>(&self, address: AccountAddress) -> Result<Option<R>> where R: MoveResource + DeserializeOwned, { self.reader.get_resource(address) } pub fn get_sequence_number(&self, address: AccountAddress) -> Result<u64> { self.reader.get_sequence_number(address) } pub fn get_on_chain_config<C>(&self) -> Result<Option<C>> where C: OnChainConfig, { self.reader.get_on_chain_config() } pub fn get_balance(&self, address: &AccountAddress) -> Result<Option<u128>> { self.reader.get_balance(*address) } pub fn get_balance_by_type( &self, address: &AccountAddress, type_tag: TypeTag, ) -> Result<Option<u128>> { self.reader.get_balance_by_type(*address, type_tag) } pub fn get_balance_by_token_code( &self, address: &AccountAddress, token_code: TokenCode, ) -> Result<Option<u128>> { self.reader.get_balance_by_token_code(*address, token_code) } pub fn get_epoch(&self) -> Result<Epoch> { self.reader.get_epoch() } pub fn get_epoch_info(&self) -> Result<EpochInfo> { self.reader.get_epoch_info() } pub fn get_timestamp(&self) -> Result<GlobalTimeOnChain> { self.reader.get_timestamp() } pub fn get_chain_id(&self) -> Result<ChainId> { self.reader.get_chain_id() } }
use anyhow::{ensure, format_err, Result}; use merkle_tree::{blob::Blob, proof::SparseMerkleProof, RawKey}; use serde::de::DeserializeOwned; use serde::{Deserialize, Serialize}; use starcoin_crypto::HashValue; use starcoin_types::state_set::AccountStateSet; use starcoin_types::write_set::WriteSet; use starcoin_types::{ access_path::AccessPath, account_address::AccountAddress, account_config::{AccountResource, BalanceResource}, account_state::AccountState, language_storage::TypeTag, state_set::ChainStateSet, }; use starcoin_vm_types::account_config::{genesis_address, STC_TOKEN_CODE}; use starcoin_vm_types::genesis_config::ChainId; use starcoin_vm_types::language_storage::ModuleId; use starcoin_vm_types::on_chain_resource::{Epoch, EpochData, EpochInfo, GlobalTimeOnChain}; use starcoin_vm_types::sips::SIP; use starcoin_vm_types::token::token_code::TokenCode; use starcoin_vm_types::{ move_resource::MoveResource, on_chain_config::OnChainConfig, state_view::StateView, }; use std::convert::TryFrom; use std::sync::Arc; #[derive(Debug, Default, Eq, PartialEq, Clone, Serialize, Deserialize)] pub struct StateProof { pub account_state: Option<Blob>, pub account_proof: SparseMerkleProof, pub account_state_proof: SparseMerkleProof, } impl StateProof { pub fn new( account_state: Option<Vec<u8>>, account_proof: SparseMerkleProof, account_state_proof: SparseMerkleProof, ) -> Self { Self { account_state: account_state.map(Blob::from), account_proof, account_state_proof, } } pub fn verify( &self, expected_root_hash: HashValue, access_path: AccessPath, access_resource_blob: Option<&[u8]>, ) -> Result<()> { let (account_address, data_path) = access_path.into_inner(); match self.account_state.as_ref() { None => { ensure!( access_resource_blob.is_none(), "accessed resource should not exists" ); } Some(s) => { let account_state = AccountState::try_from(s.as_ref())?; match account_state.storage_roots()[data_path.data_type().storage_index()] { None => { ensure!( access_resource_blob.is_none(), "accessed resource should not exists" ); } Some(expected_hash) => { let blob = access_resource_blob.map(|data| Blob::from(data.to_vec())); self.account_state_proof.verify( expected_hash, data_path.key_hash(), blob.as_ref(), )?; } } } } self.account_proof.verify( expected_root_hash, account_address.key_hash(), self.account_state.as_ref(), ) } } #[derive(Debug, Eq, PartialEq, Clone, Serialize, Deserialize)] pub struct StateWithProof { pub state: Option<Vec<u8>>, pub proof: StateProof, } impl StateWithProof { pub fn new(state: Option<Vec<u8>>, proof: StateProof) -> Self { Self { state, proof } } pub fn get_state(&self) -> &Option<Vec<u8>> { &self.state } } pub trait ChainStateReader: StateView { fn get_with_proof(&self, access_path: &AccessPath) -> Result<StateWithProof>; fn get_account_state(&self, address: &AccountAddress) -> Result<Option<AccountState>>; fn get_account_state_set(&self, address: &AccountAddress) -> Result<Option<AccountStateSet>>; fn exist_account(&self, address: &AccountAddress) -> Result<bool> { self.get_account_state(address).map(|state| state.is_some()) } fn state_root(&self) -> HashValue; fn dump(&self) -> Result<ChainStateSet>; } pub trait ChainStateWriter { fn set(&self, access_path: &AccessPath, value: Vec<u8>) -> Result<()>; fn remove(&self, access_path: &AccessPath) -> Result<()>; fn apply(&self, state_set: ChainStateSet) -> Result<()>; fn apply_write_set(&self, write_set: WriteSet) -> Result<()>; fn commit(&self) -> Result<HashValue>; fn flush(&self) -> Result<()>; } pub trait IntoSuper<Super: ?Sized> { fn as_super(&self) -> &Super; fn as_super_mut(&mut self) -> &mut Super; fn into_super(self: Box<Self>) -> Box<Super>; fn into_super_arc(self: Arc<Self>) -> Arc<Super>; } pub trait ChainState: ChainStateReader + ChainStateWriter + StateView + IntoSuper<dyn StateView> + IntoSuper<dyn ChainStateReader> + IntoSuper<dyn ChainStateWriter> { } impl<'a, T: 'a + ChainStateReader> IntoSuper<dyn ChainStateReader + 'a> for T { fn as_super(&self) -> &(dyn ChainStateReader + 'a) { self } fn as_super_mut(&mut self) -> &mut (dyn ChainStateReader + 'a) { self } fn into_super(self: Box<Self>) -> Box<dyn ChainStateReader + 'a> { self } fn into_super_arc(self: Arc<Self>) -> Arc<dyn ChainStateReader + 'a> { self } } impl<'a, T: 'a + ChainStateWriter> IntoSuper<dyn ChainStateWriter + 'a> for T { fn as_super(&self) -> &(dyn ChainStateWriter + 'a) { self } fn as_super_mut(&mut self) -> &mut (dyn ChainStateWriter + 'a) { self } fn into_super(self: Box<Self>) -> Box<dyn ChainStateWriter + 'a> { self } fn into_super_arc(self: Arc<Self>) -> Arc<dyn ChainStateWriter + 'a> { self } } impl<'a, T: 'a + StateView> IntoSuper<dyn StateView + 'a> for T { fn as_super(&self) -> &(dyn StateView + 'a) { self } fn as_super_mut(&mut self) -> &mut (dyn StateView + 'a) { self } fn into_super(self: Box<Self>) -> Box<dyn StateView + 'a> { self } fn into_super_arc(self: Arc<Self>) -> Arc<dyn StateView + 'a> { self } } impl<T: ?Sized> StateReaderExt for T where T: ChainStateReader {} pub trait StateReaderExt: ChainStateReader { fn get_account_resource(&self, address: AccountAddress) -> Result<Option<AccountResource>> { self.get_resource::<AccountResource>(address) } fn get_resource<R>(&self, address: AccountAddress) -> Result<Option<R>> where R: MoveResource + DeserializeOwned, { let access_path = AccessPath::new(address, R::resource_path()); let r = self.get(&access_path).and_then(|state| match state { Some(state) => Ok(Some(bcs_ext::from_bytes::<R>(state.as_slice())?)), None => Ok(None), })?; Ok(r) } fn get_sequence_number(&self, address: AccountAddress) -> Result<u64> { self.get_account_resource(address)? .map(|resource| resource.sequence_number()) .ok_or_else(|| format_err!("Can not find account by address:{}", address)) } fn get_on_chain_config<C>(&self) -> Result<Option<C>> where C: OnChainConfig, Self: Sized, { C::fetch_config(self) } fn get_balance(&self, address: AccountAddress) -> Result<Option<u128>> { self.get_balance_by_token_code(address, STC_TOKEN_CODE.clone()) } fn get_balance_by_type( &self, address: AccountAddress, type_tag: TypeTag, ) -> Result<Option<u128>> { Ok(self .get(&AccessPath::new( address, BalanceResource::access_path_for(type_tag), )) .and_then(|bytes| match bytes { Some(bytes) => Ok(Some(bcs_ext::from_bytes::<BalanceResource>( bytes.as_slice(), )?)), None => Ok(None), })? .map(|resource| resource.token())) } fn get_balance_by_token_code( &self, address: AccountAddress, token_code: TokenCode, ) -> Result<Option<u128>> { self.get_balance_by_type(address, token_code.into()) } fn get_epoch(&self) -> Result<Epoch> { self.get_resource::<Epoch>(genesis_address())? .ok_or_else(|| format_err!("Epoch is none.")) } fn get_epoch_info(&self) -> Result<EpochInfo> { let epoch = self .get_resource::<Epoch>(genesis_address())? .ok_or_else(|| format_err!("Epoch is none."))?; let epoch_data = self .get_resource::<EpochData>(genesis_address())? .ok_or_else(|| format_err!("Epoch is none."))?; Ok(EpochInfo::new(epoch, epoch_data)) } fn get_timestamp(&self) -> Result<GlobalTimeOnChain> { self.get_resource(genesis_address())? .ok_or_else(|| format_err!("Timestamp resource should exist.")) } fn get_chain_id(&self) -> Result<ChainId> { self.get_resource::<ChainId>(genesis_address())? .ok_or_else(|| format_err!("ChainId resource should exist at genesis address. ")) } fn get_code(&self, module_id: ModuleId) -> Result<Option<Vec<u8>>> { self.get(&AccessPath::from(&module_id)) } fn is_activated(&self, sip: SIP) -> Result<bool> { self.get_code(sip.module_id()).map(|code| code.is_some()) } } pub struct AccountStateReader<'a, Reader> { reader: &'a Reader, } impl<'a, Reader> AccountStateReader<'a, Reader> where Reader: ChainStateReader, { pub fn new(reader: &'a Reader) -> Self { Self { reader } } pub fn get_account_resource( &self, address: &AccountAddress, ) -> Result<Option<AccountResource>> { self.reader.get_account_resource(*address) } pub fn get_resource<R>(&self, address: AccountAddress) -> Result<Option<R>> where R: MoveResource + DeserializeOwned, { self.reader.get_resource(address) } pub fn get_sequence_number(&self, address: AccountAddress) -> Result<u64> { self.reader.get_sequence_number(address) }
pub fn get_balance(&self, address: &AccountAddress) -> Result<Option<u128>> { self.reader.get_balance(*address) } pub fn get_balance_by_type( &self, address: &AccountAddress, type_tag: TypeTag, ) -> Result<Option<u128>> { self.reader.get_balance_by_type(*address, type_tag) } pub fn get_balance_by_token_code( &self, address: &AccountAddress, token_code: TokenCode, ) -> Result<Option<u128>> { self.reader.get_balance_by_token_code(*address, token_code) } pub fn get_epoch(&self) -> Result<Epoch> { self.reader.get_epoch() } pub fn get_epoch_info(&self) -> Result<EpochInfo> { self.reader.get_epoch_info() } pub fn get_timestamp(&self) -> Result<GlobalTimeOnChain> { self.reader.get_timestamp() } pub fn get_chain_id(&self) -> Result<ChainId> { self.reader.get_chain_id() } }
pub fn get_on_chain_config<C>(&self) -> Result<Option<C>> where C: OnChainConfig, { self.reader.get_on_chain_config() }
function_block-full_function
[ { "content": "pub fn access_path_for_module_upgrade_strategy(address: AccountAddress) -> AccessPath {\n\n AccessPath::resource_access_path(address, ModuleUpgradeStrategy::struct_tag())\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize)]\n\npub struct TwoPhaseUpgradeV2Resource {\n\n config: TwoPhaseUpgradeConfigResource,\n\n plan: Option<UpgradePlanV2Resource>,\n\n version_cap: ModifyConfigCapabilityResource,\n\n upgrade_event: EventHandle,\n\n}\n\nimpl TwoPhaseUpgradeV2Resource {\n\n pub fn enforced(&self) -> bool {\n\n match &self.plan {\n\n Some(plan) => plan.enforced,\n\n None => false,\n\n }\n\n }\n\n}\n\nimpl MoveResource for TwoPhaseUpgradeV2Resource {\n\n const MODULE_NAME: &'static str = \"PackageTxnManager\";\n\n const STRUCT_NAME: &'static str = \"TwoPhaseUpgradeV2\";\n\n}\n\n\n", "file_path": "vm/types/src/account_config/resources/module_upgrade_strategy.rs", "rank": 0, "score": 417676.32461471495 }, { "content": "pub fn access_path_for_two_phase_upgrade_v2(address: AccountAddress) -> AccessPath {\n\n AccessPath::resource_access_path(address, TwoPhaseUpgradeV2Resource::struct_tag())\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize)]\n\npub struct TwoPhaseUpgradeConfigResource {\n\n min_time_limit: u64,\n\n}\n\nimpl MoveResource for TwoPhaseUpgradeConfigResource {\n\n const MODULE_NAME: &'static str = \"PackageTxnManager\";\n\n const STRUCT_NAME: &'static str = \"TwoPhaseUpgradeConfig\";\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize)]\n\npub struct UpgradePlanV2Resource {\n\n package_hash: Vec<u8>,\n\n active_after_time: u64,\n\n version: u64,\n\n enforced: bool,\n\n}\n\nimpl MoveResource for UpgradePlanV2Resource {\n\n const MODULE_NAME: &'static str = \"PackageTxnManager\";\n\n const STRUCT_NAME: &'static str = \"UpgradePlanV2\";\n\n}\n\n\n", "file_path": "vm/types/src/account_config/resources/module_upgrade_strategy.rs", "rank": 1, "score": 413162.00266730215 }, { "content": "pub fn get_balance(address: AccountAddress, chain_state: &dyn ChainState) -> u128 {\n\n chain_state\n\n .get_balance(address)\n\n .expect(\"read balance resource should ok\")\n\n .unwrap_or_default()\n\n}\n\n\n", "file_path": "test-helper/src/executor.rs", "rank": 2, "score": 394306.63406488404 }, { "content": "pub fn genesis_address() -> AccountAddress {\n\n CORE_CODE_ADDRESS\n\n}\n", "file_path": "vm/types/src/account_config/constants/addresses.rs", "rank": 4, "score": 363150.7785622754 }, { "content": "#[test]\n\npub fn test_state_proof() -> Result<()> {\n\n let s = MockStateNodeStore::new();\n\n let state = StateTree::new(Arc::new(s), None);\n\n assert_eq!(state.root_hash(), *SPARSE_MERKLE_PLACEHOLDER_HASH);\n\n\n\n let hash_value = HashValue::random().into();\n\n\n\n let account1 = update_nibble(&hash_value, 0, 1);\n\n // re-update to make sure account2 never equal to account1\n\n let account1 = update_nibble(&account1, 2, 1);\n\n\n\n let account2 = update_nibble(&account1, 2, 2);\n\n for (k, v) in vec![(account1, vec![0, 0, 0]), (account2, vec![1, 1, 1])] {\n\n state.put(k, v);\n\n }\n\n let (value, _) = state.get_with_proof(&account1)?;\n\n assert!(value.is_none());\n\n let new_root_hash = state.commit()?;\n\n let (value, proof) = state.get_with_proof(&account1)?;\n\n assert!(value.is_some());\n", "file_path": "state/state-tree/src/state_tree_test.rs", "rank": 5, "score": 352208.4928017484 }, { "content": "pub fn get_sequence_number(addr: AccountAddress, chain_state: &dyn ChainState) -> u64 {\n\n chain_state\n\n .get_account_resource(addr)\n\n .expect(\"read account state should ok\")\n\n .map(|res| res.sequence_number())\n\n .unwrap_or_default()\n\n}\n\n\n", "file_path": "test-helper/src/executor.rs", "rank": 6, "score": 349142.2813541572 }, { "content": "#[test]\n\npub fn test_put_blob() -> Result<()> {\n\n let s = MockStateNodeStore::new();\n\n let state = StateTree::<HashValueKey>::new(Arc::new(s), None);\n\n assert_eq!(state.root_hash(), *SPARSE_MERKLE_PLACEHOLDER_HASH);\n\n\n\n let hash_value = HashValue::random().into();\n\n\n\n let account1 = update_nibble(&hash_value, 0, 1);\n\n let account1 = update_nibble(&account1, 2, 2);\n\n state.put(account1, vec![0, 0, 0]);\n\n\n\n assert_eq!(state.get(&account1)?, Some(vec![0, 0, 0]));\n\n assert_eq!(state.get(&update_nibble(&hash_value, 0, 8))?, None);\n\n\n\n let new_root_hash = state.commit()?;\n\n assert_eq!(state.root_hash(), new_root_hash);\n\n assert_eq!(state.get(&account1)?, Some(vec![0, 0, 0]));\n\n assert_eq!(state.get(&update_nibble(&hash_value, 0, 8))?, None);\n\n\n\n let (root, updates) = state.change_sets();\n", "file_path": "state/state-tree/src/state_tree_test.rs", "rank": 7, "score": 347816.01634253626 }, { "content": "/// Get the AccessPath to a resource stored under `address` with type name `tag`\n\nfn create_access_path(address: AccountAddress, tag: StructTag) -> AccessPath {\n\n AccessPath::resource_access_path(address, tag)\n\n}\n", "file_path": "vm/vm-runtime/src/lib.rs", "rank": 8, "score": 344026.3415165982 }, { "content": "pub fn deserialize_from_string<'de, D, R>(d: D) -> std::result::Result<R, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n R: FromStr + Deserialize<'de>,\n\n R::Err: Sized + std::error::Error,\n\n{\n\n if d.is_human_readable() {\n\n let s = <String>::deserialize(d)?;\n\n R::from_str(&s).map_err(D::Error::custom)\n\n } else {\n\n R::deserialize(d)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests;\n", "file_path": "commons/serde-helpers/src/lib.rs", "rank": 10, "score": 319539.502068173 }, { "content": "pub fn association_address() -> AccountAddress {\n\n AccountAddress::from_hex_literal(\"0xA550C18\")\n\n .expect(\"Parsing valid hex literal should always succeed\")\n\n}\n", "file_path": "vm/types/src/account_config/constants/addresses.rs", "rank": 11, "score": 318304.4707669556 }, { "content": "#[stest::test]\n\nfn test_eth_state_proof_verify() -> Result<()> {\n\n let (chain_state, net) = prepare_genesis();\n\n // deploy the module\n\n {\n\n let source = include_str!(\"../../modules/EthStateVerifier.move\");\n\n let modules = compile_modules_with_address(association_address(), source);\n\n\n\n let package = Package::new(modules, None)?;\n\n association_execute(&net, &chain_state, TransactionPayload::Package(package))?;\n\n }\n\n\n\n // load the example proof\n\n let account_proof: EthAccount = {\n\n let proofs = include_str!(\"proof.json\");\n\n let value: serde_json::Value = serde_json::from_str(proofs)?;\n\n serde_json::from_value(value)?\n\n };\n\n\n\n // verify account proof\n\n {\n", "file_path": "contrib-contracts/src/eth_state_verifier_test/mod.rs", "rank": 12, "score": 317759.4803911141 }, { "content": "#[ignore]\n\n#[test]\n\npub fn test_wallet_account() -> Result<()> {\n\n use bcs_ext::BCSCodec;\n\n use core::convert::{From, TryFrom};\n\n use starcoin_crypto::ed25519::{Ed25519PrivateKey, Ed25519PublicKey, Ed25519Signature};\n\n use starcoin_crypto::{hash::CryptoHash, HashValue};\n\n use starcoin_types::transaction::authenticator::AuthenticationKey;\n\n\n\n let bytes = hex::decode(\"2c78c6fd8829de80451cda02310250b27307360ddc972d614fa0c8462ae41b3e\")?;\n\n let private_key = Ed25519PrivateKey::try_from(&bytes[..])?;\n\n let public_key = Ed25519PublicKey::from(&private_key);\n\n\n\n let message = [1, 2, 3, 4];\n\n // need add fuzzing features on libra-crypto for this.\n\n let result = <Ed25519PrivateKey as SigningKey>::sign_arbitrary_message(&private_key, &message);\n\n\n\n let address = starcoin_types::account_address::from_public_key(&public_key);\n\n let hash_value = HashValue::sha3_256_of(&public_key.to_bytes());\n\n let key = AuthenticationKey::new(*HashValue::sha3_256_of(&public_key.to_bytes()).as_ref());\n\n\n\n let sign_bytes = vec![\n", "file_path": "account/src/account_test.rs", "rank": 14, "score": 315953.36221747356 }, { "content": "#[test]\n\npub fn test_import_account() -> Result<()> {\n\n let tempdir = tempfile::tempdir()?;\n\n let storage = AccountStorage::create_from_path(tempdir.path(), RocksdbConfig::default())?;\n\n let manager = AccountManager::new(storage)?;\n\n\n\n // should success\n\n let wallet = manager.create_account(\"hello\")?;\n\n let private_key = super::account_manager::gen_private_key();\n\n let result = manager.import_account(*wallet.address(), private_key.to_bytes().to_vec(), \"abc\");\n\n assert!(result.is_err());\n\n\n\n assert!(\n\n matches!(result.err().unwrap(), AccountError::AccountAlreadyExist(addr) if addr == *wallet.address())\n\n );\n\n\n\n let normal_address = AccountAddress::random();\n\n let _account =\n\n manager.import_account(normal_address, private_key.to_bytes().to_vec(), \"abc\")?;\n\n assert_eq!(manager.list_account_infos()?.len(), 2);\n\n Ok(())\n\n}\n\n\n", "file_path": "account/src/account_test.rs", "rank": 15, "score": 315953.36221747356 }, { "content": "pub trait IntoSuper<Super: ?Sized> {\n\n fn as_super(&self) -> &Super;\n\n fn as_super_mut(&mut self) -> &mut Super;\n\n fn into_super(self: Box<Self>) -> Box<Super>;\n\n fn into_super_arc(self: Arc<Self>) -> Arc<Super>;\n\n}\n\n\n\nimpl<'a, T: 'a + StateNodeStore> IntoSuper<dyn StateNodeStore + 'a> for T {\n\n fn as_super(&self) -> &(dyn StateNodeStore + 'a) {\n\n self\n\n }\n\n fn as_super_mut(&mut self) -> &mut (dyn StateNodeStore + 'a) {\n\n self\n\n }\n\n fn into_super(self: Box<Self>) -> Box<dyn StateNodeStore + 'a> {\n\n self\n\n }\n\n fn into_super_arc(self: Arc<Self>) -> Arc<dyn StateNodeStore + 'a> {\n\n self\n\n }\n", "file_path": "storage/src/lib.rs", "rank": 16, "score": 315648.7462130314 }, { "content": "pub fn core_code_address() -> AccountAddress {\n\n CORE_CODE_ADDRESS\n\n}\n\n\n", "file_path": "vm/types/src/account_config/constants/addresses.rs", "rank": 17, "score": 314392.43545395 }, { "content": "/// Get the target of next pow work\n\npub fn get_next_work_required(chain: &dyn ChainReader) -> Result<U256> {\n\n let epoch = chain.epoch();\n\n let current_header = chain.current_header();\n\n if current_header.number() <= 1 {\n\n return Ok(difficult_to_target(current_header.difficulty()));\n\n }\n\n let start_window_num = if current_header.number() < epoch.block_difficulty_window() {\n\n 0\n\n } else {\n\n current_header\n\n .number()\n\n .saturating_sub(epoch.block_difficulty_window())\n\n .checked_add(1)\n\n .ok_or_else(|| format_err!(\"block number overflow\"))?\n\n };\n\n let blocks = (start_window_num\n\n ..current_header\n\n .number()\n\n .checked_add(1)\n\n .ok_or_else(|| format_err!(\"block number overflow\"))?)\n", "file_path": "consensus/src/difficulty.rs", "rank": 18, "score": 314186.43476823147 }, { "content": "#[test]\n\npub fn test_wallet() -> Result<()> {\n\n let tempdir = tempfile::tempdir()?;\n\n let storage = AccountStorage::create_from_path(tempdir.path(), RocksdbConfig::default())?;\n\n let manager = AccountManager::new(storage.clone())?;\n\n\n\n // should success\n\n let wallet = manager.create_account(\"hello\")?;\n\n\n\n let wallet_address = wallet.address();\n\n\n\n // test reload\n\n let loaded_wallet = Account::load(*wallet_address, \"hello\", storage)?;\n\n assert!(loaded_wallet.is_some());\n\n let reloaded_wallet = loaded_wallet.unwrap();\n\n assert_eq!(\n\n reloaded_wallet.private_key().to_bytes(),\n\n wallet.private_key().to_bytes()\n\n );\n\n\n\n // test default wallet\n", "file_path": "account/src/account_test.rs", "rank": 19, "score": 314175.16201696126 }, { "content": "fn read_two_phase_upgrade_v2_resource(state_view: &dyn StateView) -> Result<bool> {\n\n let two_phase_upgrade_v2_path = access_path_for_two_phase_upgrade_v2(genesis_address());\n\n match state_view.get(&two_phase_upgrade_v2_path)? {\n\n Some(data) => Ok(bcs_ext::from_bytes::<TwoPhaseUpgradeV2Resource>(&data)?.enforced()),\n\n _ => Err(format_err!(\"read two phase upgrade resource fail.\")),\n\n }\n\n}\n\n\n", "file_path": "executor/src/stdlib_test/module_upgrade_test.rs", "rank": 20, "score": 311635.0169082418 }, { "content": "#[test]\n\npub fn test_wallet_unlock() -> Result<()> {\n\n let tempdir = tempfile::tempdir()?;\n\n let storage = AccountStorage::create_from_path(tempdir.path(), RocksdbConfig::default())?;\n\n let manager = AccountManager::new(storage)?;\n\n\n\n let wallet = manager.create_account(\"hello\")?;\n\n\n\n let unlock_result = manager.unlock_account(*wallet.address(), \"hell0\", Duration::from_secs(1));\n\n assert!(unlock_result.is_err());\n\n manager.unlock_account(*wallet.address(), \"hello\", Duration::from_secs(1))?;\n\n let fake_txn = RawUserTransaction::new_with_default_gas_token(\n\n *wallet.address(),\n\n 1,\n\n TransactionPayload::Script(Script::new(vec![], vec![], vec![])),\n\n 1000,\n\n 1,\n\n 100000,\n\n ChainId::new(1),\n\n );\n\n let _signed = manager.sign_txn(*wallet.address(), fake_txn)?;\n", "file_path": "account/src/account_test.rs", "rank": 21, "score": 309129.78826249356 }, { "content": "pub fn deserialize_from_string_opt<'de, D, R>(d: D) -> std::result::Result<Option<R>, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n R: FromStr + Deserialize<'de>,\n\n R::Err: Sized + std::error::Error,\n\n{\n\n if d.is_human_readable() {\n\n let s = <Option<String>>::deserialize(d)?;\n\n s.map(|s| R::from_str(&s).map_err(D::Error::custom))\n\n .transpose()\n\n } else {\n\n Option::<R>::deserialize(d)\n\n }\n\n}\n\n\n", "file_path": "commons/serde-helpers/src/lib.rs", "rank": 22, "score": 307813.28584576526 }, { "content": "#[test]\n\npub fn test_state_commit() -> Result<()> {\n\n let s = MockStateNodeStore::new();\n\n let state = StateTree::new(Arc::new(s), None);\n\n assert_eq!(state.root_hash(), *SPARSE_MERKLE_PLACEHOLDER_HASH);\n\n\n\n let hash_value = HashValue::random().into();\n\n\n\n let account1 = update_nibble(&hash_value, 0, 1);\n\n let account1 = update_nibble(&account1, 2, 2);\n\n state.put(account1, vec![0, 0, 0]);\n\n let _new_root_hash = state.commit()?;\n\n\n\n let account3 = update_nibble(&account1, 2, 3);\n\n for (k, v) in vec![(account1, vec![1, 1, 0]), (account3, vec![0, 0, 0])] {\n\n state.put(k, v);\n\n }\n\n let new_root_hash = state.commit()?;\n\n\n\n state.flush()?;\n\n assert_eq!(state.root_hash(), new_root_hash);\n\n assert_eq!(state.get(&account1)?, Some(vec![1, 1, 0]));\n\n assert_eq!(state.get(&account3)?, Some(vec![0, 0, 0]));\n\n assert_eq!(state.get(&update_nibble(&account1, 2, 10))?, None);\n\n Ok(())\n\n}\n\n\n", "file_path": "state/state-tree/src/state_tree_test.rs", "rank": 23, "score": 307283.2099750214 }, { "content": "#[test]\n\npub fn test_state_dump() -> Result<()> {\n\n let s = MockStateNodeStore::new();\n\n let state = StateTree::new(Arc::new(s), None);\n\n let hash_value = HashValueKey(HashValue::random());\n\n let value = vec![1u8, 2u8];\n\n state.put(hash_value, value);\n\n state.commit()?;\n\n let state_set = state.dump()?;\n\n assert_eq!(1, state_set.len());\n\n Ok(())\n\n}\n\n\n", "file_path": "state/state-tree/src/state_tree_test.rs", "rank": 24, "score": 307283.2099750214 }, { "content": "pub fn encode(idx: u64, address: AccountAddress, amount: u128) -> anyhow::Result<Vec<u8>> {\n\n let mut index = bcs_ext::to_bytes(&idx)?;\n\n let mut address = bcs_ext::to_bytes(&address)?;\n\n let mut amount = bcs_ext::to_bytes(&amount)?;\n\n index.append(&mut address);\n\n index.append(&mut amount);\n\n Ok(index)\n\n}\n\n\n\npub struct Sha3Algorithm(Sha3);\n\n\n\nimpl Default for Sha3Algorithm {\n\n fn default() -> Self {\n\n Self(Sha3::sha3_256())\n\n }\n\n}\n\n\n\nimpl Hasher for Sha3Algorithm {\n\n #[inline]\n\n fn finish(&self) -> u64 {\n", "file_path": "cmd/merkle-generator/src/lib.rs", "rank": 25, "score": 305234.96420436946 }, { "content": "pub fn generate_server_module(rpc_trait: &mut ItemTrait) -> anyhow::Result<TokenStream> {\n\n let delegate_methods: Vec<TokenStream> = rpc_trait\n\n .items\n\n .iter()\n\n .filter_map(|trait_item| {\n\n if let syn::TraitItem::Method(method) = trait_item {\n\n Some(generate_to_delegate(method))\n\n } else {\n\n None\n\n }\n\n })\n\n .collect();\n\n let mut rpc_server_trait = rpc_trait.clone();\n\n let io_delegate_type = quote!(network_rpc_core::delegates::IoDelegate);\n\n let to_delegate_body = quote! {\n\n let mut del = #io_delegate_type::new(self.into());\n\n #(#delegate_methods)*\n\n del\n\n };\n\n let to_delegate_method: syn::TraitItemMethod = parse_quote! {\n", "file_path": "network-rpc/derive/src/to_server.rs", "rank": 26, "score": 304671.18195318675 }, { "content": "pub fn print_table(value: Value) -> Result<()> {\n\n if value.is_null() {\n\n return Ok(());\n\n }\n\n match value {\n\n Value::Array(values) => print_vec_table(values),\n\n value => print_value_table(value),\n\n }\n\n}\n\n\n", "file_path": "commons/scmd/src/result.rs", "rank": 27, "score": 303267.5785138615 }, { "content": "pub fn print_json(value: Value) -> Result<()> {\n\n let json = serde_json::to_string_pretty(&value)?;\n\n println!(\"{}\", json);\n\n Ok(())\n\n}\n\n\n", "file_path": "commons/scmd/src/result.rs", "rank": 28, "score": 303267.5785138615 }, { "content": "#[test]\n\npub fn test_repeat_commit() -> Result<()> {\n\n let s = MockStateNodeStore::new();\n\n let state = StateTree::new(Arc::new(s), None);\n\n let hash_value = HashValueKey(HashValue::random());\n\n let value = vec![1u8, 2u8];\n\n state.put(hash_value, value.clone());\n\n state.commit()?;\n\n\n\n let root_hash1 = state.root_hash();\n\n state.put(hash_value, value);\n\n state.commit()?;\n\n let root_hash2 = state.root_hash();\n\n assert_eq!(root_hash1, root_hash2);\n\n Ok(())\n\n}\n", "file_path": "state/state-tree/src/state_tree_test.rs", "rank": 29, "score": 302875.8623966829 }, { "content": "/// Helper function to deserialize versions from above encoding.\n\nfn deserialize_u64_varint<T>(reader: &mut T) -> Result<u64>\n\nwhere\n\n T: Read,\n\n{\n\n let mut num = 0u64;\n\n for i in 0..8 {\n\n let byte = reader.read_u8()?;\n\n let more = (byte & 0x80) != 0;\n\n num |= u64::from(byte & 0x7f) << (i * 7);\n\n if !more {\n\n return Ok(num);\n\n }\n\n }\n\n // Last byte is encoded as is.\n\n let byte = reader.read_u8()?;\n\n num |= u64::from(byte) << 56;\n\n Ok(num)\n\n}\n\n\n", "file_path": "commons/forkable-jellyfish-merkle/src/node_type/mod.rs", "rank": 30, "score": 301523.3785266625 }, { "content": "pub trait HashAccountAddress {\n\n fn hash(&self) -> HashValue;\n\n}\n\n\n\nimpl HashAccountAddress for AccountAddress {\n\n fn hash(&self) -> HashValue {\n\n let mut state = hasher::AccountAddressHasher::default();\n\n state.update(self.as_ref());\n\n state.finish()\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use hex::FromHex;\n\n\n\n #[test]\n\n fn address_hash() {\n\n let address: AccountAddress = \"ca843279e3427144cead5e4d5999a3d0\".parse().unwrap();\n", "file_path": "vm/types/src/account_address.rs", "rank": 31, "score": 299171.8851595842 }, { "content": "pub fn voting_period(state_view: &dyn StateView, token: TypeTag) -> u64 {\n\n let mut ret = execute_readonly_function(\n\n state_view,\n\n &ModuleId::new(genesis_address(), Identifier::new(\"Dao\").unwrap()),\n\n &Identifier::new(\"voting_period\").unwrap(),\n\n vec![token],\n\n vec![],\n\n )\n\n .unwrap();\n\n assert_eq!(ret.len(), 1);\n\n ret.pop().unwrap().1.cast().unwrap()\n\n}\n\n\n", "file_path": "test-helper/src/dao.rs", "rank": 32, "score": 297570.83185840165 }, { "content": "pub fn quorum_vote(state_view: &dyn StateView, token: TypeTag) -> u128 {\n\n let mut ret = execute_readonly_function(\n\n state_view,\n\n &ModuleId::new(genesis_address(), Identifier::new(\"Dao\").unwrap()),\n\n &Identifier::new(\"quorum_votes\").unwrap(),\n\n vec![token],\n\n vec![],\n\n )\n\n .unwrap();\n\n assert_eq!(ret.len(), 1);\n\n ret.pop().unwrap().1.cast().unwrap()\n\n}\n\n\n", "file_path": "test-helper/src/dao.rs", "rank": 33, "score": 297570.83185840165 }, { "content": "pub fn voting_delay(state_view: &dyn StateView, token: TypeTag) -> u64 {\n\n let mut ret = execute_readonly_function(\n\n state_view,\n\n &ModuleId::new(genesis_address(), Identifier::new(\"Dao\").unwrap()),\n\n &Identifier::new(\"voting_delay\").unwrap(),\n\n vec![token],\n\n vec![],\n\n )\n\n .unwrap();\n\n assert_eq!(ret.len(), 1);\n\n ret.pop().unwrap().1.cast().unwrap()\n\n}\n", "file_path": "test-helper/src/dao.rs", "rank": 34, "score": 297570.83185840165 }, { "content": "/// Output transaction builders in Rust for the given ABIs.\n\n/// If `local_types` is true, we generate a file suitable for the Diem codebase itself\n\n/// rather than using serde-generated, standalone definitions.\n\npub fn output(out: &mut dyn Write, abis: &[ScriptABI], local_types: bool) -> Result<()> {\n\n let mut emitter = RustEmitter {\n\n out: IndentedWriter::new(out, IndentConfig::Space(4)),\n\n local_types,\n\n };\n\n\n\n emitter.output_preamble()?;\n\n emitter.output_script_call_enum_with_imports(abis)?;\n\n\n\n emitter.output_transaction_script_impl(&common::transaction_script_abis(abis))?;\n\n emitter.output_script_function_impl(&common::script_function_abis(abis))?;\n\n\n\n for abi in abis {\n\n emitter.output_script_encoder_function(abi)?;\n\n }\n\n\n\n for abi in abis {\n\n emitter.output_script_decoder_function(abi)?;\n\n }\n\n\n\n emitter.output_transaction_script_decoder_map(&common::transaction_script_abis(abis))?;\n\n emitter.output_script_function_decoder_map(&common::script_function_abis(abis))?;\n\n emitter.output_decoding_helpers(abis)?;\n\n\n\n for abi in &common::transaction_script_abis(abis) {\n\n emitter.output_code_constant(abi)?;\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "vm/transaction-builder-generator/src/rust.rs", "rank": 35, "score": 295215.4833596454 }, { "content": "pub fn current_block_number(state_view: &dyn StateView) -> u64 {\n\n let mut ret = execute_readonly_function(\n\n state_view,\n\n &ModuleId::new(genesis_address(), Identifier::new(\"Block\").unwrap()),\n\n &Identifier::new(\"get_current_block_number\").unwrap(),\n\n vec![],\n\n vec![],\n\n )\n\n .unwrap();\n\n assert_eq!(ret.len(), 1);\n\n ret.pop().unwrap().1.cast().unwrap()\n\n}\n\n\n", "file_path": "test-helper/src/executor.rs", "rank": 36, "score": 294093.5221516206 }, { "content": "pub fn min_action_delay(state_view: &dyn StateView, token: TypeTag) -> u64 {\n\n let mut ret = execute_readonly_function(\n\n state_view,\n\n &ModuleId::new(genesis_address(), Identifier::new(\"Dao\").unwrap()),\n\n &Identifier::new(\"min_action_delay\").unwrap(),\n\n vec![token],\n\n vec![],\n\n )\n\n .unwrap();\n\n assert_eq!(ret.len(), 1);\n\n ret.pop().unwrap().1.cast().unwrap()\n\n}\n\n\n", "file_path": "test-helper/src/dao.rs", "rank": 37, "score": 294049.81674028025 }, { "content": "pub fn make_genesis_accounts() -> BTreeMap<String, Account> {\n\n let mut m = BTreeMap::new();\n\n m.insert(ASSOCIATION_NAME.to_string(), Account::new_association());\n\n m.insert(\n\n GENESIS_NAME.to_string(),\n\n Account::new_genesis_account(genesis_address()),\n\n );\n\n m\n\n}\n", "file_path": "vm/functional-tests/src/genesis_accounts.rs", "rank": 38, "score": 290968.00809411256 }, { "content": "/// Output a header-only library providing C++ transaction builders for the given ABIs.\n\npub fn output(out: &mut dyn Write, abis: &[ScriptABI], namespace: Option<&str>) -> Result<()> {\n\n let mut emitter = CppEmitter {\n\n out: IndentedWriter::new(out, IndentConfig::Space(4)),\n\n namespace,\n\n inlined_definitions: true,\n\n };\n\n emitter.output_preamble()?;\n\n emitter.output_open_namespace()?;\n\n emitter.output_using_namespaces()?;\n\n for abi in abis {\n\n match abi {\n\n ScriptABI::TransactionScript(abi) => {\n\n emitter.output_transaction_script_builder_definition(abi)?\n\n }\n\n ScriptABI::ScriptFunction(abi) => emitter.output_script_fun_builder_definition(abi)?,\n\n };\n\n }\n\n emitter.output_close_namespace()\n\n}\n\n\n", "file_path": "vm/transaction-builder-generator/src/cpp.rs", "rank": 39, "score": 290170.12903166376 }, { "content": "pub fn to_bytes<T>(value: &T) -> Result<Vec<u8>>\n\nwhere\n\n T: ?Sized + Serialize,\n\n{\n\n bcs::to_bytes(value).map_err(|e| e.into())\n\n}\n\n\n", "file_path": "commons/bcs_ext/src/lib.rs", "rank": 40, "score": 288268.9632115961 }, { "content": "pub fn from_public_key(public_key: &Ed25519PublicKey) -> AccountAddress {\n\n AuthenticationKey::ed25519(public_key).derived_address()\n\n}\n", "file_path": "types/src/account_address.rs", "rank": 41, "score": 287019.105758151 }, { "content": "#[test]\n\nfn test_state_proof() -> Result<()> {\n\n let storage = MockStateNodeStore::new();\n\n let chain_state_db = ChainStateDB::new(Arc::new(storage), None);\n\n let access_path = AccessPath::random_resource();\n\n let state0 = random_bytes();\n\n chain_state_db.apply_write_set(to_write_set(access_path.clone(), state0.clone()))?;\n\n\n\n let state_root = chain_state_db.commit()?;\n\n let state1 = chain_state_db.get(&access_path)?;\n\n assert!(state1.is_some());\n\n assert_eq!(state0, state1.unwrap());\n\n println!(\"{}\", access_path.address.key_hash());\n\n println!(\"{}\", access_path.key_hash());\n\n let state_with_proof = chain_state_db.get_with_proof(&access_path)?;\n\n println!(\"{:?}\", state_with_proof);\n\n state_with_proof\n\n .proof\n\n .verify(state_root, access_path, state_with_proof.state.as_deref())?;\n\n Ok(())\n\n}\n\n\n", "file_path": "state/statedb/src/tests.rs", "rank": 42, "score": 286981.308996017 }, { "content": "fn to_write_set(access_path: AccessPath, value: Vec<u8>) -> WriteSet {\n\n WriteSetMut::new(vec![(access_path, WriteOp::Value(value))])\n\n .freeze()\n\n .expect(\"freeze write_set must success.\")\n\n}\n\n\n", "file_path": "state/statedb/src/tests.rs", "rank": 43, "score": 284414.4658332745 }, { "content": "pub fn compile_modules_with_address(address: AccountAddress, code: &str) -> Vec<Module> {\n\n let temp_dir = temp_path();\n\n let stdlib_files =\n\n restore_stdlib_in_dir(temp_dir.path()).expect(\"get stdlib modules should be ok\");\n\n let compiled_result =\n\n starcoin_move_compiler::compile_source_string_no_report(code, &stdlib_files, address)\n\n .expect(\"compile fail\")\n\n .1\n\n .expect(\"compile fail\");\n\n compiled_result\n\n .into_iter()\n\n .map(|m| Module::new(m.serialize()))\n\n .collect()\n\n}\n", "file_path": "test-helper/src/executor.rs", "rank": 44, "score": 284320.46653935604 }, { "content": "fn bench_get_with_proof(c: &mut Criterion) {\n\n ::logger::init();\n\n let tmp_dir = starcoin_config::temp_path();\n\n let db_store = new_empty_store(tmp_dir.as_ref()) as Arc<dyn StateNodeStore>;\n\n\n\n let mem_store = Arc::new(MockStateNodeStore::new()) as Arc<dyn StateNodeStore>;\n\n\n\n let mut group = c.benchmark_group(\"get_with_proof\");\n\n for (id, s) in [(\"mem_store\", mem_store), (\"db_store\", db_store)].iter() {\n\n let tree = StateTree::new(s.clone(), None);\n\n let (kvs, _root) = prepare_tree(&tree, &[1, 2, 3, 4], 100_000);\n\n let ks = kvs.keys().copied().collect::<Vec<_>>();\n\n group\n\n .bench_with_input(*id, &(tree, kvs, ks), |b, input| {\n\n let (tree, kvs, ks) = input;\n\n let k_len = ks.len();\n\n let mut i = 0usize;\n\n b.iter_with_setup(\n\n || {\n\n let k = &ks[i % k_len];\n", "file_path": "benchmarks/benches/bench_state_tree.rs", "rank": 45, "score": 283724.5231293807 }, { "content": "pub fn from_public_key(public_key: &Ed25519PublicKey) -> AccountAddress {\n\n AuthenticationKey::ed25519(public_key).derived_address()\n\n}\n\n\n\n// Define the Hasher used for hashing AccountAddress types. In order to properly use the\n\n// CryptoHasher derive macro we need to have this in its own module so that it doesn't conflict\n\n// with the imported `AccountAddress` from move-core-types. It needs to have the same name since\n\n// the hash salt is calculated using the name of the type.\n\nmod hasher {\n\n use starcoin_crypto::hash::CryptoHasher;\n\n #[derive(serde::Deserialize, CryptoHasher)]\n\n struct AccountAddress;\n\n}\n\n\n", "file_path": "vm/types/src/account_address.rs", "rank": 46, "score": 283271.0410197339 }, { "content": "fn get_epoch_from_statedb(statedb: &ChainStateDB) -> Result<Epoch> {\n\n let account_reader = AccountStateReader::new(statedb);\n\n account_reader\n\n .get_resource::<Epoch>(genesis_address())?\n\n .ok_or_else(|| format_err!(\"Epoch is none.\"))\n\n}\n\n\n", "file_path": "chain/src/chain.rs", "rank": 47, "score": 281234.07403724926 }, { "content": "pub trait TokenUnit: Clone + Copy {\n\n fn symbol(&self) -> &'static str;\n\n\n\n fn scale(&self) -> u32;\n\n\n\n fn scaling_factor(&self) -> u128 {\n\n 10u32.pow(self.scale()) as u128\n\n }\n\n\n\n fn scaling(&self, value: u128) -> u128 {\n\n self.scaling_factor() * value\n\n }\n\n\n\n fn split(&self, value: u128) -> (u128, u128) {\n\n let scaling_factor = self.scaling_factor();\n\n if value >= scaling_factor {\n\n let h = value / scaling_factor;\n\n let l = value - (h * scaling_factor);\n\n (h, l)\n\n } else {\n", "file_path": "vm/types/src/token/token_value.rs", "rank": 48, "score": 281046.95716688957 }, { "content": "/// `StateView` is a trait that defines a read-only snapshot of the global state. It is passed to\n\n/// the VM for transaction execution, during which the VM is guaranteed to read anything at the\n\n/// given state.\n\npub trait StateView {\n\n /// Gets the state for a single access path.\n\n fn get(&self, access_path: &AccessPath) -> Result<Option<Vec<u8>>>;\n\n\n\n /// Gets states for a list of access paths.\n\n fn multi_get(&self, access_paths: &[AccessPath]) -> Result<Vec<Option<Vec<u8>>>>;\n\n\n\n /// VM needs this method to know whether the current state view is for genesis state creation.\n\n /// Currently TransactionPayload::WriteSet is only valid for genesis state creation.\n\n fn is_genesis(&self) -> bool;\n\n}\n", "file_path": "vm/types/src/state_view.rs", "rank": 49, "score": 279881.584506251 }, { "content": "pub fn account_struct_tag() -> StructTag {\n\n StructTag {\n\n address: CORE_CODE_ADDRESS,\n\n module: ACCOUNT_MODULE_IDENTIFIER.clone(),\n\n name: ACCOUNT_STRUCT_NAME.to_owned(),\n\n type_params: vec![],\n\n }\n\n}\n", "file_path": "vm/types/src/account_config/constants/account.rs", "rank": 50, "score": 278290.6472724031 }, { "content": "pub fn get_free_mem_size() -> Result<u64> {\n\n let sys = System::new();\n\n let free = match sys.memory() {\n\n Ok(mem) => mem.free.as_u64(),\n\n Err(_x) => 0u64,\n\n };\n\n Ok(free)\n\n}\n", "file_path": "commons/system/src/lib.rs", "rank": 51, "score": 276808.8209477427 }, { "content": "fn check_data<T: Sample + Serialize + DeserializeOwned + PartialEq>() -> Result<T> {\n\n let type_name = type_name::<T>();\n\n ensure!(\n\n T::sample() == T::sample(),\n\n \"Type {}'s sample return result is not stable.\"\n\n );\n\n if let Some(t) = read_and_check_data::<T>()? {\n\n info!(\"Check {} ok\", type_name);\n\n Ok(t)\n\n } else {\n\n let t = T::sample();\n\n write_data(&t)?;\n\n Ok(t)\n\n }\n\n}\n", "file_path": "test-helper/tests/types_check.rs", "rank": 52, "score": 274925.47671293153 }, { "content": "/// This produces the genesis block\n\npub fn genesis_strategy(storage: Arc<Storage>) -> impl Strategy<Value = Block> {\n\n let net = &ChainNetwork::new_test();\n\n let genesis = Genesis::load(net).unwrap();\n\n genesis.execute_genesis_block(net, storage).unwrap();\n\n Just(genesis.block().clone())\n\n}\n\n\n", "file_path": "chain/tests/block_test_utils.rs", "rank": 53, "score": 274548.401495822 }, { "content": "pub fn run(mut args: Args, xctx: XContext) -> Result<()> {\n\n let config = xctx.config();\n\n\n\n let mut packages = args.package_args.to_selected_packages(&xctx)?;\n\n if args.unit {\n\n packages.add_excludes(config.system_tests().iter().map(|(p, _)| p.as_str()));\n\n }\n\n\n\n args.args.extend(args.testname.clone());\n\n\n\n let generate_coverage = args.html_cov_dir.is_some() || args.html_lcov_dir.is_some();\n\n\n\n let env_vars: &[(&str, Option<&str>)] = if generate_coverage {\n\n if !xctx.installer().install_if_needed(\"grcov\") {\n\n return Err(anyhow!(\"Could not install grcov\"));\n\n }\n\n info!(\"Running \\\"cargo clean\\\" before collecting coverage\");\n\n let mut clean_cmd = Command::new(\"cargo\");\n\n clean_cmd.arg(\"clean\");\n\n clean_cmd.output()?;\n", "file_path": "devtools/x/src/test.rs", "rank": 54, "score": 273378.95643422706 }, { "content": "fn get_epoch_data_from_statedb(statedb: &ChainStateDB) -> Result<EpochData> {\n\n let account_reader = AccountStateReader::new(statedb);\n\n account_reader\n\n .get_resource::<EpochData>(genesis_address())?\n\n .ok_or_else(|| format_err!(\"Epoch is none.\"))\n\n}\n", "file_path": "chain/src/chain.rs", "rank": 55, "score": 273049.01271969447 }, { "content": "fn read_and_check_data<T: Serialize + DeserializeOwned + PartialEq>() -> Result<Option<T>> {\n\n let data_path = data_file::<T>();\n\n let json_path = json_file::<T>();\n\n if data_path.exists() && json_path.exists() {\n\n debug!(\"Read data from {:?}\", data_path);\n\n let data = hex::decode(std::fs::read_to_string(data_path)?.as_str())?;\n\n let data_t = bcs_ext::from_bytes::<T>(data.as_slice())?;\n\n let json_t = serde_json::from_str::<T>(std::fs::read_to_string(json_path)?.as_str())?;\n\n ensure!(\n\n data_t == json_t,\n\n \"{}'s bcs and json serialize data is not equals.\",\n\n type_name::<T>()\n\n );\n\n\n\n let new_data = bcs_ext::to_bytes(&data_t)?;\n\n ensure!(\n\n data == new_data,\n\n \"Check type {}'s serialize/deserialize fail, expect:{}, got: {}\",\n\n type_name::<T>(),\n\n hex::encode(data),\n\n hex::encode(new_data)\n\n );\n\n Ok(Some(data_t))\n\n } else {\n\n Ok(None)\n\n }\n\n}\n\n\n", "file_path": "test-helper/tests/types_check.rs", "rank": 56, "score": 270452.87769768655 }, { "content": "pub trait ValueCodec: Clone + Sized + Debug + std::marker::Send + std::marker::Sync {\n\n /// Converts `self` to bytes to be stored in DB.\n\n fn encode_value(&self) -> Result<Vec<u8>>;\n\n /// Converts bytes fetched from DB to `Self`.\n\n fn decode_value(data: &[u8]) -> Result<Self>;\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub enum WriteOp<V> {\n\n Value(V),\n\n Deletion,\n\n}\n\n\n\nimpl<V> WriteOp<V>\n\nwhere\n\n V: ValueCodec,\n\n{\n\n pub fn into_raw_op(self) -> Result<WriteOp<Vec<u8>>> {\n\n Ok(match self {\n\n WriteOp::Value(v) => WriteOp::Value(v.encode_value()?),\n", "file_path": "storage/src/storage.rs", "rank": 57, "score": 270004.96153614065 }, { "content": "pub fn blockmeta_execute(state: &ChainStateDB, meta: BlockMetadata) -> Result<TransactionOutput> {\n\n let txn = Transaction::BlockMetadata(meta);\n\n let output = execute_and_apply(state, txn);\n\n if let TransactionStatus::Discard(s) = output.status() {\n\n bail!(\"txn discard, status: {:?}\", s);\n\n }\n\n\n\n Ok(output)\n\n}\n\n\n", "file_path": "test-helper/src/executor.rs", "rank": 58, "score": 268874.21291581413 }, { "content": "pub trait TaskState: Sized + Clone + std::marker::Unpin + std::marker::Send {\n\n type Item: Debug + std::marker::Send;\n\n\n\n fn task_name() -> &'static str {\n\n type_name::<Self>()\n\n }\n\n fn new_sub_task(self) -> BoxFuture<'static, Result<Vec<Self::Item>>>;\n\n fn next(&self) -> Option<Self>;\n\n fn total_items(&self) -> Option<u64> {\n\n None\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests;\n", "file_path": "commons/stream-task/src/lib.rs", "rank": 59, "score": 268626.92295679863 }, { "content": "pub trait RawKey: Clone + Ord {\n\n /// Raw key's hash, will used as tree's nibble path\n\n /// Directly use origin byte's sha3_256 hash, do not use CryptoHash to add salt.\n\n fn key_hash(&self) -> HashValue {\n\n HashValue::sha3_256_of(\n\n self.encode_key()\n\n .expect(\"Serialize key failed when hash.\")\n\n .as_slice(),\n\n )\n\n }\n\n\n\n /// Encode the raw key, the raw key's bytes will store to leaf node.\n\n fn encode_key(&self) -> Result<Vec<u8>>;\n\n\n\n fn decode_key(bytes: &[u8]) -> Result<Self>;\n\n}\n\n\n\nimpl<T> RawKey for T\n\nwhere\n\n T: Clone + Ord + Serialize + DeserializeOwned,\n", "file_path": "commons/forkable-jellyfish-merkle/src/lib.rs", "rank": 60, "score": 267985.91973988723 }, { "content": "pub fn create_new_block(\n\n chain: &BlockChain,\n\n account: &Account,\n\n txns: Vec<SignedUserTransaction>,\n\n) -> Result<Block> {\n\n let (template, _) = chain.create_block_template(\n\n *account.address(),\n\n Some(account.auth_key()),\n\n None,\n\n txns,\n\n vec![],\n\n None,\n\n )?;\n\n chain\n\n .consensus()\n\n .create_block(template, chain.time_service().as_ref())\n\n}\n\n\n", "file_path": "chain/tests/test_epoch_switch.rs", "rank": 61, "score": 266593.0390970136 }, { "content": "fn build_rows(values: &[Value]) -> Result<(Vec<Row>, Box<dyn RowBuilder>)> {\n\n let bold = CellFormat::builder().bold(true).build();\n\n let mut rows = vec![];\n\n let mut field_names = Vec::new();\n\n let is_simple = |value: &Value| value.is_number() || value.is_boolean() || value.is_string();\n\n let mut exist_not_simple = false;\n\n for value in values {\n\n if is_simple(value) {\n\n rows.push(Row::new(vec![Cell::new(\"Result\", bold)]));\n\n } else {\n\n exist_not_simple = true;\n\n let mut flat = json!({});\n\n flatten(value, &mut flat, None, true, None)\n\n .map_err(|e| anyhow::Error::msg(e.description().to_string()))?;\n\n let obj = flat.as_object().expect(\"must be a object\");\n\n let mut cells = vec![];\n\n obj.keys().for_each(|key| {\n\n cells.push(Cell::new(key, bold));\n\n if !field_names.contains(key) {\n\n field_names.push(key.to_string());\n", "file_path": "commons/scmd/src/result.rs", "rank": 62, "score": 263674.07663652225 }, { "content": "#[stest::test]\n\nfn test_find_ancestor_genesis() -> Result<()> {\n\n let mut mock_chain = MockChain::new(ChainNetwork::new_test())?;\n\n mock_chain.produce_and_apply_times(3)?;\n\n\n\n let mut mock_chain2 = MockChain::new(ChainNetwork::new_test())?;\n\n mock_chain2.produce_and_apply_times(4)?;\n\n let ancestor = mock_chain.head().find_ancestor(mock_chain2.head())?;\n\n assert!(ancestor.is_some());\n\n assert_eq!(ancestor.unwrap().number, 0);\n\n Ok(())\n\n}\n\n\n", "file_path": "chain/tests/test_block_chain.rs", "rank": 63, "score": 262717.17328976706 }, { "content": "/// State sequence number client\n\npub trait AccountSeqNumberClient: fmt::Debug + Clone + Any {\n\n /// Fetch only account nonce for given sender.\n\n fn account_seq_number(&self, address: &Address) -> SeqNumber;\n\n}\n\n\n", "file_path": "txpool/src/pool/client.rs", "rank": 64, "score": 262308.76254803536 }, { "content": "pub fn deserialize<'de, D>(deserializer: D) -> Result<Vec<Vec<u8>>, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n Ok(<Vec<serde_bytes::ByteBuf>>::deserialize(deserializer)?\n\n .into_iter()\n\n .map(serde_bytes::ByteBuf::into_vec)\n\n .collect())\n\n}\n", "file_path": "vm/types/src/serde_helper/vec_bytes.rs", "rank": 65, "score": 261977.32754900763 }, { "content": "pub fn transaction_execution(c: &mut Criterion) {\n\n ::logger::init();\n\n let mut group = c.benchmark_group(\"vm\");\n\n group.sample_size(10);\n\n let bench_id = \"transaction_execution\";\n\n for i in vec![1u64, 5, 10, 20, 50].into_iter() {\n\n group.bench_function(BenchmarkId::new(bench_id, i), |b| {\n\n b.iter(|| run_benchmark(20, 1_000_000, i as usize, 1))\n\n });\n\n }\n\n}\n\n\n\ncriterion_group!(starcoin_vm_benches, transaction_execution);\n\ncriterion_main!(starcoin_vm_benches);\n", "file_path": "benchmarks/benches/bench_vm.rs", "rank": 66, "score": 261810.95831577142 }, { "content": "pub fn convert_changeset_and_events_cached<C: AccessPathCache>(\n\n ap_cache: &mut C,\n\n changeset: MoveChangeSet,\n\n events: Vec<MoveEvent>,\n\n) -> Result<(WriteSet, Vec<ContractEvent>), VMStatus> {\n\n // TODO: Cache access path computations if necessary.\n\n let mut ops = vec![];\n\n\n\n for (addr, account_changeset) in changeset.accounts {\n\n for (struct_tag, blob_opt) in account_changeset.resources {\n\n let ap = ap_cache.get_resource_path(addr, struct_tag);\n\n let op = match blob_opt {\n\n None => WriteOp::Deletion,\n\n Some(blob) => WriteOp::Value(blob),\n\n };\n\n ops.push((ap, op))\n\n }\n\n\n\n for (name, blob_opt) in account_changeset.modules {\n\n let ap = ap_cache.get_module_path(ModuleId::new(addr, name));\n", "file_path": "vm/vm-runtime/src/starcoin_vm.rs", "rank": 67, "score": 260676.84409480722 }, { "content": "pub fn from_bytes<'a, T>(bytes: &'a [u8]) -> Result<T>\n\nwhere\n\n T: Deserialize<'a>,\n\n{\n\n bcs::from_bytes(bytes).map_err(|e| e.into())\n\n}\n\n\n", "file_path": "commons/bcs_ext/src/lib.rs", "rank": 68, "score": 259419.1776502243 }, { "content": "pub fn serialize<S>(data: &[Vec<u8>], serializer: S) -> Result<S::Ok, S::Error>\n\nwhere\n\n S: Serializer,\n\n{\n\n let mut seq = serializer.serialize_seq(Some(data.len()))?;\n\n for e in data {\n\n seq.serialize_element(serde_bytes::Bytes::new(e.as_slice()))?;\n\n }\n\n seq.end()\n\n}\n\n\n", "file_path": "vm/types/src/serde_helper/vec_bytes.rs", "rank": 69, "score": 255378.3686716952 }, { "content": "#[async_trait::async_trait]\n\npub trait ChainStateAsyncService: Clone + std::marker::Unpin + Send + Sync {\n\n async fn get(self, access_path: AccessPath) -> Result<Option<Vec<u8>>>;\n\n\n\n async fn get_with_proof(self, access_path: AccessPath) -> Result<StateWithProof>;\n\n\n\n async fn get_resource<R>(self, address: AccountAddress) -> Result<Option<R>>\n\n where\n\n R: MoveResource + DeserializeOwned,\n\n {\n\n let access_path = AccessPath::new(address, R::resource_path());\n\n let r = self.get(access_path).await.and_then(|state| match state {\n\n Some(state) => Ok(Some(bcs_ext::from_bytes::<R>(state.as_slice())?)),\n\n None => Ok(None),\n\n })?;\n\n Ok(r)\n\n }\n\n\n\n async fn get_account_state(self, address: AccountAddress) -> Result<Option<AccountState>>;\n\n\n\n /// get account stateset on state_root(if empty, use current state root).\n", "file_path": "state/api/src/lib.rs", "rank": 70, "score": 254677.3133500325 }, { "content": "pub trait Solver: Send + DynClone {\n\n fn solve(\n\n &mut self,\n\n strategy: ConsensusStrategy,\n\n minting_blob: &[u8],\n\n diff: U256,\n\n nonce_tx: UnboundedSender<(Vec<u8>, u32)>,\n\n stop_rx: UnboundedReceiver<bool>,\n\n );\n\n}\n\n\n\n#[derive(Clone, Debug, Message)]\n\n#[rtype(result = \"Result<()>\")]\n\npub struct SealEvent {\n\n minting_blob: Vec<u8>,\n\n nonce: u32,\n\n extra: BlockHeaderExtra,\n\n}\n", "file_path": "cmd/miner_client/src/lib.rs", "rank": 71, "score": 252888.7650050032 }, { "content": "pub fn rpc_impl(input: syn::Item, options: &options::DeriveOptions) -> Result<TokenStream> {\n\n let mut rpc_trait = match input {\n\n syn::Item::Trait(item_trait) => item_trait,\n\n item => {\n\n return Err(Error::from(syn::Error::new_spanned(\n\n item,\n\n \"The #[net_rpc] custom attribute only works with trait declarations\",\n\n )));\n\n }\n\n };\n\n let mut exports = Vec::new();\n\n if options.enable_client {\n\n let client_module = generate_client_module(&rpc_trait)?;\n\n exports.push(client_module);\n\n }\n\n if options.enable_server {\n\n let server_module = generate_server_module(&mut rpc_trait)?;\n\n exports.push(server_module);\n\n }\n\n Ok(quote! {\n\n #(#exports)*\n\n })\n\n}\n", "file_path": "network-rpc/derive/src/rpc_trait.rs", "rank": 72, "score": 252867.857219573 }, { "content": "pub fn _sign_txn_with_default_account_by_rpc_client(\n\n cli_state: &CliState,\n\n max_gas_amount: u64,\n\n gas_price: u64,\n\n expiration_time: u64,\n\n payload: TransactionPayload,\n\n) -> Result<SignedUserTransaction> {\n\n sign_txn_by_rpc_client(\n\n cli_state,\n\n max_gas_amount,\n\n gas_price,\n\n expiration_time,\n\n payload,\n\n None,\n\n )\n\n}\n\n\n", "file_path": "cmd/starcoin/src/dev/tests.rs", "rank": 73, "score": 252141.9387074266 }, { "content": "pub fn get_dao_config(cli_state: &CliState) -> Result<DaoConfig> {\n\n let client = cli_state.client();\n\n let chain_state_reader = RemoteStateReader::new(client)?;\n\n let account_state_reader = AccountStateReader::new(&chain_state_reader);\n\n account_state_reader\n\n .get_on_chain_config::<DaoConfig>()?\n\n .ok_or_else(|| format_err!(\"DaoConfig not exist on chain.\"))\n\n}\n", "file_path": "cmd/starcoin/src/dev/sign_txn_helper.rs", "rank": 74, "score": 250889.27988412417 }, { "content": "pub fn account_balance_struct_name() -> &'static IdentStr {\n\n &*ACCOUNT_BALANCE_STRUCT_NAME\n\n}\n\n\n", "file_path": "vm/types/src/account_config/constants/account.rs", "rank": 75, "score": 250199.22912803866 }, { "content": "pub fn generate_client_module(rpc_trait: &ItemTrait) -> anyhow::Result<TokenStream> {\n\n let mut rpc_info = Vec::new();\n\n let client_methods: Vec<TokenStream> = rpc_trait\n\n .items\n\n .iter()\n\n .filter_map(|trait_item| {\n\n if let syn::TraitItem::Method(method) = trait_item {\n\n let name = &method.sig.ident;\n\n let args = compute_args(&method);\n\n let arg_names = compute_arg_identifiers(&args).unwrap();\n\n let returns = match compute_returns(method) {\n\n Ok(r) => r,\n\n Err(e) => panic!(\"{}\", e)\n\n };\n\n if arg_names.len() < 2 {\n\n panic!(\"network Rpc method must has at least two argument\");\n\n }\n\n let peer_id_indent = arg_names[0];\n\n // TODO: Only support one user custom argument currently\n\n let user_arg_indent = arg_names[1];\n", "file_path": "network-rpc/derive/src/to_client.rs", "rank": 76, "score": 248973.1529861758 }, { "content": "/// Splits a Multiaddress into a Multiaddress and PeerId.\n\npub fn parse_addr(mut addr: Multiaddr) -> Result<(PeerId, Multiaddr), ParseErr> {\n\n let who = match addr.pop() {\n\n Some(multiaddr::Protocol::P2p(key)) => {\n\n PeerId::from_multihash(key).map_err(|_| ParseErr::InvalidPeerId)?\n\n }\n\n _ => return Err(ParseErr::PeerIdMissing),\n\n };\n\n\n\n Ok((who, addr))\n\n}\n\n\n", "file_path": "network-p2p/types/src/lib.rs", "rank": 77, "score": 248170.7020348324 }, { "content": "pub fn event_handle_generator_struct_tag() -> StructTag {\n\n StructTag {\n\n address: CORE_CODE_ADDRESS,\n\n module: event_module_name().to_owned(),\n\n name: event_handle_generator_struct_name().to_owned(),\n\n type_params: vec![],\n\n }\n\n}\n", "file_path": "vm/types/src/account_config/constants/event.rs", "rank": 78, "score": 248119.15312097495 }, { "content": "//TODO this trait should move to consensus?\n\npub trait BlockVerifier {\n\n fn verify_header<R>(current_chain: &R, new_block_header: &BlockHeader) -> Result<()>\n\n where\n\n R: ChainReader;\n\n fn verify_block<R>(current_chain: &R, new_block: Block) -> Result<VerifiedBlock>\n\n where\n\n R: ChainReader,\n\n {\n\n watch(CHAIN_WATCH_NAME, \"n11\");\n\n //verify header\n\n let new_block_header = new_block.header();\n\n Self::verify_header(current_chain, new_block_header)?;\n\n watch(CHAIN_WATCH_NAME, \"n12\");\n\n //verify body\n\n let body_hash = new_block.body.hash();\n\n verify_block!(\n\n VerifyBlockField::Body,\n\n body_hash == new_block_header.body_hash(),\n\n \"verify block body hash mismatch, expect: {}, got: {}\",\n\n new_block_header.body_hash(),\n", "file_path": "chain/src/verifier/mod.rs", "rank": 80, "score": 244318.60017262568 }, { "content": "#[stest::test]\n\npub fn test_open_block() -> Result<()> {\n\n let config = Arc::new(NodeConfig::random_for_test());\n\n let chain = test_helper::gen_blockchain_for_test(config.net())?;\n\n let header = chain.current_header();\n\n let block_gas_limit = 10000000;\n\n\n\n let mut opened_block = {\n\n let miner_account = AccountInfo::random();\n\n OpenedBlock::new(\n\n chain.get_storage(),\n\n header,\n\n block_gas_limit,\n\n miner_account.address,\n\n Some(miner_account.public_key.authentication_key()),\n\n config.net().time_service().now_millis(),\n\n vec![],\n\n U256::from(0),\n\n chain.consensus(),\n\n )?\n\n };\n", "file_path": "chain/tests/test_opened_block.rs", "rank": 82, "score": 242889.5239975736 }, { "content": "pub fn new_epoch_event_key() -> EventKey {\n\n EventKey::new_from_address(&genesis_address(), 0)\n\n}\n\n\n", "file_path": "vm/types/src/on_chain_config/mod.rs", "rank": 83, "score": 241269.85328593952 }, { "content": "/// Serializes the script then deserializes it.\n\nfn serialize_and_deserialize_script(script: &CompiledScript) -> Result<()> {\n\n let mut script_blob = vec![];\n\n script.serialize(&mut script_blob)?;\n\n let deserialized_script = CompiledScript::deserialize(&script_blob)\n\n .map_err(|e| e.finish(Location::Undefined).into_vm_status())?;\n\n\n\n if *script != deserialized_script {\n\n return Err(ErrorKind::Other(\n\n \"deserialized script different from original one\".to_string(),\n\n )\n\n .into());\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "vm/functional-tests/src/evaluator.rs", "rank": 84, "score": 241005.34122100944 }, { "content": "/// Serializes the module then deserializes it.\n\nfn serialize_and_deserialize_module(module: &CompiledModule) -> Result<()> {\n\n let mut module_blob = vec![];\n\n module.serialize(&mut module_blob)?;\n\n let deserialized_module = CompiledModule::deserialize(&module_blob)\n\n .map_err(|e| e.finish(Location::Undefined).into_vm_status())?;\n\n\n\n if *module != deserialized_module {\n\n return Err(ErrorKind::Other(\n\n \"deserialized module different from original one\".to_string(),\n\n )\n\n .into());\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "vm/functional-tests/src/evaluator.rs", "rank": 85, "score": 241005.34122100944 }, { "content": "pub trait MoveResource {\n\n const MODULE_NAME: &'static str;\n\n const STRUCT_NAME: &'static str;\n\n\n\n fn module_identifier() -> Identifier {\n\n IdentStr::new(Self::MODULE_NAME)\n\n .expect(\"failed to get IdentStr for Move module\")\n\n .to_owned()\n\n }\n\n\n\n fn struct_identifier() -> Identifier {\n\n IdentStr::new(Self::STRUCT_NAME)\n\n .expect(\"failed to get IdentStr for Move struct\")\n\n .to_owned()\n\n }\n\n\n\n fn type_params() -> Vec<TypeTag> {\n\n vec![]\n\n }\n\n\n", "file_path": "vm/types/src/move_resource.rs", "rank": 86, "score": 240375.8599545182 }, { "content": "#[rpc]\n\npub trait StateApi {\n\n #[rpc(name = \"state.get\")]\n\n fn get(&self, access_path: AccessPath) -> FutureResult<Option<Vec<u8>>>;\n\n\n\n #[rpc(name = \"state.get_with_proof\")]\n\n fn get_with_proof(&self, access_path: AccessPath) -> FutureResult<StateWithProofView>;\n\n\n\n #[rpc(name = \"state.get_account_state\")]\n\n fn get_account_state(&self, address: AccountAddress) -> FutureResult<Option<AccountState>>;\n\n\n\n #[rpc(name = \"state.get_account_state_set\")]\n\n fn get_account_state_set(\n\n &self,\n\n address: AccountAddress,\n\n ) -> FutureResult<Option<AccountStateSetView>>;\n\n\n\n #[rpc(name = \"state.get_state_root\")]\n\n fn get_state_root(&self) -> FutureResult<HashValue>;\n\n\n\n #[rpc(name = \"state.get_with_proof_by_root\")]\n\n fn get_with_proof_by_root(\n\n &self,\n\n access_path: AccessPath,\n\n state_root: HashValue,\n\n ) -> FutureResult<StateWithProofView>;\n\n}\n", "file_path": "rpc/api/src/state/mod.rs", "rank": 87, "score": 240232.95097851352 }, { "content": "#[async_trait::async_trait]\n\npub trait AccountAsyncService:\n\n Clone + std::marker::Unpin + std::marker::Sync + std::marker::Send\n\n{\n\n async fn create_account(&self, password: String) -> Result<AccountInfo>;\n\n\n\n async fn get_default_account(&self) -> Result<Option<AccountInfo>>;\n\n async fn set_default_account(&self, address: AccountAddress) -> Result<Option<AccountInfo>>;\n\n async fn get_accounts(&self) -> Result<Vec<AccountInfo>>;\n\n\n\n async fn get_account(&self, address: AccountAddress) -> Result<Option<AccountInfo>>;\n\n\n\n /// Signs the hash of data with given address.\n\n async fn sign_message(\n\n &self,\n\n address: AccountAddress,\n\n message: SigningMessage,\n\n ) -> Result<Vec<u8>>;\n\n\n\n async fn sign_txn(\n\n &self,\n", "file_path": "account/api/src/service.rs", "rank": 88, "score": 240092.592511278 }, { "content": "#[rpc]\n\npub trait AccountApi {\n\n /// Get default account\n\n #[rpc(name = \"account.default\")]\n\n fn default(&self) -> FutureResult<Option<AccountInfo>>;\n\n\n\n #[rpc(name = \"account.set_default_account\")]\n\n fn set_default_account(&self, addr: AccountAddress) -> FutureResult<Option<AccountInfo>>;\n\n\n\n #[rpc(name = \"account.create\")]\n\n fn create(&self, password: String) -> FutureResult<AccountInfo>;\n\n #[rpc(name = \"account.list\")]\n\n fn list(&self) -> FutureResult<Vec<AccountInfo>>;\n\n #[rpc(name = \"account.get\")]\n\n fn get(&self, address: AccountAddress) -> FutureResult<Option<AccountInfo>>;\n\n\n\n #[rpc(name = \"account.sign\")]\n\n fn sign(&self, address: AccountAddress, data: SigningMessage)\n\n -> FutureResult<StrView<Vec<u8>>>;\n\n\n\n /// sign a txn request, return hex encoded bcs_ext bytes of signed user txn.\n", "file_path": "rpc/api/src/account/mod.rs", "rank": 89, "score": 240085.1203027959 }, { "content": "pub fn prepare_genesis() -> (ChainStateDB, ChainNetwork) {\n\n let net = ChainNetwork::new_test();\n\n let chain_state = ChainStateDB::mock();\n\n let genesis_txn = Genesis::build_genesis_transaction(&net).unwrap();\n\n Genesis::execute_genesis_txn(&chain_state, genesis_txn).unwrap();\n\n (chain_state, net)\n\n}\n\n\n", "file_path": "test-helper/src/executor.rs", "rank": 90, "score": 239788.42702401849 }, { "content": "fn print_value_table(value: Value) -> Result<()> {\n\n let simple_value =\n\n value.is_number() || value.is_boolean() || value.is_boolean() || value.is_string();\n\n if simple_value {\n\n println!(\"{}\", value_to_string(&value));\n\n } else {\n\n // value must be a object at here.\n\n let bold = CellFormat::builder().bold(true).build();\n\n let mut flat = json!({});\n\n flatten(&value, &mut flat, None, true, None)\n\n .map_err(|e| anyhow::Error::msg(e.description().to_string()))?;\n\n let obj = flat.as_object().expect(\"must be a object\");\n\n let mut rows = vec![];\n\n for (k, v) in obj {\n\n let row = Row::new(vec![\n\n Cell::new(k, bold),\n\n Cell::new(value_to_string(&v).as_str(), Default::default()),\n\n ]);\n\n rows.push(row);\n\n }\n\n let table = Table::new(rows, Default::default())?;\n\n table.print_stdout()?;\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "commons/scmd/src/result.rs", "rank": 91, "score": 239018.12485621087 }, { "content": "pub trait ServiceHandler<S, R>\n\nwhere\n\n S: ActorService,\n\n R: ServiceRequest,\n\n{\n\n fn handle(&mut self, msg: R, ctx: &mut ServiceContext<S>) -> <R as ServiceRequest>::Response;\n\n}\n\n\n", "file_path": "commons/service-registry/src/service.rs", "rank": 92, "score": 238627.59698996882 }, { "content": "fn parse_struct_tag(input: &str) -> anyhow::Result<StructTag> {\n\n match parse_type_tag(input)? {\n\n TypeTag::Struct(s) => Ok(s),\n\n _ => {\n\n anyhow::bail!(\"invalid struct tag\")\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, StructOpt)]\n\n#[structopt(name = \"resource-exporter\", about = \"onchain resource exporter\")]\n\npub struct ExporterOptions {\n\n #[structopt(long, short = \"o\", parse(from_os_str))]\n\n /// output file, like accounts.csv\n\n pub output: PathBuf,\n\n #[structopt(long, short = \"i\", parse(from_os_str))]\n\n /// starcoin node db path. like ~/.starcoin/barnard/starcoindb/db\n\n pub db_path: PathBuf,\n\n\n\n #[structopt(long)]\n", "file_path": "cmd/resource-exporter/src/main.rs", "rank": 93, "score": 238559.48332747573 }, { "content": "fn write_indent(f: &mut Formatter, indent: u64) -> std::fmt::Result {\n\n for _i in 0..indent {\n\n write!(f, \" \")?;\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "vm/resource-viewer/src/lib.rs", "rank": 94, "score": 237365.50490755745 }, { "content": "#[test]\n\nfn test_state_db() -> Result<()> {\n\n let storage = MockStateNodeStore::new();\n\n let chain_state_db = ChainStateDB::new(Arc::new(storage), None);\n\n let access_path = AccessPath::random_resource();\n\n\n\n let state0 = random_bytes();\n\n chain_state_db.apply_write_set(to_write_set(access_path.clone(), state0))?;\n\n let state_root = chain_state_db.commit()?;\n\n\n\n let state1 = random_bytes();\n\n chain_state_db.apply_write_set(to_write_set(access_path, state1))?;\n\n let new_state_root = chain_state_db.commit()?;\n\n assert_ne!(state_root, new_state_root);\n\n Ok(())\n\n}\n\n\n", "file_path": "state/statedb/src/tests.rs", "rank": 95, "score": 237142.61132815055 }, { "content": "#[test]\n\nfn test_state_version() -> Result<()> {\n\n let storage = Arc::new(MockStateNodeStore::new());\n\n let chain_state_db = ChainStateDB::new(storage.clone(), None);\n\n let account_address = AccountAddress::random();\n\n let access_path = AccessPath::new(account_address, AccountResource::resource_path());\n\n let old_state = random_bytes();\n\n chain_state_db.apply_write_set(to_write_set(access_path.clone(), old_state.clone()))?;\n\n chain_state_db.commit()?;\n\n chain_state_db.flush()?;\n\n let old_root = chain_state_db.state_root();\n\n\n\n let new_state = random_bytes();\n\n chain_state_db.apply_write_set(to_write_set(access_path.clone(), new_state))?;\n\n\n\n let chain_state_db_ori = ChainStateDB::new(storage, Some(old_root));\n\n let old_state2 = chain_state_db_ori.get(&access_path)?.unwrap();\n\n assert_eq!(old_state, old_state2);\n\n\n\n Ok(())\n\n}\n", "file_path": "state/statedb/src/tests.rs", "rank": 96, "score": 237142.61132815055 }, { "content": "fn bench_put_and_commit(c: &mut Criterion) {\n\n ::logger::init_for_test();\n\n let mut group = c.benchmark_group(\"put_and_commit\");\n\n group.sample_size(80);\n\n for i in vec![1u64, 5, 10, 50, 100].into_iter() {\n\n let tmp_dir = starcoin_config::temp_path();\n\n let db_store = new_empty_store(tmp_dir.as_ref()) as Arc<dyn StateNodeStore>;\n\n let mem_store = Arc::new(MockStateNodeStore::new()) as Arc<dyn StateNodeStore>;\n\n let mut rng: StdRng = {\n\n let seed = [1u8, 2, 3, 4];\n\n let mut actual_seed = [0u8; 32];\n\n actual_seed[..seed.len()].copy_from_slice(&seed);\n\n StdRng::from_seed(actual_seed)\n\n };\n\n for (id, store) in vec![(\"mem_store\", mem_store), (\"db_store\", db_store)].into_iter() {\n\n let tree = StateTree::new(store, None);\n\n // init tree with 10w keys.\n\n let _ = prepare_tree(&tree, &[2u8, 3, 4, 5], 100_000);\n\n group.bench_with_input(BenchmarkId::new(id, i), &(tree, i), |b, input| {\n\n let (tree, n) = input;\n", "file_path": "benchmarks/benches/bench_state_tree.rs", "rank": 97, "score": 236469.91063297176 }, { "content": "pub trait WalletStorageTrait {\n\n fn save_default_settings(&self, setting: Setting) -> Result<()>;\n\n\n\n fn save_accepted_token(&self, token: TokenCode) -> Result<()>;\n\n fn contain_wallet(&self, address: AccountAddress) -> Result<bool>;\n\n}\n", "file_path": "account/api/src/rich_wallet.rs", "rank": 98, "score": 236401.81016469415 }, { "content": "pub fn print_action_result(\n\n format: OutputFormat,\n\n result: Result<Value>,\n\n console_mode: bool,\n\n) -> Result<()> {\n\n match format {\n\n OutputFormat::JSON => {\n\n // if in console, and is err, print error directly.\n\n if console_mode && result.is_err() {\n\n println!(\"{}\", result.unwrap_err().to_string());\n\n return Ok(());\n\n }\n\n\n\n let value = match result {\n\n Ok(value) => json!({ \"ok\": value }),\n\n Err(err) => json!({\"err\": err.to_string()}),\n\n };\n\n print_json(value)\n\n }\n\n OutputFormat::TABLE => {\n\n match result {\n\n Ok(value) => print_table(value)?,\n\n // err may contains help message, so directly print err.\n\n Err(err) => println!(\"{}\", err.to_string()),\n\n };\n\n Ok(())\n\n }\n\n }\n\n}\n\n\n", "file_path": "commons/scmd/src/result.rs", "rank": 99, "score": 236012.88861608843 } ]
Rust
multi-skill/src/systems/true_skill/normal.rs
kiwec/Elo-MMR
bf64ea75e8c0dbb946d379b9bee1753e604b388a
use super::float::{erfc, Float, MyFloat, PI, TWO, ZERO}; use overload::overload; use std::ops; #[derive(Clone, Debug)] pub struct Gaussian { pub mu: MyFloat, pub sigma: MyFloat, } pub const G_ZERO: Gaussian = Gaussian { mu: ZERO, sigma: ZERO, }; pub const G_ONE: Gaussian = Gaussian { mu: ZERO, sigma: MyFloat::INFINITY, }; overload!((a: ?Gaussian) + (b: ?Gaussian) -> Gaussian { Gaussian { mu: a.mu + b.mu, sigma: a.sigma.hypot(b.sigma), } }); overload!((a: &mut Gaussian) += (b: ?Gaussian) { a.mu += b.mu; a.sigma = a.sigma.hypot(b.sigma); }); overload!((a: ?Gaussian) - (b: ?Gaussian) -> Gaussian { Gaussian { mu: a.mu - b.mu, sigma: a.sigma.hypot(b.sigma), } }); overload!((a: &mut Gaussian) -= (b: ?Gaussian) { a.mu -= b.mu; a.sigma = a.sigma.hypot(b.sigma); }); overload!(-(a: &mut Gaussian) -> Gaussian { Gaussian { mu: -a.mu, sigma: a.sigma, } }); overload!((a: ?Gaussian) * (b: ?MyFloat) -> Gaussian { Gaussian { mu: a.mu * b, sigma: a.sigma * b.abs(), } }); overload!((a: &mut Gaussian) *= (b: ?MyFloat) { a.mu *= b; a.sigma *= b.abs(); }); overload!((a: ?Gaussian) / (b: ?MyFloat) -> Gaussian { Gaussian { mu: a.mu / b, sigma: a.sigma / b.abs(), } }); overload!((a: &mut Gaussian) /= (b: ?MyFloat) { a.mu /= b; a.sigma /= b.abs(); }); overload!((a: ?Gaussian) * (b: ?Gaussian) -> Gaussian { if a.sigma.is_infinite() { return b.clone(); } if b.sigma.is_infinite() { return a.clone(); } let ssigma1 = a.sigma.powi(2); let ssigma2 = b.sigma.powi(2); Gaussian { mu: (a.mu * ssigma2 + b.mu * ssigma1) / (ssigma1 + ssigma2), sigma: a.sigma * b.sigma / (ssigma1 + ssigma2).sqrt(), } }); overload!((a: &mut Gaussian) *= (b: ?Gaussian) { *a = a.clone() * b; }); overload!((a: ?Gaussian) / (b: ?Gaussian) -> Gaussian { if b.sigma.is_infinite() { return a.clone(); } if a.sigma.is_infinite() { return Gaussian { mu: -b.mu, sigma: b.sigma, } } let ssigma1 = a.sigma.powi(2); let ssigma2 = b.sigma.powi(2); Gaussian { mu: (a.mu * ssigma2 - b.mu * ssigma1) / (ssigma2 - ssigma1), sigma: a.sigma * b.sigma / (ssigma2 - ssigma1).abs().sqrt(), } }); overload!((a: &mut Gaussian) /= (b: ?Gaussian) { *a = a.clone() / b; }); fn gauss_exponent(mu: MyFloat, sigma: MyFloat, t: MyFloat) -> MyFloat { (-((t - mu) / sigma).powi(2)).exp() } fn moment0(mu: MyFloat, sigma: MyFloat, t: MyFloat) -> MyFloat { sigma * PI.sqrt() / TWO * erfc((t - mu) / sigma) } fn moment1(mu: MyFloat, sigma: MyFloat, t: MyFloat) -> MyFloat { mu * moment0(ZERO, sigma, t - mu) + sigma.powi(2) / TWO * gauss_exponent(mu, sigma, t) } fn moment2(mu: MyFloat, sigma: MyFloat, t: MyFloat) -> MyFloat { mu.powi(2) * moment0(ZERO, sigma, t - mu) + TWO * mu * moment1(ZERO, sigma, t - mu) + (sigma / TWO).powi(2) * (TWO * gauss_exponent(mu, sigma, t) * (t - mu) + sigma * PI.sqrt() * erfc((t - mu) / sigma)) } impl Gaussian { pub fn leq_eps(&self, eps: MyFloat) -> Gaussian { assert!(eps >= ZERO); assert!(!self.sigma.is_infinite()); let alpha = moment0(self.mu, self.sigma, -eps) - moment0(self.mu, self.sigma, eps); const FLOAT_CMP_EPS: f64 = 1e-8; let (mu, sigma) = if alpha < FLOAT_CMP_EPS.into() { (eps, eps) } else { let mu = (moment1(self.mu, self.sigma, -eps) - moment1(self.mu, self.sigma, eps)) / alpha; let sigma2 = (moment2(self.mu, self.sigma, -eps) - moment2(self.mu, self.sigma, eps)) / alpha - mu.powi(2); (mu, sigma2.max(ZERO).sqrt()) }; assert!( !mu.is_nan() && !sigma.is_nan(), "{:?}\teps {} {} {}", self, eps, mu, sigma ); Gaussian { mu, sigma } / self } pub fn greater_eps(&self, eps: MyFloat) -> Gaussian { assert!(eps >= ZERO); assert!(!self.sigma.is_infinite()); let alpha = moment0(self.mu, self.sigma, eps); const FLOAT_CMP_EPS: f64 = 1e-8; let (mu, sigma) = if alpha < FLOAT_CMP_EPS.into() { (eps, eps) } else { let mu = moment1(self.mu, self.sigma, eps) / alpha; let sigma2 = moment2(self.mu, self.sigma, eps) / alpha - mu.powi(2); (mu, sigma2.max(ZERO).sqrt()) }; assert!(!mu.is_nan() && !sigma.is_nan(), "{:?}\teps {}", self, eps); Gaussian { mu, sigma } / self } }
use super::float::{erfc, Float, MyFloat, PI, TWO, ZERO}; use overload::overload; use std::ops; #[derive(Clone, Debug)] pub struct Gaussian { pub mu: MyFloat, pub sigma: MyFloat, } pub const G_ZERO: Gaussian = Gaussian { mu: ZERO, sigma: ZERO, }; pub const G_ONE: Gaussian = Gaussian { mu: ZERO, sigma: MyFloat::INFINITY, }; overload!((a: ?Gaussian) + (b: ?Gaussian) -> Gaussian { Gaussian { mu: a.mu + b.mu, sigma: a.sigma.hypot(b.sigma), } }); overload!((a: &mut Gaussian) += (b: ?Gaussian) { a.mu += b.mu; a.sigma = a.sigma.hypot(b.sigma); }); overload!((a: ?Gaussian) - (b: ?Gaussian) -> Gaussian { Gaussian { mu: a.mu - b.mu, sigma: a.sigma.hypot(b.sigma), } }); overload!((a: &mut Gaussian) -= (b: ?Gaussian) { a.mu -= b.mu; a.sigma = a.sigma.hypot(b.sigma); }); overload!(-(a: &mut Gaussian) -> Gaussian { Gaussian { mu: -a.mu, sigma: a.sigma, } }); overload!((a: ?Gaussian) * (b: ?MyFloat) -> Gaussian { Gaussian { mu: a.mu * b, sigma: a.sigma * b.abs(), } }); overload!((a: &mut Gaussian) *= (b: ?MyFloat) { a.mu *= b; a.sigma *= b.abs(); }); overload!((a: ?Gaussian) / (b: ?MyFloat) -> Gaussian { Gaussian { mu: a.mu / b, sigma: a.sigma / b.abs(), } }); overload!((a: &mut Gaussian) /= (b: ?MyFloat) { a.mu /= b; a.sigma /= b.abs(); }); overload!((a: ?Gaussian) * (b: ?Gaussian) -> Gaussian { if a.sigma.is_infinite() { return b.clone(); } if b.sigma.is_infinite() { return a.clone(); } let ssigma1 = a.sigma.powi(2); let ssigma2 = b.sigma.powi(2); Gaussian { mu: (a.mu * ssigma2 + b.mu * ssigma1) / (ssigma1 + ssigma2), sigma: a.sigma * b.sigma / (ssigma1 + ssigma2).sqrt(), } }); overload!((a: &mut Gaussian) *= (b: ?Gaussian) { *a = a.clone() * b; }); overload!((a: ?Gaussian) / (b: ?Gaussian) -> Gaussian { if b.sigma.is_infinite() { return a.clone(); } if a.sigma.is_infinite() { return Gaussian { mu: -b.mu, sigma: b.sigma, } } let ssigma1 = a.sigma.powi(2); let ssigma2 = b.sigma.powi(2); Gaussian { mu: (a.mu * ssigma2 - b.mu * ssigma1) / (ssigma2 - ssigma1), sigma: a.sigma * b.sigma / (ssigma2 - ssigma1).abs().sqrt(), } }); overload!((a: &mut Gaussian) /= (b: ?Gaussian) { *a = a.clone() / b; }); fn gauss_exponent(mu: MyFloat, sigma: MyFloat, t: MyFloat) -> MyFloat { (-((t - mu) / sigma).powi(2)).exp() } fn moment0(mu: MyFloat, sigma: MyFloat, t: MyFloat) -> MyFloat { sigma * PI.sqrt() / TWO * erfc((t - mu) / sigma) } fn moment1(mu: MyFloat, sigma: MyFloat, t: MyFloat) -> MyFloat { mu * moment0(ZERO, sigma, t - mu) + sigma.powi(2) / TWO * gauss_exponent(mu, sigma, t) } fn moment2(mu: MyFloat, sigma: MyFloat, t: MyFloat) -> MyFloat {
impl Gaussian { pub fn leq_eps(&self, eps: MyFloat) -> Gaussian { assert!(eps >= ZERO); assert!(!self.sigma.is_infinite()); let alpha = moment0(self.mu, self.sigma, -eps) - moment0(self.mu, self.sigma, eps); const FLOAT_CMP_EPS: f64 = 1e-8; let (mu, sigma) = if alpha < FLOAT_CMP_EPS.into() { (eps, eps) } else { let mu = (moment1(self.mu, self.sigma, -eps) - moment1(self.mu, self.sigma, eps)) / alpha; let sigma2 = (moment2(self.mu, self.sigma, -eps) - moment2(self.mu, self.sigma, eps)) / alpha - mu.powi(2); (mu, sigma2.max(ZERO).sqrt()) }; assert!( !mu.is_nan() && !sigma.is_nan(), "{:?}\teps {} {} {}", self, eps, mu, sigma ); Gaussian { mu, sigma } / self } pub fn greater_eps(&self, eps: MyFloat) -> Gaussian { assert!(eps >= ZERO); assert!(!self.sigma.is_infinite()); let alpha = moment0(self.mu, self.sigma, eps); const FLOAT_CMP_EPS: f64 = 1e-8; let (mu, sigma) = if alpha < FLOAT_CMP_EPS.into() { (eps, eps) } else { let mu = moment1(self.mu, self.sigma, eps) / alpha; let sigma2 = moment2(self.mu, self.sigma, eps) / alpha - mu.powi(2); (mu, sigma2.max(ZERO).sqrt()) }; assert!(!mu.is_nan() && !sigma.is_nan(), "{:?}\teps {}", self, eps); Gaussian { mu, sigma } / self } }
mu.powi(2) * moment0(ZERO, sigma, t - mu) + TWO * mu * moment1(ZERO, sigma, t - mu) + (sigma / TWO).powi(2) * (TWO * gauss_exponent(mu, sigma, t) * (t - mu) + sigma * PI.sqrt() * erfc((t - mu) / sigma)) }
function_block-function_prefix_line
[]
Rust
runtime/src/account_info.rs
luma-team/solana
b02c412d5b5c1902bd5b3616f427ffc0c9925cef
use crate::{ accounts_db::{AppendVecId, CACHE_VIRTUAL_OFFSET}, accounts_index::{IsCached, ZeroLamport}, append_vec::ALIGN_BOUNDARY_OFFSET, }; pub type Offset = usize; pub type StoredSize = u32; #[derive(Debug)] pub enum StorageLocation { AppendVec(AppendVecId, Offset), Cached, } impl StorageLocation { pub fn is_offset_equal(&self, other: &StorageLocation) -> bool { match self { StorageLocation::Cached => { matches!(other, StorageLocation::Cached) } StorageLocation::AppendVec(_, offset) => { match other { StorageLocation::Cached => { false } StorageLocation::AppendVec(_, other_offset) => other_offset == offset, } } } } pub fn is_store_id_equal(&self, other: &StorageLocation) -> bool { match self { StorageLocation::Cached => { matches!(other, StorageLocation::Cached) } StorageLocation::AppendVec(store_id, _) => { match other { StorageLocation::Cached => { false } StorageLocation::AppendVec(other_store_id, _) => other_store_id == store_id, } } } } } pub type OffsetReduced = u32; #[derive(Default, Debug, PartialEq, Eq, Clone, Copy)] pub struct AccountInfo { store_id: AppendVecId, reduced_offset: OffsetReduced, stored_size_mask: StoredSize, } const IS_ZERO_LAMPORT_FLAG: StoredSize = 1 << (StoredSize::BITS - 1); const IS_CACHED_STORE_ID_FLAG: StoredSize = 1 << (StoredSize::BITS - 2); const ALL_FLAGS: StoredSize = IS_ZERO_LAMPORT_FLAG | IS_CACHED_STORE_ID_FLAG; impl ZeroLamport for AccountInfo { fn is_zero_lamport(&self) -> bool { self.stored_size_mask & IS_ZERO_LAMPORT_FLAG == IS_ZERO_LAMPORT_FLAG } } impl IsCached for AccountInfo { fn is_cached(&self) -> bool { self.stored_size_mask & IS_CACHED_STORE_ID_FLAG == IS_CACHED_STORE_ID_FLAG } } impl IsCached for StorageLocation { fn is_cached(&self) -> bool { matches!(self, StorageLocation::Cached) } } const CACHE_VIRTUAL_STORAGE_ID: AppendVecId = AppendVecId::MAX; impl AccountInfo { pub fn new(storage_location: StorageLocation, stored_size: StoredSize, lamports: u64) -> Self { assert_eq!(stored_size & ALL_FLAGS, 0); let mut stored_size_mask = stored_size; let (store_id, raw_offset) = match storage_location { StorageLocation::AppendVec(store_id, offset) => (store_id, offset), StorageLocation::Cached => { stored_size_mask |= IS_CACHED_STORE_ID_FLAG; (CACHE_VIRTUAL_STORAGE_ID, CACHE_VIRTUAL_OFFSET) } }; if lamports == 0 { stored_size_mask |= IS_ZERO_LAMPORT_FLAG; } let reduced_offset: OffsetReduced = (raw_offset / ALIGN_BOUNDARY_OFFSET) as OffsetReduced; let result = Self { store_id, reduced_offset, stored_size_mask, }; assert_eq!(result.offset(), raw_offset, "illegal offset"); result } pub fn store_id(&self) -> AppendVecId { assert!(!self.is_cached()); self.store_id } pub fn offset(&self) -> Offset { (self.reduced_offset as Offset) * ALIGN_BOUNDARY_OFFSET } pub fn stored_size(&self) -> StoredSize { self.stored_size_mask & !ALL_FLAGS } pub fn matches_storage_location(&self, store_id: AppendVecId, offset: Offset) -> bool { self.store_id == store_id && self.offset() == offset && !self.is_cached() } pub fn storage_location(&self) -> StorageLocation { if self.is_cached() { StorageLocation::Cached } else { StorageLocation::AppendVec(self.store_id, self.offset()) } } } #[cfg(test)] mod test { use {super::*, crate::append_vec::MAXIMUM_APPEND_VEC_FILE_SIZE}; #[test] fn test_limits() { for offset in [ MAXIMUM_APPEND_VEC_FILE_SIZE as Offset, 0, ALIGN_BOUNDARY_OFFSET, 4 * ALIGN_BOUNDARY_OFFSET, ] { let info = AccountInfo::new(StorageLocation::AppendVec(0, offset), 0, 0); assert!(info.offset() == offset); } } #[test] #[should_panic(expected = "illegal offset")] fn test_alignment() { let offset = 1; AccountInfo::new(StorageLocation::AppendVec(0, offset), 0, 0); } #[test] fn test_matches_storage_location() { let offset = 0; let id = 0; let info = AccountInfo::new(StorageLocation::AppendVec(id, offset), 0, 0); assert!(info.matches_storage_location(id, offset)); let offset = ALIGN_BOUNDARY_OFFSET; assert!(!info.matches_storage_location(id, offset)); let offset = 0; let id = 1; assert!(!info.matches_storage_location(id, offset)); let id = CACHE_VIRTUAL_STORAGE_ID; let info = AccountInfo::new(StorageLocation::Cached, 0, 0); assert!(!info.matches_storage_location(id, offset)); } }
use crate::{ accounts_db::{AppendVecId, CACHE_VIRTUAL_OFFSET}, accounts_index::{IsCached, ZeroLamport}, append_vec::ALIGN_BOUNDARY_OFFSET, }; pub type Offset = usize; pub type StoredSize = u32; #[derive(Debug)] pub enum StorageLocation { AppendVec(AppendVecId, Offset), Cached, } impl StorageLocation { pub fn is_offset_equal(&self, other: &StorageLocation) -> bool { match self { StorageLocation::Cached => { matches!(other, StorageLocation::Cached) } StorageLocation::AppendVec(_, offset) => { match other { StorageLocation::Cached => { false } StorageLocation::AppendVec(_, other_offset) => other_offset == offset, } } } } pub fn is_store_id_equal(&self, other: &StorageLocation) -> bool { match self { StorageLocation::Cached => { matches!(other, StorageLocation::Cached) } StorageLocation::AppendVec(store_id, _) => { match other { StorageLocation::Cached => { false } StorageLocation::AppendVec(other_store_id, _) => other_store_id == store_id, } } } } } pub type OffsetReduced = u32; #[derive(Default, Debug, PartialEq, Eq, Clone, Copy)] pub struct AccountInfo { store_id: AppendVecId, reduced_offset: OffsetReduced, stored_size_mask: StoredSize, } const IS_ZERO_LAMPORT_FLAG: StoredSize = 1 << (StoredSize::BITS - 1); const IS_CACHED_STORE_ID_FLAG: StoredSize = 1 << (StoredSize::BITS - 2); const ALL_FLAGS: StoredSize = IS_ZERO_LAMPORT_FLAG | IS_CACHED_STORE_ID_FLAG; impl ZeroLamport for AccountInfo { fn is_zero_lamport(&self) -> bool { self.stored_size_mask & IS_ZERO_LAMPORT_FLAG == IS_ZERO_LAMPORT_FLAG } } impl IsCached for AccountInfo { fn is_cached(&self) -> bool { self.stored_size_mask & IS_CACHED_STORE_ID_FLAG == IS_CACHED_STORE_ID_FLAG } } impl IsCached for StorageLocation { fn is_cached(&self) -> bool { matches!(self, StorageLocation::Cached) } } const CACHE_VIRTUAL_STORAGE_ID: AppendVecId = AppendVecId::MAX; impl AccountInfo { pub fn new(storage_location: StorageLocation, stored_size: StoredSize, lamports: u64) -> Self { assert_eq!(stored_size & ALL_FLAGS, 0); let mut stored_size_mask = stored_size; let (store_id, raw_offset) = match storage_location { StorageLocation::AppendVec(store_id, offset) => (store_id, offset), StorageLocation::Cached => { stored_size_mask |= IS_CACHED_STORE_ID_FLAG; (CACHE_VIRTUAL_STORAGE_ID, CACHE_VIRTUAL_OFFSET) } }; if lamports == 0 { stored_size_mask |= IS_ZERO_LAMPORT_FLAG; } let reduced_offset: OffsetReduced = (raw_offset / ALIGN_BOUNDARY_OFFSET) as OffsetReduced; let result = Self { store_id, reduced_offset, stored_size_mask, }; assert_eq!(result.offset(), raw_offset, "illegal offset"); result } pub fn store_id(&self) -> AppendVecId { assert!(!self.is_cached()); self.store_id } pub fn offset(&self) -> Offset { (self.reduced_offset as Offset) * ALIGN_BOUNDARY_OFFSET } pub fn stored_size(&self) -> StoredSize { self.stored_size_mask & !ALL_FLAGS } pub fn matches_storage_location(&self, store_id: AppendVecId, offset: Offset) -> bool { self.store_id == store_id && self.offset() == offset && !self.is_cached() } pub fn storage_location(&self) -> StorageLocation { if self.is_cached() { StorageLocation::Cached } else { StorageLocation::AppendVec(self.store_id, self.offset()) } } } #[cfg(test)] mod test { use {super::*, crate::append_vec::MAXIMUM_APPEND_VEC_FILE_SIZE}; #[test] fn test_limits() { for offset in [ MAXIMUM_APPEND_VEC_FILE_SIZE as Offset, 0, ALIGN_BOUNDARY_OFFSET, 4 * ALIGN_BOUNDARY_OFFSET, ] { let info = AccountInfo::new(StorageLocation::AppendVec(0, offset), 0, 0); assert!(info.offset() == offset); } } #[test] #[should_panic(expected = "illegal offset")] fn test_alignment() { let offset = 1; AccountInfo::new(StorageLocation::AppendVec(0, offset), 0, 0); } #[test] fn test_matches_storage_location() { let offset = 0; let id = 0; let info = AccountInfo::new(StorageLocation::AppendVec(id, offset), 0, 0); assert!(info.matches_storage_location(id, offset)); let offset = ALIGN_BOUNDARY_OFFSET; assert!(!info.matches_storage_location(id, offset)); let offset = 0; let id = 1; assert!(!info.matches_storage_location(id, offset)); let id = CACHE_VI
}
RTUAL_STORAGE_ID; let info = AccountInfo::new(StorageLocation::Cached, 0, 0); assert!(!info.matches_storage_location(id, offset)); }
function_block-function_prefixed
[ { "content": "pub fn is_sysvar_id(id: &Pubkey) -> bool {\n\n ALL_IDS.iter().any(|key| key == id)\n\n}\n\n\n\n/// Declares an ID that implements [`SysvarId`].\n\n#[macro_export]\n\nmacro_rules! declare_sysvar_id(\n\n ($name:expr, $type:ty) => (\n\n $crate::declare_id!($name);\n\n\n\n impl $crate::sysvar::SysvarId for $type {\n\n fn id() -> $crate::pubkey::Pubkey {\n\n id()\n\n }\n\n\n\n fn check_id(pubkey: &$crate::pubkey::Pubkey) -> bool {\n\n check_id(pubkey)\n\n }\n\n }\n\n\n", "file_path": "sdk/program/src/sysvar/mod.rs", "rank": 0, "score": 392744.15703830356 }, { "content": "pub fn build_balance_message(lamports: u64, use_lamports_unit: bool, show_unit: bool) -> String {\n\n build_balance_message_with_config(\n\n lamports,\n\n &BuildBalanceMessageConfig {\n\n use_lamports_unit,\n\n show_unit,\n\n ..BuildBalanceMessageConfig::default()\n\n },\n\n )\n\n}\n\n\n", "file_path": "cli-output/src/display.rs", "rank": 1, "score": 392328.75173838995 }, { "content": "pub fn info(config_file: &str, local_info_only: bool, eval: bool) -> Result<(), String> {\n\n let config = Config::load(config_file)?;\n\n\n\n if eval {\n\n println!(\n\n \"SOLANA_INSTALL_ACTIVE_RELEASE={}\",\n\n &config.active_release_dir().to_str().unwrap_or(\"\")\n\n );\n\n config\n\n .explicit_release\n\n .map(|er| match er {\n\n ExplicitRelease::Semver(semver) => semver,\n\n ExplicitRelease::Channel(channel) => channel,\n\n })\n\n .and_then(|channel| {\n\n println!(\"SOLANA_INSTALL_ACTIVE_CHANNEL={}\", channel,);\n\n Option::<String>::None\n\n });\n\n return Ok(());\n\n }\n", "file_path": "install/src/command.rs", "rank": 2, "score": 386530.95343980176 }, { "content": "pub fn recv_from(batch: &mut PacketBatch, socket: &UdpSocket, max_wait_ms: u64) -> Result<usize> {\n\n let mut i = 0;\n\n //DOCUMENTED SIDE-EFFECT\n\n //Performance out of the IO without poll\n\n // * block on the socket until it's readable\n\n // * set the socket to non blocking\n\n // * read until it fails\n\n // * set it back to blocking before returning\n\n socket.set_nonblocking(false)?;\n\n trace!(\"receiving on {}\", socket.local_addr().unwrap());\n\n let start = Instant::now();\n\n loop {\n\n batch.resize(\n\n std::cmp::min(i + NUM_RCVMMSGS, PACKETS_PER_BATCH),\n\n Packet::default(),\n\n );\n\n match recv_mmsg(socket, &mut batch[i..]) {\n\n Err(_) if i > 0 => {\n\n if start.elapsed().as_millis() as u64 > max_wait_ms {\n\n break;\n", "file_path": "streamer/src/packet.rs", "rank": 3, "score": 356909.91296266543 }, { "content": "/// Format data as hex.\n\n///\n\n/// If `data`'s length is greater than 0, add a field called \"data\" to `f`. The\n\n/// first 64 bytes of `data` is displayed; bytes after that are ignored.\n\npub fn debug_account_data(data: &[u8], f: &mut fmt::DebugStruct<'_, '_>) {\n\n let data_len = cmp::min(MAX_DEBUG_ACCOUNT_DATA, data.len());\n\n if data_len > 0 {\n\n f.field(\"data\", &Hex(&data[..data_len]));\n\n }\n\n}\n\n\n\npub(crate) struct Hex<'a>(pub(crate) &'a [u8]);\n\nimpl fmt::Debug for Hex<'_> {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n for &byte in self.0 {\n\n write!(f, \"{:02x}\", byte)?;\n\n }\n\n Ok(())\n\n }\n\n}\n", "file_path": "sdk/program/src/debug_account_data.rs", "rank": 4, "score": 348275.408960791 }, { "content": "pub fn add_genesis_accounts(genesis_config: &mut GenesisConfig, mut issued_lamports: u64) {\n\n // add_stakes() and add_validators() award tokens for rent exemption and\n\n // to cover an initial transfer-free period of the network\n\n\n\n issued_lamports += add_stakes(\n\n genesis_config,\n\n CREATOR_STAKER_INFOS,\n\n &UNLOCKS_HALF_AT_9_MONTHS,\n\n ) + add_stakes(\n\n genesis_config,\n\n SERVICE_STAKER_INFOS,\n\n &UNLOCKS_ALL_AT_9_MONTHS,\n\n ) + add_stakes(\n\n genesis_config,\n\n FOUNDATION_STAKER_INFOS,\n\n &UNLOCKS_ALL_DAY_ZERO,\n\n ) + add_stakes(genesis_config, GRANTS_STAKER_INFOS, &UNLOCKS_ALL_DAY_ZERO)\n\n + add_stakes(\n\n genesis_config,\n\n COMMUNITY_STAKER_INFOS,\n", "file_path": "genesis/src/genesis_accounts.rs", "rank": 5, "score": 346203.6286580678 }, { "content": "pub fn parse_get_epoch_info(_matches: &ArgMatches<'_>) -> Result<CliCommandInfo, CliError> {\n\n Ok(CliCommandInfo {\n\n command: CliCommand::GetEpochInfo,\n\n signers: vec![],\n\n })\n\n}\n\n\n", "file_path": "cli/src/cluster_query.rs", "rank": 6, "score": 344467.0118484084 }, { "content": "pub fn parse_supply(matches: &ArgMatches<'_>) -> Result<CliCommandInfo, CliError> {\n\n let print_accounts = matches.is_present(\"print_accounts\");\n\n Ok(CliCommandInfo {\n\n command: CliCommand::Supply { print_accounts },\n\n signers: vec![],\n\n })\n\n}\n\n\n", "file_path": "cli/src/cluster_query.rs", "rank": 7, "score": 337390.9570174261 }, { "content": "pub fn parse_decode_transaction(matches: &ArgMatches<'_>) -> Result<CliCommandInfo, CliError> {\n\n let blob = value_t_or_exit!(matches, \"transaction\", String);\n\n let binary_encoding = match matches.value_of(\"encoding\").unwrap() {\n\n \"base58\" => TransactionBinaryEncoding::Base58,\n\n \"base64\" => TransactionBinaryEncoding::Base64,\n\n _ => unreachable!(),\n\n };\n\n\n\n let encoded_transaction = EncodedTransaction::Binary(blob, binary_encoding);\n\n if let Some(transaction) = encoded_transaction.decode() {\n\n Ok(CliCommandInfo {\n\n command: CliCommand::DecodeTransaction(transaction),\n\n signers: vec![],\n\n })\n\n } else {\n\n Err(CliError::BadParameter(\n\n \"Unable to decode transaction\".to_string(),\n\n ))\n\n }\n\n}\n\n\n", "file_path": "cli/src/wallet.rs", "rank": 8, "score": 337390.9570174261 }, { "content": "pub fn lamports_of_sol(matches: &ArgMatches<'_>, name: &str) -> Option<u64> {\n\n value_of(matches, name).map(sol_to_lamports)\n\n}\n\n\n", "file_path": "clap-utils/src/input_parsers.rs", "rank": 9, "score": 336678.5429464319 }, { "content": "#[doc(hidden)]\n\npub fn checked_add(a: u64, b: u64) -> Result<u64, InstructionError> {\n\n a.checked_add(b).ok_or(InstructionError::InsufficientFunds)\n\n}\n\n\n\n/// Describes a single account read or written by a program during instruction\n\n/// execution.\n\n///\n\n/// When constructing an [`Instruction`], a list of all accounts that may be\n\n/// read or written during the execution of that instruction must be supplied.\n\n/// Any account that may be mutated by the program during execution, either its\n\n/// data or metadata such as held lamports, must be writable.\n\n///\n\n/// Note that because the Solana runtime schedules parallel transaction\n\n/// execution around which accounts are writable, care should be taken that only\n\n/// accounts which actually may be mutated are specified as writable. As the\n\n/// default [`AccountMeta::new`] constructor creates writable accounts, this is\n\n/// a minor hazard: use [`AccountMeta::new_readonly`] to specify that an account\n\n/// is not writable.\n\n#[repr(C)]\n\n#[derive(Debug, Default, PartialEq, Eq, Clone, Serialize, Deserialize)]\n", "file_path": "sdk/program/src/instruction.rs", "rank": 10, "score": 333428.7267325333 }, { "content": "pub fn parse_show_validators(matches: &ArgMatches<'_>) -> Result<CliCommandInfo, CliError> {\n\n let use_lamports_unit = matches.is_present(\"lamports\");\n\n let number_validators = matches.is_present(\"number\");\n\n let reverse_sort = matches.is_present(\"reverse\");\n\n let keep_unstaked_delinquents = matches.is_present(\"keep_unstaked_delinquents\");\n\n let delinquent_slot_distance = value_of(matches, \"delinquent_slot_distance\");\n\n\n\n let sort_order = match value_t_or_exit!(matches, \"sort\", String).as_str() {\n\n \"delinquent\" => CliValidatorsSortOrder::Delinquent,\n\n \"commission\" => CliValidatorsSortOrder::Commission,\n\n \"credits\" => CliValidatorsSortOrder::EpochCredits,\n\n \"identity\" => CliValidatorsSortOrder::Identity,\n\n \"last-vote\" => CliValidatorsSortOrder::LastVote,\n\n \"root\" => CliValidatorsSortOrder::Root,\n\n \"skip-rate\" => CliValidatorsSortOrder::SkipRate,\n\n \"stake\" => CliValidatorsSortOrder::Stake,\n\n \"vote-account\" => CliValidatorsSortOrder::VoteAccount,\n\n \"version\" => CliValidatorsSortOrder::Version,\n\n _ => unreachable!(),\n\n };\n", "file_path": "cli/src/cluster_query.rs", "rank": 11, "score": 332228.4574413766 }, { "content": "pub fn parse_get_epoch(_matches: &ArgMatches<'_>) -> Result<CliCommandInfo, CliError> {\n\n Ok(CliCommandInfo {\n\n command: CliCommand::GetEpoch,\n\n signers: vec![],\n\n })\n\n}\n\n\n", "file_path": "cli/src/cluster_query.rs", "rank": 12, "score": 332228.4574413766 }, { "content": "pub fn parse_get_block(matches: &ArgMatches<'_>) -> Result<CliCommandInfo, CliError> {\n\n let slot = value_of(matches, \"slot\");\n\n Ok(CliCommandInfo {\n\n command: CliCommand::GetBlock { slot },\n\n signers: vec![],\n\n })\n\n}\n\n\n", "file_path": "cli/src/cluster_query.rs", "rank": 13, "score": 332228.4574413766 }, { "content": "pub fn parse_largest_accounts(matches: &ArgMatches<'_>) -> Result<CliCommandInfo, CliError> {\n\n let filter = if matches.is_present(\"circulating\") {\n\n Some(RpcLargestAccountsFilter::Circulating)\n\n } else if matches.is_present(\"non_circulating\") {\n\n Some(RpcLargestAccountsFilter::NonCirculating)\n\n } else {\n\n None\n\n };\n\n Ok(CliCommandInfo {\n\n command: CliCommand::LargestAccounts { filter },\n\n signers: vec![],\n\n })\n\n}\n\n\n", "file_path": "cli/src/cluster_query.rs", "rank": 14, "score": 332228.4574413766 }, { "content": "pub fn parse_leader_schedule(matches: &ArgMatches<'_>) -> Result<CliCommandInfo, CliError> {\n\n let epoch = value_of(matches, \"epoch\");\n\n Ok(CliCommandInfo {\n\n command: CliCommand::LeaderSchedule { epoch },\n\n signers: vec![],\n\n })\n\n}\n\n\n", "file_path": "cli/src/cluster_query.rs", "rank": 15, "score": 332228.4574413766 }, { "content": "pub fn parse_show_stake_history(matches: &ArgMatches<'_>) -> Result<CliCommandInfo, CliError> {\n\n let use_lamports_unit = matches.is_present(\"lamports\");\n\n let limit_results = value_of(matches, \"limit\").unwrap();\n\n Ok(CliCommandInfo {\n\n command: CliCommand::ShowStakeHistory {\n\n use_lamports_unit,\n\n limit_results,\n\n },\n\n signers: vec![],\n\n })\n\n}\n\n\n", "file_path": "cli/src/stake.rs", "rank": 16, "score": 332228.4574413766 }, { "content": "pub fn parse_get_slot(_matches: &ArgMatches<'_>) -> Result<CliCommandInfo, CliError> {\n\n Ok(CliCommandInfo {\n\n command: CliCommand::GetSlot,\n\n signers: vec![],\n\n })\n\n}\n\n\n", "file_path": "cli/src/cluster_query.rs", "rank": 17, "score": 332228.4574413766 }, { "content": "pub fn parse_total_supply(_matches: &ArgMatches<'_>) -> Result<CliCommandInfo, CliError> {\n\n Ok(CliCommandInfo {\n\n command: CliCommand::TotalSupply,\n\n signers: vec![],\n\n })\n\n}\n\n\n", "file_path": "cli/src/cluster_query.rs", "rank": 18, "score": 332228.4574413766 }, { "content": "pub fn load_genesis_accounts(file: &str, genesis_config: &mut GenesisConfig) -> io::Result<u64> {\n\n let mut lamports = 0;\n\n let accounts_file = File::open(file)?;\n\n\n\n let genesis_accounts: HashMap<String, Base64Account> =\n\n serde_yaml::from_reader(accounts_file)\n\n .map_err(|err| io::Error::new(io::ErrorKind::Other, format!(\"{:?}\", err)))?;\n\n\n\n for (key, account_details) in genesis_accounts {\n\n let pubkey = pubkey_from_str(key.as_str()).map_err(|err| {\n\n io::Error::new(\n\n io::ErrorKind::Other,\n\n format!(\"Invalid pubkey/keypair {}: {:?}\", key, err),\n\n )\n\n })?;\n\n\n\n let owner_program_id = Pubkey::from_str(account_details.owner.as_str()).map_err(|err| {\n\n io::Error::new(\n\n io::ErrorKind::Other,\n\n format!(\"Invalid owner: {}: {:?}\", account_details.owner, err),\n", "file_path": "genesis/src/main.rs", "rank": 19, "score": 327988.0046824709 }, { "content": "pub fn parse_get_transaction_count(_matches: &ArgMatches<'_>) -> Result<CliCommandInfo, CliError> {\n\n Ok(CliCommandInfo {\n\n command: CliCommand::GetTransactionCount,\n\n signers: vec![],\n\n })\n\n}\n\n\n", "file_path": "cli/src/cluster_query.rs", "rank": 20, "score": 327298.9371430176 }, { "content": "pub fn parse_get_block_height(_matches: &ArgMatches<'_>) -> Result<CliCommandInfo, CliError> {\n\n Ok(CliCommandInfo {\n\n command: CliCommand::GetBlockHeight,\n\n signers: vec![],\n\n })\n\n}\n\n\n", "file_path": "cli/src/cluster_query.rs", "rank": 21, "score": 327298.9371430176 }, { "content": "pub fn parse_get_block_time(matches: &ArgMatches<'_>) -> Result<CliCommandInfo, CliError> {\n\n let slot = value_of(matches, \"slot\");\n\n Ok(CliCommandInfo {\n\n command: CliCommand::GetBlockTime { slot },\n\n signers: vec![],\n\n })\n\n}\n\n\n", "file_path": "cli/src/cluster_query.rs", "rank": 22, "score": 327298.9371430176 }, { "content": "pub fn parse_show_block_production(matches: &ArgMatches<'_>) -> Result<CliCommandInfo, CliError> {\n\n let epoch = value_t!(matches, \"epoch\", Epoch).ok();\n\n let slot_limit = value_t!(matches, \"slot_limit\", u64).ok();\n\n\n\n Ok(CliCommandInfo {\n\n command: CliCommand::ShowBlockProduction { epoch, slot_limit },\n\n signers: vec![],\n\n })\n\n}\n\n\n", "file_path": "cli/src/cluster_query.rs", "rank": 23, "score": 327298.93714301765 }, { "content": "// same as genesis_config::create_genesis_config, but with bootstrap_validator staking logic\n\n// for the core crate tests\n\npub fn create_genesis_config(mint_lamports: u64) -> GenesisConfigInfo {\n\n create_genesis_config_with_leader(\n\n mint_lamports,\n\n &solana_sdk::pubkey::new_rand(),\n\n bootstrap_validator_stake_lamports(),\n\n )\n\n}\n", "file_path": "ledger/src/genesis_utils.rs", "rank": 24, "score": 321818.65266600804 }, { "content": "pub fn create_genesis_config(mint_lamports: u64) -> GenesisConfigInfo {\n\n // Note that zero lamports for validator stake will result in stake account\n\n // not being stored in accounts-db but still cached in bank stakes. This\n\n // causes discrepancy between cached stakes accounts in bank and\n\n // accounts-db which in particular will break snapshots test.\n\n create_genesis_config_with_leader(\n\n mint_lamports,\n\n &solana_sdk::pubkey::new_rand(), // validator_pubkey\n\n 0, // validator_stake_lamports\n\n )\n\n}\n\n\n", "file_path": "runtime/src/genesis_utils.rs", "rank": 25, "score": 321805.20022950484 }, { "content": "pub fn check_for_tracer_packet(packet: &mut Packet) -> bool {\n\n let first_pubkey_start: usize = TRACER_KEY_OFFSET_IN_TRANSACTION;\n\n let first_pubkey_end = match first_pubkey_start.checked_add(size_of::<Pubkey>()) {\n\n Some(offset) => offset,\n\n None => return false,\n\n };\n\n // Check for tracer pubkey\n\n match packet.data(first_pubkey_start..first_pubkey_end) {\n\n Some(pubkey) if pubkey == TRACER_KEY.as_ref() => {\n\n packet.meta.set_tracer(true);\n\n true\n\n }\n\n _ => false,\n\n }\n\n}\n\n\n", "file_path": "perf/src/sigverify.rs", "rank": 26, "score": 317754.78846294957 }, { "content": "/// Helper function for programs to call [`GetMinimumDelegation`] and then fetch the return data\n\n///\n\n/// This fn handles performing the CPI to call the [`GetMinimumDelegation`] function, and then\n\n/// calls [`get_return_data()`] to fetch the return data.\n\n///\n\n/// [`GetMinimumDelegation`]: super::instruction::StakeInstruction::GetMinimumDelegation\n\n/// [`get_return_data()`]: crate::program::get_return_data\n\npub fn get_minimum_delegation() -> Result<u64, ProgramError> {\n\n let instruction = super::instruction::get_minimum_delegation();\n\n crate::program::invoke_unchecked(&instruction, &[])?;\n\n get_minimum_delegation_return_data()\n\n}\n\n\n", "file_path": "sdk/program/src/stake/tools.rs", "rank": 27, "score": 316760.68725986104 }, { "content": "// fun fact: rustc is very close to make this const fn.\n\npub fn bootstrap_validator_stake_lamports() -> u64 {\n\n Rent::default().minimum_balance(StakeState::size_of())\n\n}\n\n\n\n// Number of lamports automatically used for genesis accounts\n\npub const fn genesis_sysvar_and_builtin_program_lamports() -> u64 {\n\n const NUM_BUILTIN_PROGRAMS: u64 = 4;\n\n const FEES_SYSVAR_MIN_BALANCE: u64 = 946_560;\n\n const STAKE_HISTORY_MIN_BALANCE: u64 = 114_979_200;\n\n const CLOCK_SYSVAR_MIN_BALANCE: u64 = 1_169_280;\n\n const RENT_SYSVAR_MIN_BALANCE: u64 = 1_009_200;\n\n const EPOCH_SCHEDULE_SYSVAR_MIN_BALANCE: u64 = 1_120_560;\n\n const RECENT_BLOCKHASHES_SYSVAR_MIN_BALANCE: u64 = 42_706_560;\n\n\n\n FEES_SYSVAR_MIN_BALANCE\n\n + STAKE_HISTORY_MIN_BALANCE\n\n + CLOCK_SYSVAR_MIN_BALANCE\n\n + RENT_SYSVAR_MIN_BALANCE\n\n + EPOCH_SCHEDULE_SYSVAR_MIN_BALANCE\n\n + RECENT_BLOCKHASHES_SYSVAR_MIN_BALANCE\n", "file_path": "runtime/src/genesis_utils.rs", "rank": 28, "score": 316424.61475510825 }, { "content": "pub fn update(config_file: &str, check_only: bool) -> Result<bool, String> {\n\n init_or_update(config_file, false, check_only)\n\n}\n\n\n", "file_path": "install/src/command.rs", "rank": 29, "score": 316134.8873880227 }, { "content": "pub fn create_account(lamports: u64, separate_domains: bool) -> RefCell<AccountSharedData> {\n\n RefCell::new(\n\n AccountSharedData::new_data_with_space(\n\n lamports,\n\n &Versions::new(State::Uninitialized, separate_domains),\n\n State::size(),\n\n &crate::system_program::id(),\n\n )\n\n .expect(\"nonce_account\"),\n\n )\n\n}\n\n\n", "file_path": "sdk/src/nonce_account.rs", "rank": 30, "score": 315246.39339322114 }, { "content": "#[allow(clippy::type_complexity)]\n\nfn run_simulation(stakes: &[u64], fanout: usize) {\n\n let num_threads = num_threads();\n\n // set timeout to 5 minutes\n\n let timeout = 60 * 5;\n\n\n\n // describe the leader\n\n let leader_info = ContactInfo::new_localhost(&solana_sdk::pubkey::new_rand(), 0);\n\n let cluster_info = ClusterInfo::new(\n\n leader_info.clone(),\n\n Arc::new(Keypair::new()),\n\n SocketAddrSpace::Unspecified,\n\n );\n\n\n\n // setup staked nodes\n\n let mut staked_nodes = HashMap::new();\n\n\n\n // setup accounts for all nodes (leader has 0 bal)\n\n let (s, r) = unbounded();\n\n let senders: Arc<Mutex<HashMap<Pubkey, Sender<(i32, bool)>>>> =\n\n Arc::new(Mutex::new(HashMap::new()));\n", "file_path": "gossip/tests/cluster_info.rs", "rank": 31, "score": 314431.6721822928 }, { "content": "/// Approximately convert fractional native tokens (lamports) into native tokens (SOL)\n\npub fn lamports_to_sol(lamports: u64) -> f64 {\n\n lamports as f64 / LAMPORTS_PER_SOL as f64\n\n}\n\n\n", "file_path": "sdk/program/src/native_token.rs", "rank": 32, "score": 309632.6382775324 }, { "content": "pub fn lamports_of_sol(matches: &ArgMatches, name: &str) -> Option<u64> {\n\n value_of(matches, name).map(sol_to_lamports)\n\n}\n\n\n", "file_path": "clap-v3-utils/src/input_parsers.rs", "rank": 33, "score": 309361.78459706303 }, { "content": "pub fn init_or_update(config_file: &str, is_init: bool, check_only: bool) -> Result<bool, String> {\n\n let mut config = Config::load(config_file)?;\n\n\n\n let semver_update_type = if is_init {\n\n SemverUpdateType::Fixed\n\n } else {\n\n SemverUpdateType::Patch\n\n };\n\n\n\n let (updated_version, download_url_and_sha256, release_dir) = if let Some(explicit_release) =\n\n &config.explicit_release\n\n {\n\n match explicit_release {\n\n ExplicitRelease::Semver(current_release_semver) => {\n\n let progress_bar = new_spinner_progress_bar();\n\n progress_bar.set_message(format!(\"{}Checking for updates...\", LOOKING_GLASS));\n\n\n\n let github_release = check_for_newer_github_release(\n\n semver::VersionReq::parse(&format!(\n\n \"{}{}\",\n", "file_path": "install/src/command.rs", "rank": 34, "score": 308979.70543065824 }, { "content": "/// Computes the numbr of milliseconds `num_blocks` blocks will take given\n\n/// each slot contains `ticks_per_slot`\n\npub fn ms_for_n_slots(num_blocks: u64, ticks_per_slot: u64) -> u64 {\n\n ((ticks_per_slot * DEFAULT_MS_PER_SLOT * num_blocks) + DEFAULT_TICKS_PER_SLOT - 1)\n\n / DEFAULT_TICKS_PER_SLOT\n\n}\n\n\n", "file_path": "local-cluster/tests/common.rs", "rank": 35, "score": 308028.8563601868 }, { "content": "fn stake_weighted_shuffle(stakes_and_index: &[(u64, usize)], seed: [u8; 32]) -> Vec<(u64, usize)> {\n\n let mut rng = ChaChaRng::from_seed(seed);\n\n let stake_weights: Vec<_> = stakes_and_index.iter().map(|(w, _)| *w).collect();\n\n let shuffle = WeightedShuffle::new(\"stake_weighted_shuffle\", &stake_weights);\n\n shuffle\n\n .shuffle(&mut rng)\n\n .map(|i| stakes_and_index[i])\n\n .collect()\n\n}\n\n\n", "file_path": "gossip/tests/cluster_info.rs", "rank": 36, "score": 306589.7603793087 }, { "content": "/// Search for the a node with the given balance\n\nfn find_insert_shred(id: &Pubkey, shred: i32, batches: &mut [Nodes]) {\n\n batches.par_iter_mut().for_each(|batch| {\n\n if batch.contains_key(id) {\n\n let _ = batch.get_mut(id).unwrap().1.insert(shred);\n\n }\n\n });\n\n}\n\n\n", "file_path": "gossip/tests/cluster_info.rs", "rank": 37, "score": 304011.5955035154 }, { "content": "pub fn ed25519_verify_cpu(batches: &mut [PacketBatch], reject_non_vote: bool, packet_count: usize) {\n\n debug!(\"CPU ECDSA for {}\", packet_count);\n\n let desired_thread_count = packet_count\n\n .saturating_add(VERIFY_MIN_PACKETS_PER_THREAD)\n\n .saturating_div(VERIFY_MIN_PACKETS_PER_THREAD);\n\n if desired_thread_count <= 1 {\n\n // When using single thread, skip rayon overhead.\n\n batches.iter_mut().for_each(|batch| {\n\n batch.iter_mut().for_each(|packet| {\n\n if !packet.meta.discard() && !verify_packet(packet, reject_non_vote) {\n\n packet.meta.set_discard(true);\n\n }\n\n })\n\n });\n\n } else if desired_thread_count < get_thread_count() {\n\n // Dynamically compute minimum packet length to spread the load while minimizing threads.\n\n let packets_per_thread = packet_count.saturating_div(desired_thread_count);\n\n PAR_THREAD_POOL.install(|| {\n\n batches\n\n .into_par_iter()\n", "file_path": "perf/src/sigverify.rs", "rank": 38, "score": 303790.8855595963 }, { "content": "pub fn read_u8(current: &mut usize, data: &[u8]) -> Result<u8, SanitizeError> {\n\n if data.len() < *current + 1 {\n\n return Err(SanitizeError::IndexOutOfBounds);\n\n }\n\n let e = data[*current];\n\n *current += 1;\n\n Ok(e)\n\n}\n\n\n", "file_path": "sdk/program/src/serialize_utils.rs", "rank": 39, "score": 302848.27634783497 }, { "content": "pub fn read_u16(current: &mut usize, data: &[u8]) -> Result<u16, SanitizeError> {\n\n if data.len() < *current + 2 {\n\n return Err(SanitizeError::IndexOutOfBounds);\n\n }\n\n let mut fixed_data = [0u8; 2];\n\n fixed_data.copy_from_slice(&data[*current..*current + 2]);\n\n let e = u16::from_le_bytes(fixed_data);\n\n *current += 2;\n\n Ok(e)\n\n}\n\n\n", "file_path": "sdk/program/src/serialize_utils.rs", "rank": 40, "score": 302848.27634783497 }, { "content": "pub fn read_pubkey(current: &mut usize, data: &[u8]) -> Result<Pubkey, SanitizeError> {\n\n let len = std::mem::size_of::<Pubkey>();\n\n if data.len() < *current + len {\n\n return Err(SanitizeError::IndexOutOfBounds);\n\n }\n\n let e = Pubkey::new(&data[*current..*current + len]);\n\n *current += len;\n\n Ok(e)\n\n}\n\n\n", "file_path": "sdk/program/src/serialize_utils.rs", "rank": 41, "score": 302848.27634783497 }, { "content": "pub fn add_genesis_accounts(genesis_config: &mut GenesisConfig) -> u64 {\n\n config::add_genesis_account(genesis_config)\n\n}\n\n\n\n/// The minimum stake amount that can be delegated, in lamports.\n\n/// NOTE: This is also used to calculate the minimum balance of a stake account, which is the\n\n/// rent exempt reserve _plus_ the minimum stake delegation.\n", "file_path": "programs/stake/src/lib.rs", "rank": 42, "score": 302433.560792087 }, { "content": "pub fn add_genesis_account(genesis_config: &mut GenesisConfig) -> u64 {\n\n let mut account = create_config_account(vec![], &Config::default(), 0);\n\n let lamports = genesis_config.rent.minimum_balance(account.data().len());\n\n\n\n account.set_lamports(lamports.max(1));\n\n\n\n genesis_config.add_account(config::id(), account);\n\n\n\n lamports\n\n}\n", "file_path": "programs/stake/src/config.rs", "rank": 43, "score": 302433.560792087 }, { "content": "#[test]\n\npub fn cluster_info_retransmit() {\n\n solana_logger::setup();\n\n let exit = Arc::new(AtomicBool::new(false));\n\n trace!(\"c1:\");\n\n let (c1, dr1, tn1) = test_node(&exit);\n\n trace!(\"c2:\");\n\n let (c2, dr2, tn2) = test_node(&exit);\n\n trace!(\"c3:\");\n\n let (c3, dr3, tn3) = test_node(&exit);\n\n let c1_contact_info = c1.my_contact_info();\n\n\n\n c2.insert_info(c1_contact_info.clone());\n\n c3.insert_info(c1_contact_info);\n\n\n\n let num = 3;\n\n\n\n //wait to converge\n\n trace!(\"waiting to converge:\");\n\n let mut done = false;\n\n for _ in 0..30 {\n", "file_path": "gossip/tests/gossip.rs", "rank": 44, "score": 301892.0214559047 }, { "content": "#[test]\n\n#[ignore]\n\npub fn cluster_info_scale() {\n\n use {\n\n solana_measure::measure::Measure,\n\n solana_perf::test_tx::test_tx,\n\n solana_runtime::{\n\n bank::Bank,\n\n genesis_utils::{create_genesis_config_with_vote_accounts, ValidatorVoteKeypairs},\n\n },\n\n };\n\n solana_logger::setup();\n\n let exit = Arc::new(AtomicBool::new(false));\n\n let num_nodes: usize = std::env::var(\"NUM_NODES\")\n\n .unwrap_or_else(|_| \"10\".to_string())\n\n .parse()\n\n .expect(\"could not parse NUM_NODES as a number\");\n\n\n\n let vote_keypairs: Vec<_> = (0..num_nodes)\n\n .map(|_| ValidatorVoteKeypairs::new_rand())\n\n .collect();\n\n let genesis_config_info = create_genesis_config_with_vote_accounts(\n", "file_path": "gossip/tests/gossip.rs", "rank": 45, "score": 301891.9504263966 }, { "content": "#[allow(clippy::integer_arithmetic)]\n\nfn check_overlapping_do_not_use(src_addr: u64, dst_addr: u64, n: u64) -> bool {\n\n (src_addr <= dst_addr && src_addr + n > dst_addr)\n\n || (dst_addr <= src_addr && dst_addr + n > src_addr)\n\n}\n\n\n", "file_path": "programs/bpf_loader/src/syscalls.rs", "rank": 46, "score": 299843.1479505471 }, { "content": "// Check if the provided program id as a known SPL Token program id\n\npub fn is_known_spl_token_id(program_id: &Pubkey) -> bool {\n\n *program_id == spl_token_id() || *program_id == spl_token_2022_id()\n\n}\n\n\n", "file_path": "account-decoder/src/parse_token.rs", "rank": 47, "score": 294585.30090657074 }, { "content": "/// Return true if the first keyed_account is executable, used to determine if\n\n/// the loader should call a program's 'main'\n\npub fn is_executable(keyed_accounts: &[KeyedAccount]) -> Result<bool, InstructionError> {\n\n Ok(!keyed_accounts.is_empty() && keyed_accounts[0].executable()?)\n\n}\n\n\n\nimpl<'a, T> State<T> for crate::keyed_account::KeyedAccount<'a>\n\nwhere\n\n T: serde::Serialize + serde::de::DeserializeOwned,\n\n{\n\n fn state(&self) -> Result<T, InstructionError> {\n\n self.try_account_ref()?.state()\n\n }\n\n fn set_state(&self, state: &T) -> Result<(), InstructionError> {\n\n self.try_account_ref_mut()?.set_state(state)\n\n }\n\n}\n", "file_path": "sdk/src/keyed_account.rs", "rank": 48, "score": 294136.9710972106 }, { "content": "#[cfg(not(target_os = \"linux\"))]\n\npub fn recv_mmsg(socket: &UdpSocket, packets: &mut [Packet]) -> io::Result</*num packets:*/ usize> {\n\n debug_assert!(packets.iter().all(|pkt| pkt.meta == Meta::default()));\n\n let mut i = 0;\n\n let count = cmp::min(NUM_RCVMMSGS, packets.len());\n\n for p in packets.iter_mut().take(count) {\n\n p.meta.size = 0;\n\n match socket.recv_from(p.buffer_mut()) {\n\n Err(_) if i > 0 => {\n\n break;\n\n }\n\n Err(e) => {\n\n return Err(e);\n\n }\n\n Ok((nrecv, from)) => {\n\n p.meta.size = nrecv;\n\n p.meta.set_socket_addr(&from);\n\n if i == 0 {\n\n socket.set_nonblocking(true)?;\n\n }\n\n }\n\n }\n\n i += 1;\n\n }\n\n Ok(i)\n\n}\n\n\n", "file_path": "streamer/src/recvmmsg.rs", "rank": 49, "score": 293169.12655833317 }, { "content": "#[cfg(target_os = \"linux\")]\n\n#[allow(clippy::uninit_assumed_init)]\n\npub fn recv_mmsg(sock: &UdpSocket, packets: &mut [Packet]) -> io::Result</*num packets:*/ usize> {\n\n // Assert that there are no leftovers in packets.\n\n debug_assert!(packets.iter().all(|pkt| pkt.meta == Meta::default()));\n\n const SOCKADDR_STORAGE_SIZE: usize = mem::size_of::<sockaddr_storage>();\n\n\n\n let mut hdrs: [mmsghdr; NUM_RCVMMSGS] = unsafe { mem::zeroed() };\n\n let mut iovs: [iovec; NUM_RCVMMSGS] = unsafe { mem::MaybeUninit::uninit().assume_init() };\n\n let mut addrs: [sockaddr_storage; NUM_RCVMMSGS] = unsafe { mem::zeroed() };\n\n\n\n let sock_fd = sock.as_raw_fd();\n\n let count = cmp::min(iovs.len(), packets.len());\n\n\n\n for (packet, hdr, iov, addr) in\n\n izip!(packets.iter_mut(), &mut hdrs, &mut iovs, &mut addrs).take(count)\n\n {\n\n let buffer = packet.buffer_mut();\n\n *iov = iovec {\n\n iov_base: buffer.as_mut_ptr() as *mut libc::c_void,\n\n iov_len: buffer.len(),\n\n };\n", "file_path": "streamer/src/recvmmsg.rs", "rank": 50, "score": 293169.12655833317 }, { "content": "fn parse_settings(matches: &ArgMatches<'_>) -> Result<bool, Box<dyn error::Error>> {\n\n let parse_args = match matches.subcommand() {\n\n (\"config\", Some(matches)) => {\n\n let config_file = match matches.value_of(\"config_file\") {\n\n None => {\n\n println!(\n\n \"{} Either provide the `--config` arg or ensure home directory exists to use the default config location\",\n\n style(\"No config file found.\").bold()\n\n );\n\n return Ok(false);\n\n }\n\n Some(config_file) => config_file,\n\n };\n\n let mut config = Config::load(config_file).unwrap_or_default();\n\n\n\n match matches.subcommand() {\n\n (\"get\", Some(subcommand_matches)) => {\n\n let (url_setting_type, json_rpc_url) =\n\n ConfigInput::compute_json_rpc_url_setting(\"\", &config.json_rpc_url);\n\n let (ws_setting_type, websocket_url) =\n", "file_path": "cli/src/main.rs", "rank": 51, "score": 293063.59119374276 }, { "content": "/// Approximately convert native tokens (SOL) into fractional native tokens (lamports)\n\npub fn sol_to_lamports(sol: f64) -> u64 {\n\n (sol * LAMPORTS_PER_SOL as f64) as u64\n\n}\n\n\n\nuse std::fmt::{Debug, Display, Formatter, Result};\n\npub struct Sol(pub u64);\n\n\n\nimpl Sol {\n\n fn write_in_sol(&self, f: &mut Formatter) -> Result {\n\n write!(\n\n f,\n\n \"◎{}.{:09}\",\n\n self.0 / LAMPORTS_PER_SOL,\n\n self.0 % LAMPORTS_PER_SOL\n\n )\n\n }\n\n}\n\n\n\nimpl Display for Sol {\n\n fn fmt(&self, f: &mut Formatter) -> Result {\n\n self.write_in_sol(f)\n\n }\n\n}\n\n\n\nimpl Debug for Sol {\n\n fn fmt(&self, f: &mut Formatter) -> Result {\n\n self.write_in_sol(f)\n\n }\n\n}\n", "file_path": "sdk/program/src/native_token.rs", "rank": 52, "score": 292714.1969021525 }, { "content": "#[cfg(not(windows))]\n\npub fn stop_process(process: &mut Child) -> Result<(), io::Error> {\n\n use {\n\n nix::{\n\n errno::Errno::{EINVAL, EPERM, ESRCH},\n\n sys::signal::{kill, Signal},\n\n unistd::Pid,\n\n },\n\n std::{\n\n io::ErrorKind,\n\n thread,\n\n time::{Duration, Instant},\n\n },\n\n };\n\n\n\n let nice_wait = Duration::from_secs(5);\n\n let pid = Pid::from_raw(process.id() as i32);\n\n match kill(pid, Signal::SIGINT) {\n\n Ok(()) => {\n\n let expire = Instant::now() + nice_wait;\n\n while let Ok(None) = process.try_wait() {\n", "file_path": "install/src/stop_process.rs", "rank": 53, "score": 291561.81816975446 }, { "content": "#[cfg(not(target_os = \"solana\"))]\n\npub fn split_u64(\n\n amount: u64,\n\n lo_bit_length: usize,\n\n hi_bit_length: usize,\n\n) -> Result<(u64, u64), ProofError> {\n\n assert!(lo_bit_length <= 64);\n\n assert!(hi_bit_length <= 64);\n\n\n\n if !bool::from((amount >> (lo_bit_length + hi_bit_length)).ct_eq(&0u64)) {\n\n return Err(ProofError::TransferAmount);\n\n }\n\n\n\n let lo = amount << (64 - lo_bit_length) >> (64 - lo_bit_length);\n\n let hi = amount >> lo_bit_length;\n\n\n\n Ok((lo, hi))\n\n}\n\n\n", "file_path": "zk-token-sdk/src/instruction/mod.rs", "rank": 54, "score": 289015.5451990266 }, { "content": "pub fn copy_return_values(sig_lens: &[Vec<u32>], out: &PinnedVec<u8>, rvs: &mut [Vec<u8>]) {\n\n let mut num = 0;\n\n for (vs, sig_vs) in rvs.iter_mut().zip(sig_lens.iter()) {\n\n for (v, sig_v) in vs.iter_mut().zip(sig_vs.iter()) {\n\n if *sig_v == 0 {\n\n *v = 0;\n\n } else {\n\n let mut vout = 1;\n\n for _ in 0..*sig_v {\n\n if 0 == out[num] {\n\n vout = 0;\n\n }\n\n num = num.saturating_add(1);\n\n }\n\n *v = vout;\n\n }\n\n if *v != 0 {\n\n trace!(\"VERIFIED PACKET!!!!!\");\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "perf/src/sigverify.rs", "rank": 55, "score": 288484.96715814364 }, { "content": "/// return timestamp as ms\n\npub fn timestamp() -> u64 {\n\n let now = SystemTime::now()\n\n .duration_since(UNIX_EPOCH)\n\n .expect(\"create timestamp in timing\");\n\n duration_as_ms(&now)\n\n}\n\n\n\npub const SECONDS_PER_YEAR: f64 = 365.242_199 * 24.0 * 60.0 * 60.0;\n\n\n", "file_path": "sdk/src/timing.rs", "rank": 56, "score": 286581.67556795524 }, { "content": "// Return an error if a validator field is longer than the max length.\n\npub fn is_short_field(string: String) -> Result<(), String> {\n\n if string.len() > MAX_SHORT_FIELD_LENGTH {\n\n Err(format!(\n\n \"validator field longer than {:?}-byte limit\",\n\n MAX_SHORT_FIELD_LENGTH\n\n ))\n\n } else {\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "cli/src/validator_info.rs", "rank": 57, "score": 285301.0404440205 }, { "content": "// Return an error if url field is too long or cannot be parsed.\n\npub fn check_url(string: String) -> Result<(), String> {\n\n is_url(string.clone())?;\n\n if string.len() > MAX_SHORT_FIELD_LENGTH {\n\n Err(format!(\n\n \"url longer than {:?}-byte limit\",\n\n MAX_SHORT_FIELD_LENGTH\n\n ))\n\n } else {\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "cli/src/validator_info.rs", "rank": 58, "score": 285301.0404440205 }, { "content": "#[allow(clippy::same_item_push)]\n\npub fn create_ticks(num_ticks: u64, hashes_per_tick: u64, mut hash: Hash) -> Vec<Entry> {\n\n let mut ticks = Vec::with_capacity(num_ticks as usize);\n\n for _ in 0..num_ticks {\n\n let new_tick = next_entry_mut(&mut hash, hashes_per_tick, vec![]);\n\n ticks.push(new_tick);\n\n }\n\n\n\n ticks\n\n}\n\n\n", "file_path": "entry/src/entry.rs", "rank": 59, "score": 283163.3284964023 }, { "content": "pub fn create_test_account(sample: usize) -> (StoredMeta, AccountSharedData) {\n\n let data_len = sample % 256;\n\n let mut account = AccountSharedData::new(sample as u64, 0, &Pubkey::default());\n\n account.set_data((0..data_len).map(|_| data_len as u8).collect());\n\n let stored_meta = StoredMeta {\n\n write_version: 0,\n\n pubkey: Pubkey::default(),\n\n data_len: data_len as u64,\n\n };\n\n (stored_meta, account)\n\n}\n", "file_path": "runtime/src/append_vec/test_utils.rs", "rank": 60, "score": 283076.69283467135 }, { "content": "fn run_tpu_send_transaction(tpu_use_quic: bool) {\n\n let mint_keypair = Keypair::new();\n\n let mint_pubkey = mint_keypair.pubkey();\n\n let test_validator =\n\n TestValidator::with_no_fees(mint_pubkey, None, SocketAddrSpace::Unspecified);\n\n let rpc_client = Arc::new(RpcClient::new_with_commitment(\n\n test_validator.rpc_url(),\n\n CommitmentConfig::processed(),\n\n ));\n\n let connection_cache = Arc::new(ConnectionCache::new(\n\n tpu_use_quic,\n\n DEFAULT_TPU_CONNECTION_POOL_SIZE,\n\n ));\n\n let tpu_client = TpuClient::new_with_connection_cache(\n\n rpc_client.clone(),\n\n &test_validator.rpc_pubsub_url(),\n\n TpuClientConfig::default(),\n\n connection_cache,\n\n )\n\n .unwrap();\n", "file_path": "rpc-test/tests/rpc.rs", "rank": 61, "score": 282586.35242217686 }, { "content": "/// Check if the detected device is a valid `Ledger device` by checking both the product ID and the vendor ID\n\npub fn is_valid_ledger(vendor_id: u16, product_id: u16) -> bool {\n\n vendor_id == LEDGER_VID\n\n && (LEDGER_NANO_S_PIDS.contains(&product_id) || LEDGER_NANO_X_PIDS.contains(&product_id))\n\n}\n\n\n", "file_path": "remote-wallet/src/ledger.rs", "rank": 62, "score": 281349.5106993562 }, { "content": "/// Invoke a cross-program instruction.\n\n///\n\n/// Invoking one program from another program requires an [`Instruction`]\n\n/// containing the program ID of the other program, instruction data that\n\n/// will be understood by the other program, and a list of [`AccountInfo`]s\n\n/// corresponding to all of the accounts accessed by the other program. Because\n\n/// the only way for a program to acquire `AccountInfo` values is by receiving\n\n/// them from the runtime at the [program entrypoint][entrypoint!], any account\n\n/// required by the callee program must transitively be required by the caller\n\n/// program, and provided by _its_ caller. The same is true of the program ID of\n\n/// the called program.\n\n///\n\n/// The `Instruction` is usually built from within the calling program, but may\n\n/// be deserialized from an external source.\n\n///\n\n/// This function will not return if the called program returns anything other\n\n/// than success. If the callee returns an error or aborts then the entire\n\n/// transaction will immediately fail. To return data as the result of a\n\n/// cross-program invocation use the [`set_return_data`] / [`get_return_data`]\n\n/// functions, or have the callee write to a dedicated account for that purpose.\n\n///\n\n/// A program may directly call itself recursively, but may not be indirectly\n\n/// called recursively (reentered) by another program. Indirect reentrancy will\n\n/// cause the transaction to immediately fail.\n\n///\n\n/// # Validation of shared data between programs\n\n///\n\n/// The `AccountInfo` structures passed to this function contain data that is\n\n/// directly accessed by the runtime and is copied to and from the memory space\n\n/// of the called program. Some of that data, the [`AccountInfo::lamports`] and\n\n/// [`AccountInfo::data`] fields, may be mutated as a side-effect of the called\n\n/// program, if that program has writable access to the given account.\n\n///\n\n/// These two fields are stored in [`RefCell`]s to enforce the aliasing\n\n/// discipline for mutated values required by the Rust language. Prior to\n\n/// invoking the runtime, this routine will test that each `RefCell` is\n\n/// borrowable as required by the callee and return an error if not.\n\n///\n\n/// The CPU cost of these runtime checks can be avoided with the unsafe\n\n/// [`invoke_unchecked`] function.\n\n///\n\n/// [`RefCell`]: std::cell::RefCell\n\n///\n\n/// # Errors\n\n///\n\n/// If the called program completes successfully and violates no runtime\n\n/// invariants, then this function will return successfully. If the callee\n\n/// completes and returns a [`ProgramError`], then the transaction will\n\n/// immediately fail. Control will not return to the caller.\n\n///\n\n/// Various runtime invariants are checked before the callee is invoked and\n\n/// before returning control to the caller. If any of these invariants are\n\n/// violated then the transaction will immediately fail. A non-exhaustive list\n\n/// of these invariants includes:\n\n///\n\n/// - The sum of lamports owned by all referenced accounts has not changed.\n\n/// - A program has not debited lamports from an account it does not own.\n\n/// - A program has not otherwise written to an account that it does not own.\n\n/// - A program has not written to an account that is not writable.\n\n/// - The size of account data has not exceeded applicable limits.\n\n///\n\n/// If the invoked program does not exist or is not executable then\n\n/// the transaction will immediately fail.\n\n///\n\n/// If any of the `RefCell`s within the provided `AccountInfo`s cannot be\n\n/// borrowed in accordance with the call's requirements, an error of\n\n/// [`ProgramError::AccountBorrowFailed`] is returned.\n\n///\n\n/// [`ProgramError`]: crate::program_error::ProgramError\n\n/// [`ProgramError::AccountBorrowFailed`]: crate::program_error::ProgramError::AccountBorrowFailed\n\n///\n\n/// # Examples\n\n///\n\n/// A simple example of transferring lamports via CPI:\n\n///\n\n/// ```\n\n/// use solana_program::{\n\n/// account_info::{next_account_info, AccountInfo},\n\n/// entrypoint,\n\n/// entrypoint::ProgramResult,\n\n/// program::invoke,\n\n/// pubkey::Pubkey,\n\n/// system_instruction,\n\n/// system_program,\n\n/// };\n\n///\n\n/// entrypoint!(process_instruction);\n\n///\n\n/// fn process_instruction(\n\n/// program_id: &Pubkey,\n\n/// accounts: &[AccountInfo],\n\n/// instruction_data: &[u8],\n\n/// ) -> ProgramResult {\n\n/// let account_info_iter = &mut accounts.iter();\n\n///\n\n/// let payer = next_account_info(account_info_iter)?;\n\n/// let recipient = next_account_info(account_info_iter)?;\n\n/// // The system program is a required account to invoke a system\n\n/// // instruction, even though we don't use it directly.\n\n/// let system_program_account = next_account_info(account_info_iter)?;\n\n///\n\n/// assert!(payer.is_writable);\n\n/// assert!(payer.is_signer);\n\n/// assert!(recipient.is_writable);\n\n/// assert!(system_program::check_id(system_program_account.key));\n\n///\n\n/// let lamports = 1000000;\n\n///\n\n/// invoke(\n\n/// &system_instruction::transfer(payer.key, recipient.key, lamports),\n\n/// &[payer.clone(), recipient.clone(), system_program_account.clone()],\n\n/// )\n\n/// }\n\n/// ```\n\npub fn invoke(instruction: &Instruction, account_infos: &[AccountInfo]) -> ProgramResult {\n\n invoke_signed(instruction, account_infos, &[])\n\n}\n\n\n", "file_path": "sdk/program/src/program.rs", "rank": 63, "score": 281248.1852152104 }, { "content": "#[allow(clippy::result_unit_err)]\n\npub fn decode_shortu16_len(bytes: &[u8]) -> Result<(usize, usize), ()> {\n\n let mut val = 0;\n\n for (nth_byte, byte) in bytes.iter().take(MAX_ENCODING_LENGTH).enumerate() {\n\n match visit_byte(*byte, val, nth_byte).map_err(|_| ())? {\n\n VisitStatus::More(new_val) => val = new_val,\n\n VisitStatus::Done(new_val) => {\n\n return Ok((usize::from(new_val), nth_byte.saturating_add(1)));\n\n }\n\n }\n\n }\n\n Err(())\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use {\n\n super::*,\n\n assert_matches::assert_matches,\n\n bincode::{deserialize, serialize},\n\n };\n", "file_path": "sdk/program/src/short_vec.rs", "rank": 64, "score": 281151.4766003186 }, { "content": "pub fn new_test_vote_tx<R>(rng: &mut R) -> Transaction\n\nwhere\n\n R: CryptoRng + RngCore,\n\n{\n\n let mut slots: Vec<Slot> = std::iter::repeat_with(|| rng.gen()).take(5).collect();\n\n slots.sort_unstable();\n\n slots.dedup();\n\n let switch_proof_hash = rng.gen_bool(0.5).then(|| solana_sdk::hash::new_rand(rng));\n\n vote_transaction::new_vote_transaction(\n\n slots,\n\n solana_sdk::hash::new_rand(rng), // bank_hash\n\n solana_sdk::hash::new_rand(rng), // blockhash\n\n &Keypair::generate(rng), // node_keypair\n\n &Keypair::generate(rng), // vote_keypair\n\n &Keypair::generate(rng), // authorized_voter_keypair\n\n switch_proof_hash,\n\n )\n\n}\n", "file_path": "perf/src/test_tx.rs", "rank": 65, "score": 280785.8355750196 }, { "content": "fn deposit_many(bank: &Bank, pubkeys: &mut Vec<Pubkey>, num: usize) -> Result<(), LamportsError> {\n\n for t in 0..num {\n\n let pubkey = solana_sdk::pubkey::new_rand();\n\n let account =\n\n AccountSharedData::new((t + 1) as u64, 0, AccountSharedData::default().owner());\n\n pubkeys.push(pubkey);\n\n assert!(bank.get_account(&pubkey).is_none());\n\n bank.deposit(&pubkey, (t + 1) as u64)?;\n\n assert_eq!(bank.get_account(&pubkey).unwrap(), account);\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "runtime/benches/accounts.rs", "rank": 66, "score": 280116.43889215693 }, { "content": "// Return an error if a validator details are longer than the max length.\n\npub fn check_details_length(string: String) -> Result<(), String> {\n\n if string.len() > MAX_LONG_FIELD_LENGTH {\n\n Err(format!(\n\n \"validator details longer than {:?}-byte limit\",\n\n MAX_LONG_FIELD_LENGTH\n\n ))\n\n } else {\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "cli/src/validator_info.rs", "rank": 67, "score": 279919.35310558707 }, { "content": "pub fn cluster_type_of(matches: &ArgMatches<'_>, name: &str) -> Option<ClusterType> {\n\n value_of(matches, name)\n\n}\n\n\n", "file_path": "clap-utils/src/input_parsers.rs", "rank": 68, "score": 279531.7390459175 }, { "content": "pub fn max_ticks_per_n_shreds(num_shreds: u64, shred_data_size: Option<usize>) -> u64 {\n\n let ticks = create_ticks(1, 0, Hash::default());\n\n max_entries_per_n_shred(&ticks[0], num_shreds, shred_data_size)\n\n}\n\n\n", "file_path": "ledger/src/shred.rs", "rank": 69, "score": 276878.0587683183 }, { "content": "/// Invoke a cross-program instruction but don't enforce Rust's aliasing rules.\n\n///\n\n/// This function is like [`invoke`] except that it does not check that\n\n/// [`RefCell`]s within [`AccountInfo`]s are properly borrowable as described in\n\n/// the documentation for that function. Those checks consume CPU cycles that\n\n/// this function avoids.\n\n///\n\n/// [`RefCell`]: std::cell::RefCell\n\n///\n\n/// # Safety\n\n///\n\n/// __This function is incorrectly missing an `unsafe` declaration.__\n\n///\n\n/// If any of the writable accounts passed to the callee contain data that is\n\n/// borrowed within the calling program, and that data is written to by the\n\n/// callee, then Rust's aliasing rules will be violated and cause undefined\n\n/// behavior.\n\npub fn invoke_unchecked(instruction: &Instruction, account_infos: &[AccountInfo]) -> ProgramResult {\n\n invoke_signed_unchecked(instruction, account_infos, &[])\n\n}\n\n\n", "file_path": "sdk/program/src/program.rs", "rank": 70, "score": 276678.85992667463 }, { "content": "pub fn open_db(path: &str, dry_run: bool) -> Result<PickleDb, Error> {\n\n let policy = if dry_run {\n\n PickleDbDumpPolicy::NeverDump\n\n } else {\n\n PickleDbDumpPolicy::DumpUponRequest\n\n };\n\n let path = Path::new(path);\n\n let db = if path.exists() {\n\n PickleDb::load_yaml(path, policy)?\n\n } else {\n\n if let Some(parent) = path.parent() {\n\n fs::create_dir_all(parent).unwrap();\n\n }\n\n PickleDb::new_yaml(path, policy)\n\n };\n\n Ok(db)\n\n}\n\n\n", "file_path": "tokens/src/db.rs", "rank": 71, "score": 275907.6552429135 }, { "content": "pub fn get_fees_for_messages(messages: &[Message], client: &RpcClient) -> Result<u64, Error> {\n\n // This is an arbitrary value to get regular blockhash updates for balance checks without\n\n // hitting the RPC node with too many requests\n\n const BLOCKHASH_REFRESH_MILLIS: u64 = DEFAULT_MS_PER_SLOT * 32;\n\n\n\n let mut latest_blockhash = client.get_latest_blockhash()?;\n\n let mut now = Instant::now();\n\n let mut fees = 0;\n\n for mut message in messages.iter().cloned() {\n\n if now.elapsed() > Duration::from_millis(BLOCKHASH_REFRESH_MILLIS) {\n\n latest_blockhash = client.get_latest_blockhash()?;\n\n now = Instant::now();\n\n }\n\n message.recent_blockhash = latest_blockhash;\n\n fees += client.get_fee_for_message(&message)?;\n\n }\n\n Ok(fees)\n\n}\n\n\n", "file_path": "tokens/src/commands.rs", "rank": 72, "score": 275891.10070906265 }, { "content": "#[allow(clippy::same_item_push)]\n\npub fn create_random_ticks(num_ticks: u64, max_hashes_per_tick: u64, mut hash: Hash) -> Vec<Entry> {\n\n let mut ticks = Vec::with_capacity(num_ticks as usize);\n\n for _ in 0..num_ticks {\n\n let hashes_per_tick = thread_rng().gen_range(1, max_hashes_per_tick);\n\n let new_tick = next_entry_mut(&mut hash, hashes_per_tick, vec![]);\n\n ticks.push(new_tick);\n\n }\n\n\n\n ticks\n\n}\n\n\n", "file_path": "entry/src/entry.rs", "rank": 73, "score": 275775.73724449583 }, { "content": "// useful for basic tests\n\npub fn create_genesis_config(lamports: u64) -> (GenesisConfig, Keypair) {\n\n let faucet_keypair = Keypair::new();\n\n (\n\n GenesisConfig::new(\n\n &[(\n\n faucet_keypair.pubkey(),\n\n AccountSharedData::new(lamports, 0, &system_program::id()),\n\n )],\n\n &[],\n\n ),\n\n faucet_keypair,\n\n )\n\n}\n\n\n\nimpl Default for GenesisConfig {\n\n fn default() -> Self {\n\n Self {\n\n creation_time: SystemTime::now()\n\n .duration_since(UNIX_EPOCH)\n\n .unwrap()\n", "file_path": "sdk/src/genesis_config.rs", "rank": 74, "score": 275155.5347348731 }, { "content": "#[cfg_attr(not(target_os = \"linux\"), allow(dead_code))]\n\nfn parse_udp_stats(reader: &mut impl BufRead) -> Result<UdpStats, String> {\n\n let mut udp_lines = Vec::default();\n\n for line in reader.lines() {\n\n let line = line.map_err(|e| e.to_string())?;\n\n if line.starts_with(\"Udp:\") {\n\n udp_lines.push(line);\n\n if udp_lines.len() == 2 {\n\n break;\n\n }\n\n }\n\n }\n\n if udp_lines.len() != 2 {\n\n return Err(format!(\n\n \"parse error, expected 2 lines, num lines: {}\",\n\n udp_lines.len()\n\n ));\n\n }\n\n\n\n let pairs: Vec<_> = udp_lines[0]\n\n .split_ascii_whitespace()\n", "file_path": "core/src/system_monitor_service.rs", "rank": 75, "score": 274939.5013633986 }, { "content": "pub fn next_entry_mut(start: &mut Hash, num_hashes: u64, transactions: Vec<Transaction>) -> Entry {\n\n let entry = Entry::new(start, num_hashes, transactions);\n\n *start = entry.hash;\n\n entry\n\n}\n\n\n", "file_path": "entry/src/entry.rs", "rank": 76, "score": 273876.75310222694 }, { "content": "fn check_loader_id(id: &Pubkey) -> bool {\n\n bpf_loader::check_id(id)\n\n || bpf_loader_deprecated::check_id(id)\n\n || bpf_loader_upgradeable::check_id(id)\n\n}\n\n\n", "file_path": "programs/bpf_loader/src/lib.rs", "rank": 77, "score": 271152.0147084979 }, { "content": "pub fn main() -> Result<(), String> {\n\n solana_logger::setup();\n\n\n\n let matches = App::new(crate_name!())\n\n .about(crate_description!())\n\n .version(solana_version::version!())\n\n .setting(AppSettings::SubcommandRequiredElseHelp)\n\n .arg({\n\n let arg = Arg::with_name(\"config_file\")\n\n .short(\"c\")\n\n .long(\"config\")\n\n .value_name(\"PATH\")\n\n .takes_value(true)\n\n .global(true)\n\n .help(\"Configuration file to use\");\n\n match *defaults::CONFIG_FILE {\n\n Some(ref config_file) => arg.default_value(config_file),\n\n None => arg.required(true),\n\n }\n\n })\n", "file_path": "install/src/lib.rs", "rank": 78, "score": 271141.49512706866 }, { "content": "pub fn lamports_per_signature_of(account: &AccountSharedData) -> Option<u64> {\n\n match StateMut::<Versions>::state(account).ok()?.state() {\n\n State::Initialized(data) => Some(data.fee_calculator.lamports_per_signature),\n\n State::Uninitialized => None,\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use {\n\n super::*,\n\n crate::{\n\n fee_calculator::FeeCalculator,\n\n nonce::state::{Data, DurableNonce},\n\n pubkey::Pubkey,\n\n system_program,\n\n },\n\n };\n\n\n\n #[test]\n", "file_path": "sdk/src/nonce_account.rs", "rank": 79, "score": 270316.8390019919 }, { "content": "#[bench]\n\nfn test_statuscache_serialize(bencher: &mut Bencher) {\n\n let mut status_cache = BankStatusCache::default();\n\n status_cache.add_root(0);\n\n status_cache.clear();\n\n for hash_index in 0..100 {\n\n let blockhash = Hash::new(&vec![hash_index; std::mem::size_of::<Hash>()]);\n\n let mut id = blockhash;\n\n for _ in 0..100 {\n\n id = hash(id.as_ref());\n\n let mut sigbytes = Vec::from(id.as_ref());\n\n id = hash(id.as_ref());\n\n sigbytes.extend(id.as_ref());\n\n let sig = Signature::new(&sigbytes);\n\n status_cache.insert(&blockhash, &sig, 0, ());\n\n }\n\n }\n\n bencher.iter(|| {\n\n let _ = serialize(&status_cache.slot_deltas(&[0])).unwrap();\n\n });\n\n}\n", "file_path": "runtime/benches/status_cache.rs", "rank": 80, "score": 269896.6243642488 }, { "content": "pub fn overwrite_slot_hashes_with_slots(context: &mut ProgramTestContext, slots: &[Slot]) {\n\n let mut slot_hashes = SlotHashes::default();\n\n for slot in slots {\n\n slot_hashes.add(*slot, Hash::new_unique());\n\n }\n\n context.set_sysvar(&slot_hashes);\n\n}\n", "file_path": "programs/address-lookup-table-tests/tests/common.rs", "rank": 81, "score": 269683.35722813755 }, { "content": "pub fn create_account(feature: &Feature, lamports: u64) -> AccountSharedData {\n\n let data_len = Feature::size_of().max(bincode::serialized_size(feature).unwrap() as usize);\n\n let mut account = AccountSharedData::new(lamports, data_len, &id());\n\n to_account(feature, &mut account).unwrap();\n\n account\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n #[test]\n\n fn feature_deserialize_none() {\n\n let just_initialized = AccountSharedData::new(42, Feature::size_of(), &id());\n\n assert_eq!(\n\n from_account(&just_initialized),\n\n Some(Feature { activated_at: None })\n\n );\n\n }\n\n}\n", "file_path": "sdk/src/feature.rs", "rank": 82, "score": 268993.17022435454 }, { "content": "/// Returns Neighbor Nodes and Children Nodes `(neighbors, children)` for a given node based on its stake\n\npub fn compute_retransmit_peers<T: Copy>(\n\n fanout: usize,\n\n index: usize, // Local node's index withing the nodes slice.\n\n nodes: &[T],\n\n) -> (Vec<T> /*neighbors*/, Vec<T> /*children*/) {\n\n // 1st layer: fanout nodes starting at 0\n\n // 2nd layer: fanout**2 nodes starting at fanout\n\n // 3rd layer: fanout**3 nodes starting at fanout + fanout**2\n\n // ...\n\n // Each layer is divided into neighborhoods of fanout nodes each.\n\n let offset = index % fanout; // Node's index within its neighborhood.\n\n let anchor = index - offset; // First node in the neighborhood.\n\n let neighbors = (anchor..)\n\n .take(fanout)\n\n .map(|i| nodes.get(i).copied())\n\n .while_some()\n\n .collect();\n\n let children = ((anchor + 1) * fanout + offset..)\n\n .step_by(fanout)\n\n .take(fanout)\n", "file_path": "gossip/src/cluster_info.rs", "rank": 83, "score": 268959.0922242513 }, { "content": "/// Check if a program is a precompiled program\n\npub fn is_precompile<F>(program_id: &Pubkey, is_enabled: F) -> bool\n\nwhere\n\n F: Fn(&Pubkey) -> bool,\n\n{\n\n PRECOMPILES\n\n .iter()\n\n .any(|precompile| precompile.check_id(program_id, |feature_id| is_enabled(feature_id)))\n\n}\n\n\n", "file_path": "sdk/src/precompiles.rs", "rank": 84, "score": 268709.78486750013 }, { "content": "// Get the activated stake percentage (based on the provided bank) that is visible in gossip\n\nfn get_stake_percent_in_gossip(bank: &Bank, cluster_info: &ClusterInfo, log: bool) -> u64 {\n\n let mut online_stake = 0;\n\n let mut wrong_shred_stake = 0;\n\n let mut wrong_shred_nodes = vec![];\n\n let mut offline_stake = 0;\n\n let mut offline_nodes = vec![];\n\n\n\n let mut total_activated_stake = 0;\n\n let now = timestamp();\n\n // Nodes contact infos are saved to disk and restored on validator startup.\n\n // Staked nodes entries will not expire until an epoch after. So it\n\n // is necessary here to filter for recent entries to establish liveness.\n\n let peers: HashMap<_, _> = cluster_info\n\n .all_tvu_peers()\n\n .into_iter()\n\n .filter(|node| {\n\n let age = now.saturating_sub(node.wallclock);\n\n // Contact infos are refreshed twice during this period.\n\n age < CRDS_GOSSIP_PULL_CRDS_TIMEOUT_MS\n\n })\n", "file_path": "core/src/validator.rs", "rank": 85, "score": 268104.8108337479 }, { "content": "pub fn compute_hash_time_ns(hashes_sample_size: u64) -> u64 {\n\n info!(\"Running {} hashes...\", hashes_sample_size);\n\n let mut v = Hash::default();\n\n let start = Instant::now();\n\n for _ in 0..hashes_sample_size {\n\n v = hash(v.as_ref());\n\n }\n\n start.elapsed().as_nanos() as u64\n\n}\n\n\n", "file_path": "entry/src/poh.rs", "rank": 86, "score": 267879.4877889093 }, { "content": "pub fn main_init() -> Result<(), String> {\n\n solana_logger::setup();\n\n\n\n let matches = App::new(\"solana-install-init\")\n\n .about(\"Initializes a new installation\")\n\n .version(solana_version::version!())\n\n .arg({\n\n let arg = Arg::with_name(\"config_file\")\n\n .short(\"c\")\n\n .long(\"config\")\n\n .value_name(\"PATH\")\n\n .takes_value(true)\n\n .help(\"Configuration file to use\");\n\n match *defaults::CONFIG_FILE {\n\n Some(ref config_file) => arg.default_value(config_file),\n\n None => arg.required(true),\n\n }\n\n })\n\n .arg({\n\n let arg = Arg::with_name(\"data_dir\")\n", "file_path": "install/src/lib.rs", "rank": 87, "score": 266010.0368365113 }, { "content": "pub fn create_account(lamports: u64, config: &Config) -> AccountSharedData {\n\n create_config_account(vec![], config, lamports)\n\n}\n\n\n", "file_path": "programs/stake/src/config.rs", "rank": 88, "score": 264169.01049803814 }, { "content": "#[inline]\n\npub fn sol_memset(s: &mut [u8], c: u8, n: usize) {\n\n #[cfg(target_os = \"solana\")]\n\n unsafe {\n\n crate::syscalls::sol_memset_(s.as_mut_ptr(), c, n as u64);\n\n }\n\n\n\n #[cfg(not(target_os = \"solana\"))]\n\n crate::program_stubs::sol_memset(s.as_mut_ptr(), c, n);\n\n}\n", "file_path": "sdk/program/src/program_memory.rs", "rank": 89, "score": 263815.49865804287 }, { "content": "pub fn serialize_snapshot_data_file<F>(data_file_path: &Path, serializer: F) -> Result<u64>\n\nwhere\n\n F: FnOnce(&mut BufWriter<File>) -> Result<()>,\n\n{\n\n serialize_snapshot_data_file_capped::<F>(\n\n data_file_path,\n\n MAX_SNAPSHOT_DATA_FILE_SIZE,\n\n serializer,\n\n )\n\n}\n\n\n", "file_path": "runtime/src/snapshot_utils.rs", "rank": 90, "score": 263483.71577955876 }, { "content": "pub fn set_entries_for_tests_only(entries: usize) {\n\n NUM_ENTRIES.store(entries, Ordering::Relaxed);\n\n}\n\n\n\npub type SlotHash = (Slot, Hash);\n\n\n\n#[repr(C)]\n\n#[derive(Serialize, Deserialize, PartialEq, Eq, Debug, Default)]\n\npub struct SlotHashes(Vec<SlotHash>);\n\n\n\nimpl SlotHashes {\n\n pub fn add(&mut self, slot: Slot, hash: Hash) {\n\n match self.binary_search_by(|(probe, _)| slot.cmp(probe)) {\n\n Ok(index) => (self.0)[index] = (slot, hash),\n\n Err(index) => (self.0).insert(index, (slot, hash)),\n\n }\n\n (self.0).truncate(get_entries());\n\n }\n\n pub fn position(&self, slot: &Slot) -> Option<usize> {\n\n self.binary_search_by(|(probe, _)| slot.cmp(probe)).ok()\n", "file_path": "sdk/program/src/slot_hashes.rs", "rank": 91, "score": 263039.3769318061 }, { "content": "fn checked_total_count_increment(total_count: u64, limit_count: u64) -> Result<u64> {\n\n let total_count = total_count + 1;\n\n if total_count > limit_count {\n\n return Err(UnpackError::Archive(format!(\n\n \"too many files in snapshot: {:?}\",\n\n total_count\n\n )));\n\n }\n\n Ok(total_count)\n\n}\n\n\n", "file_path": "runtime/src/hardened_unpack.rs", "rank": 92, "score": 262881.61984795856 }, { "content": "fn debug_fmt<T: ReadableAccount>(item: &T, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n let mut f = f.debug_struct(\"Account\");\n\n\n\n f.field(\"lamports\", &item.lamports())\n\n .field(\"data.len\", &item.data().len())\n\n .field(\"owner\", &item.owner())\n\n .field(\"executable\", &item.executable())\n\n .field(\"rent_epoch\", &item.rent_epoch());\n\n debug_account_data(item.data(), &mut f);\n\n\n\n f.finish()\n\n}\n\n\n\nimpl fmt::Debug for Account {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n debug_fmt(self, f)\n\n }\n\n}\n\n\n\nimpl fmt::Debug for AccountSharedData {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n debug_fmt(self, f)\n\n }\n\n}\n\n\n", "file_path": "sdk/src/account.rs", "rank": 93, "score": 261433.02519797103 }, { "content": "pub fn realloc(program_id: &Pubkey, address: &Pubkey, size: usize, bump: &mut u8) -> Instruction {\n\n let mut instruction_data = vec![REALLOC, *bump];\n\n instruction_data.extend_from_slice(&size.to_le_bytes());\n\n\n\n *bump = bump.saturating_add(1);\n\n\n\n Instruction::new_with_bytes(\n\n *program_id,\n\n &instruction_data,\n\n vec![AccountMeta::new(*address, false)],\n\n )\n\n}\n\n\n", "file_path": "programs/bpf/rust/realloc/src/instructions.rs", "rank": 94, "score": 261061.23460435704 }, { "content": "pub fn create_loadable_account(name: &str, lamports: u64) -> AccountSharedData {\n\n create_loadable_account_with_fields(name, (lamports, INITIAL_RENT_EPOCH))\n\n}\n\n\n", "file_path": "sdk/src/native_loader.rs", "rank": 95, "score": 259593.67191487906 }, { "content": "/// Store the current `Instruction`'s index in the Instructions Sysvar data\n\npub fn store_current_index(data: &mut [u8], instruction_index: u16) {\n\n let last_index = data.len() - 2;\n\n data[last_index..last_index + 2].copy_from_slice(&instruction_index.to_le_bytes());\n\n}\n\n\n", "file_path": "sdk/program/src/sysvar/instructions.rs", "rank": 96, "score": 259474.1300784176 }, { "content": "pub fn activate_feature(genesis_config: &mut GenesisConfig, feature_id: Pubkey) {\n\n genesis_config.accounts.insert(\n\n feature_id,\n\n Account::from(feature::create_account(\n\n &Feature {\n\n activated_at: Some(0),\n\n },\n\n std::cmp::max(genesis_config.rent.minimum_balance(Feature::size_of()), 1),\n\n )),\n\n );\n\n}\n\n\n", "file_path": "runtime/src/genesis_utils.rs", "rank": 97, "score": 259453.44688201032 }, { "content": "pub fn create_account<S: Sysvar>(sysvar: &S, lamports: u64) -> Account {\n\n create_account_with_fields(sysvar, (lamports, INITIAL_RENT_EPOCH))\n\n}\n\n\n", "file_path": "sdk/src/account.rs", "rank": 98, "score": 258853.19561595586 } ]
Rust
egui/src/grid.rs
katyo/egui
02db9ee5835a522ddf04308f259025388abf0185
use crate::*; #[derive(Clone, Debug, Default, PartialEq)] #[cfg_attr(feature = "persistence", derive(serde::Deserialize, serde::Serialize))] pub(crate) struct State { col_widths: Vec<f32>, row_heights: Vec<f32>, } impl State { fn set_min_col_width(&mut self, col: usize, width: f32) { self.col_widths .resize(self.col_widths.len().max(col + 1), 0.0); self.col_widths[col] = self.col_widths[col].max(width); } fn set_min_row_height(&mut self, row: usize, height: f32) { self.row_heights .resize(self.row_heights.len().max(row + 1), 0.0); self.row_heights[row] = self.row_heights[row].max(height); } fn col_width(&self, col: usize) -> Option<f32> { self.col_widths.get(col).copied() } fn row_height(&self, row: usize) -> Option<f32> { self.row_heights.get(row).copied() } fn full_width(&self, x_spacing: f32) -> f32 { self.col_widths.iter().sum::<f32>() + (self.col_widths.len().at_least(1) - 1) as f32 * x_spacing } } pub(crate) struct GridLayout { ctx: CtxRef, style: std::sync::Arc<Style>, id: Id, prev_state: State, curr_state: State, spacing: Vec2, striped: bool, initial_x: f32, min_cell_size: Vec2, max_cell_size: Vec2, col: usize, row: usize, } impl GridLayout { pub(crate) fn new(ui: &Ui, id: Id) -> Self { let prev_state = ui.memory().id_data.get_or_default::<State>(id).clone(); let available = ui.placer().max_rect().intersect(ui.cursor()); let initial_x = available.min.x; assert!( initial_x.is_finite(), "Grid not yet available for right-to-left layouts" ); Self { ctx: ui.ctx().clone(), style: ui.style().clone(), id, prev_state, curr_state: State::default(), spacing: ui.spacing().item_spacing, striped: false, initial_x, min_cell_size: ui.spacing().interact_size, max_cell_size: Vec2::INFINITY, col: 0, row: 0, } } } impl GridLayout { fn prev_col_width(&self, col: usize) -> f32 { self.prev_state .col_width(col) .unwrap_or(self.min_cell_size.x) } fn prev_row_height(&self, row: usize) -> f32 { self.prev_state .row_height(row) .unwrap_or(self.min_cell_size.y) } pub(crate) fn wrap_text(&self) -> bool { self.max_cell_size.x.is_finite() } pub(crate) fn available_rect(&self, region: &Region) -> Rect { self.available_rect_finite(region) } pub(crate) fn available_rect_finite(&self, region: &Region) -> Rect { let width = if self.max_cell_size.x.is_finite() { self.max_cell_size.x } else { self.prev_state .col_width(self.col) .or_else(|| self.curr_state.col_width(self.col)) .unwrap_or(self.min_cell_size.x) }; let available = region.max_rect.intersect(region.cursor); let height = region.max_rect_finite().max.y - available.top(); let height = height .at_least(self.min_cell_size.y) .at_most(self.max_cell_size.y); Rect::from_min_size(available.min, vec2(width, height)) } pub(crate) fn next_cell(&self, cursor: Rect, child_size: Vec2) -> Rect { let width = self.prev_state.col_width(self.col).unwrap_or(0.0); let height = self.prev_row_height(self.row); let size = child_size.max(vec2(width, height)); Rect::from_min_size(cursor.min, size) } #[allow(clippy::unused_self)] pub(crate) fn align_size_within_rect(&self, size: Vec2, frame: Rect) -> Rect { Align2::LEFT_CENTER.align_size_within_rect(size, frame) } pub(crate) fn justify_and_align(&self, frame: Rect, size: Vec2) -> Rect { self.align_size_within_rect(size, frame) } pub(crate) fn advance(&mut self, cursor: &mut Rect, frame_rect: Rect, widget_rect: Rect) { let debug_expand_width = self.style.debug.show_expand_width; let debug_expand_height = self.style.debug.show_expand_height; if debug_expand_width || debug_expand_height { let rect = widget_rect; let too_wide = rect.width() > self.prev_col_width(self.col); let too_high = rect.height() > self.prev_row_height(self.row); if (debug_expand_width && too_wide) || (debug_expand_height && too_high) { let painter = self.ctx.debug_painter(); painter.rect_stroke(rect, 0.0, (1.0, Color32::LIGHT_BLUE)); let stroke = Stroke::new(2.5, Color32::from_rgb(200, 0, 0)); let paint_line_seg = |a, b| painter.line_segment([a, b], stroke); if debug_expand_width && too_wide { paint_line_seg(rect.left_top(), rect.left_bottom()); paint_line_seg(rect.left_center(), rect.right_center()); paint_line_seg(rect.right_top(), rect.right_bottom()); } } } self.curr_state .set_min_col_width(self.col, widget_rect.width().at_least(self.min_cell_size.x)); self.curr_state.set_min_row_height( self.row, widget_rect.height().at_least(self.min_cell_size.y), ); self.col += 1; cursor.min.x += frame_rect.width() + self.spacing.x; } pub(crate) fn end_row(&mut self, cursor: &mut Rect, painter: &Painter) { let row_height = self.prev_row_height(self.row); cursor.min.x = self.initial_x; cursor.min.y += row_height + self.spacing.y; self.col = 0; self.row += 1; if self.striped && self.row % 2 == 1 { if let Some(height) = self.prev_state.row_height(self.row) { let size = Vec2::new(self.prev_state.full_width(self.spacing.x), height); let rect = Rect::from_min_size(cursor.min, size); let rect = rect.expand2(0.5 * self.spacing.y * Vec2::Y); let rect = rect.expand2(2.0 * Vec2::X); let color = if self.style.visuals.dark_mode { Rgba::from_white_alpha(0.0075) } else { Rgba::from_black_alpha(0.075) }; painter.rect_filled(rect, 2.0, color); } } } pub(crate) fn save(&self) { if self.curr_state != self.prev_state { self.ctx .memory() .id_data .insert(self.id, self.curr_state.clone()); self.ctx.request_repaint(); } } } #[must_use = "You should call .show()"] pub struct Grid { id_source: Id, striped: bool, min_col_width: Option<f32>, min_row_height: Option<f32>, max_cell_size: Vec2, spacing: Option<Vec2>, start_row: usize, } impl Grid { pub fn new(id_source: impl std::hash::Hash) -> Self { Self { id_source: Id::new(id_source), striped: false, min_col_width: None, min_row_height: None, max_cell_size: Vec2::INFINITY, spacing: None, start_row: 0, } } pub fn striped(mut self, striped: bool) -> Self { self.striped = striped; self } pub fn min_col_width(mut self, min_col_width: f32) -> Self { self.min_col_width = Some(min_col_width); self } pub fn min_row_height(mut self, min_row_height: f32) -> Self { self.min_row_height = Some(min_row_height); self } pub fn max_col_width(mut self, max_col_width: f32) -> Self { self.max_cell_size.x = max_col_width; self } pub fn spacing(mut self, spacing: impl Into<Vec2>) -> Self { self.spacing = Some(spacing.into()); self } pub fn start_row(mut self, start_row: usize) -> Self { self.start_row = start_row; self } } impl Grid { pub fn show<R>(self, ui: &mut Ui, add_contents: impl FnOnce(&mut Ui) -> R) -> InnerResponse<R> { let Self { id_source, striped, min_col_width, min_row_height, max_cell_size, spacing, start_row, } = self; let min_col_width = min_col_width.unwrap_or_else(|| ui.spacing().interact_size.x); let min_row_height = min_row_height.unwrap_or_else(|| ui.spacing().interact_size.y); let spacing = spacing.unwrap_or_else(|| ui.spacing().item_spacing); ui.horizontal(|ui| { let id = ui.make_persistent_id(id_source); let grid = GridLayout { striped, spacing, min_cell_size: vec2(min_col_width, min_row_height), max_cell_size, row: start_row, ..GridLayout::new(ui, id) }; ui.set_grid(grid); let r = add_contents(ui); ui.save_grid(); r }) } }
use crate::*; #[derive(Clone, Debug, Default, PartialEq)] #[cfg_attr(feature = "persistence", derive(serde::Deserialize, serde::Serialize))] pub(crate) struct State { col_widths: Vec<f32>, row_heights: Vec<f32>, } impl State { fn set_min_col_width(&mut self, col: usize, width: f32) { self.col_widths .resize(self.col_widths.len().max(col + 1), 0.0); self.col_widths[col] = self.col_widths[col].max(width); } fn set_min_row_height(&mut self, row: usize, height: f32) { self.row_heights .resize(self.row_heights.len().max(row + 1), 0.0); self.row_heights[row] = self.row_heights[row].max(height); } fn col_width(&self, col: usize) -> Option<f32> { self.col_widths.get(col).copied() } fn row_height(&self, row: usize) -> Option<f32> { self.row_heights.get(row).copied() } fn full_width(&self, x_spacing: f32) -> f32 { self.col_widths.iter().sum::<f32>() + (self.col_widths.len().at_least(1) - 1) as f32 * x_spacing } } pub(crate) struct GridLayout { ctx: CtxRef, style: std::sync::Arc<Style>, id: Id, prev_state: State, curr_state: State, spacing: Vec2, striped: bool, initial_x: f32, min_cell_size: Vec2, max_cell_size: Vec2, col: usize, row: usize, } impl GridLayout { pub(crate) fn new(ui: &Ui, id: Id) -> Self { let prev_state = ui.memory().id_data.get_or_default::<State>(id).clone(); let available = ui.placer().max_rect().intersect(ui.cursor()); let initial_x = available.min.x; assert!( initial_x.is_finite(), "Grid not yet available for right-to-left layouts" ); Self { ctx: ui.ctx().clone(), style: ui.style().clone(), id, prev_state, curr_state: State::default(), spacing: ui.spacing().item_spacing, striped: false, initial_x, min_cell_size: ui.spacing().interact_size, max_cell_size: Vec2::INFINITY, col: 0, row: 0, } } } impl GridLayout { fn prev_col_width(&self, col: usize) -> f32 { self.prev_state .col_width(col) .unwrap_or(self.min_cell_size.x) } fn prev_row_height(&self, row: usize) -> f32 { self.prev_state .row_height(row) .unwrap_or(self.min_cell_size.y) } pub(crate) fn wrap_text(&self) -> bool { self.max_cell_size.x.is_finite() } pub(crate) fn available_rect(&self, region: &Region) -> Rect { self.available_rect_finite(region) } pub(crate) fn available_rect_finite(&self, region: &Region) -> Rect { let width = if self.max_cell_size.x.is_finite() { self.max_cell_size.x } else { self.prev_state .col_width(self.col) .or_else(|| self.curr_state.col_width(self.col)) .unwrap_or(self.min_cell_size.x) }; let available = region.max_rect.intersect(region.cursor); let height = region.max_rect_finite().max.y - available.top(); let height = height .at_least(self.min_cell_size.y) .at_most(self.max_cell_size.y); Rect::from_min_size(available.min, vec2(width, height)) } pub(crate) fn next_cell(&self, cursor: Rect, child_size: Vec2) -> Rect { let width = self.prev_state.col_width(self.col).unwrap_or(0.0); let height = self.prev_row_height(self.row); let size = child_size.max(vec2(width, height)); Rect::from_min_size(cursor.min, size) } #[allow(clippy::unused_self)] pub(crate) fn align_size_within_rect(&self, size: Vec2, frame: Rect) -> Rect { Align2::LEFT_CENTER.align_size_within_rect(size, frame) } pub(crate) fn justify_and_align(&self, frame: Rect, size: Vec2) -> Rect { self.align_size_within_rect(size, frame) } pub(crate) fn advance(&mut self, cursor: &mut Rect, frame_rect: Rect, widget_rect: Rect) { let debug_expand_width = self.style.debug.show_expand_width; let debug_expand_height = self.style.debug.show_expand_height; if debug_expand_width || debug_expand_height { let rect = widget_rect; let too_wide = rect.width() > self.prev_col_width(self.col); let too_high = rect.height() > self.prev_row_height(self.row); if (debug_expand_width && too_wide) || (debug_expand_height && too_high) { let painter = self.ctx.debug_painter(); painter.rect_stroke(rect, 0.0, (1.0, Color32::LIGHT_BLUE)); let stroke = Stroke::new(2.5, Color32::from_rgb(200, 0, 0)); let paint_line_seg = |a, b| painter.line_segment([a, b], stroke); if debug_expand_width && too_wide { paint_line_seg(rect.left_top(), rect.left_bottom()); paint_line_seg(rect.left_center(), rect.right_center()); paint_line_seg(rect.right_top(), rect.right_bottom()); } } } self.curr_state .set_min_col_width(self.col, widget_rect.width().at_least(self.min_cell_size.x)); self.curr_state.set_min_row_height( self.row, widget_rect.height().at_least(self.min_cell_size.y), ); self.col += 1; cursor.min.x += frame_rect.width() + self.spacing.x; } pub(crate) fn end_row(&mut self, cursor: &mut Rect, painter: &Painter) { let row_height = self.prev_row_height(self.row); cursor.min.x = self.initial_x; cursor.min.y += row_height + self.spacing.y; self.col = 0; self.row += 1; if self.striped && self.row % 2 == 1 { if let Some(height) = self.prev_state.row_height(self.row) { let size = Vec2::new(self.prev_state.full_width(self.spacing.x), height); let rect = Rect::from_min_size(cursor.min, size); let rect = rect.expand2(0.5 * self.spacing.y * Vec2::Y); let rect = rect.expand2(2.0 * Vec2::X); let color = if self.style.visuals.dark_mode {
pub(crate) fn save(&self) { if self.curr_state != self.prev_state { self.ctx .memory() .id_data .insert(self.id, self.curr_state.clone()); self.ctx.request_repaint(); } } } #[must_use = "You should call .show()"] pub struct Grid { id_source: Id, striped: bool, min_col_width: Option<f32>, min_row_height: Option<f32>, max_cell_size: Vec2, spacing: Option<Vec2>, start_row: usize, } impl Grid { pub fn new(id_source: impl std::hash::Hash) -> Self { Self { id_source: Id::new(id_source), striped: false, min_col_width: None, min_row_height: None, max_cell_size: Vec2::INFINITY, spacing: None, start_row: 0, } } pub fn striped(mut self, striped: bool) -> Self { self.striped = striped; self } pub fn min_col_width(mut self, min_col_width: f32) -> Self { self.min_col_width = Some(min_col_width); self } pub fn min_row_height(mut self, min_row_height: f32) -> Self { self.min_row_height = Some(min_row_height); self } pub fn max_col_width(mut self, max_col_width: f32) -> Self { self.max_cell_size.x = max_col_width; self } pub fn spacing(mut self, spacing: impl Into<Vec2>) -> Self { self.spacing = Some(spacing.into()); self } pub fn start_row(mut self, start_row: usize) -> Self { self.start_row = start_row; self } } impl Grid { pub fn show<R>(self, ui: &mut Ui, add_contents: impl FnOnce(&mut Ui) -> R) -> InnerResponse<R> { let Self { id_source, striped, min_col_width, min_row_height, max_cell_size, spacing, start_row, } = self; let min_col_width = min_col_width.unwrap_or_else(|| ui.spacing().interact_size.x); let min_row_height = min_row_height.unwrap_or_else(|| ui.spacing().interact_size.y); let spacing = spacing.unwrap_or_else(|| ui.spacing().item_spacing); ui.horizontal(|ui| { let id = ui.make_persistent_id(id_source); let grid = GridLayout { striped, spacing, min_cell_size: vec2(min_col_width, min_row_height), max_cell_size, row: start_row, ..GridLayout::new(ui, id) }; ui.set_grid(grid); let r = add_contents(ui); ui.save_grid(); r }) } }
Rgba::from_white_alpha(0.0075) } else { Rgba::from_black_alpha(0.075) }; painter.rect_filled(rect, 2.0, color); } } }
function_block-function_prefix_line
[ { "content": "pub fn show_tooltip_under(ctx: &CtxRef, id: Id, rect: &Rect, add_contents: impl FnOnce(&mut Ui)) {\n\n show_tooltip_at(\n\n ctx,\n\n id,\n\n Some(rect.left_bottom() + vec2(-2.0, 4.0)),\n\n add_contents,\n\n )\n\n}\n\n\n", "file_path": "egui/src/containers/popup.rs", "rank": 0, "score": 496410.7242350833 }, { "content": "/// Show a tooltip at the current pointer position (if any).\n\n///\n\n/// Most of the time it is easier to use [`Response::on_hover_ui`].\n\n///\n\n/// See also [`show_tooltip_text`].\n\n///\n\n/// ```\n\n/// # let mut ui = egui::Ui::__test();\n\n/// if ui.ui_contains_pointer() {\n\n/// egui::show_tooltip(ui.ctx(), egui::Id::new(\"my_tooltip\"), |ui| {\n\n/// ui.label(\"Helpful text\");\n\n/// });\n\n/// }\n\n/// ```\n\npub fn show_tooltip(ctx: &CtxRef, id: Id, add_contents: impl FnOnce(&mut Ui)) {\n\n show_tooltip_at_pointer(ctx, id, add_contents)\n\n}\n\n\n", "file_path": "egui/src/containers/popup.rs", "rank": 1, "score": 435091.79898475885 }, { "content": "pub fn show_color(ui: &mut Ui, color: impl Into<Hsva>, desired_size: Vec2) -> Response {\n\n show_hsva(ui, color.into(), desired_size)\n\n}\n\n\n", "file_path": "egui/src/widgets/color_picker.rs", "rank": 2, "score": 435078.4534569054 }, { "content": "fn color_slider_1d(ui: &mut Ui, value: &mut f32, color_at: impl Fn(f32) -> Color32) -> Response {\n\n #![allow(clippy::identity_op)]\n\n\n\n let desired_size = vec2(\n\n ui.spacing().slider_width,\n\n ui.spacing().interact_size.y * 2.0,\n\n );\n\n let (rect, response) = ui.allocate_at_least(desired_size, Sense::click_and_drag());\n\n\n\n if let Some(mpos) = response.interact_pointer_pos() {\n\n *value = remap_clamp(mpos.x, rect.left()..=rect.right(), 0.0..=1.0);\n\n }\n\n\n\n let visuals = ui.style().interact(&response);\n\n\n\n background_checkers(ui.painter(), rect); // for alpha:\n\n\n\n {\n\n // fill color:\n\n let mut mesh = Mesh::default();\n", "file_path": "egui/src/widgets/color_picker.rs", "rank": 3, "score": 434817.4546166628 }, { "content": "pub fn show_tooltip_at_pointer(ctx: &CtxRef, id: Id, add_contents: impl FnOnce(&mut Ui)) {\n\n let suggested_pos = ctx\n\n .input()\n\n .pointer\n\n .hover_pos()\n\n .map(|pointer_pos| pointer_pos + vec2(16.0, 16.0));\n\n show_tooltip_at(ctx, id, suggested_pos, add_contents)\n\n}\n\n\n", "file_path": "egui/src/containers/popup.rs", "rank": 4, "score": 431105.3235280281 }, { "content": "pub fn paint_resize_corner_with_style(ui: &mut Ui, rect: &Rect, stroke: Stroke, corner: Align2) {\n\n let painter = ui.painter();\n\n let cp = painter.round_pos_to_pixels(corner.pos_in_rect(rect));\n\n let mut w = 2.0;\n\n\n\n while w <= rect.width() && w <= rect.height() {\n\n painter.line_segment(\n\n [\n\n pos2(cp.x - w * corner.x().to_sign(), cp.y),\n\n pos2(cp.x, cp.y - w * corner.y().to_sign()),\n\n ],\n\n stroke,\n\n );\n\n w += 4.0;\n\n }\n\n}\n", "file_path": "egui/src/containers/resize.rs", "rank": 5, "score": 406946.1417069399 }, { "content": "fn show_hsva(ui: &mut Ui, color: Hsva, desired_size: Vec2) -> Response {\n\n let (rect, response) = ui.allocate_at_least(desired_size, Sense::hover());\n\n background_checkers(ui.painter(), rect);\n\n if true {\n\n let left = Rect::from_min_max(rect.left_top(), rect.center_bottom());\n\n let right = Rect::from_min_max(rect.center_top(), rect.right_bottom());\n\n ui.painter().rect_filled(left, 0.0, color);\n\n ui.painter().rect_filled(right, 0.0, color.to_opaque());\n\n } else {\n\n ui.painter().add(Shape::Rect {\n\n rect,\n\n corner_radius: 2.0,\n\n fill: color.into(),\n\n stroke: Stroke::new(3.0, color.to_opaque()),\n\n });\n\n }\n\n response\n\n}\n\n\n", "file_path": "egui/src/widgets/color_picker.rs", "rank": 6, "score": 391408.09614316985 }, { "content": "pub fn center_size_in_rect(size: Vec2, frame: Rect) -> Rect {\n\n Align2::CENTER_CENTER.align_size_within_rect(size, frame)\n\n}\n", "file_path": "emath/src/align.rs", "rank": 7, "score": 388795.6804672339 }, { "content": "pub fn stroke_ui(ui: &mut crate::Ui, stroke: &mut epaint::Stroke, text: &str) {\n\n let epaint::Stroke { width, color } = stroke;\n\n ui.horizontal(|ui| {\n\n ui.add(DragValue::new(width).speed(0.1).clamp_range(0.0..=5.0))\n\n .on_hover_text(\"Width\");\n\n ui.color_edit_button_srgba(color);\n\n ui.label(text);\n\n\n\n // stroke preview:\n\n let (_id, stroke_rect) = ui.allocate_space(ui.spacing().interact_size);\n\n let left = stroke_rect.left_center();\n\n let right = stroke_rect.right_center();\n\n ui.painter().line_segment([left, right], (*width, *color));\n\n });\n\n}\n\n\n\npub(crate) fn shadow_ui(ui: &mut Ui, shadow: &mut epaint::Shadow, text: &str) {\n\n let epaint::Shadow { extrusion, color } = shadow;\n\n ui.horizontal(|ui| {\n\n ui.label(text);\n\n ui.add(\n\n DragValue::new(extrusion)\n\n .speed(1.0)\n\n .clamp_range(0.0..=100.0),\n\n )\n\n .on_hover_text(\"Extrusion\");\n\n ui.color_edit_button_srgba(color);\n\n });\n\n}\n", "file_path": "egui/src/widgets/mod.rs", "rank": 8, "score": 382180.5042271465 }, { "content": "pub fn drag_source(ui: &mut Ui, id: Id, body: impl FnOnce(&mut Ui)) {\n\n let is_being_dragged = ui.memory().is_being_dragged(id);\n\n\n\n if !is_being_dragged {\n\n let response = ui.scope(body).response;\n\n\n\n // Check for drags:\n\n let response = ui.interact(response.rect, id, Sense::drag());\n\n if response.hovered() {\n\n ui.output().cursor_icon = CursorIcon::Grab;\n\n }\n\n } else {\n\n ui.output().cursor_icon = CursorIcon::Grabbing;\n\n\n\n // Paint the body to a new layer:\n\n let layer_id = LayerId::new(Order::Tooltip, id);\n\n let response = ui.with_layer_id(layer_id, body).response;\n\n\n\n // Now we move the visuals of the body to where the mouse is.\n\n // Normally you need to decide a location for a widget first,\n", "file_path": "egui_demo_lib/src/apps/demo/drag_and_drop.rs", "rank": 9, "score": 381401.5292750436 }, { "content": "fn color_text_ui(ui: &mut Ui, color: impl Into<Color32>) {\n\n let color = color.into();\n\n ui.horizontal(|ui| {\n\n let [r, g, b, a] = color.to_array();\n\n ui.label(format!(\n\n \"RGBA (premultiplied): rgba({}, {}, {}, {})\",\n\n r, g, b, a\n\n ));\n\n\n\n if ui.button(\"📋\").on_hover_text(\"Click to copy\").clicked() {\n\n ui.output().copied_text = format!(\"{}, {}, {}, {}\", r, g, b, a);\n\n }\n\n });\n\n}\n\n\n", "file_path": "egui/src/widgets/color_picker.rs", "rank": 10, "score": 378758.67014943296 }, { "content": "/// Helper function that adds a label when compiling with debug assertions enabled.\n\npub fn warn_if_debug_build(ui: &mut crate::Ui) {\n\n if cfg!(debug_assertions) {\n\n ui.label(\n\n crate::Label::new(\"‼ Debug build ‼\")\n\n .small()\n\n .text_color(crate::Color32::RED),\n\n )\n\n .on_hover_text(\"egui was compiled with debug assertions enabled.\");\n\n }\n\n}\n\n\n\n// ----------------------------------------------------------------------------\n\n\n\n/// Create a [`Hyperlink`](crate::Hyperlink) to the current [`file!()`] (and line) on Github\n\n///\n\n/// Example: `ui.add(github_link_file_line!(\"https://github.com/YOUR/PROJECT/blob/master/\", \"(source code)\"));`\n\n#[macro_export]\n\nmacro_rules! github_link_file_line {\n\n ($github_url:expr, $label:expr) => {{\n\n let url = format!(\"{}{}#L{}\", $github_url, file!(), line!());\n", "file_path": "egui/src/lib.rs", "rank": 11, "score": 349251.1172759094 }, { "content": "fn bullet_point(ui: &mut Ui, width: f32) -> Response {\n\n let row_height = ui.fonts()[TextStyle::Body].row_height();\n\n let (rect, response) = ui.allocate_exact_size(vec2(width, row_height), Sense::hover());\n\n ui.painter().circle_filled(\n\n rect.center(),\n\n rect.height() / 8.0,\n\n ui.visuals().strong_text_color(),\n\n );\n\n response\n\n}\n\n\n", "file_path": "egui_demo_lib/src/easy_mark/easy_mark_viewer.rs", "rank": 12, "score": 348811.76782343996 }, { "content": "/// return true on change\n\nfn color_picker_hsva_2d(ui: &mut Ui, hsva: &mut Hsva, alpha: Alpha) -> bool {\n\n let mut hsvag = HsvaGamma::from(*hsva);\n\n color_picker_hsvag_2d(ui, &mut hsvag, alpha);\n\n let new_hasva = Hsva::from(hsvag);\n\n if *hsva == new_hasva {\n\n false\n\n } else {\n\n *hsva = new_hasva;\n\n true\n\n }\n\n}\n\n\n", "file_path": "egui/src/widgets/color_picker.rs", "rank": 13, "score": 341958.0579281187 }, { "content": "fn numbered_point(ui: &mut Ui, width: f32, number: &str) -> Response {\n\n let row_height = ui.fonts()[TextStyle::Body].row_height();\n\n let (rect, response) = ui.allocate_exact_size(vec2(width, row_height), Sense::hover());\n\n let text = format!(\"{}.\", number);\n\n let text_color = ui.visuals().strong_text_color();\n\n ui.painter().text(\n\n rect.right_center(),\n\n Align2::RIGHT_CENTER,\n\n text,\n\n TextStyle::Body,\n\n text_color,\n\n );\n\n response\n\n}\n", "file_path": "egui_demo_lib/src/easy_mark/easy_mark_viewer.rs", "rank": 14, "score": 330070.6152235584 }, { "content": "/// Construct a top level menu in a menu bar. This would be e.g. \"File\", \"Edit\" etc.\n\npub fn menu(ui: &mut Ui, title: impl ToString, add_contents: impl FnOnce(&mut Ui)) {\n\n menu_impl(ui, title, Box::new(add_contents))\n\n}\n\n\n", "file_path": "egui/src/menu.rs", "rank": 15, "score": 329402.50982299785 }, { "content": "fn ui_color(ui: &mut Ui, srgba: &mut Color32, text: &str) {\n\n ui.horizontal(|ui| {\n\n ui.color_edit_button_srgba(srgba);\n\n ui.label(text);\n\n });\n\n}\n", "file_path": "egui/src/style.rs", "rank": 16, "score": 327716.3970900571 }, { "content": "fn close_button(ui: &mut Ui, rect: Rect) -> Response {\n\n let close_id = ui.auto_id_with(\"window_close_button\");\n\n let response = ui.interact(rect, close_id, Sense::click());\n\n ui.expand_to_include_rect(response.rect);\n\n\n\n let visuals = ui.style().interact(&response);\n\n let rect = rect.shrink(2.0).expand(visuals.expansion);\n\n let stroke = visuals.fg_stroke;\n\n ui.painter()\n\n .line_segment([rect.left_top(), rect.right_bottom()], stroke);\n\n ui.painter()\n\n .line_segment([rect.right_top(), rect.left_bottom()], stroke);\n\n response\n\n}\n", "file_path": "egui/src/containers/window.rs", "rank": 17, "score": 320592.58225311164 }, { "content": "/// Show some text at the current pointer position (if any).\n\n///\n\n/// Most of the time it is easier to use [`Response::on_hover_text`].\n\n///\n\n/// See also [`show_tooltip`].\n\n///\n\n/// ```\n\n/// # let mut ui = egui::Ui::__test();\n\n/// if ui.ui_contains_pointer() {\n\n/// egui::show_tooltip_text(ui.ctx(), egui::Id::new(\"my_tooltip\"), \"Helpful text\");\n\n/// }\n\n/// ```\n\npub fn show_tooltip_text(ctx: &CtxRef, id: Id, text: impl ToString) {\n\n show_tooltip(ctx, id, |ui| {\n\n ui.add(crate::widgets::Label::new(text));\n\n })\n\n}\n\n\n", "file_path": "egui/src/containers/popup.rs", "rank": 18, "score": 316954.1682352518 }, { "content": "fn color_button(ui: &mut Ui, color: Color32) -> Response {\n\n let size = ui.spacing().interact_size;\n\n let (rect, response) = ui.allocate_exact_size(size, Sense::click());\n\n response.widget_info(|| WidgetInfo::new(WidgetType::ColorButton));\n\n let visuals = ui.style().interact(&response);\n\n let rect = rect.expand(visuals.expansion);\n\n\n\n background_checkers(ui.painter(), rect);\n\n\n\n let left_half = Rect::from_min_max(rect.left_top(), rect.center_bottom());\n\n let right_half = Rect::from_min_max(rect.center_top(), rect.right_bottom());\n\n ui.painter().rect_filled(left_half, 0.0, color);\n\n ui.painter().rect_filled(right_half, 0.0, color.to_opaque());\n\n\n\n let corner_radius = visuals.corner_radius.at_most(2.0);\n\n ui.painter()\n\n .rect_stroke(rect, corner_radius, (2.0, visuals.bg_fill)); // fill is intentional!\n\n\n\n response\n\n}\n\n\n", "file_path": "egui/src/widgets/color_picker.rs", "rank": 19, "score": 313089.81223149464 }, { "content": "/// Show a button to reset a value to its default.\n\n/// The button is only enabled if the value does not already have its original value.\n\npub fn reset_button<T: Default + PartialEq>(ui: &mut Ui, value: &mut T) {\n\n let def = T::default();\n\n if ui\n\n .add(Button::new(\"Reset\").enabled(*value != def))\n\n .clicked()\n\n {\n\n *value = def;\n\n }\n\n}\n\n\n\n// ----------------------------------------------------------------------------\n\n\n", "file_path": "egui/src/widgets/mod.rs", "rank": 20, "score": 312761.9634476489 }, { "content": "#[allow(clippy::needless_pass_by_value)]\n\nfn menu_impl<'c>(ui: &mut Ui, title: impl ToString, add_contents: Box<dyn FnOnce(&mut Ui) + 'c>) {\n\n let title = title.to_string();\n\n let bar_id = ui.id();\n\n let menu_id = bar_id.with(&title);\n\n\n\n let mut bar_state = BarState::load(ui.ctx(), &bar_id);\n\n\n\n let mut button = Button::new(title);\n\n\n\n if bar_state.open_menu == Some(menu_id) {\n\n button = button.fill(Some(ui.visuals().selection.bg_fill));\n\n }\n\n\n\n let button_response = ui.add(button);\n\n if button_response.clicked() {\n\n // Toggle\n\n if bar_state.open_menu == Some(menu_id) {\n\n bar_state.open_menu = None;\n\n } else {\n\n bar_state.open_menu = Some(menu_id);\n", "file_path": "egui/src/menu.rs", "rank": 21, "score": 310597.87449471315 }, { "content": "fn ui_url(ui: &mut egui::Ui, frame: &mut epi::Frame<'_>, url: &mut String) -> Option<String> {\n\n let mut trigger_fetch = false;\n\n\n\n ui.horizontal(|ui| {\n\n ui.label(\"URL:\");\n\n trigger_fetch |= ui.text_edit_singleline(url).lost_focus();\n\n trigger_fetch |= ui.button(\"GET\").clicked();\n\n });\n\n\n\n if frame.is_web() {\n\n ui.label(\"HINT: paste the url of this page into the field above!\");\n\n }\n\n\n\n ui.horizontal(|ui| {\n\n if ui.button(\"Source code for this example\").clicked() {\n\n *url = format!(\n\n \"https://raw.githubusercontent.com/emilk/egui/master/{}\",\n\n file!()\n\n );\n\n trigger_fetch = true;\n", "file_path": "egui_demo_lib/src/apps/http_app.rs", "rank": 22, "score": 305511.0877685263 }, { "content": "fn background_checkers(painter: &Painter, rect: Rect) {\n\n let rect = rect.shrink(0.5); // Small hack to avoid the checkers from peeking through the sides\n\n if !rect.is_positive() {\n\n return;\n\n }\n\n\n\n let mut top_color = Color32::from_gray(128);\n\n let mut bottom_color = Color32::from_gray(32);\n\n let checker_size = Vec2::splat(rect.height() / 2.0);\n\n let n = (rect.width() / checker_size.x).round() as u32;\n\n\n\n let mut mesh = Mesh::default();\n\n for i in 0..n {\n\n let x = lerp(rect.left()..=rect.right(), i as f32 / (n as f32));\n\n mesh.add_colored_rect(\n\n Rect::from_min_size(pos2(x, rect.top()), checker_size),\n\n top_color,\n\n );\n\n mesh.add_colored_rect(\n\n Rect::from_min_size(pos2(x, rect.center().y), checker_size),\n\n bottom_color,\n\n );\n\n std::mem::swap(&mut top_color, &mut bottom_color);\n\n }\n\n painter.add(Shape::mesh(mesh));\n\n}\n\n\n", "file_path": "egui/src/widgets/color_picker.rs", "rank": 23, "score": 304308.5139438763 }, { "content": "fn paint_cursor_end(ui: &mut Ui, pos: Pos2, galley: &Galley, cursor: &Cursor) {\n\n let stroke = ui.visuals().selection.stroke;\n\n\n\n let cursor_pos = galley.pos_from_cursor(cursor).translate(pos.to_vec2());\n\n let cursor_pos = cursor_pos.expand(1.5); // slightly above/below row\n\n\n\n let top = cursor_pos.center_top();\n\n let bottom = cursor_pos.center_bottom();\n\n\n\n ui.painter().line_segment(\n\n [top, bottom],\n\n (ui.visuals().text_cursor_width, stroke.color),\n\n );\n\n\n\n if false {\n\n // Roof/floor:\n\n let extrusion = 3.0;\n\n let width = 1.0;\n\n ui.painter().line_segment(\n\n [top - vec2(extrusion, 0.0), top + vec2(extrusion, 0.0)],\n", "file_path": "egui/src/widgets/text_edit.rs", "rank": 24, "score": 298845.0229283991 }, { "content": "/// The menu bar goes well in a [`TopBottomPanel::top`],\n\n/// but can also be placed in a `Window`.\n\n/// In the latter case you may want to wrap it in `Frame`.\n\npub fn bar<R>(ui: &mut Ui, add_contents: impl FnOnce(&mut Ui) -> R) -> InnerResponse<R> {\n\n ui.horizontal(|ui| {\n\n let mut style = (**ui.style()).clone();\n\n style.spacing.button_padding = vec2(2.0, 0.0);\n\n // style.visuals.widgets.active.bg_fill = Color32::TRANSPARENT;\n\n style.visuals.widgets.active.bg_stroke = Stroke::none();\n\n // style.visuals.widgets.hovered.bg_fill = Color32::TRANSPARENT;\n\n style.visuals.widgets.hovered.bg_stroke = Stroke::none();\n\n style.visuals.widgets.inactive.bg_fill = Color32::TRANSPARENT;\n\n style.visuals.widgets.inactive.bg_stroke = Stroke::none();\n\n ui.set_style(style);\n\n\n\n // Take full width and fixed height:\n\n let height = ui.spacing().interact_size.y;\n\n ui.set_min_size(vec2(ui.available_width(), height));\n\n\n\n add_contents(ui)\n\n })\n\n}\n\n\n", "file_path": "egui/src/menu.rs", "rank": 25, "score": 297987.70146027685 }, { "content": "#[allow(dead_code)]\n\nfn toggle_ui_compact(ui: &mut egui::Ui, on: &mut bool) -> egui::Response {\n\n let desired_size = ui.spacing().interact_size.y * egui::vec2(2.0, 1.0);\n\n let (rect, mut response) = ui.allocate_exact_size(desired_size, egui::Sense::click());\n\n if response.clicked() {\n\n *on = !*on;\n\n response.mark_changed();\n\n }\n\n response.widget_info(|| egui::WidgetInfo::selected(egui::WidgetType::Checkbox, *on, \"\"));\n\n\n\n let how_on = ui.ctx().animate_bool(response.id, *on);\n\n let visuals = ui.style().interact_selectable(&response, *on);\n\n let rect = rect.expand(visuals.expansion);\n\n let radius = 0.5 * rect.height();\n\n ui.painter()\n\n .rect(rect, radius, visuals.bg_fill, visuals.bg_stroke);\n\n let circle_x = egui::lerp((rect.left() + radius)..=(rect.right() - radius), how_on);\n\n let center = egui::pos2(circle_x, rect.center().y);\n\n ui.painter()\n\n .circle(center, 0.75 * radius, visuals.bg_fill, visuals.fg_stroke);\n\n\n\n response\n\n}\n\n\n", "file_path": "egui_demo_lib/src/apps/demo/toggle_switch.rs", "rank": 26, "score": 293219.7925957732 }, { "content": "/// iOS-style toggle switch:\n\n///\n\n/// ``` text\n\n/// _____________\n\n/// / /.....\\\n\n/// | |.......|\n\n/// \\_______\\_____/\n\n/// ```\n\n///\n\n/// ## Example:\n\n/// ``` ignore\n\n/// toggle_ui(ui, &mut my_bool);\n\n/// ```\n\npub fn toggle_ui(ui: &mut egui::Ui, on: &mut bool) -> egui::Response {\n\n // Widget code can be broken up in four steps:\n\n // 1. Decide a size for the widget\n\n // 2. Allocate space for it\n\n // 3. Handle interactions with the widget (if any)\n\n // 4. Paint the widget\n\n\n\n // 1. Deciding widget size:\n\n // You can query the `ui` how much space is available,\n\n // but in this example we have a fixed size widget based on the height of a standard button:\n\n let desired_size = ui.spacing().interact_size.y * egui::vec2(2.0, 1.0);\n\n\n\n // 2. Allocating space:\n\n // This is where we get a region of the screen assigned.\n\n // We also tell the Ui to sense clicks in the allocated region.\n\n let (rect, mut response) = ui.allocate_exact_size(desired_size, egui::Sense::click());\n\n\n\n // 3. Interact: Time to check for clicks!\n\n if response.clicked() {\n\n *on = !*on;\n", "file_path": "egui_demo_lib/src/apps/demo/toggle_switch.rs", "rank": 27, "score": 289397.0496747466 }, { "content": "fn demo_ui(ui: &mut Ui) {\n\n ui.monospace(\"Example widgets:\");\n\n for _ in 0..3 {\n\n ui.label(\"label\");\n\n }\n\n for _ in 0..3 {\n\n let mut dummy = false;\n\n ui.checkbox(&mut dummy, \"checkbox\");\n\n }\n\n for _ in 0..3 {\n\n let _ = ui.button(\"button\");\n\n }\n\n}\n", "file_path": "egui_demo_lib/src/apps/demo/layout_test.rs", "rank": 28, "score": 284314.35728022165 }, { "content": "fn color_picker_hsvag_2d(ui: &mut Ui, hsva: &mut HsvaGamma, alpha: Alpha) {\n\n color_text_ui(ui, *hsva);\n\n\n\n if alpha == Alpha::BlendOrAdditive {\n\n // We signal additive blending by storing a negative alpha (a bit ironic).\n\n let a = &mut hsva.a;\n\n let mut additive = *a < 0.0;\n\n ui.horizontal(|ui| {\n\n ui.label(\"Blending:\");\n\n ui.radio_value(&mut additive, false, \"Normal\");\n\n ui.radio_value(&mut additive, true, \"Additive\");\n\n\n\n if additive {\n\n *a = -a.abs();\n\n }\n\n\n\n if !additive {\n\n *a = a.abs();\n\n }\n\n });\n", "file_path": "egui/src/widgets/color_picker.rs", "rank": 29, "score": 282221.69976555696 }, { "content": "fn pixel_test(ui: &mut Ui) {\n\n ui.label(\"Each subsequent square should be one physical pixel larger than the previous. They should be exactly one physical pixel apart. They should be perfectly aligned to the pixel grid.\");\n\n\n\n let pixels_per_point = ui.ctx().pixels_per_point();\n\n let num_squares: u32 = 8;\n\n let size_pixels = Vec2::new(\n\n ((num_squares + 1) * (num_squares + 2) / 2) as f32,\n\n num_squares as f32,\n\n );\n\n let size_points = size_pixels / pixels_per_point + Vec2::splat(2.0);\n\n let (response, painter) = ui.allocate_painter(size_points, Sense::hover());\n\n\n\n let mut cursor_pixel = Pos2::new(\n\n response.rect.min.x * pixels_per_point,\n\n response.rect.min.y * pixels_per_point,\n\n )\n\n .ceil();\n\n for size in 1..=num_squares {\n\n let rect_points = Rect::from_min_size(\n\n Pos2::new(\n\n cursor_pixel.x / pixels_per_point,\n\n cursor_pixel.y / pixels_per_point,\n\n ),\n\n Vec2::splat(size as f32) / pixels_per_point,\n\n );\n\n painter.rect_filled(rect_points, 0.0, egui::Color32::WHITE);\n\n cursor_pixel.x += (1 + size) as f32;\n\n }\n\n}\n", "file_path": "egui_demo_lib/src/apps/color_test.rs", "rank": 30, "score": 281426.69966376974 }, { "content": "pub fn adjust_colors(shape: &mut Shape, adjust_color: &impl Fn(&mut Color32)) {\n\n #![allow(clippy::match_same_arms)]\n\n match shape {\n\n Shape::Noop => {}\n\n Shape::Vec(shapes) => {\n\n for shape in shapes {\n\n adjust_colors(shape, adjust_color)\n\n }\n\n }\n\n Shape::Circle { fill, stroke, .. } => {\n\n adjust_color(fill);\n\n adjust_color(&mut stroke.color);\n\n }\n\n Shape::LineSegment { stroke, .. } => {\n\n adjust_color(&mut stroke.color);\n\n }\n\n Shape::Path { fill, stroke, .. } => {\n\n adjust_color(fill);\n\n adjust_color(&mut stroke.color);\n\n }\n", "file_path": "epaint/src/shape_transform.rs", "rank": 31, "score": 280810.2794342743 }, { "content": "fn max_scroll_bar_width_with_margin(ui: &Ui) -> f32 {\n\n ui.spacing().item_spacing.x + 16.0\n\n}\n", "file_path": "egui/src/containers/scroll_area.rs", "rank": 32, "score": 279106.300256946 }, { "content": "/// Shows a button with the given color.\n\n/// If the user clicks the button, a full color picker is shown.\n\npub fn color_edit_button_srgba(ui: &mut Ui, srgba: &mut Color32, alpha: Alpha) -> Response {\n\n // To ensure we keep hue slider when `srgba` is gray we store the\n\n // full `Hsva` in a cache:\n\n\n\n let mut hsva = ui\n\n .ctx()\n\n .memory()\n\n .data_temp\n\n .get_or_default::<Cache<Color32, Hsva>>()\n\n .get(srgba)\n\n .cloned()\n\n .unwrap_or_else(|| Hsva::from(*srgba));\n\n\n\n let response = color_edit_button_hsva(ui, &mut hsva, alpha);\n\n\n\n *srgba = Color32::from(hsva);\n\n\n\n ui.ctx()\n\n .memory()\n\n .data_temp\n\n .get_mut_or_default::<Cache<Color32, Hsva>>()\n\n .set(*srgba, hsva);\n\n\n\n response\n\n}\n", "file_path": "egui/src/widgets/color_picker.rs", "rank": 33, "score": 271122.8867873156 }, { "content": "pub fn color_edit_button_hsva(ui: &mut Ui, hsva: &mut Hsva, alpha: Alpha) -> Response {\n\n let pupup_id = ui.auto_id_with(\"popup\");\n\n let mut button_response = color_button(ui, (*hsva).into()).on_hover_text(\"Click to edit color\");\n\n\n\n if button_response.clicked() {\n\n ui.memory().toggle_popup(pupup_id);\n\n }\n\n // TODO: make it easier to show a temporary popup that closes when you click outside it\n\n if ui.memory().is_popup_open(pupup_id) {\n\n let area_response = Area::new(pupup_id)\n\n .order(Order::Foreground)\n\n .default_pos(button_response.rect.max)\n\n .show(ui.ctx(), |ui| {\n\n ui.spacing_mut().slider_width = 256.0;\n\n Frame::popup(ui.style()).show(ui, |ui| {\n\n if color_picker_hsva_2d(ui, hsva, alpha) {\n\n button_response.mark_changed();\n\n }\n\n });\n\n });\n", "file_path": "egui/src/widgets/color_picker.rs", "rank": 34, "score": 271116.7177644797 }, { "content": "fn huge_content_painter(ui: &mut egui::Ui) {\n\n // This is similar to the other demo, but is fully manual, for when you want to do custom painting.\n\n ui.label(\"A lot of rows, but only the visible ones are painted, so performance is still good:\");\n\n ui.add_space(4.0);\n\n\n\n let text_style = TextStyle::Body;\n\n let row_height = ui.fonts()[text_style].row_height() + ui.spacing().item_spacing.y;\n\n let num_rows = 10_000;\n\n\n\n ScrollArea::auto_sized().show_viewport(ui, |ui, viewport| {\n\n ui.set_height(row_height * num_rows as f32);\n\n\n\n let first_item = (viewport.min.y / row_height).floor().at_least(0.0) as usize;\n\n let last_item = (viewport.max.y / row_height).ceil() as usize + 1;\n\n let last_item = last_item.at_most(num_rows);\n\n\n\n for i in first_item..last_item {\n\n let indentation = (i % 100) as f32;\n\n let x = ui.min_rect().left() + indentation;\n\n let y = ui.min_rect().top() + i as f32 * row_height;\n", "file_path": "egui_demo_lib/src/apps/demo/scrolling.rs", "rank": 35, "score": 270334.2839303363 }, { "content": "/// Return true when arguments are the same within some rounding error.\n\n///\n\n/// For instance `almost_equal(x, x.to_degrees().to_radians(), f32::EPSILON)` should hold true for all x.\n\n/// The `epsilon` can be `f32::EPSILON` to handle simple transforms (like degrees -> radians)\n\n/// but should be higher to handle more complex transformations.\n\npub fn almost_equal(a: f32, b: f32, epsilon: f32) -> bool {\n\n if a == b {\n\n true // handle infinites\n\n } else {\n\n let abs_max = a.abs().max(b.abs());\n\n abs_max <= epsilon || ((a - b).abs() / abs_max) <= epsilon\n\n }\n\n}\n\n\n", "file_path": "emath/src/lib.rs", "rank": 36, "score": 262569.305860132 }, { "content": "fn pointer_pressed_on_area(ctx: &Context, layer_id: LayerId) -> bool {\n\n if let Some(pointer_pos) = ctx.input().pointer.interact_pos() {\n\n ctx.input().pointer.any_pressed() && ctx.layer_id_at(pointer_pos) == Some(layer_id)\n\n } else {\n\n false\n\n }\n\n}\n\n\n", "file_path": "egui/src/containers/area.rs", "rank": 37, "score": 257701.49247998136 }, { "content": "// A wrapper that allows the more idiomatic usage pattern: `ui.add(toggle(&mut my_bool))`\n\n/// iOS-style toggle switch.\n\n///\n\n/// ## Example:\n\n/// ``` ignore\n\n/// ui.add(toggle(&mut my_bool));\n\n/// ```\n\npub fn toggle(on: &mut bool) -> impl egui::Widget + '_ {\n\n move |ui: &mut egui::Ui| toggle_ui(ui, on)\n\n}\n\n\n", "file_path": "egui_demo_lib/src/apps/demo/toggle_switch.rs", "rank": 38, "score": 256252.35435519397 }, { "content": "fn paint_cursor_selection(ui: &mut Ui, pos: Pos2, galley: &Galley, cursorp: &CursorPair) {\n\n let color = ui.visuals().selection.bg_fill;\n\n if cursorp.is_empty() {\n\n return;\n\n }\n\n let [min, max] = cursorp.sorted();\n\n let min = min.rcursor;\n\n let max = max.rcursor;\n\n\n\n for ri in min.row..=max.row {\n\n let row = &galley.rows[ri];\n\n let left = if ri == min.row {\n\n row.x_offset(min.column)\n\n } else {\n\n row.min_x()\n\n };\n\n let right = if ri == max.row {\n\n row.x_offset(max.column)\n\n } else {\n\n let newline_size = if row.ends_with_newline {\n", "file_path": "egui/src/widgets/text_edit.rs", "rank": 39, "score": 253579.7511905129 }, { "content": "fn handle_radius(rect: &Rect) -> f32 {\n\n rect.height() / 2.5\n\n}\n\n\n", "file_path": "egui/src/widgets/slider.rs", "rank": 40, "score": 249995.16440015912 }, { "content": "fn paint_icon(painter: &Painter, rect: Rect, visuals: &WidgetVisuals) {\n\n let rect = Rect::from_center_size(\n\n rect.center(),\n\n vec2(rect.width() * 0.7, rect.height() * 0.45),\n\n );\n\n painter.add(Shape::closed_line(\n\n vec![rect.left_top(), rect.right_top(), rect.center_bottom()],\n\n visuals.fg_stroke,\n\n ));\n\n}\n", "file_path": "egui/src/containers/combo_box.rs", "rank": 41, "score": 247208.07721278223 }, { "content": "fn vertex_gradient(ui: &mut Ui, bg_fill: Color32, gradient: &Gradient) -> Response {\n\n use egui::epaint::*;\n\n let (rect, response) = ui.allocate_at_least(GRADIENT_SIZE, Sense::hover());\n\n if bg_fill != Default::default() {\n\n let mut mesh = Mesh::default();\n\n mesh.add_colored_rect(rect, bg_fill);\n\n ui.painter().add(Shape::mesh(mesh));\n\n }\n\n {\n\n let n = gradient.0.len();\n\n assert!(n >= 2);\n\n let mut mesh = Mesh::default();\n\n for (i, &color) in gradient.0.iter().enumerate() {\n\n let t = i as f32 / (n as f32 - 1.0);\n\n let x = lerp(rect.x_range(), t);\n\n mesh.colored_vertex(pos2(x, rect.top()), color);\n\n mesh.colored_vertex(pos2(x, rect.bottom()), color);\n\n if i < n - 1 {\n\n let i = i as u32;\n\n mesh.add_triangle(2 * i, 2 * i + 1, 2 * i + 2);\n\n mesh.add_triangle(2 * i + 1, 2 * i + 2, 2 * i + 3);\n\n }\n\n }\n\n ui.painter().add(Shape::mesh(mesh));\n\n }\n\n response\n\n}\n\n\n", "file_path": "egui_demo_lib/src/apps/color_test.rs", "rank": 42, "score": 243438.32937194032 }, { "content": "fn x_range(rect: &Rect) -> RangeInclusive<f32> {\n\n let handle_radius = handle_radius(rect);\n\n (rect.left() + handle_radius)..=(rect.right() - handle_radius)\n\n}\n\n\n\nimpl<'a> Slider<'a> {\n\n /// Just the slider, no text\n\n #[allow(clippy::unused_self)]\n\n fn allocate_slider_space(&self, ui: &mut Ui, height: f32) -> Response {\n\n let desired_size = vec2(ui.spacing().slider_width, height);\n\n ui.allocate_response(desired_size, Sense::click_and_drag())\n\n }\n\n\n\n /// Just the slider, no text\n\n fn slider_ui(&mut self, ui: &mut Ui, response: &Response) {\n\n let rect = &response.rect;\n\n let x_range = x_range(rect);\n\n\n\n if let Some(pointer_pos) = response.interact_pointer_pos() {\n\n let new_value = if self.smart_aim {\n", "file_path": "egui/src/widgets/slider.rs", "rank": 43, "score": 242482.0768925804 }, { "content": "/// All ranges in 0-1, rgb is linear.\n\npub fn hsv_from_rgb([r, g, b]: [f32; 3]) -> (f32, f32, f32) {\n\n #![allow(clippy::many_single_char_names)]\n\n let min = r.min(g.min(b));\n\n let max = r.max(g.max(b)); // value\n\n\n\n let range = max - min;\n\n\n\n let h = if max == min {\n\n 0.0 // hue is undefined\n\n } else if max == r {\n\n (g - b) / (6.0 * range)\n\n } else if max == g {\n\n (b - r) / (6.0 * range) + 1.0 / 3.0\n\n } else {\n\n // max == b\n\n (r - g) / (6.0 * range) + 2.0 / 3.0\n\n };\n\n let h = (h + 1.0).fract(); // wrap\n\n let s = if max == 0.0 { 0.0 } else { 1.0 - min / max };\n\n (h, s, max)\n\n}\n\n\n", "file_path": "epaint/src/color.rs", "rank": 44, "score": 242315.30373005875 }, { "content": "pub fn easy_mark_it<'em>(ui: &mut Ui, items: impl Iterator<Item = easy_mark::Item<'em>>) {\n\n ui.horizontal_wrapped(|ui| {\n\n ui.spacing_mut().item_spacing = Vec2::new(0.0, 0.0);\n\n ui.set_row_height(ui.fonts()[TextStyle::Body].row_height());\n\n\n\n for item in items {\n\n item_ui(ui, item);\n\n }\n\n });\n\n}\n\n\n", "file_path": "egui_demo_lib/src/easy_mark/easy_mark_viewer.rs", "rank": 45, "score": 240333.69801996058 }, { "content": "/// Wrap angle to `[-PI, PI]` range.\n\npub fn normalized_angle(mut angle: f32) -> f32 {\n\n use std::f32::consts::{PI, TAU};\n\n angle %= TAU;\n\n if angle > PI {\n\n angle -= TAU;\n\n } else if angle < -PI {\n\n angle += TAU;\n\n }\n\n angle\n\n}\n\n\n", "file_path": "emath/src/lib.rs", "rank": 46, "score": 237321.82842927641 }, { "content": "fn show_menu_bar(ui: &mut Ui) {\n\n trace!(ui);\n\n use egui::*;\n\n\n\n menu::bar(ui, |ui| {\n\n menu::menu(ui, \"File\", |ui| {\n\n if ui.button(\"Organize windows\").clicked() {\n\n ui.ctx().memory().reset_areas();\n\n }\n\n if ui\n\n .button(\"Clear egui memory\")\n\n .on_hover_text(\"Forget scroll, positions, sizes etc\")\n\n .clicked()\n\n {\n\n *ui.ctx().memory() = Default::default();\n\n }\n\n });\n\n });\n\n}\n", "file_path": "egui_demo_lib/src/apps/demo/demo_app_windows.rs", "rank": 47, "score": 233043.79009414837 }, { "content": "fn contrast_color(color: impl Into<Rgba>) -> Color32 {\n\n if color.into().intensity() < 0.5 {\n\n Color32::WHITE\n\n } else {\n\n Color32::BLACK\n\n }\n\n}\n\n\n\n/// Number of vertices per dimension in the color sliders.\n\n/// We need at least 6 for hues, and more for smooth 2D areas.\n\n/// Should always be a multiple of 6 to hit the peak hues in HSV/HSL (every 60°).\n\nconst N: u32 = 6 * 6;\n\n\n", "file_path": "egui/src/widgets/color_picker.rs", "rank": 48, "score": 231032.4193015113 }, { "content": "fn next_word_boundary_char_index(it: impl Iterator<Item = char>, mut index: usize) -> usize {\n\n let mut it = it.skip(index);\n\n if let Some(_first) = it.next() {\n\n index += 1;\n\n\n\n if let Some(second) = it.next() {\n\n index += 1;\n\n for next in it {\n\n if is_word_char(next) != is_word_char(second) {\n\n break;\n\n }\n\n index += 1;\n\n }\n\n }\n\n }\n\n index\n\n}\n\n\n", "file_path": "egui/src/widgets/text_edit.rs", "rank": 49, "score": 230858.20148835247 }, { "content": "/// Show a button to switch to/from dark/light mode (globally).\n\nfn dark_light_mode_switch(ui: &mut egui::Ui) {\n\n let style: egui::Style = (*ui.ctx().style()).clone();\n\n let new_visuals = style.visuals.light_dark_small_toggle_button(ui);\n\n if let Some(visuals) = new_visuals {\n\n ui.ctx().set_visuals(visuals);\n\n }\n\n}\n", "file_path": "egui_demo_lib/src/wrap_app.rs", "rank": 50, "score": 230687.35329764133 }, { "content": "fn huge_content_lines(ui: &mut egui::Ui) {\n\n ui.label(\n\n \"A lot of rows, but only the visible ones are layed out, so performance is still good:\",\n\n );\n\n ui.add_space(4.0);\n\n\n\n let text_style = TextStyle::Body;\n\n let row_height = ui.fonts()[text_style].row_height();\n\n let num_rows = 10_000;\n\n ScrollArea::auto_sized().show_rows(ui, row_height, num_rows, |ui, row_range| {\n\n for row in row_range {\n\n let text = format!(\"This is row {}/{}\", row + 1, num_rows);\n\n ui.label(text);\n\n }\n\n });\n\n}\n\n\n", "file_path": "egui_demo_lib/src/apps/demo/scrolling.rs", "rank": 51, "score": 230687.35329764133 }, { "content": "/// Password entry field with ability to toggle character hiding.\n\n///\n\n/// ## Example:\n\n/// ``` ignore\n\n/// password_ui(ui, &mut password);\n\n/// ```\n\npub fn password_ui(ui: &mut egui::Ui, text: &mut String) -> egui::Response {\n\n // This widget has its own state — show or hide password characters.\n\n\n\n // 1. Declare state struct\n\n // This struct represents the state of this widget.\n\n // It must implement at least `Clone` and be `'static`.\n\n // If you use the `persistence` feature, it also must implement `serde::{Deserialize, Serialize}`.\n\n // You should prefer creating custom newtype structs or enums like this, to avoid `TypeId`\n\n // intersection errors, especially when you use `Memory::data` without `Id`.\n\n #[derive(Clone, Copy, Default)]\n\n struct State(bool);\n\n\n\n // 2. Create id\n\n let id = ui.id().with(\"show_password\");\n\n\n\n // 3. Get state for this widget\n\n // You can read more about available `Memory` functions in the documentation of `egui::Memory`\n\n // struct and `egui::any` module.\n\n // You should get state by value, not by reference to avoid borrowing of `Memory`.\n\n let mut plaintext = *ui.memory().id_data_temp.get_or_default::<State>(id);\n", "file_path": "egui_demo_lib/src/apps/demo/password.rs", "rank": 52, "score": 229027.32060864696 }, { "content": "fn mul_color(color: Color32, factor: f32) -> Color32 {\n\n crate::epaint_assert!(0.0 <= factor && factor <= 1.0);\n\n // As an unfortunate side-effect of using premultiplied alpha\n\n // we need a somewhat expensive conversion to linear space and back.\n\n color.linear_multiply(factor)\n\n}\n\n\n\n// ----------------------------------------------------------------------------\n\n\n\n/// Converts [`Shape`]s into [`Mesh`].\n\npub struct Tessellator {\n\n options: TessellationOptions,\n\n /// Only used for culling\n\n clip_rect: Rect,\n\n scratchpad_points: Vec<Pos2>,\n\n scratchpad_path: Path,\n\n}\n\n\n\nimpl Tessellator {\n\n pub fn from_options(options: TessellationOptions) -> Self {\n", "file_path": "epaint/src/tessellator.rs", "rank": 53, "score": 227919.1353068523 }, { "content": "pub fn paint_resize_corner(ui: &mut Ui, response: &Response) {\n\n let stroke = ui.style().interact(response).fg_stroke;\n\n paint_resize_corner_with_style(ui, &response.rect, stroke, Align2::RIGHT_BOTTOM);\n\n}\n\n\n", "file_path": "egui/src/containers/resize.rs", "rank": 54, "score": 224941.77045303903 }, { "content": "pub fn canvas_size_in_points(canvas_id: &str) -> egui::Vec2 {\n\n let canvas = canvas_element(canvas_id).unwrap();\n\n let pixels_per_point = native_pixels_per_point();\n\n egui::vec2(\n\n canvas.width() as f32 / pixels_per_point,\n\n canvas.height() as f32 / pixels_per_point,\n\n )\n\n}\n\n\n", "file_path": "egui_web/src/lib.rs", "rank": 55, "score": 217922.32834867778 }, { "content": "/// All ranges in 0-1, rgb is linear.\n\npub fn rgb_from_hsv((h, s, v): (f32, f32, f32)) -> [f32; 3] {\n\n #![allow(clippy::many_single_char_names)]\n\n let h = (h.fract() + 1.0).fract(); // wrap\n\n let s = s.clamp(0.0, 1.0);\n\n\n\n let f = h * 6.0 - (h * 6.0).floor();\n\n let p = v * (1.0 - s);\n\n let q = v * (1.0 - f * s);\n\n let t = v * (1.0 - (1.0 - f) * s);\n\n\n\n match (h * 6.0).floor() as i32 % 6 {\n\n 0 => [v, t, p],\n\n 1 => [q, v, p],\n\n 2 => [p, v, t],\n\n 3 => [p, q, v],\n\n 4 => [t, p, v],\n\n 5 => [v, p, q],\n\n _ => unreachable!(),\n\n }\n\n}\n\n\n", "file_path": "epaint/src/color.rs", "rank": 56, "score": 216750.57900517806 }, { "content": "/// Alternative to `FileStorage`\n\npub fn read_memory(ctx: &egui::Context, memory_file_path: impl AsRef<std::path::Path>) {\n\n let memory: Option<egui::Memory> = read_ron(memory_file_path);\n\n if let Some(memory) = memory {\n\n *ctx.memory() = memory;\n\n }\n\n}\n\n\n", "file_path": "egui_glium/src/persistence.rs", "rank": 57, "score": 215671.72584753655 }, { "content": "/// Parse and display a VERY simple and small subset of Markdown.\n\npub fn easy_mark(ui: &mut Ui, easy_mark: &str) {\n\n easy_mark_it(ui, easy_mark::Parser::new(easy_mark))\n\n}\n\n\n", "file_path": "egui_demo_lib/src/easy_mark/easy_mark_viewer.rs", "rank": 58, "score": 213828.59729024785 }, { "content": "fn move_single_cursor(cursor: &mut Cursor, galley: &Galley, key: Key, modifiers: &Modifiers) {\n\n match key {\n\n Key::ArrowLeft => {\n\n if modifiers.alt || modifiers.ctrl {\n\n // alt on mac, ctrl on windows\n\n *cursor = galley.from_ccursor(ccursor_previous_word(&galley.text, cursor.ccursor));\n\n } else if modifiers.mac_cmd {\n\n *cursor = galley.cursor_begin_of_row(cursor);\n\n } else {\n\n *cursor = galley.cursor_left_one_character(cursor);\n\n }\n\n }\n\n Key::ArrowRight => {\n\n if modifiers.alt || modifiers.ctrl {\n\n // alt on mac, ctrl on windows\n\n *cursor = galley.from_ccursor(ccursor_next_word(&galley.text, cursor.ccursor));\n\n } else if modifiers.mac_cmd {\n\n *cursor = galley.cursor_end_of_row(cursor);\n\n } else {\n\n *cursor = galley.cursor_right_one_character(cursor);\n", "file_path": "egui/src/widgets/text_edit.rs", "rank": 59, "score": 213416.29967451934 }, { "content": "/// linear [0, 1] -> gamma [0, 1] (not clamped).\n\n/// Works for numbers outside this range (e.g. negative numbers).\n\npub fn gamma_from_linear(linear: f32) -> f32 {\n\n if linear < 0.0 {\n\n -gamma_from_linear(-linear)\n\n } else if linear <= 0.0031308 {\n\n 12.92 * linear\n\n } else {\n\n 1.055 * linear.powf(1.0 / 2.4) - 0.055\n\n }\n\n}\n\n\n\n// ----------------------------------------------------------------------------\n\n\n\n/// Hue, saturation, value, alpha. All in the range [0, 1].\n\n/// No premultiplied alpha.\n\n#[derive(Clone, Copy, Debug, Default, PartialEq)]\n\npub struct Hsva {\n\n /// hue 0-1\n\n pub h: f32,\n\n /// saturation 0-1\n\n pub s: f32,\n", "file_path": "epaint/src/color.rs", "rank": 60, "score": 212971.53256515547 }, { "content": "/// gamma [0, 1] -> linear [0, 1] (not clamped).\n\n/// Works for numbers outside this range (e.g. negative numbers).\n\npub fn linear_from_gamma(gamma: f32) -> f32 {\n\n if gamma < 0.0 {\n\n -linear_from_gamma(-gamma)\n\n } else if gamma <= 0.04045 {\n\n gamma / 12.92\n\n } else {\n\n ((gamma + 0.055) / 1.055).powf(2.4)\n\n }\n\n}\n\n\n", "file_path": "epaint/src/color.rs", "rank": 61, "score": 212971.53256515547 }, { "content": "pub fn item_ui(ui: &mut Ui, item: easy_mark::Item<'_>) {\n\n let row_height = ui.fonts()[TextStyle::Body].row_height();\n\n let one_indent = row_height / 2.0;\n\n\n\n match item {\n\n easy_mark::Item::Newline => {\n\n // ui.label(\"\\n\"); // too much spacing (paragraph spacing)\n\n ui.allocate_exact_size(vec2(0.0, row_height), Sense::hover()); // make sure we take up some height\n\n ui.end_row();\n\n ui.set_row_height(row_height);\n\n }\n\n\n\n easy_mark::Item::Text(style, text) => {\n\n ui.add(label_from_style(text, &style));\n\n }\n\n easy_mark::Item::Hyperlink(style, text, url) => {\n\n let label = label_from_style(text, &style);\n\n ui.add(Hyperlink::from_label_and_url(label, url));\n\n }\n\n\n", "file_path": "egui_demo_lib/src/easy_mark/easy_mark_viewer.rs", "rank": 62, "score": 211698.87276590505 }, { "content": "pub fn resize_canvas_to_screen_size(canvas_id: &str, max_size_points: egui::Vec2) -> Option<()> {\n\n let canvas = canvas_element(canvas_id)?;\n\n\n\n let screen_size_points = screen_size_in_native_points()?;\n\n let pixels_per_point = native_pixels_per_point();\n\n\n\n let max_size_pixels = pixels_per_point * max_size_points;\n\n\n\n let canvas_size_pixels = pixels_per_point * screen_size_points;\n\n let canvas_size_pixels = canvas_size_pixels.min(max_size_pixels);\n\n let canvas_size_points = canvas_size_pixels / pixels_per_point;\n\n\n\n // Make sure that the height and width are always even numbers.\n\n // otherwise, the page renders blurry on some platforms.\n\n // See https://github.com/emilk/egui/issues/103\n\n fn round_to_even(v: f32) -> f32 {\n\n (v / 2.0).round() * 2.0\n\n }\n\n\n\n canvas\n", "file_path": "egui_web/src/lib.rs", "rank": 63, "score": 210463.45357401326 }, { "content": "fn set_open(open: &mut BTreeSet<String>, key: &'static str, is_open: bool) {\n\n if is_open {\n\n if !open.contains(key) {\n\n open.insert(key.to_owned());\n\n }\n\n } else {\n\n open.remove(key);\n\n }\n\n}\n\n\n\n// ----------------------------------------------------------------------------\n\n\n\n/// A menu bar in which you can select different demo windows to show.\n\n#[derive(Default)]\n\n#[cfg_attr(feature = \"persistence\", derive(serde::Deserialize, serde::Serialize))]\n\n#[cfg_attr(feature = \"persistence\", serde(default))]\n\npub struct DemoWindows {\n\n demos: Demos,\n\n tests: Tests,\n\n}\n", "file_path": "egui_demo_lib/src/apps/demo/demo_app_windows.rs", "rank": 64, "score": 207490.4422496751 }, { "content": "fn clock_button(ui: &mut egui::Ui, seconds_since_midnight: f64) -> egui::Response {\n\n let time = seconds_since_midnight;\n\n let time = format!(\n\n \"{:02}:{:02}:{:02}.{:02}\",\n\n (time % (24.0 * 60.0 * 60.0) / 3600.0).floor(),\n\n (time % (60.0 * 60.0) / 60.0).floor(),\n\n (time % 60.0).floor(),\n\n (time % 1.0 * 100.0).floor()\n\n );\n\n\n\n ui.add(egui::Button::new(time).text_style(egui::TextStyle::Monospace))\n\n}\n\n\n", "file_path": "egui_demo_lib/src/wrap_app.rs", "rank": 65, "score": 206955.59565958838 }, { "content": "pub fn label(ui: &mut Ui, alloc_info: &epaint::stats::AllocInfo, what: &str) -> Response {\n\n ui.add(Label::new(alloc_info.format(what)).wrap(false))\n\n}\n\n\n\nimpl Widget for &mut epaint::TessellationOptions {\n\n fn ui(self, ui: &mut Ui) -> Response {\n\n ui.vertical(|ui| {\n\n let epaint::TessellationOptions {\n\n pixels_per_point: _,\n\n aa_size: _,\n\n anti_alias,\n\n coarse_tessellation_culling,\n\n debug_paint_clip_rects,\n\n debug_paint_text_rects,\n\n debug_ignore_clip_rects,\n\n } = self;\n\n ui.checkbox(anti_alias, \"Antialias\");\n\n ui.checkbox(\n\n coarse_tessellation_culling,\n\n \"Do coarse culling in the tessellator\",\n", "file_path": "egui/src/introspection.rs", "rank": 66, "score": 202630.20317113682 }, { "content": "fn megabytes(size: usize) -> String {\n\n format!(\"{:.2} MB\", size as f64 / 1e6)\n\n}\n", "file_path": "epaint/src/stats.rs", "rank": 67, "score": 201299.49303948836 }, { "content": "/// linear [0, 1] -> gamma [0, 255] (clamped).\n\n/// Values outside this range will be clamped to the range.\n\npub fn gamma_u8_from_linear_f32(l: f32) -> u8 {\n\n if l <= 0.0 {\n\n 0\n\n } else if l <= 0.0031308 {\n\n (3294.6 * l).round() as u8\n\n } else if l <= 1.0 {\n\n (269.025 * l.powf(1.0 / 2.4) - 14.025).round() as u8\n\n } else {\n\n 255\n\n }\n\n}\n\n\n\n/// linear [0, 1] -> linear [0, 255] (clamped).\n\n/// Useful for alpha-channel.\n", "file_path": "epaint/src/color.rs", "rank": 68, "score": 200346.5979814853 }, { "content": "#[inline(always)]\n\npub fn linear_u8_from_linear_f32(a: f32) -> u8 {\n\n (a * 255.0).round() as u8 // rust does a saturating cast since 1.45\n\n}\n\n\n", "file_path": "epaint/src/color.rs", "rank": 69, "score": 200346.5979814853 }, { "content": "#[inline(always)]\n\npub fn linear_f32_from_linear_u8(a: u8) -> f32 {\n\n a as f32 / 255.0\n\n}\n\n\n", "file_path": "epaint/src/color.rs", "rank": 70, "score": 200346.5979814853 }, { "content": "/// gamma [0, 255] -> linear [0, 1].\n\npub fn linear_f32_from_gamma_u8(s: u8) -> f32 {\n\n if s <= 10 {\n\n s as f32 / 3294.6\n\n } else {\n\n ((s as f32 + 14.025) / 269.025).powf(2.4)\n\n }\n\n}\n\n\n\n/// linear [0, 255] -> linear [0, 1].\n\n/// Useful for alpha-channel.\n", "file_path": "epaint/src/color.rs", "rank": 71, "score": 200346.5979814853 }, { "content": "fn move_and_resize_window(ctx: &Context, window_interaction: &WindowInteraction) -> Option<Rect> {\n\n window_interaction.set_cursor(ctx);\n\n let pointer_pos = ctx.input().pointer.interact_pos()?;\n\n let mut rect = window_interaction.start_rect; // prevent drift\n\n\n\n if window_interaction.is_resize() {\n\n if window_interaction.left {\n\n rect.min.x = ctx.round_to_pixel(pointer_pos.x);\n\n } else if window_interaction.right {\n\n rect.max.x = ctx.round_to_pixel(pointer_pos.x);\n\n }\n\n\n\n if window_interaction.top {\n\n rect.min.y = ctx.round_to_pixel(pointer_pos.y);\n\n } else if window_interaction.bottom {\n\n rect.max.y = ctx.round_to_pixel(pointer_pos.y);\n\n }\n\n } else {\n\n // Movement.\n\n\n", "file_path": "egui/src/containers/window.rs", "rank": 72, "score": 198880.32687673485 }, { "content": "// A wrapper that allows the more idiomatic usage pattern: `ui.add(...)`\n\n/// Password entry field with ability to toggle character hiding.\n\n///\n\n/// ## Example:\n\n/// ``` ignore\n\n/// ui.add(password(&mut password));\n\n/// ```\n\npub fn password(text: &mut String) -> impl egui::Widget + '_ {\n\n move |ui: &mut egui::Ui| password_ui(ui, text)\n\n}\n\n\n", "file_path": "egui_demo_lib/src/apps/demo/password.rs", "rank": 73, "score": 190864.305842794 }, { "content": "fn delete_selected_ccursor_range<S: TextBuffer>(text: &mut S, [min, max]: [CCursor; 2]) -> CCursor {\n\n text.delete_char_range(min.index..max.index);\n\n CCursor {\n\n index: min.index,\n\n prefer_next_row: true,\n\n }\n\n}\n\n\n", "file_path": "egui/src/widgets/text_edit.rs", "rank": 74, "score": 185719.3219992062 }, { "content": "fn decrease_identation<S: TextBuffer>(ccursor: &mut CCursor, text: &mut S) {\n\n let line_start = find_line_start(text.as_ref(), *ccursor);\n\n\n\n let remove_len = if text.as_ref()[line_start.index..].starts_with('\\t') {\n\n Some(1)\n\n } else if text.as_ref()[line_start.index..]\n\n .chars()\n\n .take(text::TAB_SIZE)\n\n .all(|c| c == ' ')\n\n {\n\n Some(text::TAB_SIZE)\n\n } else {\n\n None\n\n };\n\n\n\n if let Some(len) = remove_len {\n\n text.delete_char_range(line_start.index..(line_start.index + len));\n\n if *ccursor != line_start {\n\n *ccursor -= len;\n\n }\n\n }\n\n}\n", "file_path": "egui/src/widgets/text_edit.rs", "rank": 75, "score": 182959.6558744192 }, { "content": "pub fn screen_size_in_native_points() -> Option<egui::Vec2> {\n\n let window = web_sys::window()?;\n\n Some(egui::Vec2::new(\n\n window.inner_width().ok()?.as_f64()? as f32,\n\n window.inner_height().ok()?.as_f64()? as f32,\n\n ))\n\n}\n\n\n", "file_path": "egui_web/src/lib.rs", "rank": 76, "score": 182944.19914087895 }, { "content": "fn tint_shape_towards(shape: &mut Shape, target: Color32) {\n\n epaint::shape_transform::adjust_colors(shape, &|color| {\n\n *color = crate::color::tint_color_towards(*color, target);\n\n });\n\n}\n", "file_path": "egui/src/painter.rs", "rank": 77, "score": 181804.40375164352 }, { "content": "// Move text agent to text cursor's position, on desktop/laptop,\n\n// candidate window moves following text element (agent),\n\n// so it appears that the IME candidate window moves with text cursor.\n\n// On mobile devices, there is no need to do that.\n\nfn move_text_cursor(cursor: &Option<egui::Pos2>, canvas_id: &str) -> Option<()> {\n\n let style = text_agent().style();\n\n // Note: movint agent on mobile devices will lead to unpredictable scroll.\n\n if is_mobile() == Some(false) {\n\n cursor.as_ref().and_then(|&egui::Pos2 { x, y }| {\n\n let canvas = canvas_element(canvas_id)?;\n\n let y = y + (canvas.scroll_top() + canvas.offset_top()) as f32;\n\n let x = x + (canvas.scroll_left() + canvas.offset_left()) as f32;\n\n // Canvas is translated 50% horizontally in html.\n\n let x = x - canvas.offset_width() as f32 / 2.0;\n\n style.set_property(\"position\", \"absolute\").ok()?;\n\n style.set_property(\"top\", &(y.to_string() + \"px\")).ok()?;\n\n style.set_property(\"left\", &(x.to_string() + \"px\")).ok()\n\n })\n\n } else {\n\n style.set_property(\"position\", \"absolute\").ok()?;\n\n style.set_property(\"top\", \"0px\").ok()?;\n\n style.set_property(\"left\", \"0px\").ok()\n\n }\n\n}\n", "file_path": "egui_web/src/lib.rs", "rank": 78, "score": 180237.64206273062 }, { "content": "pub fn screen_size_in_pixels(display: &glium::Display) -> Vec2 {\n\n let (width_in_pixels, height_in_pixels) = display.get_framebuffer_dimensions();\n\n vec2(width_in_pixels as f32, height_in_pixels as f32)\n\n}\n\n\n", "file_path": "egui_glium/src/lib.rs", "rank": 79, "score": 180016.82124804464 }, { "content": "// TODO: improve and standardize `slider_vec2`\n\nfn slider_vec2<'a>(\n\n value: &'a mut Vec2,\n\n range: std::ops::RangeInclusive<f32>,\n\n text: &'a str,\n\n) -> impl Widget + 'a {\n\n move |ui: &mut crate::Ui| {\n\n ui.horizontal(|ui| {\n\n ui.add(Slider::new(&mut value.x, range.clone()).text(\"w\"));\n\n ui.add(Slider::new(&mut value.y, range.clone()).text(\"h\"));\n\n ui.label(text);\n\n })\n\n .response\n\n }\n\n}\n\n\n", "file_path": "egui/src/style.rs", "rank": 80, "score": 177775.37319783468 }, { "content": "#[cfg(feature = \"syntect\")]\n\nstruct ColoredText(Vec<Vec<(syntect::highlighting::Style, String)>>);\n\n\n\n#[cfg(feature = \"syntect\")]\n\nimpl ColoredText {\n\n /// e.g. `text_with_extension(\"fn foo() {}\", \"rs\")`\n\n pub fn text_with_extension(text: &str, extension: &str) -> Option<ColoredText> {\n\n use syntect::easy::HighlightLines;\n\n use syntect::highlighting::ThemeSet;\n\n use syntect::parsing::SyntaxSet;\n\n use syntect::util::LinesWithEndings;\n\n\n\n let ps = SyntaxSet::load_defaults_newlines(); // should be cached and reused\n\n let ts = ThemeSet::load_defaults(); // should be cached and reused\n\n\n\n let syntax = ps.find_syntax_by_extension(extension)?;\n\n\n\n let mut h = HighlightLines::new(syntax, &ts.themes[\"base16-mocha.dark\"]);\n\n\n\n let lines = LinesWithEndings::from(text)\n\n .map(|line| {\n", "file_path": "egui_demo_lib/src/apps/http_app.rs", "rank": 81, "score": 174179.6246202818 }, { "content": "fn insert_text<S: TextBuffer>(ccursor: &mut CCursor, text: &mut S, text_to_insert: &str) {\n\n ccursor.index += text.insert_text(text_to_insert, ccursor.index);\n\n}\n\n\n\n// ----------------------------------------------------------------------------\n\n\n", "file_path": "egui/src/widgets/text_edit.rs", "rank": 82, "score": 173138.44605971093 }, { "content": "fn delete_selected<S: TextBuffer>(text: &mut S, cursorp: &CursorPair) -> CCursor {\n\n let [min, max] = cursorp.sorted();\n\n delete_selected_ccursor_range(text, [min.ccursor, max.ccursor])\n\n}\n\n\n", "file_path": "egui/src/widgets/text_edit.rs", "rank": 83, "score": 170797.38162102585 }, { "content": "#[derive(Clone)]\n\nstruct Flux<State> {\n\n start_time: f64,\n\n latest_change_time: f64,\n\n latest_state: State,\n\n}\n\n\n\nimpl<State> Undoer<State>\n\nwhere\n\n State: Clone + PartialEq,\n\n{\n\n /// Do we have an undo point different from the given state?\n\n pub fn has_undo(&self, current_state: &State) -> bool {\n\n match self.undos.len() {\n\n 0 => false,\n\n 1 => self.undos.back() != Some(current_state),\n\n _ => true,\n\n }\n\n }\n\n\n\n /// Return true if the state is currently changing\n", "file_path": "egui/src/util/undoer.rs", "rank": 84, "score": 170572.39352899988 }, { "content": "fn clamp_to_range(x: f32, range: RangeInclusive<f32>) -> f32 {\n\n x.clamp(\n\n range.start().min(*range.end()),\n\n range.start().max(*range.end()),\n\n )\n\n}\n", "file_path": "egui/src/containers/panel.rs", "rank": 85, "score": 169702.21881603982 }, { "content": "fn delete_previous_char<S: TextBuffer>(text: &mut S, ccursor: CCursor) -> CCursor {\n\n if ccursor.index > 0 {\n\n let max_ccursor = ccursor;\n\n let min_ccursor = max_ccursor - 1;\n\n delete_selected_ccursor_range(text, [min_ccursor, max_ccursor])\n\n } else {\n\n ccursor\n\n }\n\n}\n\n\n", "file_path": "egui/src/widgets/text_edit.rs", "rank": 86, "score": 168796.92775360297 }, { "content": "fn delete_next_char<S: TextBuffer>(text: &mut S, ccursor: CCursor) -> CCursor {\n\n delete_selected_ccursor_range(text, [ccursor, ccursor + 1])\n\n}\n\n\n", "file_path": "egui/src/widgets/text_edit.rs", "rank": 87, "score": 168796.92775360297 }, { "content": "fn delete_next_word<S: TextBuffer>(text: &mut S, min_ccursor: CCursor) -> CCursor {\n\n let max_ccursor = ccursor_next_word(text.as_ref(), min_ccursor);\n\n delete_selected_ccursor_range(text, [min_ccursor, max_ccursor])\n\n}\n\n\n", "file_path": "egui/src/widgets/text_edit.rs", "rank": 88, "score": 166862.85358750433 }, { "content": "fn delete_previous_word<S: TextBuffer>(text: &mut S, max_ccursor: CCursor) -> CCursor {\n\n let min_ccursor = ccursor_previous_word(text.as_ref(), max_ccursor);\n\n delete_selected_ccursor_range(text, [min_ccursor, max_ccursor])\n\n}\n\n\n", "file_path": "egui/src/widgets/text_edit.rs", "rank": 89, "score": 166862.85358750433 }, { "content": "pub fn read_ron<T>(ron_path: impl AsRef<Path>) -> Option<T>\n\nwhere\n\n T: serde::de::DeserializeOwned,\n\n{\n\n match std::fs::File::open(ron_path) {\n\n Ok(file) => {\n\n let reader = std::io::BufReader::new(file);\n\n match ron::de::from_reader(reader) {\n\n Ok(value) => Some(value),\n\n Err(err) => {\n\n eprintln!(\"ERROR: Failed to parse RON: {}\", err);\n\n None\n\n }\n\n }\n\n }\n\n Err(_err) => {\n\n // File probably doesn't exist. That's fine.\n\n None\n\n }\n\n }\n\n}\n\n// ----------------------------------------------------------------------------\n\n\n", "file_path": "egui_glium/src/persistence.rs", "rank": 90, "score": 165057.0376474284 }, { "content": "/// If context is running under mobile device?\n\nfn is_mobile() -> Option<bool> {\n\n let user_agent = web_sys::window()?.navigator().user_agent().ok()?;\n\n let is_mobile = MOBILE_DEVICE.iter().any(|&name| user_agent.contains(name));\n\n Some(is_mobile)\n\n}\n\n\n", "file_path": "egui_web/src/lib.rs", "rank": 91, "score": 158198.21247841945 }, { "content": "fn label_from_style(text: &str, style: &easy_mark::Style) -> Label {\n\n let easy_mark::Style {\n\n heading,\n\n quoted,\n\n code,\n\n strong,\n\n underline,\n\n strikethrough,\n\n italics,\n\n small,\n\n raised,\n\n } = *style;\n\n\n\n let small = small || raised; // Raised text is also smaller\n\n\n\n let mut label = Label::new(text);\n\n if heading && !small {\n\n label = label.heading().strong();\n\n }\n\n if small && !heading {\n", "file_path": "egui_demo_lib/src/easy_mark/easy_mark_viewer.rs", "rank": 92, "score": 156494.0846761277 }, { "content": "pub fn set_cursor_icon(cursor: egui::CursorIcon) -> Option<()> {\n\n let document = web_sys::window()?.document()?;\n\n document\n\n .body()?\n\n .style()\n\n .set_property(\"cursor\", cursor_web_name(cursor))\n\n .ok()\n\n}\n\n\n", "file_path": "egui_web/src/lib.rs", "rank": 93, "score": 155633.01376496477 }, { "content": "fn cursor_web_name(cursor: egui::CursorIcon) -> &'static str {\n\n match cursor {\n\n egui::CursorIcon::Alias => \"alias\",\n\n egui::CursorIcon::AllScroll => \"all-scroll\",\n\n egui::CursorIcon::Cell => \"cell\",\n\n egui::CursorIcon::ContextMenu => \"context-menu\",\n\n egui::CursorIcon::Copy => \"copy\",\n\n egui::CursorIcon::Crosshair => \"crosshair\",\n\n egui::CursorIcon::Default => \"default\",\n\n egui::CursorIcon::Grab => \"grab\",\n\n egui::CursorIcon::Grabbing => \"grabbing\",\n\n egui::CursorIcon::Help => \"help\",\n\n egui::CursorIcon::Move => \"move\",\n\n egui::CursorIcon::NoDrop => \"no-drop\",\n\n egui::CursorIcon::None => \"none\",\n\n egui::CursorIcon::NotAllowed => \"not-allowed\",\n\n egui::CursorIcon::PointingHand => \"pointer\",\n\n egui::CursorIcon::Progress => \"progress\",\n\n egui::CursorIcon::ResizeHorizontal => \"ew-resize\",\n\n egui::CursorIcon::ResizeNeSw => \"nesw-resize\",\n\n egui::CursorIcon::ResizeNwSe => \"nwse-resize\",\n\n egui::CursorIcon::ResizeVertical => \"ns-resize\",\n\n egui::CursorIcon::Text => \"text\",\n\n egui::CursorIcon::VerticalText => \"vertical-text\",\n\n egui::CursorIcon::Wait => \"wait\",\n\n egui::CursorIcon::ZoomIn => \"zoom-in\",\n\n egui::CursorIcon::ZoomOut => \"zoom-out\",\n\n }\n\n}\n\n\n", "file_path": "egui_web/src/lib.rs", "rank": 94, "score": 155633.01376496477 }, { "content": "fn is_integer(f: f64) -> bool {\n\n f.round() == f\n\n}\n\n\n", "file_path": "emath/src/smart_aim.rs", "rank": 95, "score": 154583.46079302934 }, { "content": "#[inline]\n\nfn is_chinese(c: char) -> bool {\n\n ('\\u{4E00}' <= c && c <= '\\u{9FFF}')\n\n || ('\\u{3400}' <= c && c <= '\\u{4DBF}')\n\n || ('\\u{2B740}' <= c && c <= '\\u{2B81F}')\n\n}\n\n\n", "file_path": "epaint/src/text/font.rs", "rank": 96, "score": 154583.46079302934 }, { "content": "/// Cheap and ugly.\n\n/// Made for graying out disabled `Ui`:s.\n\npub fn tint_color_towards(color: Color32, target: Color32) -> Color32 {\n\n let [mut r, mut g, mut b, mut a] = color.to_array();\n\n\n\n if a == 0 {\n\n r /= 2;\n\n g /= 2;\n\n b /= 2;\n\n } else if a < 170 {\n\n // Cheapish and looks ok.\n\n // Works for e.g. grid stripes.\n\n let div = (2 * 255 / a as i32) as u8;\n\n r = r / 2 + target.r() / div;\n\n g = g / 2 + target.g() / div;\n\n b = b / 2 + target.b() / div;\n\n a /= 2;\n\n } else {\n\n r = r / 2 + target.r() / 2;\n\n g = g / 2 + target.g() / 2;\n\n b = b / 2 + target.b() / 2;\n\n }\n", "file_path": "epaint/src/color.rs", "rank": 97, "score": 154197.5456467208 }, { "content": "#[inline]\n\nfn invisible_char(c: char) -> bool {\n\n // See https://github.com/emilk/egui/issues/336\n\n\n\n // From https://www.fileformat.info/info/unicode/category/Cf/list.htm\n\n ('\\u{200B}'..='\\u{206F}').contains(&c) // TODO: heed bidi characters\n\n}\n\n\n", "file_path": "epaint/src/text/font.rs", "rank": 98, "score": 152402.6618448578 }, { "content": "#[derive(Default)]\n\nstruct TextureManager(HashMap<Gradient, TextureId>);\n\n\n\nimpl TextureManager {\n\n fn get(\n\n &mut self,\n\n tex_allocator: &mut dyn epi::TextureAllocator,\n\n gradient: &Gradient,\n\n ) -> TextureId {\n\n *self.0.entry(gradient.clone()).or_insert_with(|| {\n\n let pixels = gradient.to_pixel_row();\n\n let width = pixels.len();\n\n let height = 1;\n\n tex_allocator.alloc_srgba_premultiplied((width, height), &pixels)\n\n })\n\n }\n\n}\n\n\n", "file_path": "egui_demo_lib/src/apps/color_test.rs", "rank": 99, "score": 152171.14804814168 } ]
Rust
crates/regex/src/re_set.rs
CryZe/libtww
0b8de9f451e7d8afda7a14d618bd3a7784b1d679
macro_rules! define_set { ($name:ident, $exec_build:expr, $text_ty:ty, $as_bytes:expr) => { pub mod $name { use libtww::std::fmt; use libtww::std::iter; use libtww::std::slice; use libtww::std::vec; use error::Error; use exec::{Exec, ExecBuilder}; use re_trait::RegularExpression; #[derive(Clone)] pub struct RegexSet(Exec); impl RegexSet { pub fn new<I, S>(exprs: I) -> Result<RegexSet, Error> where S: AsRef<str>, I: IntoIterator<Item=S> { let exec = try!($exec_build(exprs)); Ok(RegexSet(exec)) } pub fn is_match(&self, text: $text_ty) -> bool { self.0.searcher().is_match_at($as_bytes(text), 0) } pub fn matches(&self, text: $text_ty) -> SetMatches { let mut matches = vec![false; self.0.regex_strings().len()]; let any = self.0.searcher().many_matches_at( &mut matches, $as_bytes(text), 0); SetMatches { matched_any: any, matches: matches, } } pub fn len(&self) -> usize { self.0.regex_strings().len() } } #[derive(Clone, Debug)] pub struct SetMatches { matched_any: bool, matches: Vec<bool>, } impl SetMatches { pub fn matched_any(&self) -> bool { self.matched_any } pub fn matched(&self, regex_index: usize) -> bool { self.matches[regex_index] } pub fn len(&self) -> usize { self.matches.len() } pub fn iter(&self) -> SetMatchesIter { SetMatchesIter((&*self.matches).into_iter().enumerate()) } } impl IntoIterator for SetMatches { type IntoIter = SetMatchesIntoIter; type Item = usize; fn into_iter(self) -> Self::IntoIter { SetMatchesIntoIter(self.matches.into_iter().enumerate()) } } impl<'a> IntoIterator for &'a SetMatches { type IntoIter = SetMatchesIter<'a>; type Item = usize; fn into_iter(self) -> Self::IntoIter { self.iter() } } pub struct SetMatchesIntoIter(iter::Enumerate<vec::IntoIter<bool>>); impl Iterator for SetMatchesIntoIter { type Item = usize; fn next(&mut self) -> Option<usize> { loop { match self.0.next() { None => return None, Some((_, false)) => {} Some((i, true)) => return Some(i), } } } } impl DoubleEndedIterator for SetMatchesIntoIter { fn next_back(&mut self) -> Option<usize> { loop { match self.0.next_back() { None => return None, Some((_, false)) => {} Some((i, true)) => return Some(i), } } } } #[derive(Clone)] pub struct SetMatchesIter<'a>(iter::Enumerate<slice::Iter<'a, bool>>); impl<'a> Iterator for SetMatchesIter<'a> { type Item = usize; fn next(&mut self) -> Option<usize> { loop { match self.0.next() { None => return None, Some((_, &false)) => {} Some((i, &true)) => return Some(i), } } } } impl<'a> DoubleEndedIterator for SetMatchesIter<'a> { fn next_back(&mut self) -> Option<usize> { loop { match self.0.next_back() { None => return None, Some((_, &false)) => {} Some((i, &true)) => return Some(i), } } } } #[doc(hidden)] impl From<Exec> for RegexSet { fn from(exec: Exec) -> Self { RegexSet(exec) } } impl fmt::Debug for RegexSet { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "RegexSet({:?})", self.0.regex_strings()) } } #[allow(dead_code)] fn as_bytes_str(text: &str) -> &[u8] { text.as_bytes() } #[allow(dead_code)] fn as_bytes_bytes(text: &[u8]) -> &[u8] { text } } } } define_set! { unicode, |exprs| ExecBuilder::new_many(exprs).build(), &str, as_bytes_str } define_set! { bytes, |exprs| ExecBuilder::new_many(exprs).only_utf8(false).build(), &[u8], as_bytes_bytes }
macro_rules! define_set { ($name:ident, $exec_build:expr, $text_ty:ty, $as_bytes:expr) => { pub mod $name { use libtww::std::fmt; use libtww::std::iter; use libtww::std::slice; use libtww::std::vec; use error::Error; use exec::{Exec, ExecBuilder}; use re_trait::RegularExpression; #[derive(Clone)] pub struct RegexSet(Exec); impl RegexSet { pub fn new<I, S>(exprs: I) -> Result<RegexSet, Error> where S: AsRef<str>, I: IntoIterator<Item=S> { let exec = try!($exec_build(exprs)); Ok(RegexSet(exec)) } pub fn is_match(&self, text: $text_ty) -> bool { self.0.searcher().is_match_at($as_bytes(text), 0) } pub fn matches(&self, text: $text_ty) -> SetMatches { let mut matches = vec![false; self.0.regex_strings().len()]; let any = self.0.searcher().many_matches_at( &mut matches, $as_bytes(text), 0); SetMatches { matched_any: any, matches: matches, } } pub fn len(&self) -> usize { self.0.regex_strings().len() } } #[derive(Clone, Debug)] pub struct SetMatches { matched_any: bool, matches: Vec<bool>, } impl SetMatches { pub fn matched_any(&self) -> bool { self.matched_any } pub fn matched(&self, regex_index: usize) -> bool { self.matches[regex_index] } pub fn len(&self) -> usize { self.matches.len() } pub fn iter(&self) -> SetMatchesIter { SetMatchesIter((&*self.matches).into_iter().enumerate()) } } impl IntoIterator for SetMatches { type IntoIter = SetMatchesIntoIter; type Item = usize; fn into_iter(self) -> Self::IntoIter { SetMatchesIntoIter(self.matches.into_iter().enumerate()) } } impl<'a> IntoIterator for &'a SetMatches { type IntoIter = SetMatchesIter<'a>; type Item = usize; fn into_iter(self) -> Self::IntoIter { self.iter() } } pub struct SetMatchesIntoIter(iter::Enumerate<vec::IntoIter<bool>>); impl Iterator for SetMatchesIntoIter { type Item = usize; fn next(&mut self) -> Option
dedIterator for SetMatchesIter<'a> { fn next_back(&mut self) -> Option<usize> { loop { match self.0.next_back() { None => return None, Some((_, &false)) => {} Some((i, &true)) => return Some(i), } } } } #[doc(hidden)] impl From<Exec> for RegexSet { fn from(exec: Exec) -> Self { RegexSet(exec) } } impl fmt::Debug for RegexSet { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "RegexSet({:?})", self.0.regex_strings()) } } #[allow(dead_code)] fn as_bytes_str(text: &str) -> &[u8] { text.as_bytes() } #[allow(dead_code)] fn as_bytes_bytes(text: &[u8]) -> &[u8] { text } } } } define_set! { unicode, |exprs| ExecBuilder::new_many(exprs).build(), &str, as_bytes_str } define_set! { bytes, |exprs| ExecBuilder::new_many(exprs).only_utf8(false).build(), &[u8], as_bytes_bytes }
<usize> { loop { match self.0.next() { None => return None, Some((_, false)) => {} Some((i, true)) => return Some(i), } } } } impl DoubleEndedIterator for SetMatchesIntoIter { fn next_back(&mut self) -> Option<usize> { loop { match self.0.next_back() { None => return None, Some((_, false)) => {} Some((i, true)) => return Some(i), } } } } #[derive(Clone)] pub struct SetMatchesIter<'a>(iter::Enumerate<slice::Iter<'a, bool>>); impl<'a> Iterator for SetMatchesIter<'a> { type Item = usize; fn next(&mut self) -> Option<usize> { loop { match self.0.next() { None => return None, Some((_, &false)) => {} Some((i, &true)) => return Some(i), } } } } impl<'a> DoubleEn
random
[ { "content": "/// Tests if the given regular expression matches somewhere in the text given.\n\n///\n\n/// If there was a problem compiling the regular expression, an error is\n\n/// returned.\n\n///\n\n/// To find submatches, split or replace text, you'll need to compile an\n\n/// expression first.\n\npub fn is_match(regex: &str, text: &str) -> Result<bool, Error> {\n\n Regex::new(regex).map(|r| r.is_match(text))\n\n}\n\n\n\n/// A compiled regular expression for matching Unicode strings.\n\n///\n\n/// It is represented as either a sequence of bytecode instructions (dynamic)\n\n/// or as a specialized Rust function (native). It can be used to search, split\n\n/// or replace text. All searching is done with an implicit `.*?` at the\n\n/// beginning and end of an expression. To force an expression to match the\n\n/// whole string (or a prefix or a suffix), you must use an anchor like `^` or\n\n/// `$` (or `\\A` and `\\z`).\n\n///\n\n/// While this crate will handle Unicode strings (whether in the regular\n\n/// expression or in the search text), all positions returned are **byte\n\n/// indices**. Every byte index is guaranteed to be at a Unicode code point\n\n/// boundary.\n\n///\n\n/// The lifetimes `'r` and `'t` in this crate correspond to the lifetime of a\n\n/// compiled regular expression and text to search, respectively.\n", "file_path": "crates/regex/src/re_unicode.rs", "rank": 0, "score": 369525.8213242965 }, { "content": "/// Returns true iff the given regex and input should be executed by this\n\n/// engine with reasonable memory usage.\n\npub fn should_exec(num_insts: usize, text_len: usize) -> bool {\n\n // Total memory usage in bytes is determined by:\n\n //\n\n // ((len(insts) * (len(input) + 1) + bits - 1) / bits) * (size_of(u32))\n\n //\n\n // The actual limit picked is pretty much a heuristic.\n\n // See: https://github.com/rust-lang-nursery/regex/issues/215\n\n let size = ((num_insts * (text_len + 1) + BIT_SIZE - 1) / BIT_SIZE) * 4;\n\n size <= MAX_SIZE_BYTES\n\n}\n\n\n\n/// A backtracking matching engine.\n\n#[derive(Debug)]\n\npub struct Bounded<'a, 'm, 'r, 's, I> {\n\n prog: &'r Program,\n\n input: I,\n\n matches: &'m mut [bool],\n\n slots: &'s mut [Slot],\n\n m: &'a mut Cache,\n\n}\n", "file_path": "crates/regex/src/backtrack.rs", "rank": 1, "score": 348370.78929305903 }, { "content": "fn match_digits(ss: &mut &str, min_digits : usize, max_digits: usize, ws: bool) -> Option<i32> {\n\n match match_digits_i64(ss, min_digits, max_digits, ws) {\n\n Some(v) => Some(v as i32),\n\n None => None\n\n }\n\n}\n\n\n", "file_path": "crates/time/src/parse.rs", "rank": 2, "score": 318946.6122572385 }, { "content": "fn match_digits_i64(ss: &mut &str, min_digits : usize, max_digits: usize, ws: bool) -> Option<i64> {\n\n let mut value : i64 = 0;\n\n let mut n = 0;\n\n if ws {\n\n let s2 = ss.trim_left_matches(\" \");\n\n n = ss.len() - s2.len();\n\n if n > max_digits { return None }\n\n }\n\n let chars = ss[n..].char_indices();\n\n for (_, ch) in chars.take(max_digits - n) {\n\n match ch {\n\n '0' ... '9' => value = value * 10 + (ch as i64 - '0' as i64),\n\n _ => break,\n\n }\n\n n += 1;\n\n }\n\n\n\n if n >= min_digits && n <= max_digits {\n\n *ss = &ss[n..];\n\n Some(value)\n\n } else {\n\n None\n\n }\n\n}\n\n\n", "file_path": "crates/time/src/parse.rs", "rank": 3, "score": 314223.4581688106 }, { "content": "// Iterate through `iter` while it matches `prefix`; return `None` if `prefix`\n\n// is not a prefix of `iter`, otherwise return `Some(iter_after_prefix)` giving\n\n// `iter` after having exhausted `prefix`.\n\nfn iter_after<A, I, J>(mut iter: I, mut prefix: J) -> Option<I>\n\n where I: Iterator<Item = A> + Clone,\n\n J: Iterator<Item = A>,\n\n A: PartialEq\n\n{\n\n loop {\n\n let mut iter_next = iter.clone();\n\n match (iter_next.next(), prefix.next()) {\n\n (Some(ref x), Some(ref y)) if x == y => (),\n\n (Some(_), Some(_)) => return None,\n\n (Some(_), None) => return Some(iter),\n\n (None, None) => return Some(iter),\n\n (None, Some(_)) => return None,\n\n }\n\n iter = iter_next;\n\n }\n\n}\n\n\n", "file_path": "src/system/path.rs", "rank": 4, "score": 302600.7622541302 }, { "content": "#[allow(dead_code)]\n\n#[inline]\n\npub fn encode_utf8(character: char, dst: &mut [u8]) -> Option<usize> {\n\n let code = character as u32;\n\n if code <= 0x7F && !dst.is_empty() {\n\n dst[0] = code as u8;\n\n Some(1)\n\n } else if code <= 0x7FF && dst.len() >= 2 {\n\n dst[0] = (code >> 6 & 0x1F) as u8 | TAG_TWO;\n\n dst[1] = (code & 0x3F) as u8 | TAG_CONT;\n\n Some(2)\n\n } else if code <= 0xFFFF && dst.len() >= 3 {\n\n dst[0] = (code >> 12 & 0x0F) as u8 | TAG_THREE;\n\n dst[1] = (code >> 6 & 0x3F) as u8 | TAG_CONT;\n\n dst[2] = (code & 0x3F) as u8 | TAG_CONT;\n\n Some(3)\n\n } else if dst.len() >= 4 {\n\n dst[0] = (code >> 18 & 0x07) as u8 | TAG_FOUR;\n\n dst[1] = (code >> 12 & 0x3F) as u8 | TAG_CONT;\n\n dst[2] = (code >> 6 & 0x3F) as u8 | TAG_CONT;\n\n dst[3] = (code & 0x3F) as u8 | TAG_CONT;\n\n Some(4)\n", "file_path": "crates/regex/src/utf8.rs", "rank": 5, "score": 302195.4791308948 }, { "content": "#[inline]\n\npub fn encode_utf8(character: char, dst: &mut [u8]) -> Option<usize> {\n\n let code = character as u32;\n\n if code < MAX_ONE_B && !dst.is_empty() {\n\n dst[0] = code as u8;\n\n Some(1)\n\n } else if code < MAX_TWO_B && dst.len() >= 2 {\n\n dst[0] = (code >> 6 & 0x1F) as u8 | TAG_TWO_B;\n\n dst[1] = (code & 0x3F) as u8 | TAG_CONT;\n\n Some(2)\n\n } else if code < MAX_THREE_B && dst.len() >= 3 {\n\n dst[0] = (code >> 12 & 0x0F) as u8 | TAG_THREE_B;\n\n dst[1] = (code >> 6 & 0x3F) as u8 | TAG_CONT;\n\n dst[2] = (code & 0x3F) as u8 | TAG_CONT;\n\n Some(3)\n\n } else if dst.len() >= 4 {\n\n dst[0] = (code >> 18 & 0x07) as u8 | TAG_FOUR_B;\n\n dst[1] = (code >> 12 & 0x3F) as u8 | TAG_CONT;\n\n dst[2] = (code >> 6 & 0x3F) as u8 | TAG_CONT;\n\n dst[3] = (code & 0x3F) as u8 | TAG_CONT;\n\n Some(4)\n\n } else {\n\n None\n\n }\n\n}\n", "file_path": "crates/utf8-ranges/src/char_utf8.rs", "rank": 6, "score": 292542.5575042928 }, { "content": "#[inline]\n\npub fn checked_pow<T: Clone + One + CheckedMul>(mut base: T, mut exp: usize) -> Option<T> {\n\n if exp == 0 {\n\n return Some(T::one());\n\n }\n\n\n\n macro_rules! optry {\n\n ( $ expr : expr ) => {\n\n if let Some(val) = $expr { val } else { return None }\n\n }\n\n }\n\n\n\n while exp & 1 == 0 {\n\n base = optry!(base.checked_mul(&base));\n\n exp >>= 1;\n\n }\n\n if exp == 1 {\n\n return Some(base);\n\n }\n\n\n\n let mut acc = base.clone();\n\n while exp > 1 {\n\n exp >>= 1;\n\n base = optry!(base.checked_mul(&base));\n\n if exp & 1 == 1 {\n\n acc = optry!(acc.checked_mul(&base));\n\n }\n\n }\n\n Some(acc)\n\n}\n", "file_path": "crates/num/traits/src/pow.rs", "rank": 7, "score": 277576.94251781714 }, { "content": "/// Return true if and only if the given program can be executed by a DFA.\n\n///\n\n/// Generally, a DFA is always possible. A pathological case where it is not\n\n/// possible is if the number of NFA states exceeds u32::MAX, in which case,\n\n/// this function will return false.\n\n///\n\n/// This function will also return false if the given program has any Unicode\n\n/// instructions (Char or Ranges) since the DFA operates on bytes only.\n\npub fn can_exec(insts: &Program) -> bool {\n\n use prog::Inst::*;\n\n // If for some reason we manage to allocate a regex program with more\n\n // than i32::MAX instructions, then we can't execute the DFA because we\n\n // use 32 bit instruction pointer deltas for memory savings.\n\n // If i32::MAX is the largest positive delta,\n\n // then -i32::MAX == i32::MIN + 1 is the largest negative delta,\n\n // and we are OK to use 32 bits.\n\n if insts.len() > ::libtww::std::i32::MAX as usize {\n\n return false;\n\n }\n\n for inst in insts {\n\n match *inst {\n\n Char(_) | Ranges(_) => return false,\n\n EmptyLook(_) | Match(_) | Save(_) | Split(_) | Bytes(_) => {}\n\n }\n\n }\n\n true\n\n}\n\n\n", "file_path": "crates/regex/src/dfa.rs", "rank": 8, "score": 273910.85926777753 }, { "content": "/// Returns the smallest possible index of the next valid UTF-8 sequence\n\n/// starting after `i`.\n\npub fn next_utf8(text: &[u8], i: usize) -> usize {\n\n let b = match text.get(i) {\n\n None => return i + 1,\n\n Some(&b) => b,\n\n };\n\n let inc = if b <= 0x7F {\n\n 1\n\n } else if b <= 0b110_11111 {\n\n 2\n\n } else if b <= 0b1110_1111 {\n\n 3\n\n } else {\n\n 4\n\n };\n\n i + inc\n\n}\n\n\n\n/// Encode the given Unicode character to `dst` as a single UTF-8 sequence.\n\n///\n\n/// If `dst` is not long enough, then `None` is returned. Otherwise, the number\n\n/// of bytes written is returned.\n", "file_path": "crates/regex/src/utf8.rs", "rank": 9, "score": 272793.01972902124 }, { "content": "fn match_str(s: &mut &str, needle: &str) -> bool {\n\n if s.starts_with(needle) {\n\n *s = &s[needle.len()..];\n\n true\n\n } else {\n\n false\n\n }\n\n}\n\n\n", "file_path": "crates/time/src/parse.rs", "rank": 10, "score": 258604.53662282825 }, { "content": "/// Tries to format given arguments with given formatting items.\n\n/// Internally used by `DelayedFormat`.\n\npub fn format<'a, I>(w: &mut fmt::Formatter,\n\n date: Option<&NaiveDate>,\n\n time: Option<&NaiveTime>,\n\n off: Option<&(String, Duration)>,\n\n items: I)\n\n -> fmt::Result\n\n where I: Iterator<Item = Item<'a>>\n\n{\n\n // full and abbreviated month and weekday names\n\n static SHORT_MONTHS: [&'static str; 12] = [\"Jan\", \"Feb\", \"Mar\", \"Apr\", \"May\", \"Jun\", \"Jul\",\n\n \"Aug\", \"Sep\", \"Oct\", \"Nov\", \"Dec\"];\n\n static LONG_MONTHS: [&'static str; 12] = [\"January\",\n\n \"February\",\n\n \"March\",\n\n \"April\",\n\n \"May\",\n\n \"June\",\n\n \"July\",\n\n \"August\",\n\n \"September\",\n", "file_path": "crates/chrono/src/format/mod.rs", "rank": 11, "score": 257442.7441833754 }, { "content": "/// Base functionality for all errors in Rust.\n\npub trait Error: Debug + Display + Reflect {\n\n /// A short description of the error.\n\n ///\n\n /// The description should not contain newlines or sentence-ending\n\n /// punctuation, to facilitate embedding in larger user-facing\n\n /// strings.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// use std::error::Error;\n\n ///\n\n /// match \"xc\".parse::<u32>() {\n\n /// Err(e) => {\n\n /// println!(\"Error: {}\", e.description());\n\n /// }\n\n /// _ => println!(\"No error\"),\n\n /// }\n\n /// ```\n\n fn description(&self) -> &str;\n", "file_path": "src/system/error.rs", "rank": 12, "score": 255098.22362046636 }, { "content": "pub fn event_cancel() -> bool {\n\n memory::read(0x803BD3A3)\n\n}\n\n\n", "file_path": "src/game/event.rs", "rank": 13, "score": 255017.30957531842 }, { "content": "pub fn is_pause_menu_up() -> bool {\n\n read(0x803EA537) // alternative: 0x80396228\n\n}\n\n\n", "file_path": "src/system/tww.rs", "rank": 14, "score": 255017.30957531842 }, { "content": "#[inline(always)] // reduces constant overhead\n\npub fn memrchr(needle: u8, haystack: &[u8]) -> Option<usize> {\n\n\n\n fn memrchr_specific(needle: u8, haystack: &[u8]) -> Option<usize> {\n\n fallback::memrchr(needle, haystack)\n\n }\n\n\n\n memrchr_specific(needle, haystack)\n\n}\n\n\n\nmod fallback {\n\n use std::cmp;\n\n use super::{USIZE_BYTES, contains_zero_byte, repeat_byte};\n\n\n\n /// Return the first index matching the byte `a` in `text`.\n\n pub fn memchr(x: u8, text: &[u8]) -> Option<usize> {\n\n // Scan for a single byte value by reading two `usize` words at a time.\n\n //\n\n // Split `text` in three parts\n\n // - unaligned inital part, before the first word aligned address in text\n\n // - body, scan by 2 words at a time\n", "file_path": "src/system/memchr.rs", "rank": 15, "score": 253398.16734351212 }, { "content": "#[inline(always)] // reduces constant overhead\n\npub fn memchr(needle: u8, haystack: &[u8]) -> Option<usize> {\n\n fn memchr_specific(needle: u8, haystack: &[u8]) -> Option<usize> {\n\n fallback::memchr(needle, haystack)\n\n }\n\n\n\n memchr_specific(needle, haystack)\n\n}\n\n\n\n/// A safe interface to `memrchr`.\n\n///\n\n/// Returns the index corresponding to the last occurrence of `needle` in\n\n/// `haystack`, or `None` if one is not found.\n\n///\n\n/// # Example\n\n///\n\n/// This shows how to find the last position of a byte in a byte string.\n\n///\n\n/// ```rust\n\n/// use memchr::memrchr;\n\n///\n\n/// let haystack = b\"the quick brown fox\";\n\n/// assert_eq!(memrchr(b'o', haystack), Some(17));\n\n/// ```\n", "file_path": "src/system/memchr.rs", "rank": 16, "score": 253398.16734351212 }, { "content": "#[inline]\n\npub fn decode_utf8(src: &[u8]) -> Option<(char, usize)> {\n\n let b0 = match src.get(0) {\n\n None => return None,\n\n Some(&b) if b <= 0x7F => return Some((b as char, 1)),\n\n Some(&b) => b,\n\n };\n\n match b0 {\n\n 0b110_00000...0b110_11111 => {\n\n if src.len() < 2 {\n\n return None;\n\n }\n\n let b1 = src[1];\n\n let cp = ((b0 & !TAG_TWO) as u32) << 6 | ((b1 & !TAG_CONT) as u32);\n\n match cp {\n\n 0x80...0x7FF => char::from_u32(cp).map(|cp| (cp, 2)),\n\n _ => None,\n\n }\n\n }\n\n 0b1110_0000...0b1110_1111 => {\n\n if src.len() < 3 {\n", "file_path": "crates/regex/src/utf8.rs", "rank": 17, "score": 253384.1926516733 }, { "content": "/// Randomly sample up to `amount` elements from an iterator.\n\n///\n\n/// # Example\n\n///\n\n/// ```rust\n\n/// use rand::{thread_rng, sample};\n\n///\n\n/// let mut rng = thread_rng();\n\n/// let sample = sample(&mut rng, 1..100, 5);\n\n/// println!(\"{:?}\", sample);\n\n/// ```\n\npub fn sample<T, I, R>(rng: &mut R, iterable: I, amount: usize) -> Vec<T>\n\n where I: IntoIterator<Item = T>,\n\n R: Rng\n\n{\n\n let mut iter = iterable.into_iter();\n\n let mut reservoir: Vec<T> = iter.by_ref().take(amount).collect();\n\n // continue unless the iterator was exhausted\n\n if reservoir.len() == amount {\n\n for (i, elem) in iter.enumerate() {\n\n let k = rng.gen_range(0, i + 1 + amount);\n\n if let Some(spot) = reservoir.get_mut(k) {\n\n *spot = elem;\n\n }\n\n }\n\n }\n\n reservoir\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n", "file_path": "crates/rand/src/lib.rs", "rank": 18, "score": 251955.50998658492 }, { "content": "pub fn is_new_game_plus() -> bool {\n\n read(0x803B82A8)\n\n}\n\n\n", "file_path": "src/game/savefile.rs", "rank": 19, "score": 250710.30468077626 }, { "content": "/// Returns an encoding from Windows code page number.\n\n/// http://msdn.microsoft.com/en-us/library/windows/desktop/dd317756%28v=vs.85%29.aspx\n\n/// Sometimes it can return a *superset* of the requested encoding, e.g. for several CJK encodings.\n\npub fn encoding_from_windows_code_page(cp: usize) -> Option<EncodingRef> {\n\n match cp {\n\n 65001 => Some(all::UTF_8 as EncodingRef),\n\n 866 => Some(all::IBM866 as EncodingRef),\n\n 28591 => Some(all::ISO_8859_1 as EncodingRef),\n\n 28592 => Some(all::ISO_8859_2 as EncodingRef),\n\n 28593 => Some(all::ISO_8859_3 as EncodingRef),\n\n 28594 => Some(all::ISO_8859_4 as EncodingRef),\n\n 28595 => Some(all::ISO_8859_5 as EncodingRef),\n\n 28596 => Some(all::ISO_8859_6 as EncodingRef),\n\n 28597 => Some(all::ISO_8859_7 as EncodingRef),\n\n 28598 => Some(all::ISO_8859_8 as EncodingRef),\n\n 38598 => Some(all::whatwg::ISO_8859_8_I as EncodingRef),\n\n 28603 => Some(all::ISO_8859_13 as EncodingRef),\n\n 28605 => Some(all::ISO_8859_15 as EncodingRef),\n\n 20866 => Some(all::KOI8_R as EncodingRef),\n\n 21866 => Some(all::KOI8_U as EncodingRef),\n\n 10000 => Some(all::MAC_ROMAN as EncodingRef),\n\n 874 => Some(all::WINDOWS_874 as EncodingRef),\n\n 1250 => Some(all::WINDOWS_1250 as EncodingRef),\n", "file_path": "crates/encoding/src/label.rs", "rank": 20, "score": 250037.10002328246 }, { "content": "fn read_one_byte(reader: &mut Read) -> Option<Result<u8>> {\n\n let mut buf = [0];\n\n loop {\n\n return match reader.read(&mut buf) {\n\n Ok(0) => None,\n\n Ok(..) => Some(Ok(buf[0])),\n\n Err(ref e) if e.kind() == ErrorKind::Interrupted => continue,\n\n Err(e) => Some(Err(e)),\n\n };\n\n }\n\n}\n\n\n\n/// An iterator over `u8` values of a reader.\n\n///\n\n/// This struct is generally created by calling [`bytes()`][bytes] on a reader.\n\n/// Please see the documentation of `bytes()` for more details.\n\n///\n\n/// [bytes]: trait.Read.html#method.bytes\n\npub struct Bytes<R> {\n\n inner: R,\n", "file_path": "src/system/io/mod.rs", "rank": 21, "score": 249415.50375555747 }, { "content": "/// Like decode_utf8, but decodes the last UTF-8 sequence in `src` instead of\n\n/// the first.\n\npub fn decode_last_utf8(src: &[u8]) -> Option<(char, usize)> {\n\n if src.is_empty() {\n\n return None;\n\n }\n\n let mut start = src.len() - 1;\n\n if src[start] <= 0x7F {\n\n return Some((src[start] as char, 1));\n\n }\n\n while start > src.len().saturating_sub(4) {\n\n start -= 1;\n\n if is_start_byte(src[start]) {\n\n break;\n\n }\n\n }\n\n match decode_utf8(&src[start..]) {\n\n None => None,\n\n Some((_, n)) if n < src.len() - start => None,\n\n Some((cp, n)) => Some((cp, n)),\n\n }\n\n}\n\n\n", "file_path": "crates/regex/src/utf8.rs", "rank": 22, "score": 249015.2626778964 }, { "content": "#[inline(always)] // reduces constant overhead\n\npub fn memchr(needle: u8, haystack: &[u8]) -> Option<usize> {\n\n fn memchr_specific(needle: u8, haystack: &[u8]) -> Option<usize> {\n\n fallback::memchr(needle, haystack)\n\n }\n\n\n\n memchr_specific(needle, haystack)\n\n}\n\n\n\n/// A safe interface to `memrchr`.\n\n///\n\n/// Returns the index corresponding to the last occurrence of `needle` in\n\n/// `haystack`, or `None` if one is not found.\n\n///\n\n/// # Example\n\n///\n\n/// This shows how to find the last position of a byte in a byte string.\n\n///\n\n/// ```rust\n\n/// use memchr::memrchr;\n\n///\n\n/// let haystack = b\"the quick brown fox\";\n\n/// assert_eq!(memrchr(b'o', haystack), Some(17));\n\n/// ```\n", "file_path": "crates/memchr/src/lib.rs", "rank": 23, "score": 248796.0544518493 }, { "content": "#[inline(always)] // reduces constant overhead\n\npub fn memrchr(needle: u8, haystack: &[u8]) -> Option<usize> {\n\n\n\n fn memrchr_specific(needle: u8, haystack: &[u8]) -> Option<usize> {\n\n fallback::memrchr(needle, haystack)\n\n }\n\n\n\n memrchr_specific(needle, haystack)\n\n}\n\n\n", "file_path": "crates/memchr/src/lib.rs", "rank": 24, "score": 248796.0544518493 }, { "content": "/// Tries to parse given string into `parsed` with given formatting items.\n\n/// Returns `Ok` when the entire string has been parsed (otherwise `parsed` should not be used).\n\n/// There should be no trailing string after parsing;\n\n/// use a stray [`Item::Space`](./enum.Item.html#variant.Space) to trim whitespaces.\n\n///\n\n/// This particular date and time parser is:\n\n///\n\n/// - Greedy. It will consume the longest possible prefix.\n\n/// For example, `April` is always consumed entirely when the long month name is requested;\n\n/// it equally accepts `Apr`, but prefers the longer prefix in this case.\n\n///\n\n/// - Padding-agnostic (for numeric items).\n\n/// The [`Pad`](./enum.Pad.html) field is completely ignored,\n\n/// so one can prepend any number of whitespace then any number of zeroes before numbers.\n\n///\n\n/// - (Still) obeying the intrinsic parsing width. This allows, for example, parsing `HHMMSS`.\n\npub fn parse<'a, I>(parsed: &mut Parsed, mut s: &str, items: I) -> ParseResult<()>\n\n where I: Iterator<Item = Item<'a>>\n\n{\n\n macro_rules! try_consume {\n\n ($e:expr) => ({ let (s_, v) = try!($e); s = s_; v })\n\n }\n\n\n\n for item in items {\n\n match item {\n\n Item::Literal(prefix) => {\n\n if s.len() < prefix.len() {\n\n return Err(TOO_SHORT);\n\n }\n\n if !s.starts_with(prefix) {\n\n return Err(INVALID);\n\n }\n\n s = &s[prefix.len()..];\n\n }\n\n\n\n Item::Space(_) => {\n", "file_path": "crates/chrono/src/format/parse.rs", "rank": 25, "score": 245298.5475246195 }, { "content": "pub fn set_event_cancel(b: bool) {\n\n memory::write(0x803BD3A3, b);\n\n}\n", "file_path": "src/game/event.rs", "rank": 26, "score": 242898.84684095831 }, { "content": "pub fn is_teddy_128_available() -> bool {\n\n true\n\n}\n\n\n\n/// Match reports match information.\n\n#[derive(Debug, Clone)]\n\npub struct Match {\n\n /// The index of the pattern that matched. The index is in correspondence\n\n /// with the order of the patterns given at construction.\n\n pub pat: usize,\n\n /// The start byte offset of the match.\n\n pub start: usize,\n\n /// The end byte offset of the match. This is always `start + pat.len()`.\n\n pub end: usize,\n\n}\n\n\n\n/// A SIMD accelerated multi substring searcher.\n\n#[derive(Debug, Clone)]\n\npub struct Teddy {\n\n /// A list of substrings to match.\n", "file_path": "crates/regex/src/simd_accel/teddy128.rs", "rank": 27, "score": 242804.8367670538 }, { "content": "pub fn is_teddy_128_available() -> bool {\n\n false\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct Teddy(());\n\n\n\n#[derive(Debug, Clone)]\n\npub struct Match {\n\n pub pat: usize,\n\n pub start: usize,\n\n pub end: usize,\n\n}\n\n\n\nimpl Teddy {\n\n pub fn new(_pats: &syntax::Literals) -> Option<Teddy> { None }\n\n pub fn patterns(&self) -> &[Vec<u8>] { &[] }\n\n pub fn len(&self) -> usize { 0 }\n\n pub fn approximate_size(&self) -> usize { 0 }\n\n pub fn find(&self, _haystack: &[u8]) -> Option<Match> { None }\n\n}\n", "file_path": "crates/regex/src/simd_fallback/teddy128.rs", "rank": 28, "score": 242804.8367670538 }, { "content": "fn position(needle: &[u8], mut haystack: &[u8]) -> Option<usize> {\n\n let mut i = 0;\n\n while haystack.len() >= needle.len() {\n\n if needle == &haystack[..needle.len()] {\n\n return Some(i);\n\n }\n\n i += 1;\n\n haystack = &haystack[1..];\n\n }\n\n None\n\n}\n\n\n", "file_path": "crates/regex/regex-syntax/src/literals.rs", "rank": 29, "score": 240837.88700221514 }, { "content": "pub fn spawn(coord: &Coord, item: u8) {\n\n layer::switch_to_safe_layer();\n\n\n\n let func =\n\n unsafe { transmute::<Addr, extern \"C\" fn(*const Coord, u8, u32, u32, u32)>(0x80026920) };\n\n func(coord, item, 0x7f, 0, 0);\n\n}\n", "file_path": "src/link/item.rs", "rank": 30, "score": 240745.15362500364 }, { "content": "/// Determines whether the character is one of the permitted path\n\n/// separators for the current platform.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use std::path;\n\n///\n\n/// assert!(path::is_separator('/'));\n\n/// assert!(!path::is_separator('❤'));\n\n/// ```\n\npub fn is_separator(c: char) -> bool {\n\n c.is_ascii() && is_sep_byte(c as u8)\n\n}\n\n\n\n/// The primary separator for the current platform\n\npub const MAIN_SEPARATOR: char = MAIN_SEP;\n\n\n\n/// /////////////////////////////////////////////////////////////////////////////\n\n/// Misc helpers\n\n/// /////////////////////////////////////////////////////////////////////////////\n\n\n", "file_path": "src/system/path.rs", "rank": 31, "score": 240466.4492435412 }, { "content": "pub fn is_down(buttons: u16) -> bool {\n\n buttons_down() & buttons == buttons\n\n}\n\n\n", "file_path": "src/game/controller.rs", "rank": 32, "score": 240461.55716703238 }, { "content": "fn match_strs(ss: &mut &str, strs: &[(&str, i32)]) -> Option<i32> {\n\n for &(needle, value) in strs.iter() {\n\n if match_str(ss, needle) {\n\n return Some(value)\n\n }\n\n }\n\n None\n\n}\n\n\n", "file_path": "crates/time/src/parse.rs", "rank": 33, "score": 236732.5951279308 }, { "content": "#[inline]\n\npub fn is_sep_byte(b: u8) -> bool {\n\n b == b'/'\n\n}\n\n\n", "file_path": "src/system/path.rs", "rank": 34, "score": 236154.5522724902 }, { "content": "#[inline]\n\npub fn is_verbatim_sep(b: u8) -> bool {\n\n b == b'/'\n\n}\n\n\n", "file_path": "src/system/path.rs", "rank": 35, "score": 236154.5522724902 }, { "content": "pub fn is_pressed(buttons: u16) -> bool {\n\n buttons_pressed() & buttons == buttons\n\n}\n", "file_path": "src/game/controller.rs", "rank": 36, "score": 236154.5522724902 }, { "content": "/// Like `memchr`, but searches for two bytes instead of one.\n\npub fn memchr2(needle1: u8, needle2: u8, haystack: &[u8]) -> Option<usize> {\n\n use libtww::std::cmp;\n\n\n\n fn slow(b1: u8, b2: u8, haystack: &[u8]) -> Option<usize> {\n\n haystack.iter().position(|&b| b == b1 || b == b2)\n\n }\n\n\n\n let len = haystack.len();\n\n let ptr = haystack.as_ptr();\n\n let align = (ptr as usize) & (USIZE_BYTES - 1);\n\n let mut i = 0;\n\n if align > 0 {\n\n i = cmp::min(USIZE_BYTES - align, len);\n\n if let Some(found) = slow(needle1, needle2, &haystack[..i]) {\n\n return Some(found);\n\n }\n\n }\n\n let repeated_b1 = repeat_byte(needle1);\n\n let repeated_b2 = repeat_byte(needle2);\n\n if len >= USIZE_BYTES {\n", "file_path": "crates/memchr/src/lib.rs", "rank": 37, "score": 232499.5115816465 }, { "content": "/// Parses the time from the string according to the format string.\n\npub fn strptime(mut s: &str, format: &str) -> Result<Tm, ParseError> {\n\n let mut tm = Tm {\n\n tm_sec: 0,\n\n tm_min: 0,\n\n tm_hour: 0,\n\n tm_mday: 0,\n\n tm_mon: 0,\n\n tm_year: 0,\n\n tm_wday: 0,\n\n tm_yday: 0,\n\n tm_isdst: 0,\n\n tm_utcoff: 0,\n\n tm_nsec: 0,\n\n };\n\n let mut chars = format.chars();\n\n\n\n while let Some(ch) = chars.next() {\n\n if ch == '%' {\n\n if let Some(ch) = chars.next() {\n\n try!(parse_type(&mut s, ch, &mut tm));\n\n }\n\n } else {\n\n try!(parse_char(&mut s, ch));\n\n }\n\n }\n\n\n\n Ok(tm)\n\n}\n\n\n", "file_path": "crates/time/src/parse.rs", "rank": 38, "score": 231950.06834229702 }, { "content": "pub fn get_entrance() -> &'static mut Entrance {\n\n reference(0x803B8138)\n\n}\n", "file_path": "src/game/savefile.rs", "rank": 39, "score": 231373.493238003 }, { "content": "/// Escapes all regular expression meta characters in `text`.\n\n///\n\n/// The string returned may be safely used as a literal in a regular\n\n/// expression.\n\npub fn quote(text: &str) -> String {\n\n syntax::quote(text)\n\n}\n\n\n", "file_path": "crates/regex/src/re_unicode.rs", "rank": 40, "score": 228358.07316238756 }, { "content": "/// Returns true if the give character has significance in a regex.\n\npub fn is_punct(c: char) -> bool {\n\n match c {\n\n '\\\\' | '.' | '+' | '*' | '?' | '(' | ')' | '|' | '[' | ']' | '{' | '}' | '^' | '$' |\n\n '#' => true,\n\n _ => false,\n\n }\n\n}\n\n\n", "file_path": "crates/regex/regex-syntax/src/parser.rs", "rank": 41, "score": 228249.08435876775 }, { "content": "/// Blocks unless or until the current thread's token is made available.\n\n///\n\n/// Every thread is equipped with some basic low-level blocking support, via\n\n/// the `park()` function and the [`unpark()`][unpark] method. These can be\n\n/// used as a more CPU-efficient implementation of a spinlock.\n\n///\n\n/// [unpark]: struct.Thread.html#method.unpark\n\n///\n\n/// The API is typically used by acquiring a handle to the current thread,\n\n/// placing that handle in a shared data structure so that other threads can\n\n/// find it, and then parking (in a loop with a check for the token actually\n\n/// being acquired).\n\n///\n\n/// A call to `park` does not guarantee that the thread will remain parked\n\n/// forever, and callers should be prepared for this possibility.\n\n///\n\n/// See the [module documentation][thread] for more detail.\n\n///\n\n/// [thread]: index.html\n\n// The implementation currently uses the trivial strategy of a Mutex+Condvar\n\n// with wakeup flag, which does not actually allow spurious wakeups. In the\n\n// future, this will be implemented in a more efficient way, perhaps along the lines of\n\n// http://cr.openjdk.java.net/~stefank/6989984.1/raw_files/new/src/os/linux/vm/os_linux.cpp\n\n// or futuxes, and in either case may allow spurious wakeups.\n\npub fn park() {\n\n OS::suspend_thread(OS::get_current_thread());\n\n}\n\n\n\npub use self::manager::{collect, current, JoinHandle, Thread, spawn};", "file_path": "src/system/thread/mod.rs", "rank": 42, "score": 227763.1960233709 }, { "content": "fn parse_type(s: &mut &str, ch: char, tm: &mut Tm) -> Result<(), ParseError> {\n\n match ch {\n\n 'A' => match match_strs(s, &[(\"Sunday\", 0),\n\n (\"Monday\", 1),\n\n (\"Tuesday\", 2),\n\n (\"Wednesday\", 3),\n\n (\"Thursday\", 4),\n\n (\"Friday\", 5),\n\n (\"Saturday\", 6)]) {\n\n Some(v) => { tm.tm_wday = v; Ok(()) }\n\n None => Err(ParseError::InvalidDay)\n\n },\n\n 'a' => match match_strs(s, &[(\"Sun\", 0),\n\n (\"Mon\", 1),\n\n (\"Tue\", 2),\n\n (\"Wed\", 3),\n\n (\"Thu\", 4),\n\n (\"Fri\", 5),\n\n (\"Sat\", 6)]) {\n\n Some(v) => { tm.tm_wday = v; Ok(()) }\n", "file_path": "crates/time/src/parse.rs", "rank": 43, "score": 225621.67450600077 }, { "content": "/// Escapes all regular expression meta characters in `text`.\n\n///\n\n/// The string returned may be safely used as a literal in a regular\n\n/// expression.\n\npub fn quote(text: &str) -> String {\n\n let mut quoted = String::with_capacity(text.len());\n\n for c in text.chars() {\n\n if parser::is_punct(c) {\n\n quoted.push('\\\\');\n\n }\n\n quoted.push(c);\n\n }\n\n quoted\n\n}\n\n\n", "file_path": "crates/regex/regex-syntax/src/lib.rs", "rank": 44, "score": 224721.37883491698 }, { "content": "#[doc(hidden)]\n\npub fn is_word_char(c: char) -> bool {\n\n match c {\n\n '_' | '0'...'9' | 'a'...'z' | 'A'...'Z' => true,\n\n _ => {\n\n ::unicode::regex::PERLW.binary_search_by(|&(start, end)| {\n\n if c >= start && c <= end {\n\n Ordering::Equal\n\n } else if start > c {\n\n Ordering::Greater\n\n } else {\n\n Ordering::Less\n\n }\n\n })\n\n .is_ok()\n\n }\n\n }\n\n}\n\n\n\n/// Returns true if and only if `c` is an ASCII word byte.\n", "file_path": "crates/regex/regex-syntax/src/lib.rs", "rank": 45, "score": 224612.80433109048 }, { "content": "#[doc(hidden)]\n\npub fn is_word_byte(b: u8) -> bool {\n\n match b {\n\n b'_' | b'0'...b'9' | b'a'...b'z' | b'A'...b'Z' => true,\n\n _ => false,\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod properties;\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use {CharClass, ClassRange, ByteClass, ByteRange, Expr};\n\n\n\n fn class(ranges: &[(char, char)]) -> CharClass {\n\n let ranges = ranges.iter()\n\n .cloned()\n\n .map(|(c1, c2)| ClassRange::new(c1, c2))\n\n .collect();\n\n CharClass::new(ranges)\n", "file_path": "crates/regex/regex-syntax/src/lib.rs", "rank": 46, "score": 224612.80433109048 }, { "content": "#[plugin_registrar]\n\n#[doc(hidden)]\n\npub fn plugin_registrar(reg: &mut Registry) {\n\n reg.register_syntax_extension(\n\n token::intern(\"derive_NumFromPrimitive\"),\n\n MultiDecorator(Box::new(expand_deriving_from_primitive)));\n\n}\n", "file_path": "crates/num/macros/src/lib.rs", "rank": 47, "score": 223902.31590692053 }, { "content": "pub fn fopacm_create_append() -> &'static mut ActorMemory {\n\n let fopacm_create_append =\n\n unsafe { transmute::<Addr, extern \"C\" fn() -> *mut ActorMemory>(0x80023f3c) };\n\n let actor_memory = fopacm_create_append();\n\n unsafe { &mut *actor_memory }\n\n}\n\n\n\n\n", "file_path": "src/system/tww.rs", "rank": 48, "score": 223902.31590692053 }, { "content": "/// Cooperatively gives up a timeslice to the OS scheduler.\n\npub fn yield_now() {\n\n OS::yield_thread()\n\n}\n\n\n", "file_path": "src/system/thread/mod.rs", "rank": 49, "score": 223192.4666534207 }, { "content": "pub fn parse_prefix(_: &OsStr) -> Option<Prefix> {\n\n None\n\n}\n\n\n\npub const MAIN_SEP_STR: &'static str = \"/\";\n\npub const MAIN_SEP: char = '/';\n\n\n\n/// /////////////////////////////////////////////////////////////////////////////\n\n/// GENERAL NOTES\n\n/// /////////////////////////////////////////////////////////////////////////////\n\n///\n\n/// Parsing in this module is done by directly transmuting OsStr to [u8] slices,\n\n/// taking advantage of the fact that OsStr always encodes ASCII characters\n\n/// as-is. Eventually, this transmutation should be replaced by direct uses of\n\n/// OsStr APIs for parsing, but it will take a while for those to become\n\n/// available.\n\n\n\n/// /////////////////////////////////////////////////////////////////////////////\n\n/// Windows Prefixes\n\n/// /////////////////////////////////////////////////////////////////////////////\n", "file_path": "src/system/path.rs", "rank": 50, "score": 222365.76977615542 }, { "content": "pub fn mktime(tm: &mut Tm) -> time_t {\n\n let mut new: Tm = unsafe { uninitialized() };\n\n let mut opp = unsafe { uninitialized() };\n\n let mut t = tm_to_secs(tm);\n\n\n\n secs_to_zone(t, 1, &mut new.tm_isdst, &mut new.tm_utcoff, &mut opp);\n\n\n\n if tm.tm_isdst >= 0 && new.tm_isdst != tm.tm_isdst {\n\n t -= opp as c_longlong - new.tm_utcoff as c_longlong;\n\n }\n\n\n\n t -= new.tm_utcoff as c_longlong;\n\n\n\n // if t as time_t != t {\n\n // return -1;\n\n // }\n\n\n\n secs_to_zone(t, 0, &mut new.tm_isdst, &mut new.tm_utcoff, &mut opp);\n\n\n\n if secs_to_tm(t + new.tm_utcoff as c_longlong, &mut new) < 0 {\n\n return -1;\n\n }\n\n\n\n *tm = new;\n\n t as time_t\n\n}", "file_path": "crates/time/src/musl.rs", "rank": 51, "score": 221691.8125619393 }, { "content": "pub fn timegm(tm: &mut Tm) -> time_t {\n\n let mut new: Tm = unsafe { uninitialized() };\n\n let t = tm_to_secs(tm);\n\n\n\n if secs_to_tm(t, &mut new) < 0 {\n\n return -1;\n\n }\n\n\n\n *tm = new;\n\n tm.tm_isdst = 0;\n\n tm.tm_utcoff = 0;\n\n\n\n t as time_t\n\n}\n\n\n", "file_path": "crates/time/src/musl.rs", "rank": 52, "score": 221691.8125619393 }, { "content": "#[plugin_registrar]\n\n#[doc(hidden)]\n\npub fn plugin_registrar(reg: &mut Registry) {\n\n reg.register_macro(\"regex\", native);\n\n}\n\n\n", "file_path": "crates/regex/regex_macros/src/lib.rs", "rank": 53, "score": 220458.02296188287 }, { "content": "#[plugin_registrar]\n\npub fn plugin_registrar(reg: &mut Registry) {\n\n reg.register_syntax_extension(token::intern(\"derive_Rand\"),\n\n base::MultiDecorator(Box::new(expand_deriving_rand)));\n\n}\n\n\n\n\n\n\n\n\n", "file_path": "crates/rand/rand_macros/src/lib.rs", "rank": 54, "score": 220458.02296188287 }, { "content": "fn ascii_class(name: &str) -> Option<CharClass> {\n\n ASCII_CLASSES.binary_search_by(|&(s, _)| s.cmp(name))\n\n .ok()\n\n .map(|i| raw_class_to_expr(ASCII_CLASSES[i].1))\n\n}\n\n\n", "file_path": "crates/regex/regex-syntax/src/parser.rs", "rank": 55, "score": 219390.77079468936 }, { "content": "fn unicode_class(name: &str) -> Option<CharClass> {\n\n UNICODE_CLASSES.binary_search_by(|&(s, _)| s.cmp(name))\n\n .ok()\n\n .map(|i| raw_class_to_expr(UNICODE_CLASSES[i].1))\n\n}\n\n\n", "file_path": "crates/regex/regex-syntax/src/parser.rs", "rank": 56, "score": 219390.77079468936 }, { "content": "/// Like `memchr`, but searches for three bytes instead of one.\n\npub fn memchr3(needle1: u8, needle2: u8, needle3: u8, haystack: &[u8]) -> Option<usize> {\n\n use libtww::std::cmp;\n\n\n\n fn slow(b1: u8, b2: u8, b3: u8, haystack: &[u8]) -> Option<usize> {\n\n haystack.iter().position(|&b| b == b1 || b == b2 || b == b3)\n\n }\n\n\n\n let len = haystack.len();\n\n let ptr = haystack.as_ptr();\n\n let align = (ptr as usize) & (USIZE_BYTES - 1);\n\n let mut i = 0;\n\n if align > 0 {\n\n i = cmp::min(USIZE_BYTES - align, len);\n\n if let Some(found) = slow(needle1, needle2, needle3, &haystack[..i]) {\n\n return Some(found);\n\n }\n\n }\n\n let repeated_b1 = repeat_byte(needle1);\n\n let repeated_b2 = repeat_byte(needle2);\n\n let repeated_b3 = repeat_byte(needle3);\n", "file_path": "crates/memchr/src/lib.rs", "rank": 57, "score": 218442.66012752388 }, { "content": "/// Says whether the first byte after the prefix is a separator.\n\nfn has_physical_root(s: &[u8], prefix: Option<Prefix>) -> bool {\n\n let path = if let Some(p) = prefix {\n\n &s[p.len()..]\n\n } else {\n\n s\n\n };\n\n !path.is_empty() && is_sep_byte(path[0])\n\n}\n\n\n", "file_path": "src/system/path.rs", "rank": 58, "score": 218434.80417396006 }, { "content": "// This uses an adaptive system to extend the vector when it fills. We want to\n\n// avoid paying to allocate and zero a huge chunk of memory if the reader only\n\n// has 4 bytes while still making large reads if the reader does have a ton\n\n// of data to return. Simply tacking on an extra DEFAULT_BUF_SIZE space every\n\n// time is 4,500 times (!) slower than this if the reader has a very small\n\n// amount of data to return.\n\nfn read_to_end<R: Read + ?Sized>(r: &mut R, buf: &mut Vec<u8>) -> Result<usize> {\n\n let start_len = buf.len();\n\n let mut len = start_len;\n\n let mut new_write_size = 16;\n\n let ret;\n\n loop {\n\n if len == buf.len() {\n\n if new_write_size < DEFAULT_BUF_SIZE {\n\n new_write_size *= 2;\n\n }\n\n buf.resize(len + new_write_size, 0);\n\n }\n\n\n\n match r.read(&mut buf[len..]) {\n\n Ok(0) => {\n\n ret = Ok(len - start_len);\n\n break;\n\n }\n\n Ok(n) => len += n,\n\n Err(ref e) if e.kind() == ErrorKind::Interrupted => {}\n", "file_path": "src/system/io/mod.rs", "rank": 59, "score": 217768.53195675247 }, { "content": "#[inline]\n\npub fn pow<T: Clone + One + Mul<T, Output = T>>(mut base: T, mut exp: usize) -> T {\n\n if exp == 0 {\n\n return T::one();\n\n }\n\n\n\n while exp & 1 == 0 {\n\n base = base.clone() * base;\n\n exp >>= 1;\n\n }\n\n if exp == 1 {\n\n return base;\n\n }\n\n\n\n let mut acc = base.clone();\n\n while exp > 1 {\n\n exp >>= 1;\n\n base = base.clone() * base;\n\n if exp & 1 == 1 {\n\n acc = acc * base.clone();\n\n }\n", "file_path": "crates/num/traits/src/pow.rs", "rank": 60, "score": 217651.72489835887 }, { "content": "pub fn expand_deriving_from_primitive(cx: &mut ExtCtxt,\n\n span: Span,\n\n mitem: &MetaItem,\n\n item: &Annotatable,\n\n push: &mut FnMut(Annotatable))\n\n{\n\n let inline = cx.meta_word(span, InternedString::new(\"inline\"));\n\n let attrs = vec!(cx.attribute(span, inline));\n\n let trait_def = TraitDef {\n\n is_unsafe: false,\n\n span: span,\n\n attributes: Vec::new(),\n\n path: path!(num::FromPrimitive),\n\n additional_bounds: Vec::new(),\n\n generics: LifetimeBounds::empty(),\n\n methods: vec!(\n\n MethodDef {\n\n name: \"from_i64\",\n\n is_unsafe: false,\n\n unify_fieldless_variants: false,\n", "file_path": "crates/num/macros/src/lib.rs", "rank": 61, "score": 217188.24774335138 }, { "content": "pub fn ptr<T>(addr: Addr) -> *mut T {\n\n unsafe { mem::transmute(addr) }\n\n}\n\n\n", "file_path": "src/system/memory.rs", "rank": 62, "score": 216550.5217375946 }, { "content": "// A few methods below (read_to_string, read_line) will append data into a\n\n// `String` buffer, but we need to be pretty careful when doing this. The\n\n// implementation will just call `.as_mut_vec()` and then delegate to a\n\n// byte-oriented reading method, but we must ensure that when returning we never\n\n// leave `buf` in a state such that it contains invalid UTF-8 in its bounds.\n\n//\n\n// To this end, we use an RAII guard (to protect against panics) which updates\n\n// the length of the string when it is dropped. This guard initially truncates\n\n// the string to the prior length and only after we've validated that the\n\n// new contents are valid UTF-8 do we allow it to set a longer length.\n\n//\n\n// The unsafety in this function is twofold:\n\n//\n\n// 1. We're looking at the raw bytes of `buf`, so we take on the burden of UTF-8\n\n// checks.\n\n// 2. We're passing a raw buffer to the function `f`, and it is expected that\n\n// the function only *appends* bytes to the buffer. We'll get undefined\n\n// behavior if existing bytes are overwritten to have non-UTF-8 data.\n\nfn append_to_string<F>(buf: &mut String, f: F) -> Result<usize>\n\n where F: FnOnce(&mut Vec<u8>) -> Result<usize>\n\n{\n\n struct Guard<'a> {\n\n s: &'a mut Vec<u8>,\n\n len: usize,\n\n }\n\n impl<'a> Drop for Guard<'a> {\n\n fn drop(&mut self) {\n\n unsafe {\n\n self.s.set_len(self.len);\n\n }\n\n }\n\n }\n\n\n\n unsafe {\n\n let mut g = Guard {\n\n len: buf.len(),\n\n s: buf.as_mut_vec(),\n\n };\n", "file_path": "src/system/io/mod.rs", "rank": 63, "score": 215230.05448546784 }, { "content": "pub fn expand_deriving_rand(cx: &mut ExtCtxt,\n\n span: Span,\n\n mitem: &MetaItem,\n\n item: &Annotatable,\n\n push: &mut FnMut(Annotatable)) {\n\n let trait_def = TraitDef {\n\n span: span,\n\n attributes: Vec::new(),\n\n path: Path::new(vec!(\"rand\", \"Rand\")),\n\n additional_bounds: Vec::new(),\n\n generics: LifetimeBounds::empty(),\n\n is_unsafe: false,\n\n methods: vec!(\n\n MethodDef {\n\n name: \"rand\",\n\n is_unsafe: false,\n\n generics: LifetimeBounds {\n\n lifetimes: Vec::new(),\n\n bounds: vec!((\"R\",\n\n vec!( Path::new(vec!(\"rand\", \"Rng\")) )))\n", "file_path": "crates/rand/rand_macros/src/lib.rs", "rank": 64, "score": 214080.0541094322 }, { "content": "fn find_cap_ref(mut replacement: &[u8]) -> Option<CaptureRef> {\n\n if replacement.len() <= 1 || replacement[0] != b'$' {\n\n return None;\n\n }\n\n let mut brace = false;\n\n replacement = &replacement[1..];\n\n if replacement[0] == b'{' {\n\n brace = true;\n\n replacement = &replacement[1..];\n\n }\n\n let mut cap_end = 0;\n\n while replacement.get(cap_end).map_or(false, is_valid_cap_letter) {\n\n cap_end += 1;\n\n }\n\n if cap_end == 0 {\n\n return None;\n\n }\n\n // We just verified that the range 0..cap_end is valid ASCII, so it must\n\n // therefore be valid UTF-8. If we really cared, we could avoid this UTF-8\n\n // check with either unsafe or by parsing the number straight from &[u8].\n", "file_path": "crates/regex/src/expand.rs", "rank": 65, "score": 212636.63645916083 }, { "content": "type CaptureName = Option<String>;\n\n\n\n/// The type of a repeat operator expression.\n\n#[derive(Clone, Copy, Debug, PartialEq, Eq)]\n\npub enum Repeater {\n\n /// Match zero or one (`?`).\n\n ZeroOrOne,\n\n /// Match zero or more (`*`).\n\n ZeroOrMore,\n\n /// Match one or more (`+`).\n\n OneOrMore,\n\n /// Match for at least `min` and at most `max` (`{m,n}`).\n\n ///\n\n /// When `max` is `None`, there is no upper bound on the number of matches.\n\n Range {\n\n /// Lower bound on the number of matches.\n\n min: u32,\n\n /// Optional upper bound on the number of matches.\n\n max: Option<u32>,\n\n },\n", "file_path": "crates/regex/regex-syntax/src/lib.rs", "rank": 66, "score": 212074.1525931839 }, { "content": "type CaptureIndex = Option<usize>;\n\n\n", "file_path": "crates/regex/regex-syntax/src/lib.rs", "rank": 67, "score": 212025.98364913109 }, { "content": "/// Returns an encoding from given label, defined in the WHATWG Encoding standard, if any.\n\n/// Implements \"get an encoding\" algorithm: http://encoding.spec.whatwg.org/#concept-encoding-get\n\npub fn encoding_from_whatwg_label(label: &str) -> Option<EncodingRef> {\n\n let label = label.trim_matches(&[' ', '\\n', '\\r', '\\t', '\\x0C'][..]);\n\n let label: String =\n\n label.chars().map(|c| match c { 'A'...'Z' => (c as u8 + 32) as char, _ => c }).collect();\n\n match &label[..] {\n\n \"unicode-1-1-utf-8\" |\n\n \"utf-8\" |\n\n \"utf8\" =>\n\n Some(all::UTF_8 as EncodingRef),\n\n \"866\" |\n\n \"cp866\" |\n\n \"csibm866\" |\n\n \"ibm866\" =>\n\n Some(all::IBM866 as EncodingRef),\n\n \"csisolatin2\" |\n\n \"iso-8859-2\" |\n\n \"iso-ir-101\" |\n\n \"iso8859-2\" |\n\n \"iso88592\" |\n\n \"iso_8859-2\" |\n", "file_path": "crates/encoding/src/label.rs", "rank": 68, "score": 212009.77855231738 }, { "content": "pub fn expand(caps: &Captures, mut replacement: &[u8], dst: &mut Vec<u8>) {\n\n while !replacement.is_empty() {\n\n match memchr(b'$', replacement) {\n\n None => break,\n\n Some(i) => {\n\n dst.extend(&replacement[..i]);\n\n replacement = &replacement[i..];\n\n }\n\n }\n\n if replacement.get(1).map_or(false, |&b| b == b'$') {\n\n dst.push(b'$');\n\n replacement = &replacement[2..];\n\n continue;\n\n }\n\n debug_assert!(!replacement.is_empty());\n\n let cap_ref = match find_cap_ref(replacement) {\n\n Some(cap_ref) => cap_ref,\n\n None => {\n\n dst.push(b'$');\n\n replacement = &replacement[1..];\n", "file_path": "crates/regex/src/expand.rs", "rank": 69, "score": 211163.04832301792 }, { "content": "pub fn localtime_r<'a>(t: &time_t, tm: &'a mut Tm) -> Result<&'a mut Tm, ()> {\n\n let mut opp = unsafe { uninitialized() };\n\n\n\n // Reject time_t values whose year would overflow int because\n\n // __secs_to_zone cannot safely handle them.\n\n // if (*t < INT_MIN * 31622400LL || *t > INT_MAX * 31622400LL) {\n\n // \terrno = EOVERFLOW;\n\n // \treturn 0;\n\n // }\n\n\n\n secs_to_zone(*t as c_longlong,\n\n 0,\n\n &mut tm.tm_isdst,\n\n &mut tm.tm_utcoff,\n\n &mut opp);\n\n\n\n if secs_to_tm(*t as c_longlong + tm.tm_utcoff as c_longlong, tm) < 0 {\n\n return Err(());\n\n }\n\n\n\n Ok(tm)\n\n}\n\n\n", "file_path": "crates/time/src/musl.rs", "rank": 70, "score": 211026.04145709044 }, { "content": "fn year_to_secs(year: c_longlong, is_leap: &mut bool) -> c_longlong {\n\n if year - 2 <= 136 {\n\n let y = year;\n\n let mut leaps = (y - 68) >> 2;\n\n if (y - 68) & 3 == 0 {\n\n leaps -= 1;\n\n *is_leap = true;\n\n } else {\n\n *is_leap = false;\n\n }\n\n return 31536000 * (y - 70) + 86400 * leaps;\n\n }\n\n\n\n let (mut cycles, centuries, mut leaps, mut rem);\n\n\n\n cycles = (year - 100) / 400;\n\n rem = (year - 100) % 400;\n\n\n\n if rem < 0 {\n\n cycles -= 1;\n", "file_path": "crates/time/src/musl.rs", "rank": 71, "score": 210705.81741707143 }, { "content": "#[inline]\n\nfn contains_zero_byte(x: usize) -> bool {\n\n x.wrapping_sub(LO_USIZE) & !x & HI_USIZE != 0\n\n}\n\n\n", "file_path": "src/system/memchr.rs", "rank": 72, "score": 209603.43706822154 }, { "content": "pub fn sub2(a: &mut [BigDigit], b: &[BigDigit]) {\n\n let mut borrow = 0;\n\n\n\n let len = cmp::min(a.len(), b.len());\n\n let (a_lo, a_hi) = a.split_at_mut(len);\n\n let (b_lo, b_hi) = b.split_at(len);\n\n\n\n for (a, b) in a_lo.iter_mut().zip(b_lo) {\n\n *a = sbb(*a, *b, &mut borrow);\n\n }\n\n\n\n if borrow != 0 {\n\n for a in a_hi {\n\n *a = sbb(*a, 0, &mut borrow);\n\n if borrow == 0 { break }\n\n }\n\n }\n\n\n\n // note: we're _required_ to fail on underflow\n\n assert!(borrow == 0 && b_hi.iter().all(|x| *x == 0),\n\n \"Cannot subtract b from a because b is larger than a.\");\n\n}\n\n\n", "file_path": "crates/num/bigint/src/algorithms.rs", "rank": 73, "score": 209473.06323632528 }, { "content": "pub fn sub2rev(a: &[BigDigit], b: &mut [BigDigit]) {\n\n debug_assert!(b.len() >= a.len());\n\n\n\n let mut borrow = 0;\n\n\n\n let len = cmp::min(a.len(), b.len());\n\n let (a_lo, a_hi) = a.split_at(len);\n\n let (b_lo, b_hi) = b.split_at_mut(len);\n\n\n\n for (a, b) in a_lo.iter().zip(b_lo) {\n\n *b = sbb(*a, *b, &mut borrow);\n\n }\n\n\n\n assert!(a_hi.is_empty());\n\n\n\n // note: we're _required_ to fail on underflow\n\n assert!(borrow == 0 && b_hi.iter().all(|x| *x == 0),\n\n \"Cannot subtract b from a because b is larger than a.\");\n\n}\n\n\n", "file_path": "crates/num/bigint/src/algorithms.rs", "rank": 74, "score": 209473.06323632528 }, { "content": "/// /Two argument addition of raw slices:\n\n/// a += b\n\n///\n\n/// The caller _must_ ensure that a is big enough to store the result - typically this means\n\n/// resizing a to max(a.len(), b.len()) + 1, to fit a possible carry.\n\npub fn add2(a: &mut [BigDigit], b: &[BigDigit]) {\n\n let carry = __add2(a, b);\n\n\n\n debug_assert!(carry == 0);\n\n}\n\n\n", "file_path": "crates/num/bigint/src/algorithms.rs", "rank": 75, "score": 209473.06323632528 }, { "content": "pub fn reference<T>(addr: Addr) -> &'static mut T {\n\n unsafe { &mut *ptr(addr) }\n\n}\n\n\n", "file_path": "src/system/memory.rs", "rank": 76, "score": 208358.9891297673 }, { "content": "fn read_until<R: BufRead + ?Sized>(r: &mut R, delim: u8, buf: &mut Vec<u8>) -> Result<usize> {\n\n let mut read = 0;\n\n loop {\n\n let (done, used) = {\n\n let available = match r.fill_buf() {\n\n Ok(n) => n,\n\n Err(ref e) if e.kind() == ErrorKind::Interrupted => continue,\n\n Err(e) => return Err(e),\n\n };\n\n match memchr::memchr(delim, available) {\n\n Some(i) => {\n\n buf.extend_from_slice(&available[..i + 1]);\n\n (true, i + 1)\n\n }\n\n None => {\n\n buf.extend_from_slice(available);\n\n (false, available.len())\n\n }\n\n }\n\n };\n\n r.consume(used);\n\n read += used;\n\n if done || used == 0 {\n\n return Ok(read);\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/system/io/mod.rs", "rank": 77, "score": 207556.85195196944 }, { "content": "#[inline]\n\nfn contains_zero_byte(x: usize) -> bool {\n\n x.wrapping_sub(LO_USIZE) & !x & HI_USIZE != 0\n\n}\n\n\n", "file_path": "crates/memchr/src/lib.rs", "rank": 78, "score": 205533.80365387173 }, { "content": "/// Almost equivalent to `libtww::std::str::from_utf8`.\n\n/// This function is provided for the fair benchmark against the stdlib's UTF-8 conversion\n\n/// functions, as rust-encoding always allocates a new string.\n\npub fn from_utf8<'a>(input: &'a [u8]) -> Option<&'a str> {\n\n let mut iter = input.iter();\n\n let mut state;\n\n\n\n macro_rules! return_as_whole(() => (return Some(unsafe {mem::transmute(input)})));\n\n\n\n // optimization: if we are in the initial state, quickly skip to the first non-MSB-set byte.\n\n loop {\n\n match iter.next() {\n\n Some(&ch) if ch < 0x80 => {}\n\n Some(&ch) => {\n\n state = next_state!(INITIAL_STATE, ch);\n\n break;\n\n }\n\n None => {\n\n return_as_whole!();\n\n }\n\n }\n\n }\n\n\n", "file_path": "crates/encoding/src/codec/utf_8.rs", "rank": 79, "score": 204870.682024443 }, { "content": "/// Creates a new synchronous, bounded channel.\n\n///\n\n/// Like asynchronous channels, the `Receiver` will block until a message\n\n/// becomes available. These channels differ greatly in the semantics of the\n\n/// sender from asynchronous channels, however.\n\n///\n\n/// This channel has an internal buffer on which messages will be queued. When\n\n/// the internal buffer becomes full, future sends will *block* waiting for the\n\n/// buffer to open up. Note that a buffer size of 0 is valid, in which case this\n\n/// becomes \"rendezvous channel\" where each send will not return until a recv\n\n/// is paired with it.\n\n///\n\n/// As with asynchronous channels, all senders will panic in `send` if the\n\n/// `Receiver` has been destroyed.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use std::sync::mpsc::sync_channel;\n\n/// use std::thread;\n\n///\n\n/// let (tx, rx) = sync_channel(1);\n\n///\n\n/// // this returns immediately\n\n/// tx.send(1).unwrap();\n\n///\n\n/// thread::spawn(move|| {\n\n/// // this will block until the previous message has been received\n\n/// tx.send(2).unwrap();\n\n/// });\n\n///\n\n/// assert_eq!(rx.recv().unwrap(), 1);\n\n/// assert_eq!(rx.recv().unwrap(), 2);\n\n/// ```\n\npub fn sync_channel<T>(bound: usize) -> (SyncSender<T>, Receiver<T>) {\n\n let a = Arc::new(UnsafeCell::new(sync::Packet::new(bound)));\n\n (SyncSender::new(a.clone()), Receiver::new(Flavor::Sync(a)))\n\n}\n\n\n\n/// /////////////////////////////////////////////////////////////////////////////\n\n/// Sender\n\n/// /////////////////////////////////////////////////////////////////////////////\n\n\n\nimpl<T> Sender<T> {\n\n fn new(inner: Flavor<T>) -> Sender<T> {\n\n Sender { inner: UnsafeCell::new(inner) }\n\n }\n\n\n\n /// Attempts to send a value on this channel, returning it back if it could\n\n /// not be sent.\n\n ///\n\n /// A successful send occurs when it is determined that the other end of\n\n /// the channel has not hung up already. An unsuccessful send would be one\n\n /// where the corresponding receiver has already been deallocated. Note\n", "file_path": "src/system/sync/mpsc/mod.rs", "rank": 80, "score": 202833.98223927512 }, { "content": "fn match_digits_in_range(ss: &mut &str,\n\n min_digits : usize, max_digits : usize,\n\n ws: bool, min: i32, max: i32) -> Option<i32> {\n\n let before = *ss;\n\n match match_digits(ss, min_digits, max_digits, ws) {\n\n Some(val) if val >= min && val <= max => Some(val),\n\n _ => { *ss = before; None }\n\n }\n\n}\n\n\n", "file_path": "crates/time/src/parse.rs", "rank": 81, "score": 200947.80033506488 }, { "content": "#[bench]\n\nfn iterator_memchr3(b: &mut test::Bencher) {\n\n let haystack = bench_data();\n\n let (needle1, needle2, needle3) = (b'a', b'b', b'c');\n\n b.iter(|| {\n\n assert!(haystack.iter().position(|&b| {\n\n b == needle1 || b == needle2 || b == needle3\n\n }).is_none());\n\n });\n\n b.bytes = haystack.len() as u64;\n\n}\n\n\n", "file_path": "crates/memchr/benches/bench.rs", "rank": 82, "score": 200389.1918942631 }, { "content": "#[bench]\n\nfn iterator_memchr(b: &mut test::Bencher) {\n\n let haystack = bench_data();\n\n let needle = b'a';\n\n b.iter(|| {\n\n assert!(haystack.iter().position(|&b| b == needle).is_none());\n\n });\n\n b.bytes = haystack.len() as u64;\n\n}\n\n\n", "file_path": "crates/memchr/benches/bench.rs", "rank": 83, "score": 200389.1918942631 }, { "content": "#[bench]\n\nfn iterator_memchr2(b: &mut test::Bencher) {\n\n let haystack = bench_data();\n\n let (needle1, needle2) = (b'a', b'b');\n\n b.iter(|| {\n\n assert!(haystack.iter().position(|&b| {\n\n b == needle1 || b == needle2\n\n }).is_none());\n\n });\n\n b.bytes = haystack.len() as u64;\n\n}\n\n\n", "file_path": "crates/memchr/benches/bench.rs", "rank": 84, "score": 200389.1918942631 }, { "content": "#[bench]\n\nfn iterator_memrchr(b: &mut test::Bencher) {\n\n let haystack = bench_data();\n\n let needle = b'a';\n\n b.iter(|| {\n\n assert!(haystack.iter().rposition(|&b| b == needle).is_none());\n\n });\n\n b.bytes = haystack.len() as u64;\n\n}\n\n\n", "file_path": "crates/memchr/benches/bench.rs", "rank": 85, "score": 200389.1918942631 }, { "content": "/// Tries to parse `[-+]\\d\\d` continued by `\\d\\d`. Return an offset in seconds if possible.\n\n///\n\n/// The additional `colon` may be used to parse a mandatory or optional `:`\n\n/// between hours and minutes, and should return either a new suffix or `Err` when parsing fails.\n\npub fn timezone_offset<F>(mut s: &str, mut colon: F) -> ParseResult<(&str, i32)>\n\n where F: FnMut(&str) -> ParseResult<&str> {\n\n fn digits(s: &str) -> ParseResult<(u8, u8)> {\n\n let b = s.as_bytes();\n\n if b.len() < 2 {\n\n Err(TOO_SHORT)\n\n } else {\n\n Ok((b[0], b[1]))\n\n }\n\n }\n\n let negative = match s.as_bytes().first() {\n\n Some(&b'+') => false,\n\n Some(&b'-') => true,\n\n Some(_) => return Err(INVALID),\n\n None => return Err(TOO_SHORT),\n\n };\n\n s = &s[1..];\n\n\n\n // hours (00--99)\n\n let hours = match try!(digits(s)) {\n", "file_path": "crates/chrono/src/format/scan.rs", "rank": 86, "score": 200026.8010887546 }, { "content": "/// Same to `timezone_offset` but also allows for RFC 2822 legacy timezones.\n\n/// May return `None` which indicates an insufficient offset data (i.e. `-0000`).\n\npub fn timezone_offset_2822(s: &str) -> ParseResult<(&str, Option<i32>)> {\n\n // tries to parse legacy time zone names\n\n let upto = s.as_bytes().iter().position(|&c| match c { b'a'...b'z' | b'A'...b'Z' => false,\n\n _ => true }).unwrap_or(s.len());\n\n if upto > 0 {\n\n let name = &s[..upto];\n\n let s = &s[upto..];\n\n if equals(name, \"gmt\") || equals(name, \"ut\") {\n\n Ok((s, Some(0)))\n\n } else if equals(name, \"est\") {\n\n Ok((s, Some(-5 * 3600)))\n\n } else if equals(name, \"edt\") {\n\n Ok((s, Some(-4 * 3600)))\n\n } else if equals(name, \"cst\") {\n\n Ok((s, Some(-6 * 3600)))\n\n } else if equals(name, \"cdt\") {\n\n Ok((s, Some(-5 * 3600)))\n\n } else if equals(name, \"mst\") {\n\n Ok((s, Some(-7 * 3600)))\n\n } else if equals(name, \"mdt\") {\n", "file_path": "crates/chrono/src/format/scan.rs", "rank": 87, "score": 199135.9469563465 }, { "content": "#[must_use]\n\n#[inline]\n\npub fn __add2(a: &mut [BigDigit], b: &[BigDigit]) -> BigDigit {\n\n debug_assert!(a.len() >= b.len());\n\n\n\n let mut carry = 0;\n\n let (a_lo, a_hi) = a.split_at_mut(b.len());\n\n\n\n for (a, b) in a_lo.iter_mut().zip(b) {\n\n *a = adc(*a, *b, &mut carry);\n\n }\n\n\n\n if carry != 0 {\n\n for a in a_hi {\n\n *a = adc(*a, 0, &mut carry);\n\n if carry == 0 { break }\n\n }\n\n }\n\n\n\n carry\n\n}\n\n\n", "file_path": "crates/num/bigint/src/algorithms.rs", "rank": 88, "score": 198542.87015984353 }, { "content": "pub fn dstage_actor_create(template: *const ActorTemplate, memory: *mut ActorMemory) {\n\n let dstage_actor_create = unsafe {\n\n transmute::<Addr, extern \"C\" fn(*const ActorTemplate, *mut ActorMemory)>(0x8003f484)\n\n };\n\n dstage_actor_create(template, memory);\n\n}\n\n\n", "file_path": "src/system/tww.rs", "rank": 89, "score": 195578.42683612404 }, { "content": "fn multiply_bench(b: &mut Bencher, xbits: usize, ybits: usize) {\n\n let mut rng = get_rng();\n\n let x = rng.gen_bigint(xbits);\n\n let y = rng.gen_bigint(ybits);\n\n\n\n b.iter(|| &x * &y);\n\n}\n\n\n", "file_path": "crates/num/benches/bigint.rs", "rank": 90, "score": 194033.06356799748 }, { "content": "fn divide_bench(b: &mut Bencher, xbits: usize, ybits: usize) {\n\n let mut rng = get_rng();\n\n let x = rng.gen_bigint(xbits);\n\n let y = rng.gen_bigint(ybits);\n\n\n\n b.iter(|| &x / &y);\n\n}\n\n\n", "file_path": "crates/num/benches/bigint.rs", "rank": 91, "score": 194033.06356799748 }, { "content": "#[bench]\n\nfn ac_one_prefix_byte_no_match(b: &mut Bencher) {\n\n let aut = $aut(vec![\"zbc\"]);\n\n $bench(b, aut, &haystack_same('y'));\n\n}\n\n\n", "file_path": "crates/aho-corasick/benches/bench.rs", "rank": 92, "score": 193859.8857244603 }, { "content": "#[bench]\n\nfn naive_one_prefix_byte_no_match(b: &mut Bencher) {\n\n bench_naive_no_match(b, vec![\"zbc\"], &haystack_same('y'));\n\n}\n\n\n", "file_path": "crates/aho-corasick/benches/bench.rs", "rank": 93, "score": 193859.8857244603 }, { "content": "type Result<T> = result::Result<T, Box<error::Error + Send + Sync>>;\n\n\n", "file_path": "crates/regex/regex-debug/src/main.rs", "rank": 94, "score": 193104.96987525586 }, { "content": "fn match_fractional_seconds(ss: &mut &str) -> i32 {\n\n let mut value = 0;\n\n let mut multiplier = NSEC_PER_SEC / 10;\n\n\n\n let mut chars = ss.char_indices();\n\n let orig = *ss;\n\n for (i, ch) in &mut chars {\n\n *ss = &orig[i..];\n\n match ch {\n\n '0' ... '9' => {\n\n // This will drop digits after the nanoseconds place\n\n let digit = ch as i32 - '0' as i32;\n\n value += digit * multiplier;\n\n multiplier /= 10;\n\n }\n\n _ => break\n\n }\n\n }\n\n\n\n value\n\n}\n\n\n", "file_path": "crates/time/src/parse.rs", "rank": 95, "score": 192896.31019169162 }, { "content": "/// The offset from the local time to UTC.\n\npub trait Offset: Sized + Clone + fmt::Debug {\n\n /// Returns the offset from UTC to the local time stored.\n\n fn local_minus_utc(&self) -> Duration;\n\n}\n\n\n", "file_path": "crates/chrono/src/offset/mod.rs", "rank": 96, "score": 192571.5302052089 }, { "content": "pub fn write_str<T: AsRef<str>>(ptr: *mut u8, value: T) {\n\n let mut dst = ptr;\n\n for &byte in value.as_ref().as_bytes().iter() {\n\n unsafe {\n\n *dst = byte;\n\n dst = dst.offset(1);\n\n }\n\n if byte == 0 {\n\n return;\n\n }\n\n }\n\n unsafe {\n\n *dst = 0;\n\n }\n\n}\n\n\n", "file_path": "src/system/memory.rs", "rank": 97, "score": 190812.5932832366 }, { "content": "#[bench]\n\nfn ac_one_prefix_byte_every_match(b: &mut Bencher) {\n\n // We lose the benefit of `memchr` because the first byte matches\n\n // in every position in the haystack.\n\n let aut = $aut(vec![\"zbc\"]);\n\n $bench(b, aut, &haystack_same('z'));\n\n}\n\n\n", "file_path": "crates/aho-corasick/benches/bench.rs", "rank": 98, "score": 190585.27979908953 }, { "content": "#[bench]\n\nfn ac_two_one_prefix_byte_no_match(b: &mut Bencher) {\n\n let aut = $aut(vec![\"zbcdef\", \"zmnopq\"]);\n\n $bench(b, aut, &haystack_same('y'));\n\n}\n\n\n", "file_path": "crates/aho-corasick/benches/bench.rs", "rank": 99, "score": 190585.27979908953 } ]
Rust
src/passes/mod.rs
Kixiron/cranial-coitus
2f0b158709b23f23a4d84045846829b250779f31
mod add_sub_loop; mod associative_ops; mod canonicalize; mod const_folding; mod copy_cell; mod dataflow; mod dce; mod eliminate_const_gamma; mod equality; mod expr_dedup; mod fold_arithmetic; mod fuse_io; mod licm; mod mem2reg; mod move_cell; mod scan_loops; mod square_cell; mod symbolic_eval; mod unobserved_store; mod utils; mod zero_loop; pub use add_sub_loop::AddSubLoop; pub use associative_ops::AssociativeOps; pub use canonicalize::Canonicalize; pub use const_folding::ConstFolding; pub use copy_cell::CopyCell; pub use dataflow::{Dataflow, DataflowSettings}; pub use dce::Dce; pub use eliminate_const_gamma::ElimConstGamma; pub use equality::Equality; pub use expr_dedup::ExprDedup; pub use fold_arithmetic::FoldArithmetic; pub use fuse_io::FuseIO; pub use licm::Licm; pub use mem2reg::Mem2Reg; pub use move_cell::MoveCell; pub use scan_loops::ScanLoops; pub use square_cell::SquareCell; pub use symbolic_eval::SymbolicEval; pub use unobserved_store::UnobservedStore; pub use zero_loop::ZeroLoop; use crate::{ graph::{ Add, Bool, Byte, End, Eq, Gamma, Input, InputParam, Int, Load, Mul, Neg, Neq, Node, NodeExt, NodeId, Not, Output, OutputParam, Rvsdg, Scan, Start, Store, Sub, Theta, }, passes::utils::ChangeReport, utils::HashSet, values::{Cell, Ptr}, }; use std::{cell::RefCell, collections::VecDeque}; #[derive(Debug, Clone)] pub struct PassConfig { tape_len: u16, tape_operations_wrap: bool, cell_operations_wrap: bool, } impl PassConfig { pub fn new(tape_len: u16, tape_operations_wrap: bool, cell_operations_wrap: bool) -> Self { Self { tape_len, tape_operations_wrap, cell_operations_wrap, } } } pub fn default_passes(config: &PassConfig) -> Vec<Box<dyn Pass>> { let tape_len = config.tape_len; bvec![ UnobservedStore::new(tape_len), ConstFolding::new(tape_len), FoldArithmetic::new(tape_len), AssociativeOps::new(tape_len), ZeroLoop::new(tape_len), Mem2Reg::new(tape_len), AddSubLoop::new(tape_len), FuseIO::new(), MoveCell::new(tape_len), ScanLoops::new(tape_len), Dce::new(), ElimConstGamma::new(tape_len), ConstFolding::new(tape_len), SymbolicEval::new(tape_len), Licm::new(), CopyCell::new(tape_len), Equality::new(), ExprDedup::new(tape_len), Canonicalize::new(), Dce::new(), ] } thread_local! { #[allow(clippy::declare_interior_mutable_const)] #[allow(clippy::type_complexity)] static VISIT_GRAPH_CACHE: RefCell<Vec<(VecDeque<NodeId>, HashSet<NodeId>, Vec<NodeId>)>> = const { RefCell::new(Vec::new()) }; } pub trait Pass { fn pass_name(&self) -> &'static str; fn did_change(&self) -> bool; fn reset(&mut self); fn report(&self) -> ChangeReport { ChangeReport::default() } fn visit_graph(&mut self, graph: &mut Rvsdg) -> bool { let (mut stack, mut visited, mut buffer) = VISIT_GRAPH_CACHE .with(|buffers| buffers.borrow_mut().pop()) .unwrap_or_else(|| { ( VecDeque::with_capacity(graph.node_len() / 2), HashSet::with_capacity_and_hasher(graph.node_len(), Default::default()), Vec::new(), ) }); let result = self.visit_graph_inner(graph, &mut stack, &mut visited, &mut buffer); stack.clear(); visited.clear(); buffer.clear(); VISIT_GRAPH_CACHE.with(|buffers| buffers.borrow_mut().push((stack, visited, buffer))); result } fn visit_graph_inner( &mut self, graph: &mut Rvsdg, stack: &mut VecDeque<NodeId>, visited: &mut HashSet<NodeId>, buffer: &mut Vec<NodeId>, ) -> bool { visited.clear(); buffer.clear(); for node_id in graph.node_ids() { let node = graph.get_node(node_id); if node.is_start() || node.is_input_param() { stack.push_back(node_id); } else if node.is_end() || node.is_output_param() { stack.push_front(node_id); } } while let Some(node_id) = stack.pop_back() { if visited.contains(&node_id) || !graph.contains_node(node_id) { continue; } let mut missing_inputs = false; buffer.extend(graph.try_inputs(node_id).filter_map(|(_, input)| { input.and_then(|(input, ..)| { let input_id = input.node(); if !visited.contains(&input_id) { missing_inputs = true; Some(input_id) } else { None } }) })); if missing_inputs { stack.reserve(buffer.len() + 1); stack.push_back(node_id); stack.extend(buffer.drain(..)); continue; } self.visit(graph, node_id); self.after_visit(graph, node_id); let didnt_exist = visited.insert(node_id); debug_assert!(didnt_exist); if graph.contains_node(node_id) { buffer.extend( graph .get_node(node_id) .all_output_ports() .into_iter() .flat_map(|output| graph.get_outputs(output)) .filter_map(|(output_node, ..)| { let output_id = output_node.node(); (!visited.contains(&output_id) && !stack.contains(&output_id)) .then(|| output_id) }), ); stack.extend(buffer.drain(..)); } } self.post_visit_graph(graph, visited); stack.clear(); buffer.clear(); visited.clear(); self.did_change() } fn post_visit_graph(&mut self, _graph: &mut Rvsdg, _visited: &HashSet<NodeId>) {} fn after_visit(&mut self, _graph: &mut Rvsdg, _node_id: NodeId) {} fn visit(&mut self, graph: &mut Rvsdg, node_id: NodeId) { if let Some(node) = graph.try_node(node_id).cloned() { match node { Node::Int(int, value) => self.visit_int(graph, int, value), Node::Byte(byte, value) => self.visit_byte(graph, byte, value), Node::Bool(bool, value) => self.visit_bool(graph, bool, value), Node::Add(add) => self.visit_add(graph, add), Node::Sub(sub) => self.visit_sub(graph, sub), Node::Mul(mul) => self.visit_mul(graph, mul), Node::Load(load) => self.visit_load(graph, load), Node::Store(store) => self.visit_store(graph, store), Node::Scan(scan) => self.visit_scan(graph, scan), Node::Start(start) => self.visit_start(graph, start), Node::End(end) => self.visit_end(graph, end), Node::Input(input) => self.visit_input(graph, input), Node::Output(output) => self.visit_output(graph, output), Node::Theta(theta) => self.visit_theta(graph, *theta), Node::Eq(eq) => self.visit_eq(graph, eq), Node::Neq(neq) => self.visit_neq(graph, neq), Node::Not(not) => self.visit_not(graph, not), Node::Neg(neg) => self.visit_neg(graph, neg), Node::Gamma(gamma) => self.visit_gamma(graph, *gamma), Node::InputParam(input_param) => self.visit_input_param(graph, input_param), Node::OutputParam(output_param) => self.visit_output_param(graph, output_param), } } else { tracing::error!("visited node that doesn't exist: {:?}", node_id); } } fn visit_int(&mut self, _graph: &mut Rvsdg, _int: Int, _value: Ptr) {} fn visit_byte(&mut self, _graph: &mut Rvsdg, _byte: Byte, _value: Cell) {} fn visit_bool(&mut self, _graph: &mut Rvsdg, _bool: Bool, _value: bool) {} fn visit_add(&mut self, _graph: &mut Rvsdg, _add: Add) {} fn visit_sub(&mut self, _graph: &mut Rvsdg, _sub: Sub) {} fn visit_mul(&mut self, _graph: &mut Rvsdg, _mul: Mul) {} fn visit_not(&mut self, _graph: &mut Rvsdg, _not: Not) {} fn visit_neg(&mut self, _graph: &mut Rvsdg, _neg: Neg) {} fn visit_eq(&mut self, _graph: &mut Rvsdg, _eq: Eq) {} fn visit_neq(&mut self, _graph: &mut Rvsdg, _neq: Neq) {} fn visit_load(&mut self, _graph: &mut Rvsdg, _load: Load) {} fn visit_store(&mut self, _graph: &mut Rvsdg, _store: Store) {} fn visit_scan(&mut self, _graph: &mut Rvsdg, _scan: Scan) {} fn visit_input(&mut self, _graph: &mut Rvsdg, _input: Input) {} fn visit_output(&mut self, _graph: &mut Rvsdg, _output: Output) {} fn visit_theta(&mut self, _graph: &mut Rvsdg, _theta: Theta) {} fn visit_gamma(&mut self, _graph: &mut Rvsdg, _gamma: Gamma) {} fn visit_input_param(&mut self, _graph: &mut Rvsdg, _input: InputParam) {} fn visit_output_param(&mut self, _graph: &mut Rvsdg, _output: OutputParam) {} fn visit_start(&mut self, _graph: &mut Rvsdg, _start: Start) {} fn visit_end(&mut self, _graph: &mut Rvsdg, _end: End) {} } test_opts! { memchr_loop, passes = |tape_len| bvec![ZeroLoop::new(tape_len)], output = [0], |graph, mut effect, tape_len| { let mut ptr = graph.int(Ptr::zero(tape_len)).value(); let not_zero = graph.int(Ptr::new(255, tape_len)).value(); let store = graph.store(ptr, not_zero, effect); effect = store.output_effect(); let theta = graph.theta([], [ptr], effect, |graph, mut effect, _invariant, variant| { let ptr = variant[0]; let zero = graph.int(Ptr::zero(tape_len)).value(); let four = graph.int(Ptr::new(4, tape_len)).value(); let add = graph.add(ptr, four); let load = graph.load(add.value(), effect); effect = load.output_effect(); let not_eq_zero = graph.neq(load.output_value(), zero); ThetaData::new([ptr], not_eq_zero.value(), effect) }); ptr = theta.output_ports().next().unwrap(); effect = theta.output_effect().unwrap(); let load = graph.load(ptr, effect); effect = load.output_effect(); let output = graph.output(load.output_value(), effect); output.output_effect() }, }
mod add_sub_loop; mod associative_ops; mod canonicalize; mod const_folding; mod copy_cell; mod dataflow; mod dce; mod eliminate_const_gamma; mod equality; mod expr_dedup; mod fold_arithmetic; mod fuse_io; mod licm; mod mem2reg; mod move_cell; mod scan_loops; mod square_cell; mod symbolic_eval; mod unobserved_store; mod utils; mod zero_loop; pub use add_sub_loop::AddSubLoop; pub use associative_ops::AssociativeOps; pub use canonicalize::Canonicalize; pub use const_folding::ConstFolding; pub use copy_cell::CopyCell; pub use dataflow::{Dataflow, DataflowSettings}; pub use dce::Dce; pub use eliminate_const_gamma::ElimConstGamma; pub use equality::Equality; pub use expr_dedup::ExprDedup; pub use fold_arithmetic::FoldArithmetic; pub use fuse_io::FuseIO; pub use licm::Licm; pub use mem2reg::Mem2Reg; pub use move_cell::MoveCell; pub use scan_loops::ScanLoops; pub use square_cell::SquareCell; pub use symbolic_eval::SymbolicEval; pub use unobserved_store::UnobservedStore; pub use zero_loop::ZeroLoop; use crate::{ graph::{ Add, Bool, Byte, End, Eq, Gamma, Input, InputParam, Int, Load, Mul, Neg, Neq, Node, NodeExt, NodeId, Not, Output, OutputParam, Rvsdg, Scan, Start, Store, Sub, Theta, }, passes::utils::ChangeReport, utils::HashSet, values::{Cell, Ptr}, }; use std::{cell::RefCell, collections::VecDeque}; #[derive(Debug, Clone)] pub struct PassConfig { tape_len: u16, tape_operations_wrap: bool, cell_operations_wrap: bool, } impl PassConfig { pub fn new(tape_len: u16, tape_operations_wrap: bool, cell_operations_wrap: bool) -> Self { Self { tape_len, tape_operations_wrap, cell_operations_wrap, } } } pub fn default_passes(config: &PassConfig) -> Vec<Box<dyn Pass>> { let tape_len = config.tape_len; bvec![ UnobservedStore::new(tape_len), ConstFolding::new(tape_len), FoldArithmetic::new(tape_len), AssociativeOps::new(tape_len), ZeroLoop::new(tape_len), Mem2Reg::new(tape_len), AddSubLoop::new(tape_len), FuseIO::new(), MoveCell::new(tape_len), ScanLoops::new(tape_len), Dce::new(), ElimConstGamma::new(tape_len), ConstFolding::new(tape_len), SymbolicEval::new(tape_len), Licm::new(), CopyCell::new(tape_len), Equality::new(), ExprDedup::new(tape_len), Canonicalize::new(), Dce::new(), ] } thread_local! { #[allow(clippy::declare_interior_mutable_const)] #[allow(clippy::type_complexity)] static VISIT_GRAPH_CACHE: RefCell<Vec<(VecDeque<NodeId>, HashSet<NodeId>, Vec<NodeId>)>> = const { RefCell::new(Vec::new()) }; } pub trait Pass { fn pass_name(&self) -> &'static str; fn did_change(&self) -> bool; fn reset(&mut self); fn report(&self) -> ChangeReport { ChangeReport::default() }
fn visit_graph_inner( &mut self, graph: &mut Rvsdg, stack: &mut VecDeque<NodeId>, visited: &mut HashSet<NodeId>, buffer: &mut Vec<NodeId>, ) -> bool { visited.clear(); buffer.clear(); for node_id in graph.node_ids() { let node = graph.get_node(node_id); if node.is_start() || node.is_input_param() { stack.push_back(node_id); } else if node.is_end() || node.is_output_param() { stack.push_front(node_id); } } while let Some(node_id) = stack.pop_back() { if visited.contains(&node_id) || !graph.contains_node(node_id) { continue; } let mut missing_inputs = false; buffer.extend(graph.try_inputs(node_id).filter_map(|(_, input)| { input.and_then(|(input, ..)| { let input_id = input.node(); if !visited.contains(&input_id) { missing_inputs = true; Some(input_id) } else { None } }) })); if missing_inputs { stack.reserve(buffer.len() + 1); stack.push_back(node_id); stack.extend(buffer.drain(..)); continue; } self.visit(graph, node_id); self.after_visit(graph, node_id); let didnt_exist = visited.insert(node_id); debug_assert!(didnt_exist); if graph.contains_node(node_id) { buffer.extend( graph .get_node(node_id) .all_output_ports() .into_iter() .flat_map(|output| graph.get_outputs(output)) .filter_map(|(output_node, ..)| { let output_id = output_node.node(); (!visited.contains(&output_id) && !stack.contains(&output_id)) .then(|| output_id) }), ); stack.extend(buffer.drain(..)); } } self.post_visit_graph(graph, visited); stack.clear(); buffer.clear(); visited.clear(); self.did_change() } fn post_visit_graph(&mut self, _graph: &mut Rvsdg, _visited: &HashSet<NodeId>) {} fn after_visit(&mut self, _graph: &mut Rvsdg, _node_id: NodeId) {} fn visit(&mut self, graph: &mut Rvsdg, node_id: NodeId) { if let Some(node) = graph.try_node(node_id).cloned() { match node { Node::Int(int, value) => self.visit_int(graph, int, value), Node::Byte(byte, value) => self.visit_byte(graph, byte, value), Node::Bool(bool, value) => self.visit_bool(graph, bool, value), Node::Add(add) => self.visit_add(graph, add), Node::Sub(sub) => self.visit_sub(graph, sub), Node::Mul(mul) => self.visit_mul(graph, mul), Node::Load(load) => self.visit_load(graph, load), Node::Store(store) => self.visit_store(graph, store), Node::Scan(scan) => self.visit_scan(graph, scan), Node::Start(start) => self.visit_start(graph, start), Node::End(end) => self.visit_end(graph, end), Node::Input(input) => self.visit_input(graph, input), Node::Output(output) => self.visit_output(graph, output), Node::Theta(theta) => self.visit_theta(graph, *theta), Node::Eq(eq) => self.visit_eq(graph, eq), Node::Neq(neq) => self.visit_neq(graph, neq), Node::Not(not) => self.visit_not(graph, not), Node::Neg(neg) => self.visit_neg(graph, neg), Node::Gamma(gamma) => self.visit_gamma(graph, *gamma), Node::InputParam(input_param) => self.visit_input_param(graph, input_param), Node::OutputParam(output_param) => self.visit_output_param(graph, output_param), } } else { tracing::error!("visited node that doesn't exist: {:?}", node_id); } } fn visit_int(&mut self, _graph: &mut Rvsdg, _int: Int, _value: Ptr) {} fn visit_byte(&mut self, _graph: &mut Rvsdg, _byte: Byte, _value: Cell) {} fn visit_bool(&mut self, _graph: &mut Rvsdg, _bool: Bool, _value: bool) {} fn visit_add(&mut self, _graph: &mut Rvsdg, _add: Add) {} fn visit_sub(&mut self, _graph: &mut Rvsdg, _sub: Sub) {} fn visit_mul(&mut self, _graph: &mut Rvsdg, _mul: Mul) {} fn visit_not(&mut self, _graph: &mut Rvsdg, _not: Not) {} fn visit_neg(&mut self, _graph: &mut Rvsdg, _neg: Neg) {} fn visit_eq(&mut self, _graph: &mut Rvsdg, _eq: Eq) {} fn visit_neq(&mut self, _graph: &mut Rvsdg, _neq: Neq) {} fn visit_load(&mut self, _graph: &mut Rvsdg, _load: Load) {} fn visit_store(&mut self, _graph: &mut Rvsdg, _store: Store) {} fn visit_scan(&mut self, _graph: &mut Rvsdg, _scan: Scan) {} fn visit_input(&mut self, _graph: &mut Rvsdg, _input: Input) {} fn visit_output(&mut self, _graph: &mut Rvsdg, _output: Output) {} fn visit_theta(&mut self, _graph: &mut Rvsdg, _theta: Theta) {} fn visit_gamma(&mut self, _graph: &mut Rvsdg, _gamma: Gamma) {} fn visit_input_param(&mut self, _graph: &mut Rvsdg, _input: InputParam) {} fn visit_output_param(&mut self, _graph: &mut Rvsdg, _output: OutputParam) {} fn visit_start(&mut self, _graph: &mut Rvsdg, _start: Start) {} fn visit_end(&mut self, _graph: &mut Rvsdg, _end: End) {} } test_opts! { memchr_loop, passes = |tape_len| bvec![ZeroLoop::new(tape_len)], output = [0], |graph, mut effect, tape_len| { let mut ptr = graph.int(Ptr::zero(tape_len)).value(); let not_zero = graph.int(Ptr::new(255, tape_len)).value(); let store = graph.store(ptr, not_zero, effect); effect = store.output_effect(); let theta = graph.theta([], [ptr], effect, |graph, mut effect, _invariant, variant| { let ptr = variant[0]; let zero = graph.int(Ptr::zero(tape_len)).value(); let four = graph.int(Ptr::new(4, tape_len)).value(); let add = graph.add(ptr, four); let load = graph.load(add.value(), effect); effect = load.output_effect(); let not_eq_zero = graph.neq(load.output_value(), zero); ThetaData::new([ptr], not_eq_zero.value(), effect) }); ptr = theta.output_ports().next().unwrap(); effect = theta.output_effect().unwrap(); let load = graph.load(ptr, effect); effect = load.output_effect(); let output = graph.output(load.output_value(), effect); output.output_effect() }, }
fn visit_graph(&mut self, graph: &mut Rvsdg) -> bool { let (mut stack, mut visited, mut buffer) = VISIT_GRAPH_CACHE .with(|buffers| buffers.borrow_mut().pop()) .unwrap_or_else(|| { ( VecDeque::with_capacity(graph.node_len() / 2), HashSet::with_capacity_and_hasher(graph.node_len(), Default::default()), Vec::new(), ) }); let result = self.visit_graph_inner(graph, &mut stack, &mut visited, &mut buffer); stack.clear(); visited.clear(); buffer.clear(); VISIT_GRAPH_CACHE.with(|buffers| buffers.borrow_mut().push((stack, visited, buffer))); result }
function_block-full_function
[ { "content": "pub fn stdout_output() -> impl FnMut(u8) + 'static {\n\n move |byte| {\n\n // FIXME: Lock once, move into closure\n\n let stdout_handle = io::stdout();\n\n let mut stdout = stdout_handle.lock();\n\n\n\n tracing::trace!(\n\n \"wrote output value {byte}, hex: {byte:#X}, binary: {byte:#08b}\",\n\n byte = byte,\n\n );\n\n\n\n stdout\n\n .write_all(&[byte])\n\n .expect(\"failed to write to stdout\");\n\n\n\n // FIXME: Flush infrequently, somehow?\n\n stdout.flush().expect(\"failed to flush stdout\");\n\n }\n\n}\n", "file_path": "src/driver.rs", "rank": 0, "score": 284483.938117387 }, { "content": "pub fn stdin_input() -> impl FnMut() -> u8 + 'static {\n\n move || {\n\n // FIXME: Lock once, move into closure\n\n let stdin_handle = io::stdin();\n\n let mut stdin = stdin_handle.lock();\n\n\n\n let mut buf = [0];\n\n stdin\n\n .read_exact(&mut buf)\n\n .expect(\"failed to read from stdin\");\n\n let [byte] = buf;\n\n\n\n tracing::trace!(\n\n \"read input value {byte}, hex: {byte:#X}, binary: {byte:#08b}\",\n\n byte = byte,\n\n );\n\n\n\n byte\n\n }\n\n}\n\n\n", "file_path": "src/driver.rs", "rank": 1, "score": 284480.49422218825 }, { "content": "#[tracing::instrument(skip_all)]\n\npub fn build_graph<T>(tokens: T, tape_len: u16) -> Rvsdg\n\nwhere\n\n T: AsRef<[Token]>,\n\n{\n\n let tokens = tokens.as_ref();\n\n\n\n tracing::info!(\"started building rvsdg\");\n\n let event = PerfEvent::new(\"build-graph\");\n\n\n\n let mut graph = Rvsdg::new();\n\n // Create the graph's start node\n\n let start = graph.start();\n\n\n\n // Get the starting effect and create the initial pointer (zero)\n\n let effect = start.effect();\n\n let ptr = graph.int(Ptr::zero(tape_len)).value();\n\n\n\n // Lower all of the program's tokens into the graph\n\n let (_ptr, effect) = lower_tokens::lower_tokens(&mut graph, ptr, effect, tokens);\n\n // Create the program's end node\n\n graph.end(effect);\n\n\n\n let elapsed = event.finish();\n\n tracing::info!(\"finished building rvsdg in {:#?}\", elapsed);\n\n\n\n graph\n\n}\n\n\n", "file_path": "src/driver.rs", "rank": 2, "score": 270343.2500085946 }, { "content": "pub trait Port: Debug + Clone + Copy + PartialEq + Eq + Hash {\n\n fn port(&self) -> PortId;\n\n\n\n fn raw(&self) -> u32 {\n\n self.port().0\n\n }\n\n}\n\n\n\n#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]\n\n#[repr(transparent)]\n\npub struct PortId(u32);\n\n\n\nimpl PortId {\n\n pub const fn new(id: u32) -> Self {\n\n Self(id)\n\n }\n\n\n\n const fn inner(&self) -> u32 {\n\n self.0\n\n }\n", "file_path": "src/graph/ports.rs", "rank": 3, "score": 267061.738792638 }, { "content": "pub trait UnaryOp: NodeExt {\n\n fn name() -> &'static str;\n\n\n\n fn symbol() -> &'static str;\n\n\n\n fn make_in_graph(graph: &mut Rvsdg, input: OutputPort) -> Self;\n\n\n\n fn apply(input: Const) -> Const;\n\n\n\n fn input(&self) -> InputPort;\n\n\n\n fn value(&self) -> OutputPort;\n\n}\n\n\n\nimpl UnaryOp for Neg {\n\n fn name() -> &'static str {\n\n \"neg\"\n\n }\n\n\n\n fn symbol() -> &'static str {\n", "file_path": "src/passes/utils/unary_op.rs", "rank": 5, "score": 265848.2771781817 }, { "content": "pub trait BinaryOp: NodeExt {\n\n fn name() -> &'static str;\n\n\n\n fn symbol() -> &'static str;\n\n\n\n fn make_in_graph(graph: &mut Rvsdg, lhs: OutputPort, rhs: OutputPort) -> Self;\n\n\n\n fn apply(lhs: Const, rhs: Const) -> Const;\n\n\n\n fn lhs(&self) -> InputPort;\n\n\n\n fn rhs(&self) -> InputPort;\n\n\n\n fn value(&self) -> OutputPort;\n\n\n\n fn is_associative() -> bool;\n\n\n\n fn is_commutative() -> bool;\n\n}\n\n\n", "file_path": "src/passes/utils/binary_op.rs", "rank": 6, "score": 265848.2771781817 }, { "content": "// TODO:\n\n// - .effect_inputs()\n\n// - .value_inputs()\n\n// - .effect_outputs()\n\n// - .value_outputs()\n\n// - .has_effect_inputs()\n\n// - .has_value_inputs()\n\n// - .has_effect_outputs()\n\n// - .has_value_outputs()\n\n// - .update_output()\n\n// - .update_outputs()\n\n// - .num_input_ports()\n\n// - .num_input_effect_ports()\n\n// - .num_input_value_ports()\n\n// - .num_output_ports()\n\n// - .num_output_effect_ports()\n\n// - .num_output_value_ports()\n\n// - .has_input(&self, InputPort) -> bool\n\n// - .has_output(&self, OutputPort) -> bool\n\npub trait NodeExt {\n\n fn node(&self) -> NodeId;\n\n\n\n fn input_desc(&self) -> EdgeDescriptor;\n\n\n\n fn all_input_ports(&self) -> InputPorts;\n\n\n\n fn all_input_port_kinds(&self) -> InputPortKinds;\n\n\n\n fn update_inputs<F>(&mut self, mut update: F) -> bool\n\n where\n\n F: FnMut(InputPort, EdgeKind) -> Option<InputPort>,\n\n {\n\n let mut changed = false;\n\n for (input, kind) in self.all_input_port_kinds() {\n\n if let Some(new_input) = update(input, kind) {\n\n self.update_input(input, new_input);\n\n changed = true;\n\n }\n\n }\n", "file_path": "src/graph/nodes/node_ext.rs", "rank": 7, "score": 260836.38906571933 }, { "content": "/// Returns `true` if the given graph has no other nodes between its [`Start`] and [`End`] nodes\n\nfn gamma_branch_is_empty(branch: &Rvsdg) -> bool {\n\n debug_assert_eq!(branch.start_nodes().len(), 1);\n\n debug_assert_eq!(branch.end_nodes().len(), 1);\n\n\n\n let start = branch.to_node::<Start>(branch.start_nodes()[0]);\n\n let end = branch.to_node::<End>(branch.end_nodes()[0]);\n\n\n\n branch.input_source(end.input_effect()) == start.effect()\n\n}\n\n\n\nimpl Pass for SquareCell {\n\n fn pass_name(&self) -> &'static str {\n\n \"square-cell\"\n\n }\n\n\n\n fn did_change(&self) -> bool {\n\n self.changed\n\n }\n\n\n\n fn reset(&mut self) {\n", "file_path": "src/passes/square_cell.rs", "rank": 8, "score": 259480.25919347192 }, { "content": "#[inline(always)]\n\nfn bit(index: u16) -> usize {\n\n index as usize % 64\n\n}\n\n\n", "file_path": "src/passes/dataflow/domain/bitmap_u16/mod.rs", "rank": 9, "score": 241665.37749713077 }, { "content": "#[inline(always)]\n\nfn key(index: u16) -> usize {\n\n index as usize / 64\n\n}\n\n\n", "file_path": "src/passes/dataflow/domain/bitmap_u16/mod.rs", "rank": 10, "score": 241665.37749713077 }, { "content": "pub fn array_output(output: &mut Vec<u8>) -> impl FnMut(u8) + '_ {\n\n move |byte| {\n\n tracing::trace!(\n\n output_len = output.len() + 1,\n\n \"pushed output value {byte}, hex: {byte:#X}, binary: {byte:#08b}\",\n\n byte = byte,\n\n );\n\n output.push(byte);\n\n }\n\n}\n\n\n", "file_path": "src/driver.rs", "rank": 12, "score": 230587.15394510148 }, { "content": "pub fn diff_ir(old: &str, new: &str) -> String {\n\n let start_time = Instant::now();\n\n\n\n let diff = TextDiff::configure()\n\n .algorithm(Algorithm::Patience)\n\n .deadline(Instant::now() + Duration::from_secs(1))\n\n .diff_lines(old, new);\n\n\n\n let diff = format!(\"{}\", diff.unified_diff());\n\n\n\n let elapsed = start_time.elapsed();\n\n tracing::debug!(\n\n target: \"timings\",\n\n \"took {:#?} to diff ir\",\n\n elapsed,\n\n );\n\n\n\n diff\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 13, "score": 229591.7780706408 }, { "content": "pub fn array_input(input: &mut VecDeque<u8>) -> impl FnMut() -> u8 + '_ {\n\n move || {\n\n let (byte, was_empty) = input.pop_front().map_or((0, true), |byte| (byte, false));\n\n\n\n tracing::trace!(\n\n input_len = input.len(),\n\n was_empty,\n\n \"popped input value {byte}, hex: {byte:#X}, binary: {byte:#08b}\",\n\n byte = byte,\n\n );\n\n\n\n byte\n\n }\n\n}\n\n\n", "file_path": "src/driver.rs", "rank": 14, "score": 227321.37345707772 }, { "content": " pub trait True {}\n\n\n\n impl True for Assert<true> {}\n\n\n\n pub const fn contains(changes: &[&str], name: &str) -> bool {\n\n let mut idx = 0;\n\n while idx < changes.len() {\n\n if str_eq(changes[idx], name) {\n\n return true;\n\n }\n\n\n\n idx += 1;\n\n }\n\n\n\n false\n\n }\n\n\n\n const fn index_of(changes: &[&str], name: &str) -> usize {\n\n let mut idx = 0;\n\n while idx < changes.len() {\n", "file_path": "src/passes/utils/changes.rs", "rank": 15, "score": 224407.75348419492 }, { "content": "#[inline]\n\npub fn lsb(chunk: u64) -> Option<u8> {\n\n if chunk == 0 {\n\n None\n\n } else {\n\n Some(chunk.trailing_zeros() as u8)\n\n }\n\n}\n\n\n\n/// Returns the last (most significant) bit of `chunk`, or `None` if `chunk` is\n\n/// 0.\n", "file_path": "src/passes/dataflow/domain/utils.rs", "rank": 16, "score": 223467.3953258172 }, { "content": "#[inline]\n\npub fn msb(chunk: u64) -> Option<u8> {\n\n if chunk == 0 {\n\n None\n\n } else {\n\n let bits = u64::BITS - 1;\n\n Some((bits as u8) ^ chunk.leading_zeros() as u8)\n\n }\n\n}\n\n\n\n/// Removes the first (least significant) bit from `chunk` and returns it, or\n\n/// `None` if `chunk` is 0.\n", "file_path": "src/passes/dataflow/domain/utils.rs", "rank": 17, "score": 223467.3953258172 }, { "content": "#[inline]\n\npub fn pop_msb(chunk: &mut u64) -> Option<u8> {\n\n let msb = msb(*chunk)?;\n\n *chunk ^= 1 << msb;\n\n Some(msb)\n\n}\n", "file_path": "src/passes/dataflow/domain/utils.rs", "rank": 18, "score": 212121.2620166552 }, { "content": "#[inline]\n\npub fn pop_lsb(chunk: &mut u64) -> Option<u8> {\n\n let lsb = lsb(*chunk)?;\n\n *chunk ^= 1 << lsb;\n\n Some(lsb)\n\n}\n\n\n\n/// Removes the last (most significant) bit from `chunk` and returns it, or\n\n/// `None` if `chunk` is 0.\n", "file_path": "src/passes/dataflow/domain/utils.rs", "rank": 19, "score": 212121.2620166552 }, { "content": "#[test]\n\nfn const_add() {\n\n use crate::{driver, passes::PassConfig};\n\n\n\n let mut input = {\n\n let mut graph = Rvsdg::new();\n\n\n\n let start = graph.start();\n\n let lhs = graph.byte(10);\n\n let rhs = graph.byte(20);\n\n let sum = graph.add(lhs.value(), rhs.value());\n\n let output = graph.output(sum.value(), start.effect());\n\n let _end = graph.end(output.output_effect());\n\n\n\n graph\n\n };\n\n driver::run_opt_passes(\n\n &mut input,\n\n usize::MAX,\n\n &PassConfig::new(30_000, true, true),\n\n None,\n", "file_path": "src/passes/const_folding.rs", "rank": 20, "score": 208839.77021813847 }, { "content": "// TODO: Turn validation into a pass\n\n// TODO: Make validation check edge and port kinds\n\nfn validate(graph: &Rvsdg) {\n\n tracing::debug!(\n\n target: \"timings\",\n\n \"started validating graph\",\n\n );\n\n let start_time = Instant::now();\n\n\n\n let mut stack: Vec<_> = graph.node_ids().map(|node_id| (node_id, graph)).collect();\n\n\n\n while let Some((node_id, graph)) = stack.pop() {\n\n let node = graph.get_node(node_id);\n\n\n\n if let Node::Theta(theta) = node {\n\n stack.extend(\n\n theta\n\n .body()\n\n .node_ids()\n\n .map(|node_id| (node_id, theta.body())),\n\n );\n\n } else if let Node::Gamma(gamma) = node {\n", "file_path": "src/main.rs", "rank": 21, "score": 206214.02288971373 }, { "content": "#[inline]\n\nfn zeroed_bit_box() -> BitBox {\n\n pop_bit_cache()\n\n .map(|mut bits| {\n\n arch::set_bits_zero(&mut bits);\n\n bits\n\n })\n\n // Safety: u64s are zeroable\n\n .unwrap_or_else(|| ManuallyDrop::new(unsafe { Box::new_zeroed().assume_init() }))\n\n}\n\n\n", "file_path": "src/passes/dataflow/domain/bitmap_u16/mod.rs", "rank": 22, "score": 205297.94350081487 }, { "content": "// Calculates the filtered cartesian product of both domains where the values are inequal\n\n// (a, b) := { (a, b) ∈ A × B | A ≠ B }\n\npub fn differential_product(lhs: &Domain, rhs: &Domain) -> (Domain, Domain) {\n\n match (lhs, rhs) {\n\n (Domain::Bool(lhs), Domain::Bool(rhs)) => {\n\n let (mut lhs_false, mut rhs_false) = (BoolSet::empty(), BoolSet::empty());\n\n for &lhs in lhs.as_slice() {\n\n for &rhs in rhs.as_slice() {\n\n if lhs != rhs {\n\n lhs_false.add(lhs);\n\n rhs_false.add(rhs);\n\n }\n\n }\n\n }\n\n\n\n (Domain::Bool(lhs_false), Domain::Bool(rhs_false))\n\n }\n\n\n\n (Domain::Byte(lhs), Domain::Byte(rhs)) => {\n\n let (mut lhs_false, mut rhs_false) = (ByteSet::empty(), ByteSet::empty());\n\n for lhs in lhs.iter() {\n\n for rhs in rhs.iter() {\n", "file_path": "src/passes/dataflow/domain/mod.rs", "rank": 23, "score": 205220.5725801081 }, { "content": "#[inline]\n\nfn pop_bit_cache() -> Option<BitBox> {\n\n BITMAP_CACHE.with_borrow_mut(|cache| cache.pop().map(ManuallyDrop::new))\n\n}\n\n\n", "file_path": "src/passes/dataflow/domain/bitmap_u16/mod.rs", "rank": 24, "score": 198887.63036987657 }, { "content": "#[derive(Debug)]\n\nstruct LoadMutateStore {\n\n output_effect: OutputPort,\n\n loaded_value: AddOrSub,\n\n mutated_value: Const,\n\n ptr: OutputPort,\n\n}\n", "file_path": "src/passes/move_cell.rs", "rank": 25, "score": 196451.36688782205 }, { "content": "type PopcountPtr = unsafe fn(&BitArray) -> u32;\n\n\n\npub(super) fn popcount(bits: &BitArray) -> u32 {\n\n static POPCOUNT: AtomicPtr<()> = AtomicPtr::new(select_popcount as *mut ());\n\n\n\n unsafe fn select_popcount(bits: &BitArray) -> u32 {\n\n let selected: PopcountPtr = if is_x86_feature_detected!(\"avx2\") {\n\n popcount_avx2 as _\n\n } else {\n\n popcount_scalar as _\n\n };\n\n POPCOUNT.store(selected as *mut (), Ordering::Relaxed);\n\n\n\n unsafe { selected(bits) }\n\n }\n\n\n\n let popcount = POPCOUNT.load(Ordering::Relaxed);\n\n let popcount = unsafe { transmute::<*mut (), PopcountPtr>(popcount) };\n\n unsafe { popcount(bits) }\n\n}\n", "file_path": "src/passes/dataflow/domain/bitmap_u16/arch.rs", "rank": 26, "score": 192354.2155793597 }, { "content": "pub fn parse(source: &str) -> Parsed {\n\n let tokens = source.chars().flat_map(|token| {\n\n Some(match token {\n\n '>' => RawToken::IncPtr,\n\n '<' => RawToken::DecPtr,\n\n '+' => RawToken::Inc,\n\n '-' => RawToken::Dec,\n\n '.' => RawToken::Output,\n\n ',' => RawToken::Input,\n\n '[' => RawToken::JumpStart,\n\n ']' => RawToken::JumpEnd,\n\n _ => return None,\n\n })\n\n });\n\n\n\n let (mut scopes, mut total_tokens, mut deepest_nesting) = (vec![Vec::new()], 0, 0);\n\n for token in tokens {\n\n match token {\n\n RawToken::IncPtr => scopes.last_mut().unwrap().push(Token::IncPtr),\n\n RawToken::DecPtr => scopes.last_mut().unwrap().push(Token::DecPtr),\n", "file_path": "src/parse.rs", "rank": 27, "score": 188013.88039443037 }, { "content": "/// Returns `true` if the label matches `\\.[a-zA-Z0-9_]+\n\nfn is_valid_label_name(label: &str) -> bool {\n\n label.starts_with('.')\n\n && label.len() >= 2\n\n && label\n\n .chars()\n\n .skip(1)\n\n .all(|char| char.is_alphanumeric() || char == '_')\n\n}\n", "file_path": "src/jit/disassemble.rs", "rank": 28, "score": 187194.01427970076 }, { "content": "#[cfg(test)]\n\npub fn compile_brainfuck_into(\n\n source: &str,\n\n graph: &mut Rvsdg,\n\n ptr: OutputPort,\n\n effect: OutputPort,\n\n) -> (OutputPort, OutputPort) {\n\n use crate::parse::Parsed;\n\n\n\n let parsing_start = Instant::now();\n\n\n\n let span = tracing::info_span!(\"parsing\");\n\n let tokens = span.in_scope(|| {\n\n tracing::info!(\"started parsing source code\");\n\n let Parsed {\n\n tokens,\n\n source_len,\n\n total_tokens,\n\n deepest_nesting,\n\n } = parse::parse(source);\n\n\n", "file_path": "src/utils.rs", "rank": 29, "score": 184586.67726760055 }, { "content": "#[inline]\n\nfn uninit_bit_box() -> Box<[MaybeUninit<u64>; BITS_LEN]> {\n\n pop_bit_cache()\n\n .map(|bits| {\n\n let bits = ManuallyDrop::into_inner(bits);\n\n // Safety: We can act like this array isn't initialized\n\n unsafe { Box::from_raw(Box::into_raw(bits).cast::<[MaybeUninit<u64>; BITS_LEN]>()) }\n\n })\n\n // Safety: We're creating an uninit array\n\n .unwrap_or_else(|| unsafe { Box::new_uninit().assume_init() })\n\n}\n\n\n\n/// # Safety\n\n///\n\n/// The caller must ensure that all bits within the bit box are initialized\n\n#[inline]\n\nunsafe fn assume_bit_box_init(bits: Box<[MaybeUninit<u64>; BITS_LEN]>) -> BitBox {\n\n ManuallyDrop::new(unsafe { Box::from_raw(Box::into_raw(bits).cast::<BitArray>()) })\n\n}\n\n\n\n#[cfg(test)]\n", "file_path": "src/passes/dataflow/domain/bitmap_u16/mod.rs", "rank": 30, "score": 183737.1553673665 }, { "content": "#[tracing::instrument(level = \"debug\", skip_all)]\n\npub fn sequentialize_graph(\n\n args: &Settings,\n\n graph: &Rvsdg,\n\n dump_dir: Option<&Path>,\n\n config: PrettyConfig,\n\n) -> Result<(Block, String)> {\n\n // Sequentialize the graph into serial instructions\n\n let program = {\n\n tracing::debug!(\"started sequentializing graph\");\n\n let event = PerfEvent::new(\"sequentializing-graph\");\n\n\n\n let sequential_code = IrBuilder::new(!args.dont_inline_constants).translate(graph);\n\n\n\n let elapsed = event.finish();\n\n tracing::debug!(\"finished sequentializing graph in {:#?}\", elapsed);\n\n\n\n sequential_code\n\n };\n\n\n\n // Pretty print the IR\n", "file_path": "src/driver.rs", "rank": 31, "score": 183574.9453174238 }, { "content": "pub trait Set<K> {\n\n fn contains(&self, value: &K) -> bool;\n\n\n\n fn is_empty(&self) -> bool;\n\n}\n\n\n\nimpl<K> Set<K> for HashSet<K>\n\nwhere\n\n K: Eq + Hash,\n\n{\n\n #[inline]\n\n fn contains(&self, value: &K) -> bool {\n\n HashSet::contains(self, value)\n\n }\n\n\n\n #[inline]\n\n fn is_empty(&self) -> bool {\n\n HashSet::is_empty(self)\n\n }\n\n}\n", "file_path": "src/utils.rs", "rank": 32, "score": 183255.43386034472 }, { "content": "pub trait OptionExt<T> {\n\n fn inspect<F>(self, inspect: F) -> Self\n\n where\n\n F: FnOnce(&T);\n\n}\n\n\n\nimpl<T> OptionExt<T> for Option<T> {\n\n fn inspect<F>(self, inspect: F) -> Self\n\n where\n\n F: FnOnce(&T),\n\n {\n\n if let Some(value) = &self {\n\n inspect(value);\n\n }\n\n\n\n self\n\n }\n\n}\n\n\n\npub(crate) fn set_logger() {\n", "file_path": "src/utils.rs", "rank": 33, "score": 179338.67265274315 }, { "content": "#[tracing::instrument(skip_all)]\n\npub fn run_opt_passes(\n\n graph: &mut Rvsdg,\n\n iteration_limit: usize,\n\n pass_config: &PassConfig,\n\n mut stats: Option<&mut HashMap<String, (usize, Duration)>>,\n\n) -> usize {\n\n let mut passes = passes::default_passes(pass_config);\n\n if let Some(stats) = stats.as_mut() {\n\n stats.reserve(passes.len());\n\n }\n\n\n\n let (mut pass_num, mut stack, mut visited, mut buffer) = (\n\n 1,\n\n VecDeque::new(),\n\n HashSet::with_hasher(Default::default()),\n\n Vec::new(),\n\n );\n\n\n\n loop {\n\n let mut changed = false;\n", "file_path": "src/driver.rs", "rank": 34, "score": 178036.54300647316 }, { "content": "mod arch;\n\nmod iter;\n\n\n\npub(super) use iter::{IntoIter, Iter};\n\n\n\nuse crate::passes::dataflow::domain::ByteSet;\n\nuse std::{\n\n cell::{Cell, RefCell},\n\n fmt::{self, Debug},\n\n mem::{ManuallyDrop, MaybeUninit},\n\n thread,\n\n};\n\n\n\nconst BITS_LEN: usize = 1024;\n\nconst MAX_LEN: u32 = u16::MAX as u32 + 1;\n\n\n", "file_path": "src/passes/dataflow/domain/bitmap_u16/mod.rs", "rank": 35, "score": 164447.75561896755 }, { "content": "mod tests {\n\n use crate::passes::dataflow::domain::bitmap_u16::U16Bitmap;\n\n\n\n #[test]\n\n fn bitmap_as_singleton() {\n\n let mut bitmap = U16Bitmap::singleton(10);\n\n assert_eq!(bitmap.as_singleton(), Some(10));\n\n\n\n bitmap.insert(200);\n\n assert_eq!(bitmap.as_singleton(), None);\n\n\n\n bitmap.clear();\n\n bitmap.insert(20_000);\n\n assert_eq!(bitmap.as_singleton(), Some(20_000));\n\n }\n\n\n\n #[test]\n\n fn drop_a_bunch() {\n\n let mut maps = Vec::new();\n\n for _ in 0..100_000 {\n", "file_path": "src/passes/dataflow/domain/bitmap_u16/mod.rs", "rank": 36, "score": 164443.502644551 }, { "content": " fn default() -> Self {\n\n Self::empty()\n\n }\n\n}\n\n\n\nimpl PartialEq for U16Bitmap {\n\n fn eq(&self, other: &Self) -> bool {\n\n // If the lengths of the two bitmaps aren't equal, their contents\n\n // can't possibly be. This allows us to potentially skip on the more\n\n // expensive equality check if both bitmaps already know their\n\n // lengths\n\n let equal_lengths = self\n\n .raw_len()\n\n .zip(other.raw_len())\n\n .map_or(true, |(lhs, rhs)| lhs == rhs);\n\n\n\n equal_lengths && arch::bitmap_eq(self.as_array(), other.as_array())\n\n }\n\n}\n\n\n", "file_path": "src/passes/dataflow/domain/bitmap_u16/mod.rs", "rank": 37, "score": 164440.60173025922 }, { "content": " }\n\n\n\n pub(crate) fn intersects_bytes(&self, other: ByteSet) -> bool {\n\n let lhs = ByteSet::from_ref((&self.bits[..ByteSet::LEN]).try_into().unwrap());\n\n lhs.intersects(other)\n\n }\n\n\n\n pub fn is_disjoint(&self, other: &Self) -> bool {\n\n arch::is_disjoint(self.as_array(), other.as_array())\n\n }\n\n\n\n pub fn is_subset(&self, other: &Self) -> bool {\n\n arch::is_subset(self.as_array(), other.as_array())\n\n }\n\n\n\n #[inline]\n\n fn as_array(&self) -> &BitArray {\n\n &*self.bits\n\n }\n\n\n", "file_path": "src/passes/dataflow/domain/bitmap_u16/mod.rs", "rank": 38, "score": 164439.0721977406 }, { "content": "impl Eq for U16Bitmap {}\n\n\n\nimpl Debug for U16Bitmap {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n f.debug_set().entries(self.iter()).finish()\n\n }\n\n}\n\n\n\nimpl Clone for U16Bitmap {\n\n fn clone(&self) -> Self {\n\n let mut bits = uninit_bit_box();\n\n MaybeUninit::write_slice(&mut *bits, self.as_array());\n\n\n\n let clone = Self {\n\n // Safety: We've initialized the bits\n\n bits: unsafe { assume_bit_box_init(bits) },\n\n length: self.length.clone(),\n\n };\n\n debug_assert_eq!(self, &clone);\n\n\n", "file_path": "src/passes/dataflow/domain/bitmap_u16/mod.rs", "rank": 39, "score": 164438.869310791 }, { "content": " }\n\n\n\n pub(super) fn intersect_bytes(&mut self, rhs: &ByteSet) -> bool {\n\n let array_ref = (&mut self.bits[..ByteSet::LEN]).try_into().unwrap();\n\n let lhs = ByteSet::from_mut(array_ref);\n\n lhs.intersect(rhs)\n\n }\n\n\n\n pub(super) fn intersect_bytes_new(&self, rhs: &ByteSet) -> Self {\n\n let mut lhs = self.clone();\n\n lhs.intersect_bytes(rhs);\n\n lhs\n\n }\n\n\n\n pub fn intersects(&self, other: &Self) -> bool {\n\n if self.shallow_empty() || other.shallow_empty() {\n\n false\n\n } else {\n\n arch::intersects(self.as_array(), other.as_array())\n\n }\n", "file_path": "src/passes/dataflow/domain/bitmap_u16/mod.rs", "rank": 40, "score": 164437.34038437047 }, { "content": "\n\n pub(super) fn union_bytes(&mut self, rhs: ByteSet) -> bool {\n\n let array_ref = (&mut self.bits[..ByteSet::LEN]).try_into().unwrap();\n\n let lhs = ByteSet::from_mut(array_ref);\n\n lhs.union(rhs)\n\n }\n\n\n\n pub fn intersect(&mut self, other: &Self) {\n\n if self.shallow_empty() {\n\n // Do nothing, we're already empty and an intersection between anything\n\n // and an empty set is nothing\n\n } else if other.shallow_empty() {\n\n self.clear();\n\n } else {\n\n self.length.set(None);\n\n arch::intersect(self.as_mut_array(), other.as_array());\n\n }\n\n }\n\n\n\n pub fn intersect_into(&self, other: &Self, output: &mut Self) {\n", "file_path": "src/passes/dataflow/domain/bitmap_u16/mod.rs", "rank": 41, "score": 164435.79731008445 }, { "content": " ///\n\n pub fn union_into(&self, other: &Self, output: &mut Self) -> bool {\n\n output.length.set(None);\n\n arch::union_into(self.as_array(), other.as_array(), output.as_mut_array())\n\n }\n\n\n\n /// Creates a new [`U16Bitmap`] with the set of all integers contained\n\n /// in `self`, `other` or both `self` and `other\n\n ///\n\n /// Computes `self ∪ other`, returning the result.\n\n ///\n\n pub fn union_new(&self, other: &Self) -> Self {\n\n let mut uninit = uninit_bit_box();\n\n arch::union_into_uninit(self.as_array(), other.as_array(), &mut *uninit);\n\n\n\n Self {\n\n bits: unsafe { assume_bit_box_init(uninit) },\n\n length: Cell::new(None),\n\n }\n\n }\n", "file_path": "src/passes/dataflow/domain/bitmap_u16/mod.rs", "rank": 42, "score": 164435.44871149998 }, { "content": "\n\n pub fn full() -> Self {\n\n let mut bits = uninit_bit_box();\n\n arch::set_uninit_bits_one(&mut bits);\n\n\n\n Self {\n\n // Safety: All bits have been initialized\n\n bits: unsafe { assume_bit_box_init(bits) },\n\n length: Cell::new(Some(MAX_LEN)),\n\n }\n\n }\n\n\n\n pub fn singleton(value: u16) -> Self {\n\n let mut this = Self::empty();\n\n this.insert(value);\n\n this\n\n }\n\n\n\n pub fn is_empty(&self) -> bool {\n\n match self.raw_len() {\n", "file_path": "src/passes/dataflow/domain/bitmap_u16/mod.rs", "rank": 43, "score": 164431.5170675487 }, { "content": " clone\n\n }\n\n\n\n fn clone_from(&mut self, other: &Self) {\n\n self.as_mut_array().copy_from_slice(other.as_array());\n\n self.length = other.length.clone();\n\n debug_assert_eq!(self, other);\n\n }\n\n}\n\n\n\nimpl Drop for U16Bitmap {\n\n fn drop(&mut self) {\n\n // If the thread is panicking we just want to deallocate the box\n\n if thread::panicking() {\n\n // Safety: The box's contents will never be touched again\n\n unsafe { ManuallyDrop::drop(&mut self.bits) };\n\n\n\n // Otherwise we want to keep the allocation around for reuse\n\n } else {\n\n // Safety: The bits of this bitmap won't be touched again\n\n let bits = unsafe { ManuallyDrop::take(&mut self.bits) };\n\n BITMAP_CACHE.with_borrow_mut(|cache| cache.push(bits));\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/passes/dataflow/domain/bitmap_u16/mod.rs", "rank": 44, "score": 164431.28356440808 }, { "content": " #[inline]\n\n fn shallow_not_full(&self) -> bool {\n\n matches!(self.raw_len(), Some(len) if len != MAX_LEN)\n\n }\n\n\n\n #[inline]\n\n fn raw_len(&self) -> Option<u32> {\n\n self.length.get()\n\n }\n\n\n\n pub(super) fn iter(&self) -> Iter<'_> {\n\n Iter::new(self)\n\n }\n\n\n\n pub(super) fn into_iter(self) -> IntoIter {\n\n IntoIter::new(self)\n\n }\n\n}\n\n\n\nimpl Default for U16Bitmap {\n", "file_path": "src/passes/dataflow/domain/bitmap_u16/mod.rs", "rank": 45, "score": 164430.28012297882 }, { "content": " }\n\n }\n\n\n\n pub fn clear(&mut self) {\n\n if self.shallow_not_empty() {\n\n self.length.set(Some(0));\n\n arch::set_bits_zero(self.as_mut_array());\n\n }\n\n }\n\n\n\n /// Fills `self` with the set of all integers contained\n\n /// in `self`, `other` or both `self` and `other\n\n ///\n\n /// Computes `self ∪ other`, storing the result in `self` and returning\n\n /// a boolean as to whether or not `self` has changed any\n\n ///\n\n pub fn union(&mut self, other: &Self) -> bool {\n\n // If both sets are empty, their union is empty\n\n if self.shallow_empty() && other.shallow_empty() {\n\n false\n", "file_path": "src/passes/dataflow/domain/bitmap_u16/mod.rs", "rank": 46, "score": 164429.81372302654 }, { "content": " let len = arch::popcount(self.as_array());\n\n self.length.set(Some(len));\n\n len\n\n });\n\n\n\n length as usize\n\n }\n\n\n\n pub fn contains(&self, value: u16) -> bool {\n\n let (index, offset) = (key(value), bit(value));\n\n debug_assert!(index < BITS_LEN);\n\n\n\n let lane = unsafe { *self.bits.get_unchecked(index) };\n\n lane & (1 << offset) != 0\n\n }\n\n\n\n pub fn insert(&mut self, value: u16) -> bool {\n\n let (index, offset) = (key(value), bit(value));\n\n debug_assert!(index < BITS_LEN);\n\n\n", "file_path": "src/passes/dataflow/domain/bitmap_u16/mod.rs", "rank": 47, "score": 164427.26187258787 }, { "content": " if self.shallow_empty() || other.shallow_empty() {\n\n output.clear();\n\n } else {\n\n output.length.set(None);\n\n arch::intersect_into(self.as_array(), other.as_array(), output.as_mut_array())\n\n }\n\n }\n\n\n\n pub fn intersect_new(&self, other: &Self) -> Self {\n\n if self.shallow_empty() || other.shallow_empty() {\n\n Self::empty()\n\n } else {\n\n let mut uninit = uninit_bit_box();\n\n arch::intersect_into_uninit(self.as_array(), other.as_array(), &mut *uninit);\n\n\n\n Self {\n\n bits: unsafe { assume_bit_box_init(uninit) },\n\n length: Cell::new(None),\n\n }\n\n }\n", "file_path": "src/passes/dataflow/domain/bitmap_u16/mod.rs", "rank": 48, "score": 164427.12607931378 }, { "content": " assert_eq!(full, full);\n\n assert_eq!(empty, empty);\n\n }\n\n\n\n #[test]\n\n fn union() {\n\n let mut empty = U16Bitmap::empty();\n\n let full = U16Bitmap::full();\n\n\n\n assert_eq!(empty.union_new(&full), full);\n\n\n\n let mut half: U16Bitmap = (0..=u16::MAX).step_by(2).collect();\n\n assert_eq!(half.union_new(&empty), half);\n\n assert_eq!(half.union_new(&half), half);\n\n\n\n assert!(!half.union(&empty));\n\n assert!(empty.union(&half));\n\n assert_eq!(empty, half);\n\n }\n\n}\n", "file_path": "src/passes/dataflow/domain/bitmap_u16/mod.rs", "rank": 49, "score": 164424.53893782536 }, { "content": " maps.push(U16Bitmap::empty());\n\n }\n\n }\n\n\n\n #[test]\n\n fn equality() {\n\n let mut empty = U16Bitmap::empty();\n\n let full = U16Bitmap::full();\n\n\n\n assert_ne!(empty, full);\n\n\n\n // Force an actual equality check instead of just checking\n\n // for length equality\n\n full.length.set(None);\n\n assert_ne!(empty, full);\n\n\n\n empty.insert(10000);\n\n empty.insert(u16::MAX);\n\n assert_ne!(empty, full);\n\n\n", "file_path": "src/passes/dataflow/domain/bitmap_u16/mod.rs", "rank": 50, "score": 164424.27718669293 }, { "content": " let target = unsafe { self.bits.get_unchecked_mut(index) };\n\n let old = *target;\n\n *target |= 1 << offset;\n\n\n\n let was_inserted = *target != old;\n\n if was_inserted {\n\n self.length.update(|length| length.map(|length| length + 1));\n\n }\n\n\n\n was_inserted\n\n }\n\n\n\n pub fn remove(&mut self, value: u16) -> bool {\n\n let (index, offset) = (key(value), bit(value));\n\n debug_assert!(index < BITS_LEN);\n\n\n\n let target = unsafe { self.bits.get_unchecked_mut(index) };\n\n let old = *target;\n\n *target &= !(1 << offset);\n\n\n", "file_path": "src/passes/dataflow/domain/bitmap_u16/mod.rs", "rank": 51, "score": 164423.72802093343 }, { "content": "\n\n // If self is empty and other isn't, copy the bits of other into self\n\n } else if self.shallow_empty() && other.shallow_not_empty() {\n\n self.length.set(other.raw_len());\n\n self.bits.copy_from_slice(&**other.bits);\n\n true\n\n\n\n // Otherwise calculate the union\n\n } else {\n\n self.length.set(None);\n\n arch::union(self.as_mut_array(), other.as_array())\n\n }\n\n }\n\n\n\n /// Fills `output` with the set of all integers contained\n\n /// in `self`, `other` or both `self` and `other\n\n ///\n\n /// Computes `self ∪ other`, storing the result in `output` and returning\n\n /// a boolean as to whether or not `output` has changed any in comparison\n\n /// to `self` (that is, this function returns `true` if `output != self`)\n", "file_path": "src/passes/dataflow/domain/bitmap_u16/mod.rs", "rank": 52, "score": 164422.07392504695 }, { "content": " let was_removed = *target != old;\n\n if was_removed {\n\n self.length.update(|length| length.map(|length| length - 1));\n\n }\n\n\n\n was_removed\n\n }\n\n\n\n pub fn as_singleton(&self) -> Option<u16> {\n\n if self.len() != 1 {\n\n None\n\n } else {\n\n self.iter().next()\n\n }\n\n }\n\n\n\n pub fn fill(&mut self) {\n\n if self.shallow_not_full() {\n\n self.length.set(Some(MAX_LEN));\n\n arch::set_bits_one(self.as_mut_array());\n", "file_path": "src/passes/dataflow/domain/bitmap_u16/mod.rs", "rank": 53, "score": 164421.96184387998 }, { "content": " // len ≡ 0\n\n Some(0) => true,\n\n\n\n // len ≠ 0\n\n Some(_) => false,\n\n\n\n // len unknown\n\n None => {\n\n let is_empty = arch::all_bits_are_unset(self.as_array());\n\n if is_empty {\n\n self.length.set(Some(0));\n\n }\n\n\n\n is_empty\n\n }\n\n }\n\n }\n\n\n\n pub fn is_full(&self) -> bool {\n\n match self.raw_len() {\n", "file_path": "src/passes/dataflow/domain/bitmap_u16/mod.rs", "rank": 54, "score": 164421.65939916813 }, { "content": " #[inline]\n\n fn as_mut_array(&mut self) -> &mut BitArray {\n\n &mut *self.bits\n\n }\n\n\n\n #[inline]\n\n fn shallow_empty(&self) -> bool {\n\n matches!(self.raw_len(), Some(0))\n\n }\n\n\n\n #[inline]\n\n fn shallow_not_empty(&self) -> bool {\n\n matches!(self.raw_len(), Some(len) if len != 0)\n\n }\n\n\n\n #[inline]\n\n fn shallow_full(&self) -> bool {\n\n matches!(self.raw_len(), Some(len) if len == MAX_LEN)\n\n }\n\n\n", "file_path": "src/passes/dataflow/domain/bitmap_u16/mod.rs", "rank": 55, "score": 164418.39222195558 }, { "content": " // len ≡ MAX_LEN\n\n Some(len) if len == MAX_LEN => true,\n\n\n\n // len ≠ MAX_LEN\n\n Some(_) => false,\n\n\n\n // len unknown\n\n None => {\n\n let is_full = arch::all_bits_are_set(self.as_array());\n\n if is_full {\n\n self.length.set(Some(MAX_LEN));\n\n }\n\n\n\n is_full\n\n }\n\n }\n\n }\n\n\n\n pub fn len(&self) -> usize {\n\n let length = self.raw_len().unwrap_or_else(|| {\n", "file_path": "src/passes/dataflow/domain/bitmap_u16/mod.rs", "rank": 56, "score": 164416.71887236778 }, { "content": "#[tracing::instrument(skip(source))]\n\npub fn parse_source(file: &Path, source: &str) -> Box<[Token]> {\n\n let file = file.display();\n\n\n\n tracing::info!(source_len = source.len(), \"started parsing '{}'\", file);\n\n let event = PerfEvent::new(\"parsing\");\n\n\n\n // Parse the program's source code into tokens\n\n let Parsed {\n\n tokens,\n\n source_len,\n\n total_tokens,\n\n deepest_nesting,\n\n } = parse::parse(source);\n\n\n\n let elapsed = event.finish();\n\n tracing::info!(\n\n source_len,\n\n total_tokens,\n\n deepest_nesting,\n\n \"finished parsing '{}' in {:#?}\",\n\n file,\n\n elapsed,\n\n );\n\n\n\n tokens\n\n}\n\n\n", "file_path": "src/driver.rs", "rank": 57, "score": 164313.20330518874 }, { "content": "struct ExpressionPuller<'a> {\n\n graph: &'a mut Rvsdg,\n\n body: &'a mut Rvsdg,\n\n removals: BTreeSet<NodeId>,\n\n params: Vec<(OutputPort, NodeId)>,\n\n constants: HashMap<OutputPort, Const>,\n\n invariant_exprs: VecDeque<OutputPort>,\n\n input_pairs: HashMap<OutputPort, OutputPort>,\n\n param_to_new: HashMap<OutputPort, OutputPort>,\n\n}\n\n\n\nimpl<'a> ExpressionPuller<'a> {\n\n fn new(\n\n graph: &'a mut Rvsdg,\n\n body: &'a mut Rvsdg,\n\n invariant_inputs: &[(OutputPort, OutputPort)],\n\n invariant_exprs: VecDeque<OutputPort>,\n\n input_pairs: HashMap<OutputPort, OutputPort>,\n\n ) -> Self {\n\n // TODO: Buffers\n", "file_path": "src/passes/licm.rs", "rank": 58, "score": 158823.68461825632 }, { "content": "/// Creates an [`IrBuilder`] for gamma branches\n\nfn gamma_branch_builder(inline_constants: bool) -> IrBuilder {\n\n IrBuilder {\n\n values: BTreeMap::new(),\n\n instructions: Vec::new(),\n\n evaluated: BTreeSet::new(),\n\n evaluation_stack: Vec::new(),\n\n top_level: false,\n\n inline_constants,\n\n }\n\n}\n\n\n", "file_path": "src/ir/builder.rs", "rank": 59, "score": 157711.4702059238 }, { "content": "#[derive(Debug)]\n\nstruct OutmostTheta {\n\n inner_gamma_id: NodeId,\n\n src_ptr: OutputPort,\n\n temp0_ptr: OutputPort,\n\n temp1_ptr: OutputPort,\n\n}\n\n\n\nimpl OutmostTheta {\n\n const fn new(\n\n inner_gamma_id: NodeId,\n\n src_ptr: OutputPort,\n\n temp0_ptr: OutputPort,\n\n temp1_ptr: OutputPort,\n\n ) -> Self {\n\n Self {\n\n inner_gamma_id,\n\n src_ptr,\n\n temp0_ptr,\n\n temp1_ptr,\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/passes/square_cell.rs", "rank": 60, "score": 157633.83408170496 }, { "content": "#[derive(Debug)]\n\nstruct InnerGamma {\n\n inner_theta_id: NodeId,\n\n}\n\n\n\nimpl InnerGamma {\n\n const fn new(inner_theta_id: NodeId) -> Self {\n\n Self { inner_theta_id }\n\n }\n\n}\n\n\n\ntest_opts! {\n\n square_input,\n\n passes = |tape_len| bvec![SquareCell::new(tape_len)],\n\n input = [10],\n\n output = [100],\n\n |graph, effect, tape_len| {\n\n let source = include_str!(concat!(env!(\"CARGO_MANIFEST_DIR\"), \"/examples/square.bf\"));\n\n\n\n let ptr = graph.int(Ptr::new(0, tape_len));\n\n let (_ptr, effect) = compile_brainfuck_into(source, graph, ptr.value(), effect);\n\n effect\n\n },\n\n}\n", "file_path": "src/passes/square_cell.rs", "rank": 61, "score": 157573.73516726206 }, { "content": "/// Builds an input parameter for a gamma branch\n\nfn gamma_input_param(\n\n block: &mut Block,\n\n builder: &mut IrBuilder,\n\n graph: &Rvsdg,\n\n input_values: &BTreeMap<InputPort, Value>,\n\n input: InputPort,\n\n param: NodeId,\n\n branch: &str,\n\n) {\n\n let input_param = graph.to_node::<InputParam>(param);\n\n let input_id = VarId::new(input_param.output());\n\n\n\n let value = input_values.get(&input).cloned().unwrap_or(Value::Missing);\n\n block.push(Assign::input(input_id, value, Variance::None).into());\n\n\n\n if value.is_missing() {\n\n tracing::warn!(\n\n \"missing input value for gamma {} branch input {:?}: {:?}\",\n\n branch,\n\n input,\n\n input_param,\n\n );\n\n }\n\n\n\n builder\n\n .values\n\n .insert(input_param.output(), input_id.into())\n\n .debug_unwrap_none();\n\n}\n", "file_path": "src/ir/builder.rs", "rank": 62, "score": 157136.866574627 }, { "content": "#[cold]\n\n#[track_caller]\n\n#[inline(never)]\n\nfn panic_none_with_message(value: &dyn Debug, message: &str) -> ! {\n\n panic!(\n\n \"unwrapped {:?} when `None` was expected: {}\",\n\n value, message\n\n )\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 63, "score": 154608.69359776456 }, { "content": "pub fn percent_total(total: usize, subset: usize) -> f64 {\n\n let diff = (subset as f64 * 100.0) / total as f64;\n\n\n\n if diff.is_nan() || diff == -0.0 {\n\n 0.0\n\n } else {\n\n diff\n\n }\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 64, "score": 151311.8869069935 }, { "content": "pub fn hex_encode<T: AsRef<[u8]>>(data: T) -> String {\n\n const HEX_CHARACTERS: [u8; 16] = *b\"0123456789abcdef\";\n\n\n\n let data = data.as_ref();\n\n\n\n let mut output = String::with_capacity(data.len() * 2);\n\n for &byte in data {\n\n output.push(HEX_CHARACTERS[(byte >> 4) as usize] as char);\n\n output.push(HEX_CHARACTERS[(byte & 0xF) as usize] as char);\n\n }\n\n\n\n output\n\n}\n\n\n\npub(crate) enum DebugCollapse<T> {\n\n Single(T),\n\n Many(T, usize, usize),\n\n}\n\n\n\nimpl<T> Debug for DebugCollapse<T>\n", "file_path": "src/utils.rs", "rank": 65, "score": 146301.8227448228 }, { "content": "type BitArray = [u64; BITS_LEN];\n", "file_path": "src/passes/dataflow/domain/bitmap_u16/mod.rs", "rank": 66, "score": 143994.2652770764 }, { "content": "pub trait Pretty {\n\n fn pretty_print(&self, config: PrettyConfig) -> String {\n\n let construction_start = Instant::now();\n\n\n\n let arena = Arena::<()>::new();\n\n let pretty = self.pretty(&arena, config);\n\n\n\n if STATIC_MAX_LEVEL >= LevelFilter::DEBUG && config.duration_logging {\n\n let elapsed = construction_start.elapsed();\n\n tracing::debug!(\n\n target: \"timings\",\n\n \"took {:#?} to construct pretty printed ir\",\n\n elapsed,\n\n );\n\n }\n\n\n\n let format_start = Instant::now();\n\n\n\n let mut output = String::with_capacity(4096);\n\n write!(output, \"{}\", pretty.1.pretty(RENDER_WIDTH))\n", "file_path": "src/ir/pretty_print.rs", "rank": 67, "score": 140512.48940950018 }, { "content": "fn from_utf8_lossy_buffered<'a>(buffer: &'a mut String, bytes: &'a [u8]) -> &'a str {\n\n let mut iter = Utf8Lossy::from_bytes(bytes).chunks();\n\n\n\n let first_valid = if let Some(chunk) = iter.next() {\n\n let Utf8LossyChunk { valid, broken } = chunk;\n\n if broken.is_empty() {\n\n debug_assert_eq!(valid.len(), bytes.len());\n\n return valid;\n\n }\n\n valid\n\n } else {\n\n return \"\";\n\n };\n\n\n\n const REPLACEMENT: &str = \"\\u{FFFD}\";\n\n\n\n buffer.clear();\n\n buffer.reserve(bytes.len());\n\n buffer.push_str(first_valid);\n\n buffer.push_str(REPLACEMENT);\n", "file_path": "src/jit/ffi.rs", "rank": 68, "score": 139521.89684738676 }, { "content": "pub fn lower_tokens(\n\n graph: &mut Rvsdg,\n\n mut ptr: OutputPort,\n\n mut effect: OutputPort,\n\n tokens: &[Token],\n\n) -> (OutputPort, OutputPort) {\n\n // FIXME: Byte node\n\n let (zero, one) = (graph.byte(0).value(), graph.byte(1).value());\n\n\n\n for token in tokens {\n\n match token {\n\n Token::IncPtr => ptr = graph.add(ptr, one).value(),\n\n Token::DecPtr => ptr = graph.sub(ptr, one).value(),\n\n\n\n Token::Inc => {\n\n // Load the pointed-to cell's current value\n\n let load = graph.load(ptr, effect);\n\n effect = load.output_effect();\n\n\n\n // Increment the loaded cell's value\n", "file_path": "src/lower_tokens.rs", "rank": 69, "score": 137408.56094493048 }, { "content": "type BitBox = ManuallyDrop<Box<BitArray>>;\n\n\n\nthread_local! {\n\n // FIXME: https://github.com/rust-lang/rust-clippy/issues/8493\n\n #[allow(clippy::declare_interior_mutable_const)]\n\n static BITMAP_CACHE: RefCell<Vec<Box<BitArray>>> = const { RefCell::new(Vec::new()) };\n\n}\n\n\n\npub struct U16Bitmap {\n\n bits: BitBox,\n\n length: Cell<Option<u32>>,\n\n}\n\n\n\nimpl U16Bitmap {\n\n pub fn empty() -> Self {\n\n Self {\n\n bits: zeroed_bit_box(),\n\n length: Cell::new(Some(0)),\n\n }\n\n }\n", "file_path": "src/passes/dataflow/domain/bitmap_u16/mod.rs", "rank": 70, "score": 137179.7309835569 }, { "content": "}\n\n\n\nimpl Scan {\n\n #[must_use]\n\n #[allow(clippy::too_many_arguments)]\n\n pub(in crate::graph) const fn new(\n\n node: NodeId,\n\n direction: ScanDirection,\n\n ptr: InputPort,\n\n step: InputPort,\n\n needle: InputPort,\n\n output_ptr: OutputPort,\n\n input_effect: InputPort,\n\n output_effect: OutputPort,\n\n ) -> Self {\n\n Self {\n\n node,\n\n direction,\n\n ptr,\n\n step,\n", "file_path": "src/graph/nodes/scan.rs", "rank": 71, "score": 136715.38981800413 }, { "content": "use crate::graph::{\n\n nodes::node_ext::{InputPortKinds, InputPorts, OutputPortKinds, OutputPorts},\n\n EdgeCount, EdgeDescriptor, EdgeKind, InputPort, NodeExt, NodeId, OutputPort,\n\n};\n\nuse tinyvec::tiny_vec;\n\n\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]\n\npub struct Scan {\n\n /// The id of the scan node\n\n node: NodeId,\n\n\n\n /// The direction the scan goes in, forwards or backwards\n\n direction: ScanDirection,\n\n\n\n /// The pointer to start scanning at\n\n ptr: InputPort,\n\n\n\n /// The distance the scan steps by\n\n step: InputPort,\n\n\n", "file_path": "src/graph/nodes/scan.rs", "rank": 72, "score": 136715.17958120824 }, { "content": " self.output_ptr = to;\n\n }\n\n\n\n if self.output_effect == from {\n\n self.output_effect = to;\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]\n\npub enum ScanDirection {\n\n Forward,\n\n Backward,\n\n}\n\n\n\nimpl ScanDirection {\n\n /// Returns `true` if the scan direction is [`Forward`].\n\n ///\n\n /// [`Forward`]: ScanDirection::Forward\n\n #[must_use]\n", "file_path": "src/graph/nodes/scan.rs", "rank": 73, "score": 136701.72831719552 }, { "content": " }\n\n\n\n /// Get the scan's output pointer\n\n pub const fn output_ptr(&self) -> OutputPort {\n\n self.output_ptr\n\n }\n\n\n\n /// Get the scan's needle\n\n pub const fn needle(&self) -> InputPort {\n\n self.needle\n\n }\n\n\n\n /// Get the scan's input effect\n\n pub const fn input_effect(&self) -> InputPort {\n\n self.input_effect\n\n }\n\n\n\n /// Get the scan's output effect\n\n pub const fn output_effect(&self) -> OutputPort {\n\n self.output_effect\n", "file_path": "src/graph/nodes/scan.rs", "rank": 74, "score": 136692.8794153726 }, { "content": " needle,\n\n output_ptr,\n\n input_effect,\n\n output_effect,\n\n }\n\n }\n\n\n\n /// Get the scan's direction\n\n pub const fn direction(&self) -> ScanDirection {\n\n self.direction\n\n }\n\n\n\n /// Get the scan's pointer\n\n pub const fn ptr(&self) -> InputPort {\n\n self.ptr\n\n }\n\n\n\n /// Get the scan's step\n\n pub const fn step(&self) -> InputPort {\n\n self.step\n", "file_path": "src/graph/nodes/scan.rs", "rank": 75, "score": 136692.21420624747 }, { "content": " pub const fn is_forward(&self) -> bool {\n\n matches!(self, Self::Forward)\n\n }\n\n\n\n /// Returns `true` if the scan direction is [`Backward`].\n\n ///\n\n /// [`Backward`]: ScanDirection::Backward\n\n #[must_use]\n\n pub const fn is_backward(&self) -> bool {\n\n matches!(self, Self::Backward)\n\n }\n\n}\n", "file_path": "src/graph/nodes/scan.rs", "rank": 76, "score": 136690.5949803368 }, { "content": " /// The byte values being scanned for\n\n needle: InputPort,\n\n\n\n /// A pointer to a cell containing one of the requested needles,\n\n /// effectively `ptr ± offset` where `offset` is the offset to a cell\n\n /// with one of the requested needles and `ptr` is `self.ptr`.\n\n ///\n\n /// The relative offset can be either positive or negative (that is,\n\n /// the output pointer can be greater or less than the input pointer)\n\n /// no matter the scan direction because of the wrapping semantics of\n\n /// the program tape.\n\n ///\n\n /// Loops infinitely if no cells with the proper needle value can be found\n\n output_ptr: OutputPort,\n\n\n\n /// Input effect stream\n\n input_effect: InputPort,\n\n\n\n /// Output effect stream\n\n output_effect: OutputPort,\n", "file_path": "src/graph/nodes/scan.rs", "rank": 77, "score": 136688.5787304865 }, { "content": " }\n\n}\n\n\n\nimpl NodeExt for Scan {\n\n fn node(&self) -> NodeId {\n\n self.node\n\n }\n\n\n\n fn input_desc(&self) -> EdgeDescriptor {\n\n EdgeDescriptor::new(EdgeCount::one(), EdgeCount::exact(3))\n\n }\n\n\n\n fn all_input_ports(&self) -> InputPorts {\n\n tiny_vec![self.ptr, self.step, self.needle, self.input_effect]\n\n }\n\n\n\n fn all_input_port_kinds(&self) -> InputPortKinds {\n\n tiny_vec![[_; 4] =>\n\n (self.ptr, EdgeKind::Value),\n\n (self.step, EdgeKind::Value),\n", "file_path": "src/graph/nodes/scan.rs", "rank": 78, "score": 136687.97060593715 }, { "content": " }\n\n }\n\n\n\n fn output_desc(&self) -> EdgeDescriptor {\n\n EdgeDescriptor::new(EdgeCount::one(), EdgeCount::one())\n\n }\n\n\n\n fn all_output_ports(&self) -> OutputPorts {\n\n tiny_vec![self.output_ptr, self.output_effect]\n\n }\n\n\n\n fn all_output_port_kinds(&self) -> OutputPortKinds {\n\n tiny_vec![[_; 4] =>\n\n (self.output_ptr, EdgeKind::Value),\n\n (self.output_effect, EdgeKind::Effect),\n\n ]\n\n }\n\n\n\n fn update_output(&mut self, from: OutputPort, to: OutputPort) {\n\n if self.output_ptr == from {\n", "file_path": "src/graph/nodes/scan.rs", "rank": 79, "score": 136675.659620963 }, { "content": " (self.needle, EdgeKind::Value),\n\n (self.input_effect, EdgeKind::Effect),\n\n ]\n\n }\n\n\n\n fn update_input(&mut self, from: InputPort, to: InputPort) {\n\n if from == self.ptr {\n\n self.ptr = to;\n\n }\n\n\n\n if from == self.step {\n\n self.step = to;\n\n }\n\n\n\n if from == self.needle {\n\n self.needle = to;\n\n }\n\n\n\n if from == self.input_effect {\n\n self.input_effect = to;\n", "file_path": "src/graph/nodes/scan.rs", "rank": 80, "score": 136675.48953392237 }, { "content": "//! The theta node\n\n\n\nuse crate::{\n\n graph::{\n\n nodes::node_ext::{InputPortKinds, InputPorts, OutputPortKinds, OutputPorts},\n\n EdgeCount, EdgeDescriptor, EdgeKind, End, InputParam, InputPort, NodeExt, NodeId,\n\n OutputParam, OutputPort, Rvsdg, Start, Subgraph,\n\n },\n\n utils::{AssertNone, HashMap},\n\n};\n\nuse tinyvec::TinyVec;\n\n\n\n// TODO: Probably want reverse lookup maps as well as the current forward ones\n\n#[derive(Debug, Clone, PartialEq)]\n\npub struct Theta {\n\n /// The theta's [`NodeId`]\n\n node: NodeId,\n\n\n\n /// Theta nodes don't have to have effects, the inner [`Subgraph`]\n\n /// will have them regardless (only from [`Start`] to [`End`] node though).\n", "file_path": "src/graph/nodes/theta.rs", "rank": 81, "score": 136645.22180320136 }, { "content": " tracing::trace!(\n\n node = ?parent_node,\n\n \"tried to replace output effect {:?} of Theta with {:?} but Theta doesn't have that port\",\n\n from, to,\n\n );\n\n }\n\n }\n\n}\n\n\n\n/// The effects of a theta node\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq)]\n\npub(in crate::graph) struct ThetaEffects {\n\n /// The input effect's port on the theta node\n\n input: InputPort,\n\n /// The output effect's port on the theta node\n\n output: OutputPort,\n\n}\n\n\n\nimpl ThetaEffects {\n\n pub const fn new(input: InputPort, output: OutputPort) -> Self {\n", "file_path": "src/graph/nodes/theta.rs", "rank": 82, "score": 136639.81126906764 }, { "content": " /// [`End`]: crate::graph::End\n\n subgraph: Box<Subgraph>,\n\n}\n\n\n\nimpl Theta {\n\n /// Create a new theta node\n\n #[allow(clippy::too_many_arguments)]\n\n pub(in crate::graph) fn new(\n\n node: NodeId,\n\n effects: Option<ThetaEffects>,\n\n invariant_inputs: HashMap<InputPort, NodeId>,\n\n variant_inputs: HashMap<InputPort, NodeId>,\n\n outputs: HashMap<OutputPort, NodeId>,\n\n output_feedback: HashMap<OutputPort, InputPort>,\n\n condition: NodeId,\n\n subgraph: Box<Subgraph>,\n\n ) -> Self {\n\n if cfg!(debug_assertions) {\n\n assert_eq!(variant_inputs.len(), outputs.len());\n\n assert_eq!(outputs.len(), output_feedback.len());\n", "file_path": "src/graph/nodes/theta.rs", "rank": 83, "score": 136637.3205292428 }, { "content": " Self { input, output }\n\n }\n\n\n\n #[allow(dead_code)]\n\n pub const fn output(&self) -> OutputPort {\n\n self.output\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, PartialEq)]\n\npub struct ThetaData {\n\n pub(in crate::graph) outputs: Box<[OutputPort]>,\n\n pub(in crate::graph) condition: OutputPort,\n\n pub(in crate::graph) effect: OutputPort,\n\n}\n\n\n\nimpl ThetaData {\n\n pub fn new<O>(outputs: O, condition: OutputPort, effect: OutputPort) -> Self\n\n where\n\n O: IntoIterator<Item = OutputPort>,\n\n {\n\n Self {\n\n outputs: outputs.into_iter().collect(),\n\n condition,\n\n effect,\n\n }\n\n }\n\n}\n", "file_path": "src/graph/nodes/theta.rs", "rank": 84, "score": 136634.75114642555 }, { "content": " has_child_thetas\n\n }\n\n}\n\n\n\n/// Utility functions\n\n// TODO: Function to inline the theta's body into the given graph\n\nimpl Theta {\n\n pub fn get_output_param(&self, port: OutputPort) -> Option<OutputParam> {\n\n self.outputs\n\n .get(&port)\n\n .map(|&node| self.subgraph.to_node(node))\n\n .copied()\n\n }\n\n\n\n /// Returns `true` if the theta's condition is always `false`\n\n pub fn is_infinite(&self) -> bool {\n\n let cond = self.condition();\n\n let condition_is_false = self\n\n .body()\n\n .input_source_node(cond.input())\n", "file_path": "src/graph/nodes/theta.rs", "rank": 85, "score": 136630.0412736898 }, { "content": " /// Just because a theta node isn't effectful doesn't mean it's useless,\n\n /// it can still have data dependencies\n\n ///\n\n /// [`Start`]: crate::graph::Start\n\n /// [`End`]: crate::graph::End\n\n effects: Option<ThetaEffects>,\n\n\n\n /// These are the inputs that go into the loop's body\n\n /// but do not change between iterations\n\n ///\n\n /// These point from an [`InputPort`] on the theta to the [`NodeId`]\n\n /// of an [`InputParam`] within the theta's body\n\n invariant_inputs: HashMap<InputPort, NodeId>,\n\n\n\n /// These inputs change upon each iteration, they're connected to `output_back_edges`\n\n ///\n\n /// These point from an [`InputPort`] on the theta to the [`NodeId`]\n\n /// of an [`InputParam`] within the theta's body\n\n variant_inputs: HashMap<InputPort, NodeId>,\n\n\n", "file_path": "src/graph/nodes/theta.rs", "rank": 86, "score": 136627.1646519455 }, { "content": " /// The node's outputs, these all must be included in `output_back_edges`. Any invariant\n\n /// data must go around the loop to reach dependents after it\n\n ///\n\n /// These point from an [`OutputPort`] on the theta to the [`NodeId`]\n\n /// of an [`OutputParam`] within the theta's body\n\n outputs: HashMap<OutputPort, NodeId>,\n\n\n\n /// These are the relationships between outputs and variant inputs\n\n ///\n\n /// These point from an [`OutputPort`] in `outputs` to an [`InputPort`] in `variant_inputs`\n\n output_feedback: HashMap<OutputPort, InputPort>,\n\n\n\n /// The theta's condition, it should be an expression that evaluates to a boolean\n\n ///\n\n /// Points to an [`OutputParam`] within the theta's body\n\n condition: NodeId,\n\n\n\n /// The theta's loop body, contains the [`Start`] and [`End`] [`NodeId`]s\n\n ///\n\n /// [`Start`]: crate::graph::Start\n", "file_path": "src/graph/nodes/theta.rs", "rank": 87, "score": 136625.9849243502 }, { "content": " pub fn output_params(&self) -> impl Iterator<Item = OutputParam> + '_ {\n\n self.outputs\n\n .iter()\n\n .map(|(_, &param)| self.subgraph.to_node(param))\n\n .copied()\n\n }\n\n\n\n /// Returns the node ids of outputs from the theta node, *not including effect outputs*\n\n pub fn output_param_ids(&self) -> impl Iterator<Item = NodeId> + '_ {\n\n self.outputs.values().copied()\n\n }\n\n\n\n /// Returns all variant inputs to the theta node along with the output\n\n /// that loops back into the given input\n\n pub fn variant_inputs_loopback(&self) -> impl Iterator<Item = (InputParam, OutputParam)> + '_ {\n\n self.output_feedback.iter().map(|(output, input)| {\n\n let (input, output) = (self.variant_inputs[input], self.outputs[output]);\n\n (\n\n *self.subgraph.to_node(input),\n\n *self.subgraph.to_node(output),\n", "file_path": "src/graph/nodes/theta.rs", "rank": 88, "score": 136621.31811640895 }, { "content": " )\n\n })\n\n }\n\n\n\n /// Returns `true` if the given [`OutputParam`] feeds back to the given variant [`InputParam`]\n\n ///\n\n /// Will return `false` if either of the given [`OutputParam`] or [`InputParam`]s don't exist\n\n /// within the current [`Theta`] or if the given [`InputParam`] isn't a *variant* input.\n\n pub(crate) fn output_feeds_back_to(\n\n &self,\n\n output: &OutputParam,\n\n variant_input: &InputParam,\n\n ) -> bool {\n\n self.outputs\n\n .iter()\n\n .find(|(_, &output_id)| output_id == output.node())\n\n .and_then(|(output_port, _)| self.output_feedback.get(output_port))\n\n .and_then(|input_port| self.variant_inputs.get(input_port))\n\n .map_or(false, |&input_node| input_node == variant_input.node())\n\n }\n", "file_path": "src/graph/nodes/theta.rs", "rank": 89, "score": 136619.47356333592 }, { "content": "\n\n /// Gets the [`InputPort`] of the parent [`Theta`] that feeds into the given variant [`InputParam`]\n\n pub fn variant_input_source(&self, variant_input: &InputParam) -> Option<InputPort> {\n\n self.variant_inputs\n\n .iter()\n\n .find(|(_, &input)| input == variant_input.node())\n\n .map(|(&input_port, _)| input_port)\n\n }\n\n\n\n pub(crate) fn has_child_thetas(&self) -> bool {\n\n let mut has_child_thetas = false;\n\n self.body().try_for_each_transitive_node(|_, node| {\n\n if node.is_theta() {\n\n has_child_thetas = true;\n\n false\n\n } else {\n\n true\n\n }\n\n });\n\n\n", "file_path": "src/graph/nodes/theta.rs", "rank": 90, "score": 136618.77244390166 }, { "content": " self.outputs.remove(&output).debug_unwrap();\n\n }\n\n\n\n /// Returns the number of all inputs (variant and invariant) to the theta node,\n\n /// *not including effect inputs*\n\n pub fn inputs_len(&self) -> usize {\n\n self.invariant_inputs_len() + self.variant_inputs_len()\n\n }\n\n\n\n /// Returns the input ports of all inputs (variant and invariant) to the theta node,\n\n /// *not including effect inputs*\n\n pub fn input_ports(&self) -> impl Iterator<Item = InputPort> + '_ {\n\n self.invariant_input_ports()\n\n .chain(self.variant_input_ports())\n\n }\n\n\n\n /// Returns the input ports and the associated input param of all inputs (variant and invariant)\n\n /// to the theta node, *not including effect inputs*\n\n pub fn input_pairs(&self) -> impl Iterator<Item = (InputPort, InputParam)> + '_ {\n\n self.invariant_input_pairs()\n", "file_path": "src/graph/nodes/theta.rs", "rank": 91, "score": 136618.65592759947 }, { "content": " tracing::error!(\"tried to set input effect on theta without effect edges\");\n\n }\n\n }\n\n\n\n pub fn set_output_effect(&mut self, output_effect: OutputPort) {\n\n if let Some(effects) = self.effects.as_mut() {\n\n effects.output = output_effect;\n\n } else {\n\n tracing::error!(\"tried to set output effect on theta without effect edges\");\n\n }\n\n }\n\n\n\n pub fn remove_invariant_input(&mut self, input: InputPort) {\n\n self.invariant_inputs.remove(&input);\n\n }\n\n\n\n pub fn add_invariant_input_raw(&mut self, input: InputPort, param: NodeId) {\n\n debug_assert!(self.body().contains_node(param));\n\n debug_assert!(self.body().get_node(param).is_input_param());\n\n\n", "file_path": "src/graph/nodes/theta.rs", "rank": 92, "score": 136618.21390916433 }, { "content": " subgraph,\n\n }\n\n }\n\n\n\n /// Get the [`NodeId`] of the theta node's condition\n\n ///\n\n /// Should be an [`OutputParam`] in the theta's body\n\n pub const fn condition_id(&self) -> NodeId {\n\n self.condition\n\n }\n\n\n\n /// Get the [`OutputParam`] of the theta's condition from within its body\n\n pub fn condition(&self) -> OutputParam {\n\n *self.subgraph.to_node(self.condition)\n\n }\n\n\n\n /// Get the [`Start`] of the theta's body\n\n pub fn start_node(&self) -> Start {\n\n self.subgraph.start_node()\n\n }\n", "file_path": "src/graph/nodes/theta.rs", "rank": 93, "score": 136617.8459959413 }, { "content": "\n\n /// Get the [`End`] of the theta's body\n\n pub fn end_node(&self) -> End {\n\n self.subgraph.end_node()\n\n }\n\n\n\n /// Get the [`NodeId`] of the theta body's [`End`] node\n\n pub const fn end_node_id(&self) -> NodeId {\n\n self.subgraph.end\n\n }\n\n\n\n /// Get access to the theta node's body\n\n pub fn body(&self) -> &Rvsdg {\n\n &self.subgraph\n\n }\n\n\n\n /// Get mutable access to the theta node's body\n\n pub fn body_mut(&mut self) -> &mut Rvsdg {\n\n &mut self.subgraph\n\n }\n", "file_path": "src/graph/nodes/theta.rs", "rank": 94, "score": 136617.61348186442 }, { "content": "\n\n /// Get the input effect's port from the theta node if it's available\n\n pub fn input_effect(&self) -> Option<InputPort> {\n\n self.effects.map(|effects| effects.input)\n\n }\n\n\n\n /// Get a mutable reference to the input effect's port from\n\n /// the theta node if it's available\n\n pub fn input_effect_mut(&mut self) -> Option<&mut InputPort> {\n\n self.effects.as_mut().map(|effects| &mut effects.input)\n\n }\n\n\n\n /// Get the output effect's port from the theta node if available\n\n pub fn output_effect(&self) -> Option<OutputPort> {\n\n self.effects.map(|effects| effects.output)\n\n }\n\n\n\n /// Get a mutable reference to the output effect's port from\n\n /// the theta node if it's available\n\n pub fn output_effect_mut(&mut self) -> Option<&mut OutputPort> {\n", "file_path": "src/graph/nodes/theta.rs", "rank": 95, "score": 136616.53523696284 }, { "content": " self.outputs.iter().map(|(&port, &param)| (port, param))\n\n }\n\n\n\n pub fn output_pair_ids_with_feedback(\n\n &self,\n\n ) -> impl Iterator<Item = (OutputPort, NodeId, InputPort)> + '_ {\n\n self.outputs\n\n .iter()\n\n .map(|(&port, &param)| (port, param, self.output_feedback[&port]))\n\n }\n\n\n\n pub fn replace_outputs(&mut self, outputs: HashMap<OutputPort, NodeId>) {\n\n self.outputs = outputs;\n\n }\n\n\n\n pub fn replace_output_feedback(&mut self, output_feedback: HashMap<OutputPort, InputPort>) {\n\n self.output_feedback = output_feedback;\n\n }\n\n\n\n /// Returns the outputs from the theta node, *not including effect outputs*\n", "file_path": "src/graph/nodes/theta.rs", "rank": 96, "score": 136616.35040865358 }, { "content": "\n\n /// Returns the node ids of each variant input and the input port that feeds into them\n\n pub fn variant_input_pair_ids(&self) -> impl Iterator<Item = (InputPort, NodeId)> + '_ {\n\n self.variant_inputs\n\n .iter()\n\n .map(|(&port, &param)| (port, param))\n\n }\n\n\n\n /// Returns the variant inputs to the theta node\n\n pub fn variant_input_params(&self) -> impl Iterator<Item = InputParam> + '_ {\n\n self.variant_inputs\n\n .iter()\n\n .map(|(_, &param)| self.subgraph.to_node(param))\n\n .copied()\n\n }\n\n\n\n /// Returns the node ids of variant inputs to the theta node\n\n pub fn variant_input_param_ids(&self) -> impl Iterator<Item = NodeId> + '_ {\n\n self.variant_inputs.values().copied()\n\n }\n", "file_path": "src/graph/nodes/theta.rs", "rank": 97, "score": 136615.6285229743 }, { "content": " /// Returns the node ids of each invariant input and the input port that feeds into them\n\n pub fn invariant_input_pair_ids(&self) -> impl Iterator<Item = (InputPort, NodeId)> + '_ {\n\n self.invariant_inputs\n\n .iter()\n\n .map(|(&port, &param)| (port, param))\n\n }\n\n\n\n /// Returns the invariant inputs to the theta node\n\n pub fn invariant_input_params(&self) -> impl Iterator<Item = InputParam> + '_ {\n\n self.invariant_inputs\n\n .iter()\n\n .map(|(_, &param)| self.subgraph.to_node(param))\n\n .copied()\n\n }\n\n\n\n /// Returns the node ids of invariant inputs to the theta node\n\n pub fn invariant_input_param_ids(&self) -> impl Iterator<Item = NodeId> + '_ {\n\n self.invariant_inputs.values().copied()\n\n }\n\n\n", "file_path": "src/graph/nodes/theta.rs", "rank": 98, "score": 136615.6285229743 }, { "content": "\n\n pub fn retain_invariant_inputs<F>(&mut self, mut retain: F)\n\n where\n\n F: FnMut(InputPort, NodeId) -> bool,\n\n {\n\n self.invariant_inputs\n\n .retain(|&port, &mut param| retain(port, param));\n\n }\n\n\n\n /// Removes a variant input, the output it's fed from and the feedback entry for them\n\n pub fn remove_variant_input(&mut self, input: InputPort) {\n\n self.variant_inputs.remove(&input).debug_unwrap();\n\n\n\n let output = self\n\n .output_feedback\n\n .iter()\n\n .find_map(|(&output, &port)| (port == input).then(|| output))\n\n .unwrap();\n\n\n\n self.output_feedback.remove(&output).debug_unwrap();\n", "file_path": "src/graph/nodes/theta.rs", "rank": 99, "score": 136615.60752106982 } ]
Rust
garnet/bin/setui/src/tests/media_buttons_agent_tests.rs
allansrc/fuchsia
a2c235b33fc4305044d496354a08775f30cdcf37
use crate::agent::media_buttons; use crate::agent::Invocation; use crate::agent::Lifespan; use crate::agent::{Context, Payload}; use crate::event::{self, Event}; use crate::input::{MediaButtons, VolumeGain}; use crate::message::base::{Audience, MessengerType}; use crate::message::MessageHubUtil; use crate::service; use crate::service_context::ServiceContext; use crate::tests::fakes::input_device_registry_service::InputDeviceRegistryService; use crate::tests::fakes::service_registry::ServiceRegistry; use fidl_fuchsia_ui_input::MediaButtonsEvent; use futures::lock::Mutex; use media_buttons::MediaButtonsAgent; use std::collections::HashSet; use std::sync::Arc; struct FakeServices { input_device_registry: Arc<Mutex<InputDeviceRegistryService>>, } async fn create_services() -> (Arc<Mutex<ServiceRegistry>>, FakeServices) { let service_registry = ServiceRegistry::create(); let input_device_registry_service_handle = Arc::new(Mutex::new(InputDeviceRegistryService::new())); service_registry.lock().await.register_service(input_device_registry_service_handle.clone()); (service_registry, FakeServices { input_device_registry: input_device_registry_service_handle }) } #[fuchsia_async::run_until_stalled(test)] async fn test_media_buttons_proxied() { let service_hub = service::MessageHub::create_hub(); let agent_receptor = service_hub .create(MessengerType::Unbound) .await .expect("Unable to create agent messenger") .1; let signature = agent_receptor.get_signature(); let (agent_messenger, _) = service_hub.create(MessengerType::Unbound).await.expect("Unable to create agent messenger"); let mut event_receptor = service::build_event_listener(&service_hub).await; let context = Context::new(agent_receptor, service_hub, HashSet::new(), HashSet::new(), None).await; MediaButtonsAgent::create(context).await; let (service_registry, fake_services) = create_services().await; let service_context = Arc::new(ServiceContext::new(Some(ServiceRegistry::serve(service_registry)), None)); let invocation = Invocation { lifespan: Lifespan::Service, service_context }; let mut reply_receptor = agent_messenger .message(Payload::Invocation(invocation).into(), Audience::Messenger(signature)) .send(); let mut completion_result = None; if let Ok((Payload::Complete(result), _)) = reply_receptor.next_of::<Payload>().await { completion_result = Some(result); } assert!( matches!(completion_result, Some(Ok(()))), "Did not receive a completion event from the invocation message" ); fake_services .input_device_registry .lock() .await .send_media_button_event(MediaButtonsEvent { volume: Some(1), mic_mute: Some(true), pause: None, camera_disable: None, ..MediaButtonsEvent::EMPTY }) .await; let mut mic_mute_received = false; let mut volume_received = false; while let Ok((event::Payload::Event(event), _)) = event_receptor.next_of::<event::Payload>().await { if let Event::MediaButtons(event) = event { match event { event::media_buttons::Event::OnButton(MediaButtons { mic_mute: Some(true), .. }) => { mic_mute_received = true; if volume_received { break; } } event::media_buttons::Event::OnVolume(VolumeGain::Up) => { volume_received = true; if mic_mute_received { break; } } _ => {} } } } assert!(mic_mute_received); assert!(volume_received); }
use crate::agent::media_buttons; use crate::agent::Invocation; use crate::agent::Lifespan; use crate::agent::{Context, Payload}; use crate::event::{self, Event}; use crate::input::{MediaButtons, VolumeGain}; use crate::message::base::{Audience, MessengerType}; use crate::message::MessageHubUtil; use crate::service; use crate::service_context::ServiceContext; use crate::tests::fakes::input_device_registry_service::InputDeviceRegistryService; use crate::tests::fakes::service_registry::ServiceRegistry; use fidl_fuchsia_ui_input::MediaButtonsEvent; use futures::lock::Mutex; use media_buttons::MediaButtonsAgent; use std::collections::HashSet; use std::sync::Arc; struct FakeServices { input_device_registry: Arc<Mutex<InputDeviceRegistryService>>, } async fn create_services() -> (Arc<Mutex<ServiceRegistry>>, FakeServices) { let service_registry = ServiceRegistry::create(); let input_device_registry_service_handle = Arc::new(Mutex::new(InputDeviceRegistryService::new())); service_registry.lock().await.register_service(input_device_registry_service_handle.clone()); (service_registry, FakeServices { input_device_registry: input_device_registry_service_handle }) } #[fuchsia_async::run_until_stalled(test)] async fn test_media_buttons_proxied() { let service_hub = service::MessageHub::create_hub(); let agent_receptor = service_hub .create(MessengerType::Unbound) .await .expect("Unable to create agent messenger") .1; let signature = agent_receptor.get_signature(); let (agent_messenger, _) = service_hub.create(MessengerType::Unbound).await.expect("Unable to create agent messenger"); let mut event_receptor = service::build_event_listener(&service_hub).await; let context = Context::new(agent_receptor, service_hub, HashSet::new(), HashSet::new(), None).await; MediaButtonsAgent::create(context).await; let (service_registry, fake_services) = create_services().await; let service_context = Arc::new(ServiceContext::new(Some(ServiceRegistry::serve(service_registry)), None)); let invocation = Invocation { lifespan: Lifespan::Service, service_context }; let mut reply_receptor = agent_messenger .message(Payload::Invocation(invocation).into(), Audience::Messenger(signature)) .send(); let mut completion_result = None;
assert!( matches!(completion_result, Some(Ok(()))), "Did not receive a completion event from the invocation message" ); fake_services .input_device_registry .lock() .await .send_media_button_event(MediaButtonsEvent { volume: Some(1), mic_mute: Some(true), pause: None, camera_disable: None, ..MediaButtonsEvent::EMPTY }) .await; let mut mic_mute_received = false; let mut volume_received = false; while let Ok((event::Payload::Event(event), _)) = event_receptor.next_of::<event::Payload>().await { if let Event::MediaButtons(event) = event { match event { event::media_buttons::Event::OnButton(MediaButtons { mic_mute: Some(true), .. }) => { mic_mute_received = true; if volume_received { break; } } event::media_buttons::Event::OnVolume(VolumeGain::Up) => { volume_received = true; if mic_mute_received { break; } } _ => {} } } } assert!(mic_mute_received); assert!(volume_received); }
if let Ok((Payload::Complete(result), _)) = reply_receptor.next_of::<Payload>().await { completion_result = Some(result); }
if_condition
[]
Rust
src/reconnectable_ws.rs
hermanodecastro/openlimits
af7b4d59c6a874c662bbc5ace9271bcb1956a8c5
use crate::errors::OpenLimitsError; use crate::exchange_ws::{CallbackHandle, ExchangeWs, OpenLimitsWs, Subscriptions}; use crate::model::websocket::{Subscription, WebSocketResponse}; use crate::shared::Result; use futures::stream::BoxStream; use std::sync::Arc; use std::thread::sleep; use tokio::sync::mpsc::{unbounded_channel, UnboundedSender}; use tokio::sync::Mutex; use tokio::time::Duration; pub type SubscriptionCallback<Response> = Arc<dyn Fn(&Result<WebSocketResponse<Response>>) + Sync + Send + 'static>; pub type SubscriptionCallbackRegistry<E> = ( Subscription, SubscriptionCallback<<E as ExchangeWs>::Response>, ); pub struct ReconnectableWebsocket<E: ExchangeWs> { websocket: Arc<Mutex<OpenLimitsWs<E>>>, tx: UnboundedSender<()>, subscriptions: Arc<Mutex<Vec<SubscriptionCallbackRegistry<E>>>>, } impl<E: ExchangeWs + 'static> ReconnectableWebsocket<E> { pub async fn instantiate(params: E::InitParams, reattempt_interval: Duration) -> Result<Self> { let websocket = E::new(params.clone()).await?; let websocket = OpenLimitsWs { websocket }; let websocket = Arc::new(Mutex::new(websocket)); let subscriptions: Arc<Mutex<Vec<SubscriptionCallbackRegistry<E>>>> = Arc::new(Mutex::new(Default::default())); let (tx, mut rx) = unbounded_channel(); { let websocket = Arc::downgrade(&websocket); let subscriptions = Arc::downgrade(&subscriptions); let tx = tx.clone(); tokio::spawn(async move { while rx.recv().await.is_some() { 'reconnection: loop { if let (Some(websocket), Some(subscriptions)) = (websocket.upgrade(), subscriptions.upgrade()) { if let Ok(new_websocket) = E::new(params.clone()).await { let new_websocket = OpenLimitsWs { websocket: new_websocket, }; let mut websocket = websocket.lock().await; *websocket = new_websocket; let subscriptions = { subscriptions.lock().await.clone() }; let subscriptions = subscriptions.iter().map(|(subscription, callback)| { let callback = callback.clone(); let tx = tx.clone(); websocket.subscribe(subscription.clone(), move |message| { if let Err(OpenLimitsError::SocketError()) = message.as_ref() { tx.send(()).ok(); } callback(message) }) }); if futures_util::future::join_all(subscriptions) .await .iter() .all(|subscription| subscription.is_ok()) { break 'reconnection; } } sleep(reattempt_interval); } } } }); } Ok(Self { websocket, tx, subscriptions, }) } pub async fn create_stream_specific( &self, subscriptions: Subscriptions<E::Subscription>, ) -> Result<BoxStream<'static, Result<E::Response>>> { self.websocket .lock() .await .create_stream_specific(subscriptions) .await } pub async fn subscribe< F: Fn(&Result<WebSocketResponse<E::Response>>) + Sync + Send + Clone + 'static, >( &self, subscription: Subscription, callback: F, ) -> Result<CallbackHandle> { let tx = self.tx.clone(); self.subscriptions .lock() .await .push((subscription.clone(), Arc::new(callback.clone()))); self.websocket .lock() .await .subscribe(subscription, move |message| { if let Err(OpenLimitsError::SocketError()) = message.as_ref() { tx.send(()).ok(); } callback(message); }) .await } pub async fn create_stream<S: Into<E::Subscription> + Clone + Send + Sync>( &self, subscriptions: &[S], ) -> Result<BoxStream<'static, Result<WebSocketResponse<E::Response>>>> { self.websocket .lock() .await .create_stream(subscriptions) .await } pub async fn disconnect(&self) { self.websocket.lock().await.disconnect().await; } }
use crate::errors::OpenLimitsError; use crate::exchange_ws::{CallbackHandle, ExchangeWs, OpenLimitsWs, Subscriptions}; use crate::model::websocket::{Subscription, WebSocketResponse}; use crate::shared::Result; use futures::stream::BoxStream; use std::sync::Arc; use std::thread::sleep; use tokio::sync::mpsc::{unbounded_channel, UnboundedSender}; use tokio::sync::Mutex; use tokio::time::Duration; pub type SubscriptionCallback<Response> = Arc<dyn Fn(&Result<WebSocketResponse<Response>>) + Sync + Send + 'static>; pub type SubscriptionCallbackRegistry<E> = ( Subscription, SubscriptionCallback<<E as ExchangeWs>::Response>, ); pub struct ReconnectableWebsocket<E: ExchangeWs> { websocket: Arc<Mutex<OpenLimitsWs<E>>>, tx: UnboundedSender<()>, subscriptions: Arc<Mutex<Vec<SubscriptionCallbackRegistry<E>>>>, } impl<E: ExchangeWs + 'static> ReconnectableWebsocket<E> { pub async fn instantiate(params: E::InitParams, reattempt_interval: Duration) -> Result<Self> { let websocket = E::new(params.clone()).await?; let websocket = OpenLimitsWs { websocket }; let websocket = Arc::new(Mutex::new(websocket)); let subscriptions: Arc<Mutex<Vec<SubscriptionCallbackRegistry<E>>>> = Arc::new(Mutex::new(Default::default())); let (tx, mut rx) = unbounded_channel(); { let websocket = Arc::downgrade(&websocket); let subscriptions = Arc::downgrade(&subscriptions); let tx = tx.clone(); tokio::spawn(async move { while rx.recv().await.is_some() { 'reconnection: loop { if let (Some(websocket), Some(subscriptions)) = (websocket.upgrade(), subscriptions.upgrade()) { if let Ok(new_websocket) = E::new(params.clone()).await { let new_websocket = OpenLimitsWs { websocket: new_websocket, }; let mut websocket = websocket.lock().await; *websocket = new_websocket; let subscriptions = { subscriptions.lock().await.clone() }; let subscriptions = subscriptions.iter().map(|(subscription, callback)| { let callback = callback.clone(); let tx = tx.clone(); websocket.subscribe(subscription.clone(), move |message| { if let Err(OpenLimitsError::SocketError()) = message.as_ref() { tx.send(()).ok(); }
pub async fn create_stream_specific( &self, subscriptions: Subscriptions<E::Subscription>, ) -> Result<BoxStream<'static, Result<E::Response>>> { self.websocket .lock() .await .create_stream_specific(subscriptions) .await } pub async fn subscribe< F: Fn(&Result<WebSocketResponse<E::Response>>) + Sync + Send + Clone + 'static, >( &self, subscription: Subscription, callback: F, ) -> Result<CallbackHandle> { let tx = self.tx.clone(); self.subscriptions .lock() .await .push((subscription.clone(), Arc::new(callback.clone()))); self.websocket .lock() .await .subscribe(subscription, move |message| { if let Err(OpenLimitsError::SocketError()) = message.as_ref() { tx.send(()).ok(); } callback(message); }) .await } pub async fn create_stream<S: Into<E::Subscription> + Clone + Send + Sync>( &self, subscriptions: &[S], ) -> Result<BoxStream<'static, Result<WebSocketResponse<E::Response>>>> { self.websocket .lock() .await .create_stream(subscriptions) .await } pub async fn disconnect(&self) { self.websocket.lock().await.disconnect().await; } }
callback(message) }) }); if futures_util::future::join_all(subscriptions) .await .iter() .all(|subscription| subscription.is_ok()) { break 'reconnection; } } sleep(reattempt_interval); } } } }); } Ok(Self { websocket, tx, subscriptions, }) }
function_block-function_prefix_line
[ { "content": "#[async_trait]\n\npub trait ExchangeWs: Send + Sync + Sized {\n\n type InitParams: Clone + Send + Sync + 'static;\n\n type Subscription: From<Subscription> + Send + Sync + Sized + Clone;\n\n type Response: TryInto<WebSocketResponse<Self::Response>, Error = OpenLimitsError>\n\n + Send\n\n + Sync\n\n + Clone\n\n + Sized\n\n + Debug\n\n + 'static;\n\n\n\n async fn new(params: Self::InitParams) -> Result<Self>;\n\n\n\n async fn disconnect(&self);\n\n\n\n async fn create_stream_specific(\n\n &self,\n\n subscriptions: Subscriptions<Self::Subscription>,\n\n ) -> Result<BoxStream<'static, Result<Self::Response>>>;\n\n\n", "file_path": "src/exchange_ws.rs", "rank": 0, "score": 119419.3980410245 }, { "content": "fn parse_message(ws_message: Message) -> Result<CoinbaseWebsocketMessage> {\n\n let msg = match ws_message {\n\n Message::Text(m) => m,\n\n _ => return Err(OpenLimitsError::SocketError()),\n\n };\n\n Ok(serde_json::from_str(&msg)?)\n\n}\n\n\n\n#[async_trait]\n\nimpl ExchangeWs for CoinbaseWebsocket {\n\n type InitParams = CoinbaseParameters;\n\n type Subscription = CoinbaseSubscription;\n\n type Response = CoinbaseWebsocketMessage;\n\n\n\n async fn new(parameters: Self::InitParams) -> Result<Self> {\n\n Ok(CoinbaseWebsocket::new(parameters))\n\n }\n\n\n\n async fn disconnect(&self) {\n\n if let Ok(mut senders) = self.disconnection_senders.lock() {\n", "file_path": "src/coinbase/client/websocket.rs", "rank": 1, "score": 115496.55660945199 }, { "content": "fn parse_message(ws_message: Message) -> Result<BinanceWebsocketMessage> {\n\n let msg = match ws_message {\n\n Message::Text(m) => m,\n\n Message::Binary(b) => return Ok(BinanceWebsocketMessage::Binary(b)),\n\n Message::Pong(..) => return Ok(BinanceWebsocketMessage::Pong),\n\n Message::Ping(..) => return Ok(BinanceWebsocketMessage::Ping),\n\n Message::Close(..) => return Ok(BinanceWebsocketMessage::Close),\n\n };\n\n\n\n serde_json::from_str(&msg).map_err(OpenLimitsError::JsonError)\n\n}\n", "file_path": "src/binance/client/websocket.rs", "rank": 2, "score": 115496.55660945199 }, { "content": "#[derive(Deserialize)]\n\nstruct BinanceWebsocketStream {\n\n #[serde(rename = \"stream\")]\n\n pub name: String,\n\n pub data: Value,\n\n}\n\n\n\nimpl<'de> Deserialize<'de> for BinanceWebsocketMessage {\n\n fn deserialize<D>(deserializer: D) -> core::result::Result<Self, D::Error>\n\n where\n\n D: serde::Deserializer<'de>,\n\n {\n\n let stream: BinanceWebsocketStream = BinanceWebsocketStream::deserialize(deserializer)?;\n\n\n\n if stream.name.ends_with(\"@aggTrade\") {\n\n Ok(BinanceWebsocketMessage::AggregateTrade(\n\n serde_json::from_value(stream.data).map_err(de::Error::custom)?,\n\n ))\n\n } else if stream.name.contains(\"@trade\") {\n\n Ok(BinanceWebsocketMessage::Trade(\n\n serde_json::from_value(stream.data).map_err(de::Error::custom)?,\n", "file_path": "src/binance/client/websocket.rs", "rank": 3, "score": 90397.1879028519 }, { "content": "#[async_trait]\n\npub trait ExchangeInfoRetrieval: Sync {\n\n async fn get_pair(&self, name: &str) -> Result<MarketPairHandle>;\n\n async fn retrieve_pairs(&self) -> Result<Vec<MarketPair>>;\n\n async fn refresh_market_info(&self) -> Result<Vec<MarketPairHandle>>;\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct MarketPair {\n\n pub base: String,\n\n pub quote: String,\n\n pub symbol: String,\n\n pub base_increment: Decimal,\n\n pub quote_increment: Decimal,\n\n pub min_base_trade_size: Option<Decimal>,\n\n pub min_quote_trade_size: Option<Decimal>,\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct MarketPairHandle {\n\n pub inner: Arc<RwLock<MarketPair>>,\n", "file_path": "src/exchange_info.rs", "rank": 4, "score": 87427.25960978286 }, { "content": "pub fn timestamp_to_naive_datetime(timestamp: u64) -> chrono::naive::NaiveDateTime {\n\n let seconds = (timestamp / 1000) as i64;\n\n let nanos = ((timestamp % 1000) * 1_000_000) as u32;\n\n\n\n chrono::NaiveDateTime::from_timestamp(seconds, nanos)\n\n}\n\n\n", "file_path": "src/shared.rs", "rank": 5, "score": 67634.16532944998 }, { "content": "pub fn timestamp_to_utc_datetime(timestamp: u64) -> chrono::DateTime<chrono::Utc> {\n\n let d = timestamp_to_naive_datetime(timestamp);\n\n chrono::DateTime::<chrono::Utc>::from_utc(d, chrono::Utc)\n\n}\n", "file_path": "src/shared.rs", "rank": 6, "score": 66198.15495627497 }, { "content": "type WSStream = WebSocketStream<MaybeTlsStream<TcpStream>>;\n\n\n\npub struct CoinbaseWebsocket {\n\n pub subscriptions: HashMap<CoinbaseSubscription, SplitStream<WSStream>>,\n\n pub parameters: CoinbaseParameters,\n\n disconnection_senders: Mutex<Vec<UnboundedSender<()>>>,\n\n}\n\n\n\nimpl CoinbaseWebsocket {\n\n pub fn new(parameters: CoinbaseParameters) -> Self {\n\n Self {\n\n subscriptions: Default::default(),\n\n parameters,\n\n disconnection_senders: Default::default(),\n\n }\n\n }\n\n\n\n pub async fn subscribe_(&mut self, subscription: CoinbaseSubscription) -> Result<()> {\n\n let (channels, product_ids) = match &subscription {\n\n CoinbaseSubscription::Level2(product_id) => (\n", "file_path": "src/coinbase/client/websocket.rs", "rank": 7, "score": 59985.75342014043 }, { "content": "pub fn get_pair<'a>(name: &str, exchange_info: &'a ExchangeInfo) -> Result<MarketPairHandle> {\n\n exchange_info.get_pair(name)\n\n}\n\n\n", "file_path": "src/exchange_info.rs", "rank": 8, "score": 59519.84419099382 }, { "content": "#[cfg(not(target_os = \"windows\"))]\n\nfn main() {}", "file_path": "build.rs", "rank": 9, "score": 57066.61250315179 }, { "content": "#[cfg(target_os = \"windows\")]\n\nfn main() {\n\n use std::process::Command;\n\n use std::path::Path;\n\n\n\n let rustup_output = Command::new(\"rustup\")\n\n .arg(\"which\")\n\n .arg(\"rustc\")\n\n .output()\n\n .expect(\"Couldn't get rustup output.\");\n\n let rustc_path = String::from_utf8(rustup_output.stdout).expect(\"Couldn't get toolchain path\");\n\n let toolchain_path = Path::new(&rustc_path)\n\n .parent().unwrap()\n\n .parent().unwrap();\n\n\n\n let toolchain_triple = toolchain_path\n\n .file_name()\n\n .map(|name| name.to_string_lossy().to_string())\n\n .map(|name| name.replace(\"stable-\", \"\"))\n\n .expect(\"Couldn't get toolchain triple.\");\n\n let architecture = if let Some(_) = toolchain_triple.find(\"x86_64\") {\n", "file_path": "build.rs", "rank": 10, "score": 57066.61250315179 }, { "content": "// chrono::Duration does not have a serde serialize/deserialize option\n\nstruct TimeInForceVisitor;\n\n\n\nimpl<'de> Visitor<'de> for TimeInForceVisitor {\n\n type Value = TimeInForce;\n\n\n\n fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {\n\n formatter.write_str(\"an string, either GTC, IOC, FOK, GTT,duration\")\n\n }\n\n fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>\n\n where\n\n E: de::Error,\n\n {\n\n if v.starts_with(\"GTT,\") {\n\n match v[4..].parse::<u64>() {\n\n Ok(v) => Ok(TimeInForce::GoodTillTime(Duration::milliseconds(v as i64))),\n\n _ => Err(E::custom(format!(\"Invalid GTG: {}\", v))),\n\n }\n\n } else {\n\n match v {\n\n \"GTC\" => Ok(TimeInForce::GoodTillCancelled),\n", "file_path": "src/model/mod.rs", "rank": 11, "score": 51574.02577249582 }, { "content": "fn try_split_paginator(\n\n paginator: Option<Paginator>,\n\n) -> crate::shared::Result<(\n\n Option<String>,\n\n Option<i64>,\n\n Option<nash_protocol::types::DateTimeRange>,\n\n)> {\n\n Ok(match paginator {\n\n Some(paginator) => (\n\n paginator.before,\n\n match paginator.limit {\n\n Some(v) => Some(i64::try_from(v).map_err(|_| {\n\n OpenLimitsError::InvalidParameter(\n\n \"Couldn't convert paginator limit to i64\".to_string(),\n\n )\n\n })?),\n\n None => None,\n\n },\n\n if paginator.start_time.is_some() && paginator.end_time.is_some() {\n\n Some(nash_protocol::types::DateTimeRange {\n", "file_path": "src/nash/mod.rs", "rank": 12, "score": 50582.08388160035 }, { "content": "#[async_trait]\n\npub trait ExchangeAccount {\n\n async fn limit_buy(&self, req: &OpenLimitOrderRequest) -> Result<Order>;\n\n async fn limit_sell(&self, req: &OpenLimitOrderRequest) -> Result<Order>;\n\n async fn market_buy(&self, req: &OpenMarketOrderRequest) -> Result<Order>;\n\n async fn market_sell(&self, req: &OpenMarketOrderRequest) -> Result<Order>;\n\n async fn cancel_order(&self, req: &CancelOrderRequest) -> Result<OrderCanceled>;\n\n async fn cancel_all_orders(&self, req: &CancelAllOrdersRequest) -> Result<Vec<OrderCanceled>>;\n\n async fn get_all_open_orders(&self) -> Result<Vec<Order>>;\n\n async fn get_order_history(&self, req: &GetOrderHistoryRequest) -> Result<Vec<Order>>;\n\n async fn get_trade_history(&self, req: &TradeHistoryRequest) -> Result<Vec<Trade>>;\n\n async fn get_account_balances(&self, paginator: Option<Paginator>) -> Result<Vec<Balance>>;\n\n async fn get_order(&self, req: &GetOrderRequest) -> Result<Order>;\n\n}\n", "file_path": "src/exchange.rs", "rank": 13, "score": 49346.80818312607 }, { "content": "#[async_trait]\n\npub trait ExchangeMarketData {\n\n async fn order_book(&self, req: &OrderBookRequest) -> Result<OrderBookResponse>;\n\n async fn get_price_ticker(&self, req: &GetPriceTickerRequest) -> Result<Ticker>;\n\n async fn get_historic_rates(&self, req: &GetHistoricRatesRequest) -> Result<Vec<Candle>>;\n\n async fn get_historic_trades(&self, req: &GetHistoricTradesRequest) -> Result<Vec<Trade>>;\n\n}\n\n\n", "file_path": "src/exchange.rs", "rank": 14, "score": 47939.49257124301 }, { "content": "pub trait BookLevel {\n\n fn level() -> u8;\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Debug, Clone)]\n\npub struct BookRecordL1 {\n\n #[serde(with = \"string_to_decimal\")]\n\n pub price: Decimal,\n\n #[serde(with = \"string_to_decimal\")]\n\n pub size: Decimal,\n\n pub num_orders: usize,\n\n}\n\n\n\nimpl BookLevel for BookRecordL1 {\n\n fn level() -> u8 {\n\n 1\n\n }\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Debug, Clone)]\n", "file_path": "src/coinbase/model/mod.rs", "rank": 15, "score": 46641.8614312603 }, { "content": "type HmacSha256 = Hmac<Sha256>;\n\n\n\nstatic RECV_WINDOW: usize = 7000;\n\n\n\n#[derive(Clone)]\n\npub struct Transport {\n\n credential: Option<(String, String)>,\n\n client: reqwest::Client,\n\n pub recv_window: usize,\n\n base_url: String,\n\n}\n\n\n\nimpl Transport {\n\n pub fn new(sandbox: bool) -> Result<Self> {\n\n let default_headers = Transport::default_headers(None);\n\n let client = reqwest::Client::builder()\n\n .default_headers(default_headers)\n\n .build()?;\n\n\n\n Ok(Transport {\n", "file_path": "src/binance/transport.rs", "rank": 16, "score": 45140.135832319414 }, { "content": "type HmacSha256 = Hmac<Sha256>;\n\n\n\n#[derive(Clone)]\n\npub struct Transport {\n\n api_secret: Option<String>,\n\n client: reqwest::Client,\n\n base_url: String,\n\n}\n\n\n\nimpl Transport {\n\n pub fn new(sandbox: bool) -> Result<Self> {\n\n let default_headers = Transport::default_headers();\n\n\n\n let client = reqwest::Client::builder()\n\n .default_headers(default_headers)\n\n .build()?;\n\n\n\n Ok(Transport {\n\n client,\n\n api_secret: None,\n", "file_path": "src/coinbase/transport.rs", "rank": 17, "score": 45140.135832319414 }, { "content": "fn to_decimal(v: &Value) -> Decimal {\n\n v.as_str()\n\n .expect(\"Couldn't get JSON Value as str.\")\n\n .parse()\n\n .expect(\"Couldn't parse str as Decimal.\")\n\n}\n", "file_path": "src/binance/client/market.rs", "rank": 18, "score": 42900.46836895768 }, { "content": "fn to_i64(v: &Value) -> i64 {\n\n v.as_i64().expect(\"Couldn't get JSON Value as i64.\")\n\n}\n\n\n", "file_path": "src/binance/client/market.rs", "rank": 19, "score": 42900.46836895768 }, { "content": "use nash_native_client::Environment;\n\nuse openlimits::shared::Result;\n\nuse openlimits::{model::websocket::Subscription, nash::NashWebsocket};\n\nuse tokio::time::Duration;\n\n\n\nuse openlimits::binance::{BinanceParameters, BinanceWebsocket};\n\nuse openlimits::coinbase::client::websocket::CoinbaseWebsocket;\n\nuse openlimits::coinbase::CoinbaseParameters;\n\nuse openlimits::exchange_ws::ExchangeWs;\n\nuse openlimits::nash::NashParameters;\n\nuse openlimits::reconnectable_ws::ReconnectableWebsocket;\n\nuse std::sync::mpsc::sync_channel;\n\nuse std::sync::{Arc, Mutex};\n\n\n\nasync fn test_subscription_callback<E: ExchangeWs + 'static>(\n\n websocket: ReconnectableWebsocket<E>,\n\n sub: Subscription,\n\n) {\n\n let disconnections = Arc::new(Mutex::new(0 as u32));\n\n let (tx, rx) = sync_channel(0);\n", "file_path": "tests/any_exchange/reconnection.rs", "rank": 22, "score": 36578.048481956124 }, { "content": " let websocket = Arc::new(websocket);\n\n let weak_websocket = Arc::downgrade(&websocket);\n\n websocket\n\n .subscribe(sub, move |message| match message.as_ref() {\n\n Ok(_message) => {\n\n if let Ok(disconnections) = disconnections.lock().map(|value| *value) {\n\n if disconnections >= 2 {\n\n tx.send(()).expect(\"Couldn't send sync.\");\n\n }\n\n }\n\n let websocket = weak_websocket.upgrade().expect(\"Couldn't get websocket.\");\n\n tokio::spawn(async move { websocket.disconnect().await });\n\n }\n\n Err(_error) => {\n\n *disconnections\n\n .lock()\n\n .expect(\"Couldn't lock disconnections.\") += 1;\n\n }\n\n })\n\n .await\n", "file_path": "tests/any_exchange/reconnection.rs", "rank": 27, "score": 36568.2754281955 }, { "content": "async fn binance() {\n\n let client = init_binance().await;\n\n let sub = Subscription::OrderBookUpdates(\"bnbbtc\".to_string());\n\n test_subscription_callback(client.expect(\"Couldn't create client.\"), sub).await;\n\n}\n\n\n\nasync fn init_coinbase() -> Result<ReconnectableWebsocket<CoinbaseWebsocket>> {\n\n ReconnectableWebsocket::instantiate(\n\n CoinbaseParameters {\n\n credentials: None,\n\n sandbox: true,\n\n },\n\n Duration::from_secs_f32(1.0),\n\n )\n\n .await\n\n}\n\n\n\nasync fn init_nash() -> Result<ReconnectableWebsocket<NashWebsocket>> {\n\n ReconnectableWebsocket::instantiate(\n\n NashParameters {\n", "file_path": "tests/any_exchange/reconnection.rs", "rank": 28, "score": 36565.9769105866 }, { "content": " timeout: Duration::from_secs_f32(2.0),\n\n client_id: 123,\n\n credentials: None,\n\n affiliate_code: None,\n\n environment: Environment::Production,\n\n sign_states_loop_interval: None,\n\n },\n\n Duration::from_secs_f32(1.0),\n\n )\n\n .await\n\n}\n\n\n\nasync fn init_binance() -> Result<ReconnectableWebsocket<BinanceWebsocket>> {\n\n ReconnectableWebsocket::instantiate(\n\n BinanceParameters {\n\n sandbox: false,\n\n credentials: None,\n\n },\n\n Duration::from_secs_f32(1.0),\n\n )\n\n .await\n\n}\n", "file_path": "tests/any_exchange/reconnection.rs", "rank": 30, "score": 36562.01003529361 }, { "content": " .expect(\"Couldn't subscribe\");\n\n rx.recv_timeout(Duration::from_secs_f32(10.0))\n\n .expect(\"Couldn't receive sync.\");\n\n}\n\n\n\n#[tokio::test(flavor = \"multi_thread\", worker_threads = 2)]\n\nasync fn coinbase() {\n\n let client = init_coinbase().await;\n\n let sub = Subscription::OrderBookUpdates(\"BTC-USD\".to_string());\n\n test_subscription_callback(client.expect(\"Couldn't create client.\"), sub).await;\n\n}\n\n\n\n#[tokio::test(flavor = \"multi_thread\", worker_threads = 2)]\n\nasync fn nash() {\n\n let client = init_nash().await;\n\n let sub = Subscription::OrderBookUpdates(\"btc_usdc\".to_string());\n\n test_subscription_callback(client.expect(\"Couldn't create client.\"), sub).await;\n\n}\n\n\n\n#[tokio::test(flavor = \"multi_thread\", worker_threads = 2)]\n", "file_path": "tests/any_exchange/reconnection.rs", "rank": 31, "score": 36560.652400375184 }, { "content": " m.as_ref().expect(\"Couldn't get response.\");\n\n tx.send(()).expect(\"Couldn't send sync message.\");\n\n })\n\n .await\n\n .expect(\"Couldn't subscribe.\");\n\n\n\n rx.recv().expect(\"Couldn't receive sync message.\");\n\n}\n\n\n\nasync fn test_account_subscription_callback(\n\n websocket: OpenLimitsWs<NashWebsocket>,\n\n sub: Subscription,\n\n cancel_orders: bool,\n\n) {\n\n let (tx, rx) = sync_channel(0);\n\n\n\n websocket\n\n .subscribe(sub, move |m| {\n\n m.as_ref().expect(\"Couldn't get response.\");\n\n tx.send(()).expect(\"Couldn't send sync message.\");\n", "file_path": "tests/nash/websocket.rs", "rank": 32, "score": 35680.92798291303 }, { "content": " credentials: Some(NashCredentials {\n\n secret: env::var(\"NASH_API_SECRET\").expect(\"Couldn't get environment variable.\"),\n\n session: env::var(\"NASH_API_KEY\").expect(\"Couldn't get environment variable.\"),\n\n }),\n\n environment: Environment::Sandbox,\n\n client_id: 1,\n\n timeout: NativeDuration::new(10, 0),\n\n sign_states_loop_interval: None,\n\n };\n\n\n\n OpenLimits::instantiate(parameters)\n\n .await\n\n .expect(\"Failed to create Client\")\n\n}\n\n\n\nasync fn test_subscription_callback(websocket: OpenLimitsWs<NashWebsocket>, sub: Subscription) {\n\n let (tx, rx) = sync_channel(0);\n\n\n\n websocket\n\n .subscribe(sub, move |m| {\n", "file_path": "tests/nash/websocket.rs", "rank": 33, "score": 35671.38835745391 }, { "content": "use crate::openlimits::exchange::ExchangeAccount;\n\nuse dotenv::dotenv;\n\nuse nash_native_client::Environment;\n\nuse openlimits::exchange::OpenLimits;\n\nuse openlimits::exchange_ws::ExchangeWs;\n\nuse openlimits::model::websocket::AccountOrders;\n\nuse openlimits::model::{CancelAllOrdersRequest, OpenLimitOrderRequest, TimeInForce};\n\nuse openlimits::nash::{Nash, NashCredentials, NashParameters};\n\nuse openlimits::{exchange_ws::OpenLimitsWs, model::websocket::Subscription, nash::NashWebsocket};\n\nuse rust_decimal::Decimal;\n\nuse std::str::FromStr;\n\nuse std::time::Duration as NativeDuration;\n\nuse std::{env, sync::mpsc::sync_channel};\n\nuse tokio::time::Duration;\n\n\n\nasync fn init_exchange() -> Nash {\n\n dotenv().ok();\n\n\n\n let parameters = NashParameters {\n\n affiliate_code: None,\n", "file_path": "tests/nash/websocket.rs", "rank": 34, "score": 35662.99230224671 }, { "content": " market_pair: Some(\"eth_btc\".to_string()),\n\n };\n\n\n\n let resp = exchange\n\n .cancel_all_orders(&req)\n\n .await\n\n .expect(\"Couldn't cancel all orders.\");\n\n println!(\"{:?}\", resp);\n\n }\n\n\n\n rx.recv().expect(\"Couldn't receive sync message.\");\n\n}\n\n\n\n#[tokio::test(flavor = \"multi_thread\", worker_threads = 2)]\n\nasync fn account_orders() {\n\n let client = init().await;\n\n let sub = Subscription::AccountOrders(AccountOrders {\n\n market: Some(\"eth_btc\".to_string()),\n\n order_type: None,\n\n status: None,\n", "file_path": "tests/nash/websocket.rs", "rank": 35, "score": 35659.98086671665 }, { "content": "#[tokio::test(flavor = \"multi_thread\", worker_threads = 2)]\n\nasync fn orderbook() {\n\n let client = init().await;\n\n let sub = Subscription::OrderBookUpdates(\"btc_usdc\".to_string());\n\n test_subscription_callback(client, sub).await;\n\n}\n\n\n\n#[tokio::test(flavor = \"multi_thread\", worker_threads = 2)]\n\nasync fn trades() {\n\n let client = init().await;\n\n let sub = Subscription::Trades(\"btc_usdc\".to_string());\n\n test_subscription_callback(client, sub).await;\n\n}\n\n\n\nasync fn init() -> OpenLimitsWs<NashWebsocket> {\n\n dotenv().ok();\n\n\n\n let websocket = NashWebsocket::new(NashParameters {\n\n credentials: Some(NashCredentials {\n\n secret: env::var(\"NASH_API_SECRET\").expect(\"Couldn't get environment variable.\"),\n", "file_path": "tests/nash/websocket.rs", "rank": 36, "score": 35658.65980268255 }, { "content": "use super::{OrderBookResponse, Trade};\n\nuse crate::model::{OrderStatus, OrderType, Side};\n\nuse serde::{Deserialize, Serialize};\n\nuse std::ops::Range;\n\n\n\n#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Hash)]\n\npub struct AccountOrders {\n\n pub market: Option<String>,\n\n pub order_type: Option<Vec<OrderType>>,\n\n pub buy_or_sell: Option<Side>,\n\n pub range: Option<Range<u64>>,\n\n pub status: Option<Vec<OrderStatus>>,\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Hash)]\n\npub enum Subscription {\n\n Ticker(String), // symbol\n\n OrderBookUpdates(String), // symbol\n\n Trades(String), // symbol\n\n AccountTrades(String), // symbol\n", "file_path": "src/model/websocket.rs", "rank": 37, "score": 35658.174555141115 }, { "content": "// FIXME: These test cases aren't implemented.\n\n// The purpose of this module is to be sure we have the same functionalities across all the\n\n// supported exchanges.\n\n\n\nuse dotenv::dotenv;\n\nuse nash_native_client::Environment;\n\nuse openlimits::any_exchange::{AnyExchange, AnyWsExchange};\n\nuse openlimits::binance::{Binance, BinanceCredentials, BinanceParameters};\n\nuse openlimits::coinbase::client::websocket::CoinbaseWebsocket;\n\nuse openlimits::coinbase::{Coinbase, CoinbaseCredentials, CoinbaseParameters};\n\nuse openlimits::exchange::OpenLimits;\n\nuse openlimits::exchange_ws::OpenLimitsWs;\n\nuse openlimits::nash::{Nash, NashCredentials, NashParameters};\n\nuse openlimits::shared::Result;\n\nuse std::env;\n\nuse tokio::time::Duration;\n\n\n\n#[tokio::test]\n\nasync fn account_test() {\n\n let _exchange = init().await;\n", "file_path": "tests/any_exchange/websocket.rs", "rank": 38, "score": 35656.08067819529 }, { "content": " buy_or_sell: None,\n\n range: None,\n\n });\n\n test_account_subscription_callback(client, sub, true).await;\n\n}\n\n\n\n#[tokio::test(flavor = \"multi_thread\", worker_threads = 2)]\n\nasync fn account_trades() {\n\n let client = init().await;\n\n let sub = Subscription::AccountTrades(\"eth_btc\".to_string());\n\n test_account_subscription_callback(client, sub, false).await;\n\n}\n\n\n\n#[tokio::test(flavor = \"multi_thread\", worker_threads = 2)]\n\nasync fn account_balance() {\n\n let client = init().await;\n\n let sub = Subscription::AccountBalance(\"eth\".to_string());\n\n test_account_subscription_callback(client, sub, false).await;\n\n}\n\n\n", "file_path": "tests/nash/websocket.rs", "rank": 39, "score": 35655.41915505281 }, { "content": "}\n\n\n\n#[tokio::test]\n\nasync fn ws_test() {\n\n let _websocket = init_ws().await;\n\n}\n\n\n\nasync fn _nash() -> Result<Nash> {\n\n let parameters = NashParameters {\n\n affiliate_code: None,\n\n credentials: Some(NashCredentials {\n\n secret: env::var(\"NASH_API_SECRET\").expect(\"Couldn't get environment variable.\"),\n\n session: env::var(\"NASH_API_KEY\").expect(\"Couldn't get environment variable.\"),\n\n }),\n\n environment: Environment::Sandbox,\n\n client_id: 1,\n\n timeout: Duration::from_secs_f32(10.0),\n\n sign_states_loop_interval: None,\n\n };\n\n OpenLimits::instantiate(parameters).await\n", "file_path": "tests/any_exchange/websocket.rs", "rank": 40, "score": 35654.57568267983 }, { "content": " session: env::var(\"NASH_API_KEY\").expect(\"Couldn't get environment variable.\"),\n\n }),\n\n affiliate_code: None,\n\n client_id: 1234,\n\n environment: Environment::Sandbox,\n\n timeout: Duration::from_secs(10),\n\n sign_states_loop_interval: None,\n\n })\n\n .await\n\n .expect(\"Couldn't connect.\");\n\n\n\n OpenLimitsWs { websocket }\n\n}\n", "file_path": "tests/nash/websocket.rs", "rank": 41, "score": 35653.08586869406 }, { "content": " }),\n\n };\n\n OpenLimits::instantiate(parameters).await\n\n}\n\n\n\nasync fn init() -> Result<AnyExchange> {\n\n dotenv().ok();\n\n coinbase().await.map(|exchange| exchange.into())\n\n}\n\n\n\nasync fn coinbase_websocket() -> OpenLimitsWs<CoinbaseWebsocket> {\n\n dotenv().ok();\n\n\n\n let websocket = CoinbaseWebsocket::new(CoinbaseParameters {\n\n sandbox: false,\n\n credentials: Some(CoinbaseCredentials {\n\n api_secret: env::var(\"COINBASE_API_SECRET\").unwrap(),\n\n api_key: env::var(\"COINBASE_API_KEY\").unwrap(),\n\n passphrase: env::var(\"COINBASE_PASSPHRASE\").unwrap(),\n\n }),\n\n });\n\n OpenLimitsWs { websocket }\n\n}\n\n\n\nasync fn init_ws() -> AnyWsExchange {\n\n coinbase_websocket().await.into()\n\n}\n", "file_path": "tests/any_exchange/websocket.rs", "rank": 42, "score": 35651.94287182948 }, { "content": " AccountBalance(String), // symbol\n\n AccountOrders(AccountOrders),\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub enum WebSocketResponse<T> {\n\n Generic(OpenLimitsWebSocketMessage),\n\n Raw(T),\n\n}\n\n\n\n#[derive(Debug, Clone, Serialize)]\n\npub enum OpenLimitsWebSocketMessage {\n\n Ping,\n\n OrderBook(OrderBookResponse),\n\n OrderBookDiff(OrderBookResponse),\n\n Trades(Vec<Trade>),\n\n}\n", "file_path": "src/model/websocket.rs", "rank": 43, "score": 35650.83967345624 }, { "content": "}\n\n\n\nasync fn _binance() -> Result<Binance> {\n\n let parameters = BinanceParameters {\n\n credentials: Some(BinanceCredentials {\n\n api_key: env::var(\"BINANCE_API_KEY\").expect(\"Couldn't get environment variable.\"),\n\n api_secret: env::var(\"BINANCE_API_SECRET\").expect(\"Couldn't get environment variable.\"),\n\n }),\n\n sandbox: true,\n\n };\n\n OpenLimits::instantiate(parameters).await\n\n}\n\n\n\nasync fn coinbase() -> Result<Coinbase> {\n\n let parameters = CoinbaseParameters {\n\n sandbox: true,\n\n credentials: Some(CoinbaseCredentials {\n\n api_key: env::var(\"COINBASE_API_KEY\").unwrap(),\n\n api_secret: env::var(\"COINBASE_API_SECRET\").unwrap(),\n\n passphrase: env::var(\"COINBASE_PASSPHRASE\").unwrap(),\n", "file_path": "tests/any_exchange/websocket.rs", "rank": 44, "score": 35646.43922103795 }, { "content": " })\n\n .await\n\n .expect(\"Couldn't subscribe.\");\n\n\n\n let exchange = init_exchange().await;\n\n let req = OpenLimitOrderRequest {\n\n time_in_force: TimeInForce::GoodTillCancelled,\n\n price: Decimal::from_str(\"0.01\").expect(\"Couldn't parse string.\"),\n\n size: Decimal::from_str(\"0.1\").expect(\"Couldn't parse string.\"),\n\n market_pair: String::from(\"eth_btc\"),\n\n post_only: false,\n\n };\n\n\n\n exchange\n\n .limit_buy(&req)\n\n .await\n\n .expect(\"Couldn't limit sell.\");\n\n\n\n if cancel_orders {\n\n let req = CancelAllOrdersRequest {\n", "file_path": "tests/nash/websocket.rs", "rank": 45, "score": 35643.16369010782 }, { "content": "use std::sync::mpsc::sync_channel;\n\n\n\nuse openlimits::{\n\n binance::{client::websocket::BinanceWebsocket, BinanceParameters},\n\n exchange_ws::{ExchangeWs, OpenLimitsWs},\n\n model::websocket::Subscription,\n\n};\n\n\n\nasync fn test_subscription_callback(websocket: OpenLimitsWs<BinanceWebsocket>, sub: Subscription) {\n\n let (tx, rx) = sync_channel(0);\n\n\n\n websocket\n\n .subscribe(sub, move |m| {\n\n m.as_ref().expect(\"Couldn't get response.\");\n\n tx.send(()).expect(\"Couldn't send sync message.\");\n\n })\n\n .await\n\n .expect(\"Couldn't subscribe.\");\n\n\n\n rx.recv().expect(\"Couldn't receive sync message.\");\n", "file_path": "tests/binance/ws_callbacks.rs", "rank": 46, "score": 35004.89908277085 }, { "content": "}\n\n\n\n#[tokio::test(flavor = \"multi_thread\", worker_threads = 2)]\n\nasync fn orderbook() {\n\n let ws = init().await;\n\n let sub = Subscription::OrderBookUpdates(\"bnbbtc\".to_string());\n\n test_subscription_callback(ws, sub).await;\n\n}\n\n\n\n#[tokio::test(flavor = \"multi_thread\", worker_threads = 2)]\n\nasync fn trades() {\n\n let ws = init().await;\n\n let sub = Subscription::Trades(\"btcusdt\".to_string());\n\n test_subscription_callback(ws, sub).await;\n\n}\n\n\n\nasync fn init() -> OpenLimitsWs<BinanceWebsocket> {\n\n OpenLimitsWs {\n\n websocket: BinanceWebsocket::new(BinanceParameters::prod())\n\n .await\n\n .expect(\"Failed to create Client\"),\n\n }\n\n}\n", "file_path": "tests/binance/ws_callbacks.rs", "rank": 47, "score": 34981.54214039709 }, { "content": "#[async_trait]\n\npub trait Exchange: ExchangeInfoRetrieval + ExchangeAccount + ExchangeMarketData + Sized {\n\n type InitParams;\n\n type InnerClient;\n\n async fn new(params: Self::InitParams) -> Result<Self>;\n\n fn inner_client(&self) -> Option<&Self::InnerClient>;\n\n}\n\n\n", "file_path": "src/exchange.rs", "rank": 48, "score": 34709.1008143585 }, { "content": "use openlimits::{\n\n binance::{\n\n client::websocket::BinanceWebsocket, model::websocket::BinanceSubscription,\n\n BinanceParameters,\n\n },\n\n exchange_ws::ExchangeWs,\n\n};\n\nuse std::sync::mpsc::sync_channel;\n\n\n\nasync fn test_subscription_callback(websocket: BinanceWebsocket, sub: BinanceSubscription) {\n\n let (tx, rx) = sync_channel(0);\n\n\n\n websocket\n\n .subscribe(sub, move |m| {\n\n m.as_ref().expect(\"Couldn't get response.\");\n\n tx.send(()).expect(\"Couldn't send sync message.\");\n\n })\n\n .await\n\n .expect(\"Couldn't subscribe.\");\n\n\n", "file_path": "tests/apis/binance/websocket.rs", "rank": 49, "score": 34039.684895664184 }, { "content": "use openlimits::coinbase::{\n\n client::websocket::CoinbaseWebsocket, model::websocket::CoinbaseSubscription,\n\n CoinbaseParameters,\n\n};\n\nuse openlimits::exchange_ws::ExchangeWs;\n\nuse openlimits::model::websocket::{OpenLimitsWebSocketMessage, WebSocketResponse};\n\nuse std::sync::mpsc::sync_channel;\n\nuse std::time::Duration;\n\n\n\nasync fn test_subscription_callback(\n\n websocket: CoinbaseWebsocket,\n\n sub: CoinbaseSubscription,\n\n expected_messages: Vec<OpenLimitsWebSocketMessage>,\n\n) {\n\n let (tx, rx) = sync_channel(0);\n\n\n\n let mut received_messages: Vec<bool> = expected_messages.iter().map(|_| false).collect();\n\n\n\n websocket\n\n .subscribe(sub, move |message| {\n", "file_path": "tests/apis/coinbase/websocket.rs", "rank": 50, "score": 34039.45575880161 }, { "content": " rx.recv_timeout(Duration::from_secs(3))\n\n .expect(\"Couldn't receive sync message.\");\n\n}\n\n\n\n#[tokio::test(flavor = \"multi_thread\", worker_threads = 2)]\n\nasync fn order_book() {\n\n let websocket = init().await;\n\n let sub = CoinbaseSubscription::Level2(\"BTC-USD\".to_string());\n\n let expected = vec![\n\n OpenLimitsWebSocketMessage::OrderBook(Default::default()),\n\n OpenLimitsWebSocketMessage::OrderBookDiff(Default::default()),\n\n ];\n\n test_subscription_callback(websocket, sub, expected).await;\n\n}\n\n\n\nasync fn init() -> CoinbaseWebsocket {\n\n CoinbaseWebsocket::new(CoinbaseParameters {\n\n sandbox: true,\n\n credentials: None,\n\n })\n\n}\n", "file_path": "tests/apis/coinbase/websocket.rs", "rank": 51, "score": 34025.94700567293 }, { "content": " rx.recv().expect(\"Couldn't receive sync message.\");\n\n}\n\n\n\n#[tokio::test(flavor = \"multi_thread\", worker_threads = 2)]\n\nasync fn aggregate_trade() {\n\n let websocket = init().await;\n\n let sub = BinanceSubscription::AggregateTrade(\"bnbbtc\".to_string());\n\n test_subscription_callback(websocket, sub).await;\n\n}\n\n\n\n#[tokio::test(flavor = \"multi_thread\", worker_threads = 2)]\n\nasync fn candlestick() {\n\n let websocket = init().await;\n\n let sub = BinanceSubscription::Candlestick(\"bnbbtc\".to_string(), \"1m\".to_string());\n\n test_subscription_callback(websocket, sub).await;\n\n}\n\n\n\n#[tokio::test(flavor = \"multi_thread\", worker_threads = 2)]\n\nasync fn depth() {\n\n let websocket = init().await;\n", "file_path": "tests/apis/binance/websocket.rs", "rank": 52, "score": 34024.7457998738 }, { "content": "use crate::{\n\n binance::{\n\n model::websocket::{BinanceSubscription, BinanceWebsocketMessage},\n\n BinanceParameters,\n\n },\n\n errors::OpenLimitsError,\n\n exchange_ws::ExchangeWs,\n\n exchange_ws::Subscriptions,\n\n model::websocket::OpenLimitsWebSocketMessage,\n\n model::websocket::Subscription,\n\n model::websocket::WebSocketResponse,\n\n shared::Result,\n\n};\n\n\n\nuse async_trait::async_trait;\n\nuse futures::{stream::BoxStream, SinkExt, StreamExt};\n\nuse serde::{de, Deserialize, Serialize};\n\nuse serde_json::Value;\n\nuse std::sync::Mutex;\n\nuse std::{convert::TryFrom, fmt::Display};\n\nuse tokio::sync::mpsc::{unbounded_channel, UnboundedSender};\n\nuse tokio_tungstenite::{connect_async, tungstenite::protocol::Message};\n\n\n\nconst WS_URL_PROD: &str = \"wss://stream.binance.com:9443/stream\";\n\nconst WS_URL_SANDBOX: &str = \"wss://testnet.binance.vision/stream\";\n\n\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\n#[serde(untagged)]\n", "file_path": "src/binance/client/websocket.rs", "rank": 53, "score": 34023.09582255776 }, { "content": "\n\n pub async fn connect(&self, subscribe: Subscribe) -> Result<SplitStream<WSStream>> {\n\n let ws_url = if self.parameters.sandbox {\n\n WS_URL_SANDBOX\n\n } else {\n\n WS_URL_PROD\n\n };\n\n let url = url::Url::parse(ws_url).expect(\"Couldn't parse url.\");\n\n let (ws_stream, _) = connect_async(&url).await?;\n\n let (mut sink, stream) = ws_stream.split();\n\n let subscribe = serde_json::to_string(&subscribe)?;\n\n\n\n sink.send(Message::Text(subscribe)).await?;\n\n Ok(stream)\n\n }\n\n}\n\n\n\nimpl Stream for CoinbaseWebsocket {\n\n type Item = Result<CoinbaseWebsocketMessage>;\n\n\n", "file_path": "src/coinbase/client/websocket.rs", "rank": 54, "score": 34021.48078759435 }, { "content": "use async_trait::async_trait;\n\nuse std::{collections::HashMap, pin::Pin, task::Poll};\n\n\n\nuse futures::{\n\n stream::{SplitStream, Stream},\n\n SinkExt, StreamExt,\n\n};\n\nuse serde::{Deserialize, Serialize};\n\nuse tokio::net::TcpStream;\n\n\n\nuse tokio_tungstenite::tungstenite::Message;\n\nuse tokio_tungstenite::{connect_async, MaybeTlsStream, WebSocketStream};\n\n\n\nuse crate::{\n\n coinbase::model::websocket::{\n\n Channel, CoinbaseSubscription, CoinbaseWebsocketMessage, Subscribe, SubscribeCmd,\n\n },\n\n errors::OpenLimitsError,\n\n shared::Result,\n\n};\n", "file_path": "src/coinbase/client/websocket.rs", "rank": 55, "score": 34019.2466445153 }, { "content": " })\n\n }\n\n\n\n async fn disconnect(&self) {\n\n if let Ok(mut senders) = self.disconnection_senders.lock() {\n\n for sender in senders.iter() {\n\n sender.send(()).ok();\n\n }\n\n senders.clear();\n\n }\n\n }\n\n\n\n async fn create_stream_specific(\n\n &self,\n\n subscriptions: Subscriptions<Self::Subscription>,\n\n ) -> Result<BoxStream<'static, Result<Self::Response>>> {\n\n let streams = subscriptions\n\n .into_iter()\n\n .map(|bs| bs.to_string())\n\n .collect::<Vec<String>>()\n", "file_path": "src/binance/client/websocket.rs", "rank": 56, "score": 34018.72896775577 }, { "content": " let sub = BinanceSubscription::Depth(\"bnbbtc\".to_string(), Some(1));\n\n test_subscription_callback(websocket, sub).await;\n\n}\n\n\n\n#[tokio::test(flavor = \"multi_thread\", worker_threads = 2)]\n\nasync fn mini_ticker() {\n\n let websocket = init().await;\n\n let sub = BinanceSubscription::MiniTicker(\"bnbbtc\".to_string());\n\n test_subscription_callback(websocket, sub).await;\n\n}\n\n\n\n#[tokio::test(flavor = \"multi_thread\", worker_threads = 2)]\n\nasync fn mini_ticker_all() {\n\n let websocket = init().await;\n\n let sub = BinanceSubscription::MiniTickerAll;\n\n test_subscription_callback(websocket, sub).await;\n\n}\n\n\n\n#[tokio::test(flavor = \"multi_thread\", worker_threads = 2)]\n\nasync fn order_book() {\n", "file_path": "tests/apis/binance/websocket.rs", "rank": 57, "score": 34018.15476889218 }, { "content": " if let Ok(message) = message.as_ref() {\n\n if let WebSocketResponse::Generic(message) = message {\n\n let expected_iter = expected_messages.iter().map(|expected| {\n\n std::mem::discriminant(expected) == std::mem::discriminant(&message)\n\n });\n\n for (already_received, currently_received) in\n\n received_messages.iter_mut().zip(expected_iter)\n\n {\n\n if !*already_received {\n\n *already_received = currently_received;\n\n }\n\n }\n\n if received_messages.iter().all(|received| *received) {\n\n tx.send(()).expect(\"Couldn't send sync message.\");\n\n }\n\n }\n\n }\n\n })\n\n .await\n\n .expect(\"Couldn't subscribe.\");\n", "file_path": "tests/apis/coinbase/websocket.rs", "rank": 58, "score": 34018.018603024844 }, { "content": " let websocket = init().await;\n\n let sub = BinanceSubscription::OrderBook(\"bnbbtc\".to_string(), 10);\n\n test_subscription_callback(websocket, sub).await;\n\n}\n\n\n\n#[tokio::test(flavor = \"multi_thread\", worker_threads = 2)]\n\nasync fn ticker() {\n\n let websocket = init().await;\n\n let sub = BinanceSubscription::Ticker(\"bnbbtc\".to_string());\n\n test_subscription_callback(websocket, sub).await;\n\n}\n\n\n\n#[tokio::test(flavor = \"multi_thread\", worker_threads = 2)]\n\nasync fn ticker_all() {\n\n let websocket = init().await;\n\n let sub = BinanceSubscription::TickerAll;\n\n test_subscription_callback(websocket, sub).await;\n\n}\n\n\n\n#[tokio::test(flavor = \"multi_thread\", worker_threads = 2)]\n", "file_path": "tests/apis/binance/websocket.rs", "rank": 59, "score": 34017.9609656571 }, { "content": "}\n\n\n\nimpl From<InputMessage> for CoinbaseWebsocketMessage {\n\n fn from(msg: InputMessage) -> Self {\n\n match msg {\n\n InputMessage::Subscriptions { channels } => {\n\n CoinbaseWebsocketMessage::Subscriptions { channels }\n\n }\n\n InputMessage::Heartbeat {\n\n sequence,\n\n last_trade_id,\n\n product_id,\n\n time,\n\n } => CoinbaseWebsocketMessage::Heartbeat {\n\n sequence,\n\n last_trade_id,\n\n product_id,\n\n time,\n\n },\n\n InputMessage::Ticker(ticker) => CoinbaseWebsocketMessage::Ticker(ticker),\n", "file_path": "src/coinbase/model/websocket.rs", "rank": 60, "score": 34017.47847149938 }, { "content": "async fn trade() {\n\n let websocket = init().await;\n\n let sub = BinanceSubscription::Trade(\"bnbbtc\".to_string());\n\n test_subscription_callback(websocket, sub).await;\n\n}\n\n\n\nasync fn init() -> BinanceWebsocket {\n\n BinanceWebsocket::new(BinanceParameters::sandbox())\n\n .await\n\n .expect(\"Failed to create Client\")\n\n}\n", "file_path": "tests/apis/binance/websocket.rs", "rank": 61, "score": 34017.41036611352 }, { "content": " CoinbaseSubscription::Level2(product_id) => (\n\n vec![Channel::Name(ChannelType::Level2)],\n\n vec![product_id.clone()],\n\n ),\n\n CoinbaseSubscription::Heartbeat(product_id) => (\n\n vec![Channel::Name(ChannelType::Heartbeat)],\n\n vec![product_id.clone()],\n\n ),\n\n _ => unimplemented!(),\n\n };\n\n let subscribe = Subscribe {\n\n _type: SubscribeCmd::Subscribe,\n\n auth: None,\n\n channels,\n\n product_ids,\n\n };\n\n let subscribe = serde_json::to_string(&subscribe)?;\n\n let (mut sink, stream) = ws_stream.split();\n\n let (disconnection_sender, mut disconnection_receiver) = unbounded_channel();\n\n sink.send(Message::Text(subscribe)).await?;\n", "file_path": "src/coinbase/client/websocket.rs", "rank": 62, "score": 34017.16909982844 }, { "content": "\n\nuse crate::coinbase::model::websocket::ChannelType;\n\nuse crate::coinbase::CoinbaseParameters;\n\nuse crate::exchange_ws::{ExchangeWs, Subscriptions};\n\nuse futures::stream::BoxStream;\n\nuse std::sync::Mutex;\n\nuse tokio::sync::mpsc::{unbounded_channel, UnboundedSender};\n\n\n\nconst WS_URL_PROD: &str = \"wss://ws-feed.pro.coinbase.com\";\n\nconst WS_URL_SANDBOX: &str = \"wss://ws-feed-public.sandbox.pro.coinbase.com\";\n\n\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\n#[serde(untagged)]\n", "file_path": "src/coinbase/client/websocket.rs", "rank": 63, "score": 34017.06042642117 }, { "content": " ))\n\n } else if stream.name.ends_with(\"@depth\") {\n\n Ok(BinanceWebsocketMessage::Depth(\n\n serde_json::from_value(stream.data).map_err(de::Error::custom)?,\n\n ))\n\n } else if stream.name.contains(\"@depth\") {\n\n Ok(BinanceWebsocketMessage::OrderBook(\n\n serde_json::from_value(stream.data).map_err(de::Error::custom)?,\n\n ))\n\n } else {\n\n panic!(\"Not supported Subscription\");\n\n }\n\n }\n\n}\n\n\n\nimpl Display for BinanceSubscription {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n match self {\n\n BinanceSubscription::AggregateTrade(ref symbol) => write!(f, \"{}@aggTrade\", symbol),\n\n BinanceSubscription::Candlestick(ref symbol, ref interval) => {\n", "file_path": "src/binance/client/websocket.rs", "rank": 64, "score": 34017.005420769885 }, { "content": " fn from(subscription: Subscription) -> Self {\n\n match subscription {\n\n Subscription::OrderBookUpdates(symbol) => BinanceSubscription::Depth(symbol, None),\n\n Subscription::Trades(symbol) => BinanceSubscription::Trade(symbol),\n\n _ => unimplemented!(),\n\n }\n\n }\n\n}\n\n\n\nimpl TryFrom<BinanceWebsocketMessage> for WebSocketResponse<BinanceWebsocketMessage> {\n\n type Error = OpenLimitsError;\n\n\n\n fn try_from(value: BinanceWebsocketMessage) -> Result<Self> {\n\n match value {\n\n BinanceWebsocketMessage::Depth(orderbook) => Ok(WebSocketResponse::Generic(\n\n OpenLimitsWebSocketMessage::OrderBook(orderbook.into()),\n\n )),\n\n BinanceWebsocketMessage::Trade(trade) => Ok(WebSocketResponse::Generic(\n\n OpenLimitsWebSocketMessage::Trades(trade.into()),\n\n )),\n\n BinanceWebsocketMessage::Ping => {\n\n Ok(WebSocketResponse::Generic(OpenLimitsWebSocketMessage::Ping))\n\n }\n\n BinanceWebsocketMessage::Close => Err(OpenLimitsError::SocketError()),\n\n _ => Ok(WebSocketResponse::Raw(value)),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/binance/client/websocket.rs", "rank": 65, "score": 34016.89482633964 }, { "content": " pub private: bool,\n\n pub user_id: Option<String>,\n\n #[serde(default)]\n\n pub profile_id: Option<String>,\n\n}\n\n\n\n#[derive(Deserialize, Debug, Clone)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub enum StopType {\n\n Entry,\n\n Exit,\n\n}\n\n\n\nimpl From<Subscription> for CoinbaseSubscription {\n\n fn from(subscription: Subscription) -> Self {\n\n match subscription {\n\n Subscription::OrderBookUpdates(symbol) => CoinbaseSubscription::Level2(symbol),\n\n _ => unimplemented!(),\n\n }\n\n }\n", "file_path": "src/coinbase/model/websocket.rs", "rank": 66, "score": 34016.391421075285 }, { "content": "use crate::coinbase::model::OrderSide;\n\nuse crate::errors::OpenLimitsError;\n\nuse crate::model::websocket::{OpenLimitsWebSocketMessage, Subscription, WebSocketResponse};\n\nuse crate::model::{AskBid, OrderBookResponse};\n\nuse crate::shared::Result;\n\nuse crate::shared::{string_to_decimal, string_to_opt_decimal};\n\nuse rust_decimal::prelude::Decimal;\n\nuse serde::{Deserialize, Deserializer, Serialize};\n\nuse std::convert::{TryFrom, TryInto};\n\n\n\n#[derive(Debug, Clone, PartialEq, Eq, Hash)]\n\npub enum CoinbaseSubscription {\n\n Heartbeat(String),\n\n Status,\n\n // Ticker(String),\n\n Level2(String),\n\n // User,\n\n // Matches,\n\n // FullChannel\n\n}\n", "file_path": "src/coinbase/model/websocket.rs", "rank": 67, "score": 34015.97811804122 }, { "content": " tokio::spawn(async move {\n\n if disconnection_receiver.recv().await.is_some() {\n\n sink.close().await.ok();\n\n }\n\n });\n\n\n\n if let Ok(mut senders) = self.disconnection_senders.lock() {\n\n senders.push(disconnection_sender);\n\n }\n\n let s = stream.map(|message| parse_message(message?));\n\n\n\n Ok(s.boxed())\n\n }\n\n}\n", "file_path": "src/coinbase/client/websocket.rs", "rank": 68, "score": 34015.533868012484 }, { "content": "\n\nimpl TryFrom<CoinbaseWebsocketMessage> for WebSocketResponse<CoinbaseWebsocketMessage> {\n\n type Error = OpenLimitsError;\n\n\n\n fn try_from(value: CoinbaseWebsocketMessage) -> Result<Self> {\n\n match value {\n\n CoinbaseWebsocketMessage::Level2(level2) => {\n\n Ok(WebSocketResponse::Generic(level2.try_into()?))\n\n }\n\n _ => Ok(WebSocketResponse::Raw(value)),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Deserialize, Debug, Clone)]\n\npub enum Level2 {\n\n Snapshot {\n\n product_id: String,\n\n bids: Vec<Level2SnapshotRecord>,\n\n asks: Vec<Level2SnapshotRecord>,\n", "file_path": "src/coinbase/model/websocket.rs", "rank": 69, "score": 34015.00187563144 }, { "content": "\n\n#[derive(Debug, Clone)]\n\npub enum CoinbaseWebsocketMessage {\n\n Subscriptions {\n\n channels: Vec<Channel>,\n\n },\n\n Heartbeat {\n\n sequence: usize,\n\n last_trade_id: usize,\n\n product_id: String,\n\n time: String,\n\n },\n\n Ticker(Ticker),\n\n Level2(Level2),\n\n Match(Match),\n\n Full(Full),\n\n Error {\n\n message: String,\n\n },\n\n}\n", "file_path": "src/coinbase/model/websocket.rs", "rank": 70, "score": 34014.58668012692 }, { "content": " for sender in senders.iter() {\n\n sender.send(()).ok();\n\n }\n\n senders.clear();\n\n }\n\n }\n\n\n\n async fn create_stream_specific(\n\n &self,\n\n subscription: Subscriptions<Self::Subscription>,\n\n ) -> Result<BoxStream<'static, Result<Self::Response>>> {\n\n let ws_url = if self.parameters.sandbox {\n\n WS_URL_SANDBOX\n\n } else {\n\n WS_URL_PROD\n\n };\n\n let endpoint = url::Url::parse(ws_url).expect(\"Couldn't parse url.\");\n\n let (ws_stream, _) = connect_async(endpoint).await?;\n\n\n\n let (channels, product_ids) = match &subscription.as_slice()[0] {\n", "file_path": "src/coinbase/client/websocket.rs", "rank": 71, "score": 34014.452588387285 }, { "content": " fn poll_next(\n\n mut self: std::pin::Pin<&mut Self>,\n\n cx: &mut std::task::Context<'_>,\n\n ) -> Poll<Option<Self::Item>> {\n\n for (_sub, stream) in &mut self.subscriptions.iter_mut() {\n\n if let Poll::Ready(Some(message)) = Pin::new(stream).poll_next(cx) {\n\n let message = parse_message(message?);\n\n return Poll::Ready(Some(message));\n\n }\n\n }\n\n\n\n std::task::Poll::Pending\n\n }\n\n}\n\n\n", "file_path": "src/coinbase/client/websocket.rs", "rank": 72, "score": 34013.913545598625 }, { "content": "use serde::{Deserialize, Serialize};\n\n\n\nuse crate::shared::string_to_decimal;\n\n\n\nuse super::{\n\n AskBid, Kline, OrderBook, OrderExecType, OrderRejectReason, OrderStatus, OrderType, Side,\n\n TimeInForce,\n\n};\n\n\n\nuse rust_decimal::prelude::Decimal;\n\n\n\n#[derive(Debug, Clone, PartialEq, Eq, Hash)]\n\npub enum BinanceSubscription {\n\n UserData(String), // listen key\n\n AggregateTrade(String), // symbol\n\n Trade(String), // symbol\n\n Candlestick(String, String), // (symbol, interval)\n\n MiniTicker(String), // symbol\n\n MiniTickerAll,\n\n Ticker(String), // symbol\n", "file_path": "src/binance/model/websocket.rs", "rank": 73, "score": 34013.48717151605 }, { "content": " InputMessage::Open(open) => CoinbaseWebsocketMessage::Full(Full::Open(open)),\n\n InputMessage::Done(done) => CoinbaseWebsocketMessage::Full(Full::Done(done)),\n\n InputMessage::Match(_match) => CoinbaseWebsocketMessage::Full(Full::Match(_match)),\n\n InputMessage::Change(change) => CoinbaseWebsocketMessage::Full(Full::Change(change)),\n\n InputMessage::Activate(activate) => {\n\n CoinbaseWebsocketMessage::Full(Full::Activate(activate))\n\n }\n\n InputMessage::Error { message } => CoinbaseWebsocketMessage::Error { message },\n\n }\n\n }\n\n}\n\n\n\nimpl<'de> Deserialize<'de> for CoinbaseWebsocketMessage {\n\n fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>\n\n where\n\n D: Deserializer<'de>,\n\n {\n\n Deserialize::deserialize(deserializer).map(|input_msg: InputMessage| input_msg.into())\n\n }\n\n}\n", "file_path": "src/coinbase/model/websocket.rs", "rank": 74, "score": 34013.037297399154 }, { "content": " Close,\n\n Binary(Vec<u8>), // Unexpected, unparsed\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize, Clone)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct TradeMessage {\n\n #[serde(rename = \"e\")]\n\n pub event_type: String,\n\n #[serde(rename = \"E\")]\n\n pub event_time: u64,\n\n #[serde(rename = \"s\")]\n\n pub symbol: String,\n\n #[serde(rename = \"t\")]\n\n pub trade_id: u64,\n\n #[serde(rename = \"p\", with = \"string_to_decimal\")]\n\n pub price: Decimal,\n\n #[serde(rename = \"q\", with = \"string_to_decimal\")]\n\n pub qty: Decimal,\n\n #[serde(rename = \"b\")]\n", "file_path": "src/binance/model/websocket.rs", "rank": 75, "score": 34010.97963197999 }, { "content": " pub quote_volume: Decimal,\n\n #[serde(rename = \"O\")]\n\n pub open_time: u64,\n\n #[serde(rename = \"C\")]\n\n pub close_time: u64,\n\n #[serde(rename = \"F\")]\n\n pub first_trade_id: u64,\n\n #[serde(rename = \"L\")]\n\n pub last_trade_id: u64,\n\n #[serde(rename = \"n\")]\n\n pub num_trades: u64,\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize, Clone)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct CandlestickMessage {\n\n #[serde(rename = \"e\")]\n\n pub event_type: String,\n\n #[serde(rename = \"E\")]\n\n pub event_time: u64,\n", "file_path": "src/binance/model/websocket.rs", "rank": 76, "score": 34010.8316087225 }, { "content": " InputMessage::Snapshot {\n\n product_id,\n\n bids,\n\n asks,\n\n } => CoinbaseWebsocketMessage::Level2(Level2::Snapshot {\n\n product_id,\n\n bids,\n\n asks,\n\n }),\n\n InputMessage::L2update {\n\n product_id,\n\n changes,\n\n } => CoinbaseWebsocketMessage::Level2(Level2::L2update {\n\n product_id,\n\n changes,\n\n }),\n\n InputMessage::LastMatch(_match) => CoinbaseWebsocketMessage::Match(_match),\n\n InputMessage::Received(_match) => {\n\n CoinbaseWebsocketMessage::Full(Full::Received(_match))\n\n }\n", "file_path": "src/coinbase/model/websocket.rs", "rank": 77, "score": 34010.705277768866 }, { "content": " update_id,\n\n last_update_id,\n\n };\n\n OpenLimitsWebSocketMessage::OrderBookDiff(order_book_response)\n\n }\n\n })\n\n }\n\n}\n\n\n\n#[derive(Deserialize, Debug, Clone)]\n\npub struct Level2SnapshotRecord {\n\n #[serde(with = \"string_to_decimal\")]\n\n pub price: Decimal,\n\n #[serde(with = \"string_to_decimal\")]\n\n pub size: Decimal,\n\n}\n\n\n\nimpl From<&Level2SnapshotRecord> for AskBid {\n\n fn from(record: &Level2SnapshotRecord) -> Self {\n\n let price = record.price;\n", "file_path": "src/coinbase/model/websocket.rs", "rank": 78, "score": 34010.53481975771 }, { "content": " .join(\"/\");\n\n\n\n let ws_url = match self.parameters.sandbox {\n\n true => WS_URL_SANDBOX,\n\n false => WS_URL_PROD,\n\n };\n\n let endpoint = url::Url::parse(&format!(\"{}?streams={}\", ws_url, streams))\n\n .map_err(OpenLimitsError::UrlParserError)?;\n\n let (ws_stream, _) = connect_async(endpoint).await?;\n\n\n\n let (mut sink, stream) = ws_stream.split();\n\n let (disconnection_sender, mut disconnection_receiver) = unbounded_channel();\n\n tokio::spawn(async move {\n\n if disconnection_receiver.recv().await.is_some() {\n\n sink.close().await.ok();\n\n }\n\n });\n\n\n\n if let Ok(mut senders) = self.disconnection_senders.lock() {\n\n senders.push(disconnection_sender);\n", "file_path": "src/binance/client/websocket.rs", "rank": 79, "score": 34010.49404861168 }, { "content": " Level2,\n\n Matches,\n\n Full,\n\n User,\n\n}\n\n\n\n#[derive(Deserialize, Debug)]\n\n#[serde(tag = \"type\")]\n\n#[serde(rename_all = \"snake_case\")]\n\npub(crate) enum InputMessage {\n\n Subscriptions {\n\n channels: Vec<Channel>,\n\n },\n\n Heartbeat {\n\n sequence: usize,\n\n last_trade_id: usize,\n\n product_id: String,\n\n time: String,\n\n },\n\n Ticker(Ticker),\n", "file_path": "src/coinbase/model/websocket.rs", "rank": 80, "score": 34010.23276333194 }, { "content": " TickerAll,\n\n OrderBook(String, i64), // (symbol, depth)\n\n Depth(String, Option<u16>), // (symbol, interval)\n\n}\n\n\n\n#[derive(Debug, Clone, Serialize)]\n\npub enum BinanceWebsocketMessage {\n\n UserOrderUpdate(UserOrderUpdate),\n\n UserAccountUpdate(AccountUpdate),\n\n AggregateTrade(AggregateTrade),\n\n Trade(TradeMessage),\n\n Candlestick(CandlestickMessage),\n\n MiniTicker(MiniTicker),\n\n MiniTickerAll(Vec<MiniTicker>),\n\n Ticker(Ticker),\n\n TickerAll(Vec<Ticker>),\n\n OrderBook(OrderBook),\n\n Depth(Depth),\n\n Ping,\n\n Pong,\n", "file_path": "src/binance/model/websocket.rs", "rank": 81, "score": 34009.892001461165 }, { "content": "pub struct UserOrderUpdate {\n\n #[serde(rename = \"e\")]\n\n pub event_type: String,\n\n #[serde(rename = \"E\")]\n\n pub event_time: u64,\n\n #[serde(rename = \"s\")]\n\n pub symbol: String,\n\n #[serde(rename = \"c\")]\n\n pub new_client_order_id: String,\n\n #[serde(rename = \"S\")]\n\n pub side: Side,\n\n #[serde(rename = \"o\")]\n\n pub order_type: OrderType,\n\n #[serde(rename = \"f\")]\n\n pub time_in_force: TimeInForce,\n\n #[serde(rename = \"q\", with = \"string_to_decimal\")]\n\n pub qty: Decimal,\n\n #[serde(rename = \"p\", with = \"string_to_decimal\")]\n\n pub price: Decimal,\n\n #[serde(rename = \"P\", with = \"string_to_decimal\")]\n", "file_path": "src/binance/model/websocket.rs", "rank": 82, "score": 34009.86135053109 }, { "content": " vec![Channel::Name(ChannelType::Level2)],\n\n vec![product_id.clone()],\n\n ),\n\n CoinbaseSubscription::Heartbeat(product_id) => (\n\n vec![Channel::Name(ChannelType::Heartbeat)],\n\n vec![product_id.clone()],\n\n ),\n\n _ => unimplemented!(),\n\n };\n\n let subscribe = Subscribe {\n\n _type: SubscribeCmd::Subscribe,\n\n auth: None,\n\n channels,\n\n product_ids,\n\n };\n\n\n\n let stream = self.connect(subscribe).await?;\n\n self.subscriptions.insert(subscription, stream);\n\n Ok(())\n\n }\n", "file_path": "src/coinbase/client/websocket.rs", "rank": 83, "score": 34009.66385391634 }, { "content": " pub price: Option<Decimal>,\n\n pub side: super::OrderSide,\n\n pub user_id: Option<String>,\n\n #[serde(default)]\n\n pub profile_id: Option<String>,\n\n}\n\n\n\n#[derive(Deserialize, Debug, Clone)]\n\npub struct Activate {\n\n pub product_id: String,\n\n #[serde(with = \"string_to_decimal\")]\n\n pub timestamp: Decimal,\n\n pub order_id: String,\n\n pub stop_type: StopType,\n\n #[serde(with = \"string_to_decimal\")]\n\n pub size: Decimal,\n\n #[serde(with = \"string_to_decimal\")]\n\n pub funds: Decimal,\n\n #[serde(with = \"string_to_decimal\")]\n\n pub taker_fee_rate: Decimal,\n", "file_path": "src/coinbase/model/websocket.rs", "rank": 84, "score": 34009.46424399006 }, { "content": " ))\n\n } else if stream.name.contains(\"@kline_\") {\n\n Ok(BinanceWebsocketMessage::Candlestick(\n\n serde_json::from_value(stream.data).map_err(de::Error::custom)?,\n\n ))\n\n } else if stream.name.contains(\"@ticker\") {\n\n Ok(BinanceWebsocketMessage::Ticker(\n\n serde_json::from_value(stream.data).map_err(de::Error::custom)?,\n\n ))\n\n } else if stream.name.eq(\"!ticker@arr\") {\n\n Ok(BinanceWebsocketMessage::TickerAll(\n\n serde_json::from_value(stream.data).map_err(de::Error::custom)?,\n\n ))\n\n } else if stream.name.ends_with(\"@miniTicker\") {\n\n Ok(BinanceWebsocketMessage::MiniTicker(\n\n serde_json::from_value(stream.data).map_err(de::Error::custom)?,\n\n ))\n\n } else if stream.name.ends_with(\"!miniTicker@arr\") {\n\n Ok(BinanceWebsocketMessage::MiniTickerAll(\n\n serde_json::from_value(stream.data).map_err(de::Error::custom)?,\n", "file_path": "src/binance/client/websocket.rs", "rank": 85, "score": 34009.44827499353 }, { "content": "#[serde(rename_all = \"camelCase\")]\n\npub enum SubscribeCmd {\n\n Subscribe,\n\n}\n\n\n\n#[derive(Serialize, Clone, Deserialize, Debug, PartialEq, Eq, Hash)]\n\n#[serde(untagged)]\n\npub enum Channel {\n\n Name(ChannelType),\n\n WithProduct {\n\n name: ChannelType,\n\n product_ids: Vec<String>,\n\n },\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Hash)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub enum ChannelType {\n\n Heartbeat,\n\n Ticker,\n", "file_path": "src/coinbase/model/websocket.rs", "rank": 86, "score": 34009.31483031311 }, { "content": " pub stop_price: Decimal,\n\n #[serde(rename = \"F\", with = \"string_to_decimal\")]\n\n pub iceberg_qty: Decimal,\n\n #[serde(skip_serializing)]\n\n pub g: i32,\n\n #[serde(skip_serializing, rename = \"C\")]\n\n pub c_ignore: Option<String>,\n\n #[serde(rename = \"x\")]\n\n pub execution_type: OrderExecType,\n\n #[serde(rename = \"X\")]\n\n pub order_status: OrderStatus,\n\n #[serde(rename = \"r\")]\n\n pub order_reject_reason: OrderRejectReason,\n\n #[serde(rename = \"i\")]\n\n pub order_id: u64,\n\n #[serde(rename = \"l\", with = \"string_to_decimal\")]\n\n pub qty_last_filled_trade: Decimal,\n\n #[serde(rename = \"z\", with = \"string_to_decimal\")]\n\n pub accumulated_qty_filled_trades: Decimal,\n\n #[serde(rename = \"L\", with = \"string_to_decimal\")]\n", "file_path": "src/binance/model/websocket.rs", "rank": 87, "score": 34008.95406686824 }, { "content": "\n\n#[derive(Serialize, Deserialize, Debug)]\n\npub struct Auth {\n\n pub signature: String,\n\n pub key: String,\n\n pub passphrase: String,\n\n pub timestamp: String,\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Debug)]\n\npub struct Subscribe {\n\n #[serde(rename = \"type\")]\n\n pub _type: SubscribeCmd,\n\n pub product_ids: Vec<String>,\n\n pub channels: Vec<Channel>,\n\n #[serde(flatten)]\n\n pub auth: Option<Auth>,\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Debug)]\n", "file_path": "src/coinbase/model/websocket.rs", "rank": 88, "score": 34008.77893187938 }, { "content": " #[serde(rename = \"s\")]\n\n pub symbol: String,\n\n #[serde(rename = \"k\")]\n\n pub kline: Kline,\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize, Clone)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct AccountUpdate {\n\n #[serde(rename = \"e\")]\n\n pub event_type: String,\n\n #[serde(rename = \"E\")]\n\n pub event_time: u64,\n\n #[serde(rename = \"m\")]\n\n pub maker_commision_rate: u64,\n\n #[serde(rename = \"t\")]\n\n pub taker_commision_rate: u64,\n\n #[serde(rename = \"b\")]\n\n pub buyer_commision_rate: u64,\n\n #[serde(rename = \"s\")]\n", "file_path": "src/binance/model/websocket.rs", "rank": 89, "score": 34008.403710471706 }, { "content": " pub buyer_order_id: u64,\n\n #[serde(rename = \"a\")]\n\n pub seller_order_id: u64,\n\n #[serde(rename = \"T\")]\n\n pub trade_order_time: u64,\n\n #[serde(rename = \"m\")]\n\n pub is_buyer_maker: bool,\n\n #[serde(skip_serializing, rename = \"M\")]\n\n pub m_ignore: bool,\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize, Clone)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct AggregateTrade {\n\n #[serde(rename = \"e\")]\n\n pub event_type: String,\n\n #[serde(rename = \"E\")]\n\n pub event_time: u64,\n\n #[serde(rename = \"s\")]\n\n pub symbol: String,\n", "file_path": "src/binance/model/websocket.rs", "rank": 90, "score": 34008.28863090266 }, { "content": " pub cumulative_quote_asset_transacted_qty: Decimal,\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize, Clone)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct Depth {\n\n #[serde(rename = \"e\")]\n\n pub event_type: String,\n\n #[serde(rename = \"E\")]\n\n pub event_time: u64,\n\n #[serde(rename = \"s\")]\n\n pub symbol: String,\n\n #[serde(rename = \"U\")]\n\n pub first_update_id: u64,\n\n #[serde(rename = \"u\")]\n\n pub final_update_id: u64,\n\n #[serde(rename = \"b\")]\n\n pub bids: Vec<AskBid>,\n\n #[serde(rename = \"a\")]\n\n pub asks: Vec<AskBid>,\n", "file_path": "src/binance/model/websocket.rs", "rank": 91, "score": 34008.26233379846 }, { "content": "}\n\n\n\n#[derive(Debug, Serialize, Deserialize, Clone)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct Ticker {\n\n #[serde(rename = \"e\")]\n\n pub event_type: String,\n\n #[serde(rename = \"E\")]\n\n pub event_time: u64,\n\n #[serde(rename = \"s\")]\n\n pub symbol: String,\n\n #[serde(rename = \"p\", with = \"string_to_decimal\")]\n\n pub price_change: Decimal,\n\n #[serde(rename = \"P\", with = \"string_to_decimal\")]\n\n pub price_change_percent: Decimal,\n\n #[serde(rename = \"w\", with = \"string_to_decimal\")]\n\n pub average_price: Decimal,\n\n #[serde(rename = \"x\", with = \"string_to_decimal\")]\n\n pub prev_close: Decimal,\n\n #[serde(rename = \"c\", with = \"string_to_decimal\")]\n", "file_path": "src/binance/model/websocket.rs", "rank": 92, "score": 34008.12590061864 }, { "content": " write!(f, \"{}@kline_{}\", symbol, interval)\n\n }\n\n BinanceSubscription::Depth(ref symbol, interval) => match interval {\n\n None => write!(f, \"{}@depth\", symbol),\n\n Some(i) => write!(f, \"{}@depth@{}ms\", symbol, i),\n\n },\n\n BinanceSubscription::MiniTicker(symbol) => write!(f, \"{}@miniTicker\", symbol),\n\n BinanceSubscription::MiniTickerAll => write!(f, \"!miniTicker@arr\"),\n\n BinanceSubscription::OrderBook(ref symbol, depth) => {\n\n write!(f, \"{}@depth{}\", symbol, depth)\n\n }\n\n BinanceSubscription::Ticker(ref symbol) => write!(f, \"{}@ticker\", symbol),\n\n BinanceSubscription::TickerAll => write!(f, \"!ticker@arr\"),\n\n BinanceSubscription::Trade(ref symbol) => write!(f, \"{}@trade\", symbol),\n\n BinanceSubscription::UserData(ref key) => write!(f, \"{}\", key),\n\n }\n\n }\n\n}\n\n\n\nimpl From<Subscription> for BinanceSubscription {\n", "file_path": "src/binance/client/websocket.rs", "rank": 93, "score": 34008.111954646614 }, { "content": " #[serde(rename = \"l\", with = \"string_to_decimal\")]\n\n pub locked: Decimal,\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize, Clone)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct MiniTicker {\n\n #[serde(rename = \"e\")]\n\n pub event_type: String,\n\n #[serde(rename = \"E\")]\n\n pub event_time: u64,\n\n #[serde(rename = \"s\")]\n\n pub symbol: String,\n\n #[serde(rename = \"c\", with = \"string_to_decimal\")]\n\n pub close: Decimal,\n\n #[serde(rename = \"o\", with = \"string_to_decimal\")]\n\n pub open: Decimal,\n\n #[serde(rename = \"l\", with = \"string_to_decimal\")]\n\n pub low: Decimal,\n\n #[serde(rename = \"h\", with = \"string_to_decimal\")]\n\n pub high: Decimal,\n\n #[serde(rename = \"v\", with = \"string_to_decimal\")]\n\n pub volume: Decimal,\n\n #[serde(rename = \"q\", with = \"string_to_decimal\")]\n\n pub quote_volume: Decimal,\n\n}\n", "file_path": "src/binance/model/websocket.rs", "rank": 94, "score": 34008.041164706694 }, { "content": " let qty = record.size;\n\n Self { price, qty }\n\n }\n\n}\n\n\n\n#[derive(Deserialize, Debug, Clone)]\n\npub struct Level2UpdateRecord {\n\n pub side: super::OrderSide,\n\n #[serde(with = \"string_to_decimal\")]\n\n pub price: Decimal,\n\n #[serde(with = \"string_to_decimal\")]\n\n pub size: Decimal,\n\n}\n\n\n\nimpl From<&Level2UpdateRecord> for AskBid {\n\n fn from(record: &Level2UpdateRecord) -> Self {\n\n let price = record.price;\n\n let qty = record.size;\n\n Self { price, qty }\n\n }\n", "file_path": "src/coinbase/model/websocket.rs", "rank": 95, "score": 34007.939476025975 }, { "content": " },\n\n Empty {\n\n sequence: usize,\n\n product_id: String,\n\n #[serde(with = \"string_to_opt_decimal\")]\n\n price: Option<Decimal>,\n\n },\n\n}\n\n\n\nimpl Ticker {\n\n pub fn price(&self) -> Decimal {\n\n match self {\n\n Ticker::Full { price, .. } => *price,\n\n Ticker::Empty { price, .. } => price.expect(\"Couldn't get price.\"),\n\n }\n\n }\n\n\n\n pub fn time(&self) -> Option<&String> {\n\n match self {\n\n Ticker::Full { time, .. } => Some(time),\n", "file_path": "src/coinbase/model/websocket.rs", "rank": 96, "score": 34007.59879699431 }, { "content": " Ticker::Full { best_ask, .. } => Some(best_ask.expect(\"Couldn't get best ask.\")),\n\n Ticker::Empty { .. } => None,\n\n }\n\n }\n\n}\n\n\n\n#[derive(Deserialize, Debug, Clone)]\n\npub enum Full {\n\n Received(Received),\n\n Open(Open),\n\n Done(Done),\n\n Match(Match),\n\n Change(Change),\n\n Activate(Activate),\n\n}\n\n\n\nimpl Full {\n\n pub fn price(&self) -> Option<&Decimal> {\n\n match self {\n\n Full::Received(Received::Limit { price, .. }) => Some(price),\n", "file_path": "src/coinbase/model/websocket.rs", "rank": 97, "score": 34007.36727483327 }, { "content": "#[derive(Deserialize, Debug, Clone)]\n\npub struct Match {\n\n pub trade_id: usize,\n\n pub sequence: usize,\n\n pub maker_order_id: String,\n\n pub taker_order_id: String,\n\n pub time: String,\n\n pub product_id: String,\n\n #[serde(with = \"string_to_decimal\")]\n\n pub size: Decimal,\n\n #[serde(with = \"string_to_decimal\")]\n\n pub price: Decimal,\n\n pub side: super::OrderSide,\n\n pub taker_user_id: Option<String>,\n\n pub taker_profile_id: Option<String>,\n\n pub maker_user_id: Option<String>,\n\n pub maker_profile_id: Option<String>,\n\n pub user_id: Option<String>,\n\n #[serde(default)]\n\n pub profile_id: Option<String>,\n", "file_path": "src/coinbase/model/websocket.rs", "rank": 98, "score": 34005.937378898816 }, { "content": " }\n\n }\n\n\n\n pub fn sequence(&self) -> Option<&usize> {\n\n match self {\n\n Full::Received(Received::Limit { sequence, .. }) => Some(sequence),\n\n Full::Received(Received::Market { sequence, .. }) => Some(sequence),\n\n Full::Open(Open { sequence, .. }) => Some(sequence),\n\n Full::Done(Done::Limit { sequence, .. }) => sequence.as_ref(),\n\n Full::Done(Done::Market { sequence, .. }) => Some(sequence),\n\n Full::Match(Match { sequence, .. }) => Some(sequence),\n\n Full::Change(Change { sequence, .. }) => Some(sequence),\n\n Full::Activate(Activate { .. }) => None,\n\n }\n\n }\n\n}\n\n\n\n#[derive(Deserialize, Debug, Clone)]\n\n#[serde(tag = \"order_type\")]\n\n#[serde(rename_all = \"camelCase\")]\n", "file_path": "src/coinbase/model/websocket.rs", "rank": 99, "score": 34005.79113737625 } ]
Rust
src/processes/poisson.rs
rasa200/markovian
8824ae58301e83f2b8f0278b3d321c2a4000331a
use num_traits::Float; use rand_distr::{Exp1, Exp}; use crate::{State, StateIterator}; use core::fmt::Debug; use num_traits::{sign::Unsigned, One, Zero}; use rand::Rng; use rand_distr::Distribution; use crate::errors::InvalidState; use core::mem; #[derive(Debug, Clone)] pub struct Poisson<N, T, R> where N: Float, Exp1: Distribution<N>, T: Debug + PartialEq + Clone + One + Zero + PartialOrd + Unsigned, R: Rng, { state: T, exp: Exp<N>, rng: R, } impl<N, T, R> Poisson<N, T, R> where N: Float, Exp1: Distribution<N>, T: Debug + PartialEq + Clone + One + Zero + PartialOrd + Unsigned, R: Rng, { #[inline] pub fn new(lambda: N, rng: R) -> Result<Self, rand_distr::ExpError> { Ok(Poisson { state: T::zero(), exp: Exp::new(lambda)?, rng, }) } } impl<N, T, R> State for Poisson<N, T, R> where N: Float, Exp1: Distribution<N>, T: Debug + PartialEq + Clone + One + Zero + PartialOrd + Unsigned, R: Rng, { type Item = T; #[inline] fn state(&self) -> Option<&Self::Item> { Some(&self.state) } #[inline] fn state_mut(&mut self) -> Option<&mut Self::Item> { Some(&mut self.state) } #[inline] fn set_state( &mut self, mut new_state: Self::Item, ) -> Result<Option<Self::Item>, InvalidState<Self::Item>> { mem::swap(&mut self.state, &mut new_state); Ok(Some(new_state)) } } impl<N, T, R> Iterator for Poisson<N, T, R> where N: Float, Exp1: Distribution<N>, T: Debug + PartialEq + Clone + One + Zero + PartialOrd + Unsigned, R: Rng, { type Item = (N, T); #[inline] fn next(&mut self) -> Option<Self::Item> { let period = self.exp.sample(&mut self.rng); self.set_state(self.state.clone() + T::one()).unwrap(); self.state().cloned().map(|state| (period, state)) } } impl<N, T, R> StateIterator for Poisson<N, T, R> where N: Float, Exp1: Distribution<N>, T: Debug + PartialEq + Clone + One + Zero + PartialOrd + Unsigned, R: Rng, { #[inline] fn state_as_item(&self) -> Option<<Self as std::iter::Iterator>::Item> { self.state().cloned().map(|state| (N::zero(), state)) } } impl<N, T, R> Distribution<(N, T)> for Poisson<N, T, R> where N: Float, Exp1: Distribution<N>, T: Debug + PartialEq + Clone + One + Zero + PartialOrd + Unsigned, R: Rng, { #[inline] fn sample<R2>(&self, rng: &mut R2) -> (N, T) where R2: Rng + ?Sized, { (self.exp.sample(rng), self.state.clone() + T::one()) } } #[cfg(test)] mod tests { use super::*; #[test] fn value_stability() { let rng = crate::tests::rng(3); let lambda = 1.; let expected = vec![(0.529274135874436, 1), (0.5369108748992898, 2), (0.3618522192460201, 3), (0.5717432176122981, 4)]; let mc = Poisson::new(lambda, rng).unwrap(); let sample: Vec<(f64, u64)> = mc.take(4).collect(); assert_eq!(sample, expected); } }
use num_traits::Float; use rand_distr::{Exp1, Exp}; use crate::{State, StateIterator}; use core::fmt::Debug; use num_traits::{sign::Unsigned, One, Zero}; use rand::Rng; use rand_distr::Distribution; use crate::errors::InvalidState; use core::mem; #[derive(Debug, Clone)] pub struct Poisson<N, T, R> where N: Float, Exp1: Distribution<N>, T: Debug + PartialEq + Clone + One + Zero + PartialOrd + Unsigned, R: Rng, { state: T, exp: Exp<N>, rng: R, } impl<N, T, R> Poisson<N, T, R> where N: Float, Exp1: Distribution<N>, T: Debug + PartialEq + Clone + One + Zero + PartialOrd + Unsigned, R: Rng, { #[inline] pub fn new(lambda: N, rng: R) -> Result<Self, rand_distr::ExpError> { Ok(Poisson { state: T::zero(), exp: Exp::new(lambda)?, rng, }) } } impl<N, T, R> State for Poisson<N, T, R> where N: Float, Exp1: Distribution<N>, T: Debug + PartialEq + Clone + One + Zero + PartialOrd + Unsigned, R: Rng, { type Item = T; #[inline] fn state(&self) -> Option<&Self::Item> { Some(&self.state) } #[inline] fn state_mut(&mut self) -> Option<&mut Self::Item> { Some(&mut self.state) } #[inline]
} impl<N, T, R> Iterator for Poisson<N, T, R> where N: Float, Exp1: Distribution<N>, T: Debug + PartialEq + Clone + One + Zero + PartialOrd + Unsigned, R: Rng, { type Item = (N, T); #[inline] fn next(&mut self) -> Option<Self::Item> { let period = self.exp.sample(&mut self.rng); self.set_state(self.state.clone() + T::one()).unwrap(); self.state().cloned().map(|state| (period, state)) } } impl<N, T, R> StateIterator for Poisson<N, T, R> where N: Float, Exp1: Distribution<N>, T: Debug + PartialEq + Clone + One + Zero + PartialOrd + Unsigned, R: Rng, { #[inline] fn state_as_item(&self) -> Option<<Self as std::iter::Iterator>::Item> { self.state().cloned().map(|state| (N::zero(), state)) } } impl<N, T, R> Distribution<(N, T)> for Poisson<N, T, R> where N: Float, Exp1: Distribution<N>, T: Debug + PartialEq + Clone + One + Zero + PartialOrd + Unsigned, R: Rng, { #[inline] fn sample<R2>(&self, rng: &mut R2) -> (N, T) where R2: Rng + ?Sized, { (self.exp.sample(rng), self.state.clone() + T::one()) } } #[cfg(test)] mod tests { use super::*; #[test] fn value_stability() { let rng = crate::tests::rng(3); let lambda = 1.; let expected = vec![(0.529274135874436, 1), (0.5369108748992898, 2), (0.3618522192460201, 3), (0.5717432176122981, 4)]; let mc = Poisson::new(lambda, rng).unwrap(); let sample: Vec<(f64, u64)> = mc.take(4).collect(); assert_eq!(sample, expected); } }
fn set_state( &mut self, mut new_state: Self::Item, ) -> Result<Option<Self::Item>, InvalidState<Self::Item>> { mem::swap(&mut self.state, &mut new_state); Ok(Some(new_state)) }
function_block-full_function
[ { "content": "pub trait State {\n\n type Item: core::fmt::Debug;\n\n\n\n #[inline]\n\n fn state(&self) -> Option<&Self::Item> {\n\n None\n\n }\n\n\n\n #[inline]\n\n fn state_mut(&mut self) -> Option<&mut Self::Item> {\n\n None\n\n }\n\n\n\n /// Changes the `state` of the struct.\n\n /// \n\n /// If changing is succesful, this method should return the old state as an `Option<Self::Item>`.\n\n /// \n\n /// # Remarks\n\n ///\n\n /// You might want to use [core::mem::swap](https://doc.rust-lang.org/core/mem/fn.swap.html).\n\n #[inline]\n\n fn set_state(\n\n &mut self,\n\n new_state: Self::Item,\n\n ) -> Result<Option<Self::Item>, InvalidState<Self::Item>> {\n\n Err(InvalidState::new(new_state))\n\n }\n\n}\n", "file_path": "src/traits/state.rs", "rank": 0, "score": 82345.80420010671 }, { "content": "/// Iterator with an internal state. \n\n/// \n\n/// Internals states can be thought as the \"initial\" element of an iterator, see [trajectory].\n\n/// \n\n/// # Example\n\n/// \n\n/// Usual case: A struct that uses the same type for [Iterator] and [State] traits.\n\n/// ```\n\n/// # struct MyStateIterator;\n\n/// # impl Iterator for MyStateIterator {\n\n/// # type Item = f64;\n\n/// # fn next(&mut self) -> Option<Self::Item> {\n\n/// # None\n\n/// # }\n\n/// # }\n\n/// # impl markovian::State for MyStateIterator {\n\n/// # type Item = f64;\n\n/// # }\n\n/// # use markovian::State;\n\n/// impl markovian::StateIterator for MyStateIterator {\n\n/// #[inline]\n\n/// fn state_as_item(&self) -> Option<<Self as std::iter::Iterator>::Item> {\n\n/// self.state().cloned()\n\n/// }\n\n/// }\n\n/// ```\n\n///\n\n/// [trajectory]: trait.StateIterator.html#method.trajectory\n\n/// [Iterator]: https://doc.rust-lang.org/nightly/core/iter/trait.Iterator.html\n\n/// [State]: trait.State.html\n\npub trait StateIterator: Iterator + State + Sized {\n\n /// # Remarks\n\n /// \n\n /// You should use ``#[inline]`` when implementing this method.\n\n fn state_as_item(&self) -> Option<<Self as std::iter::Iterator>::Item>;\n\n\n\n /// Returns a new iterator whose first element is the state (seen as an item of the Iterator) \n\n /// and then follows with the elements of the iterator. \n\n #[inline]\n\n fn trajectory(self) -> Chain<std::option::IntoIter<<Self as std::iter::Iterator>::Item>, Self> {\n\n self.state_as_item().into_iter().chain(self)\n\n }\n\n}\n", "file_path": "src/traits/state_iterator.rs", "rank": 1, "score": 71161.21718393208 }, { "content": "fn main() {\n\n // Monte Carlo\n\n let (times, values): (Vec<_>, Vec<_>) = Poisson::<f64, usize, _>::new(LAMBDA, thread_rng())\n\n .unwrap()\n\n .trajectory()\n\n .take(STEPS)\n\n .unzip();\n\n // Plotting\n\n (times.iter().cumsum::<f64>(), values).preexplore()\n\n .set_title(format!(\"Poisson process, lambda = {}\", LAMBDA))\n\n .set_xlabel(\"time\")\n\n .set_ylabel(\"state\")\n\n .set_style(\"_|\")\n\n .plot(\"poisson\")\n\n .unwrap();\n\n}", "file_path": "examples/poisson/main.rs", "rank": 2, "score": 35100.09885708801 }, { "content": "fn main() {\n\n // Monte Carlo\n\n let monte_carlo_approx: Vec<f64> = (0..TIME).map(|time| {\n\n // Simulate a sample from Branchng ´processes at a fixed time\n\n let simulations = sample_population(BIRTH, DEATH, time, SAMPLES);\n\n // Compute extinction probability for this time\n\n extinction_prob(simulations)\n\n }).collect();\n\n // Plotting\n\n monte_carlo_approx.preexplore()\n\n .set_title(\"Approximate extinction probability\")\n\n .set_xlabel(\"time\")\n\n .set_ylabel(\"extinction_probability\")\n\n .plot(\"extinction\")\n\n .unwrap();\n\n}\n\n\n", "file_path": "examples/branching/main.rs", "rank": 3, "score": 35100.09885708801 }, { "content": "fn main() {\n\n // Finite state-space Markov Chain\n\n\n\n let init_state: i32 = 0;\n\n\n\n let transition = |state: &i32| Raw::new(vec![(0.5, state + 1), (0.5, state - 1)]);\n\n\n\n let mut mc = MarkovChain::new(init_state, &transition, thread_rng());\n\n\n\n println!(\"{:?}\", mc.state());\n\n println!(\"{:?}\", mc.next());\n\n println!(\"{:?}\", mc.next());\n\n println!(\"{:?}\", mc.next());\n\n\n\n // Infinite state-space Markov Chain\n\n\n\n let init_state: i32 = 0;\n\n let transition2 = |state: &i32| {\n\n let v: Vec<(f64, i32)> = (1..)\n\n .map(move |s| ((2.0_f32).powf(-s as f32), state + s as i32))\n", "file_path": "examples/markov_chain/main.rs", "rank": 4, "score": 33836.28387820474 }, { "content": "/// Samples population starting from 1 until time iterations.\n\nfn sample_population(\n\n birth_prob: f64,\n\n death_prob: f64,\n\n iterations: usize,\n\n samples: usize,\n\n) -> Vec<u32> {\n\n (0..samples)\n\n .collect::<Vec<usize>>()\n\n .par_iter()\n\n .map(|_| {\n\n let init_state: u32 = 1;\n\n let density = raw_dist![\n\n (death_prob, 0),\n\n (birth_prob, 2),\n\n (1.0 - birth_prob - death_prob, 1)\n\n ];\n\n let mut branching_process = Branching::new(init_state, density, thread_rng());\n\n\n\n branching_process.nth(iterations).unwrap()\n\n })\n\n .collect()\n\n}\n", "file_path": "examples/branching/main.rs", "rank": 5, "score": 33836.28387820474 }, { "content": "fn main() {\n\n\n\n let state_index = 0;\n\n let transition_matrix = ndarray::array![\n\n [1, 2, 3],\n\n [1, 0, 0],\n\n [0, 1, 1],\n\n ];\n\n let state_space = vec!['a', 'b', 'c'];\n\n let rng = thread_rng();\n\n\n\n let mut mc = FiniteMarkovChain::from((state_index, transition_matrix, state_space, rng));\n\n\n\n println!(\"{:?}\", mc.state());\n\n println!(\"{:?}\", mc.next());\n\n println!(\"{:?}\", mc.next());\n\n println!(\"{:?}\", mc.next());\n\n}", "file_path": "examples/finite_markov_chain/main.rs", "rank": 6, "score": 32689.90554482297 }, { "content": "/// Abstraction over transition matrix.\n\n///\n\n/// # Remarks\n\n/// \n\n/// Output parameter `O` allows sampling more than only the next state,\n\n/// for example, continuous markov chains are able to sample both a new state\n\n/// and a time step.\n\npub trait Transition<T, O> {\n\n fn sample_from<R>(&self, state: &T, rng: &mut R) -> O\n\n where\n\n R: Rng + ?Sized;\n\n}\n\n\n\nimpl<T, O, F, D> Transition<T, O> for F\n\nwhere\n\n F: Fn(&T) -> D,\n\n D: Distribution<O>,\n\n{\n\n #[inline]\n\n fn sample_from<R>(&self, state: &T, rng: &mut R) -> O\n\n where\n\n R: Rng + ?Sized,\n\n {\n\n self(state).sample(rng)\n\n }\n\n}\n\n\n", "file_path": "src/traits/transition.rs", "rank": 7, "score": 32457.378215965866 }, { "content": "fn bench_brownian(c: &mut Criterion) {\n\n let mut group = c.benchmark_group(\"Brownian Motion\");\n\n for i in [100, 1000, 10000].iter() {\n\n group.bench_with_input(BenchmarkId::new(\"Markovian\", i), i, \n\n |b, i| b.iter(|| brownian_motion(*i)));\n\n group.bench_with_input(BenchmarkId::new(\"Direct\", i), i, \n\n |b, i| b.iter(|| direct_brownian_motion(*i)));\n\n }\n\n group.finish();\n\n}\n\n\n\ncriterion_group!(benches, bench_brownian);\n\ncriterion_main!(benches);", "file_path": "benches/brownian_motion.rs", "rank": 8, "score": 29041.276339446358 }, { "content": "/// }\n\n/// impl markovian::State for ArithmeticSequence {\n\n/// type Item = f64;\n\n/// #[inline]\n\n/// fn state(&self) -> Option<&Self::Item> {\n\n/// Some(&self.state)\n\n/// }\n\n/// \n\n/// #[inline]\n\n/// fn state_mut(&mut self) -> Option<&mut Self::Item> {\n\n/// Some(&mut self.state)\n\n/// }\n\n/// #[inline]\n\n/// fn set_state(\n\n/// &mut self,\n\n/// mut new_state: Self::Item,\n\n/// ) -> Result<Option<Self::Item>, markovian::errors::InvalidState<Self::Item>> {\n\n/// std::mem::swap(&mut self.state, &mut new_state); // Change places\n\n/// Ok(Some(new_state)) // Return the previous state value\n\n/// }\n\n/// }\n\n/// ```\n\n///\n\n/// [Arithmetic sequence]: https://en.wikipedia.org/wiki/Arithmetic_progression\n\n\n", "file_path": "src/traits/state.rs", "rank": 9, "score": 27480.17904023246 }, { "content": "use crate::errors::InvalidState;\n\n\n\n/// Public state.\n\n/// \n\n/// `State` should be implemented when it is absolutely clear what a `state` for your your struct means.\n\n/// \n\n/// # Examples\n\n/// \n\n/// [Arithmetic sequence].\n\n/// ```\n\n/// struct ArithmeticSequence {\n\n/// state: f64,\n\n/// step: f64,\n\n/// }\n\n/// impl Iterator for ArithmeticSequence {\n\n/// type Item = f64;\n\n/// fn next(&mut self) -> Option<Self::Item> {\n\n/// self.state += self.step;\n\n/// Some(self.state)\n\n/// }\n", "file_path": "src/traits/state.rs", "rank": 10, "score": 27477.64426357613 }, { "content": "// To bench\n\nfn brownian_motion(steps: usize) -> Vec<f64> {\n\n\tlet state = 0.;\n\n\tlet transition = |_: &f64| StandardNormal;\n\n\tlet rng = thread_rng();\n\n\n\n\tlet mc = markovian::MarkovChain::new(state, transition, rng);\n\n\n\n\tmc.take(steps).collect()\n\n}\n\n\n", "file_path": "benches/brownian_motion.rs", "rank": 11, "score": 26640.133444500578 }, { "content": "/// # }\n\n/// # use markovian::State;\n\n/// impl markovian::StateIterator for MyStateIterator {\n\n/// #[inline]\n\n/// fn state_as_item(&self) -> Option<<Self as std::iter::Iterator>::Item> {\n\n/// self.state().cloned()\n\n/// }\n\n/// }\n\n/// ```\n\n///\n\n/// [trajectory]: trait.StateIterator.html#method.trajectory\n\n/// [Iterator]: https://doc.rust-lang.org/nightly/core/iter/trait.Iterator.html\n\n/// [State]: trait.State.html\n", "file_path": "src/traits/state_iterator.rs", "rank": 12, "score": 26074.44957546347 }, { "content": "use crate::State;\n\nuse core::iter::Chain;\n\n\n\n/// Iterator with an internal state. \n\n/// \n\n/// Internals states can be thought as the \"initial\" element of an iterator, see [trajectory].\n\n/// \n\n/// # Example\n\n/// \n\n/// Usual case: A struct that uses the same type for [Iterator] and [State] traits.\n\n/// ```\n\n/// # struct MyStateIterator;\n\n/// # impl Iterator for MyStateIterator {\n\n/// # type Item = f64;\n\n/// # fn next(&mut self) -> Option<Self::Item> {\n\n/// # None\n\n/// # }\n\n/// # }\n\n/// # impl markovian::State for MyStateIterator {\n\n/// # type Item = f64;\n", "file_path": "src/traits/state_iterator.rs", "rank": 13, "score": 26071.602762644663 }, { "content": "// To compare with\n\nfn direct_brownian_motion(steps: usize) -> Vec<f64> {\n\n\tlet mut rng = thread_rng();\n\n\t(0..steps).map(|_| rng.sample(StandardNormal)).collect()\n\n}\n\n\n\n\n", "file_path": "benches/brownian_motion.rs", "rank": 14, "score": 25831.096018243916 }, { "content": "/// Counts the fraction of the trayectories that are not extinct.\n\n/// Formally, it gives\n\n/// \\PP_1( X_{max_iterations} = 0),\n\n/// approximated by samples number of simuations.\n\nfn extinction_prob(population_samples: Vec<u32>) -> f64 {\n\n let samples = population_samples.len();\n\n population_samples\n\n .into_iter()\n\n .filter(|&x| x == 0_u32)\n\n .count() as f64\n\n / samples as f64\n\n}\n\n\n", "file_path": "examples/branching/main.rs", "rank": 15, "score": 25831.096018243916 }, { "content": " state,\n\n base_distribution,\n\n rng,\n\n }\n\n }\n\n}\n\n\n\nimpl<T, D, R> State for Branching<T, D, R>\n\nwhere\n\n T: Debug + PartialEq + Clone + One + Zero + PartialOrd + Unsigned,\n\n D: Distribution<T>,\n\n R: Rng,\n\n{\n\n type Item = T;\n\n\n\n #[inline]\n\n fn state(&self) -> Option<&Self::Item> {\n\n Some(&self.state)\n\n }\n\n\n", "file_path": "src/processes/branching.rs", "rank": 21, "score": 27.289024351185947 }, { "content": " #[inline]\n\n fn next(&mut self) -> Option<Self::Item> {\n\n let mut count = T::one();\n\n let mut acc = T::zero();\n\n while count <= self.state {\n\n acc = acc + self.base_distribution.sample(&mut self.rng);\n\n count = count + T::one();\n\n }\n\n self.state = acc.clone();\n\n Some(acc)\n\n }\n\n}\n\n\n\nimpl<T, D, R> StateIterator for Branching<T, D, R>\n\nwhere\n\n T: Debug + PartialEq + Clone + One + Zero + PartialOrd + Unsigned,\n\n D: Distribution<T>,\n\n R: Rng,\n\n{\n\n #[inline]\n", "file_path": "src/processes/branching.rs", "rank": 22, "score": 25.8351614490955 }, { "content": " fn state_as_item(&self) -> Option<<Self as std::iter::Iterator>::Item> {\n\n self.state().cloned()\n\n }\n\n}\n\n\n\nimpl<T, D, R> Distribution<T> for Branching<T, D, R>\n\nwhere\n\n T: Debug + PartialEq + Clone + One + Zero + PartialOrd + Unsigned,\n\n D: Distribution<T>,\n\n R: Rng,\n\n{\n\n /// Sample a possible next state. \n\n #[inline]\n\n fn sample<R2>(&self, rng: &mut R2) -> T \n\n where\n\n R2: Rng + ?Sized,\n\n { \n\n let mut count = T::one();\n\n let mut acc = T::zero();\n\n while count < self.state {\n", "file_path": "src/processes/branching.rs", "rank": 24, "score": 25.696356216124137 }, { "content": " #[inline]\n\n fn state_mut(&mut self) -> Option<&mut Self::Item> {\n\n Some(&mut self.state)\n\n }\n\n\n\n #[inline]\n\n fn set_state(\n\n &mut self,\n\n mut new_state: Self::Item,\n\n ) -> Result<Option<Self::Item>, InvalidState<Self::Item>> {\n\n mem::swap(&mut self.state, &mut new_state);\n\n Ok(Some(new_state))\n\n }\n\n}\n\n\n\nimpl<T, D, R> Iterator for Branching<T, D, R>\n\nwhere\n\n T: Debug + PartialEq + Clone + One + Zero + PartialOrd + Unsigned,\n\n D: Distribution<T>,\n\n R: Rng,\n", "file_path": "src/processes/branching.rs", "rank": 25, "score": 25.485749663719147 }, { "content": "{\n\n type Item = (N, T);\n\n\n\n #[inline]\n\n fn next(&mut self) -> Option<Self::Item> {\n\n let (period, state) = self.transition.sample_from(&self.state, &mut self.rng);\n\n self.state = state;\n\n self.state().cloned().map(|state| (period, state))\n\n }\n\n}\n\n\n\nimpl<N, T, F, R> StateIterator for TimedMarkovChain<N, T, F, R>\n\nwhere\n\n T: Debug + Clone,\n\n F: Transition<T, (N, T)>,\n\n R: Rng,\n\n N: From<f64>,\n\n{\n\n #[inline]\n\n fn state_as_item(&self) -> Option<<Self as std::iter::Iterator>::Item> {\n", "file_path": "src/timed_markov_chain.rs", "rank": 26, "score": 24.57538602481686 }, { "content": " #[inline]\n\n fn state_mut(&mut self) -> Option<&mut Self::Item> {\n\n Some(&mut self.state)\n\n }\n\n\n\n #[inline]\n\n fn set_state(\n\n &mut self,\n\n mut new_state: Self::Item,\n\n ) -> Result<Option<Self::Item>, InvalidState<Self::Item>> {\n\n mem::swap(&mut self.state, &mut new_state);\n\n Ok(Some(new_state))\n\n }\n\n}\n\n\n\nimpl<N, T, F, R> Iterator for TimedMarkovChain<N, T, F, R>\n\nwhere\n\n T: Debug + Clone,\n\n F: Transition<T, (N, T)>,\n\n R: Rng,\n", "file_path": "src/timed_markov_chain.rs", "rank": 27, "score": 23.24001220753382 }, { "content": " self.state_index = self.sample_index();\n\n let period = self.sample_clock();\n\n self.state().cloned().map(|x| (period, x))\n\n }\n\n}\n\n\n\nimpl<T, W, R> StateIterator for ContFiniteMarkovChain<T, W, R>\n\nwhere\n\n W: Float + AliasableWeight,\n\n Exp1: Distribution<W>,\n\n T: Debug + PartialEq + Clone,\n\n R: Rng,\n\n{\n\n #[inline]\n\n fn state_as_item(&self) -> Option<<Self as std::iter::Iterator>::Item> {\n\n self.state().cloned().map(|x| (W::zero(), x))\n\n }\n\n}\n\n\n\nimpl<T, W, R> Distribution<(W, T)> for ContFiniteMarkovChain<T, W, R>\n", "file_path": "src/continuous_finite_markov_chain.rs", "rank": 28, "score": 23.177991550927583 }, { "content": " TimedMarkovChain {\n\n state,\n\n transition,\n\n rng,\n\n phantom: PhantomData,\n\n }\n\n }\n\n}\n\n\n\nimpl<N, T, F, R> State for TimedMarkovChain<N, T, F, R>\n\nwhere\n\n T: Debug + Clone,\n\n{\n\n type Item = T;\n\n\n\n #[inline]\n\n fn state(&self) -> Option<&Self::Item> {\n\n Some(&self.state)\n\n }\n\n\n", "file_path": "src/timed_markov_chain.rs", "rank": 29, "score": 23.092853387692447 }, { "content": " R: Rng,\n\n F: Transition<T, T>,\n\n{\n\n #[inline]\n\n pub fn new(state: T, transition: F, rng: R) -> Self {\n\n MarkovChain {\n\n state,\n\n transition,\n\n rng,\n\n }\n\n }\n\n}\n\n\n\nimpl<T, F, R> State for MarkovChain<T, F, R>\n\nwhere\n\n T: Debug + Clone,\n\n{\n\n type Item = T;\n\n\n\n #[inline]\n", "file_path": "src/markov_chain.rs", "rank": 30, "score": 22.77008418876804 }, { "content": " T: Debug + PartialEq + Clone,\n\n R: Rng + Debug + Clone,\n\n{\n\n type Item = T;\n\n\n\n #[inline]\n\n fn state(&self) -> Option<&Self::Item> {\n\n Some(&self.state_space[self.state_index])\n\n }\n\n\n\n #[inline]\n\n fn state_mut(&mut self) -> Option<&mut Self::Item> {\n\n Some(&mut self.state_space[self.state_index])\n\n }\n\n\n\n #[inline]\n\n fn set_state(\n\n &mut self,\n\n new_state: Self::Item,\n\n ) -> Result<Option<Self::Item>, InvalidState<Self::Item>> {\n", "file_path": "src/finite_markov_chain/fast_sample.rs", "rank": 31, "score": 22.558119908564514 }, { "content": " Some(mut state_index) => {\n\n mem::swap(&mut self.state_index, &mut state_index);\n\n Ok(Some(self.state_space[state_index].clone()))\n\n }\n\n None => Err(InvalidState::new(new_state)),\n\n }\n\n }\n\n}\n\n\n\nimpl<T, W, R> Iterator for ContFiniteMarkovChain<T, W, R>\n\nwhere\n\n W: Float + AliasableWeight,\n\n Exp1: Distribution<W>,\n\n T: Debug + PartialEq + Clone,\n\n R: Rng,\n\n{\n\n type Item = (W, T);\n\n\n\n #[inline]\n\n fn next(&mut self) -> Option<Self::Item> {\n", "file_path": "src/continuous_finite_markov_chain.rs", "rank": 32, "score": 21.66176547323422 }, { "content": "where\n\n T: Debug + Clone,\n\n F: Transition<T, T>,\n\n R: Rng,\n\n{\n\n type Item = T;\n\n\n\n #[inline]\n\n fn next(&mut self) -> Option<Self::Item> {\n\n self.state = self.transition.sample_from(&self.state, &mut self.rng);\n\n self.state().cloned()\n\n }\n\n}\n\n\n\nimpl<T, F, R> StateIterator for MarkovChain<T, F, R>\n\nwhere\n\n T: Debug + Clone,\n\n F: Transition<T, T>,\n\n R: Rng,\n\n{\n", "file_path": "src/markov_chain.rs", "rank": 33, "score": 21.583798039176965 }, { "content": "/// constitutes the second generation and the process repeats. \n\n/// The overall process is therefore characterized by the number of \n\n/// offsprings an individual has. \n\n/// The resulting process is a Markov Chain in NN.\n\n#[derive(Debug, Clone)]\n\npub struct Branching<T, D, R> \n\nwhere\n\n T: Debug + PartialEq + Clone + One + Zero + PartialOrd + Unsigned,\n\n D: Distribution<T>,\n\n R: Rng,\n\n{\n\n state: T,\n\n base_distribution: D,\n\n rng: R,\n\n}\n\n\n\nimpl<T, D, R> Branching<T, D, R>\n\nwhere\n\n T: Debug + PartialEq + Clone + One + Zero + PartialOrd + Unsigned,\n\n D: Distribution<T>,\n", "file_path": "src/processes/branching.rs", "rank": 34, "score": 21.5718930116394 }, { "content": " fn next(&mut self) -> Option<Self::Item> {\n\n self.state_index = self.sample_index();\n\n self.state().cloned()\n\n }\n\n}\n\n\n\nimpl<T, W, R> StateIterator for FiniteMarkovChain<T, W, R>\n\nwhere\n\n W: AliasableWeight + Debug + Clone,\n\n Uniform<W>: Debug + Clone,\n\n T: Debug + PartialEq + Clone,\n\n R: Rng + Debug + Clone,\n\n{\n\n #[inline]\n\n fn state_as_item(&self) -> Option<<Self as std::iter::Iterator>::Item> {\n\n self.state().cloned()\n\n }\n\n}\n\n\n\nimpl<T, W, R> Distribution<T> for FiniteMarkovChain<T, W, R>\n", "file_path": "src/finite_markov_chain/fast_sample.rs", "rank": 35, "score": 21.357302661505024 }, { "content": " #[inline]\n\n fn state_as_item(&self) -> Option<<Self as std::iter::Iterator>::Item> {\n\n self.state().cloned()\n\n }\n\n}\n\n\n\nimpl<T, F, R> Distribution<T> for MarkovChain<T, F, R>\n\nwhere\n\n T: Debug + Clone,\n\n F: Transition<T, T>,\n\n R: Rng,\n\n{\n\n /// Sample a possible next state. \n\n #[inline]\n\n fn sample<R2>(&self, rng: &mut R2) -> T\n\n where\n\n R2: Rng + ?Sized,\n\n { \n\n self.transition.sample_from(&self.state, rng)\n\n }\n", "file_path": "src/markov_chain.rs", "rank": 36, "score": 20.640257439865145 }, { "content": "/// If your transition function `transition` could reuse of structs that implement\n\n/// the `Distribution<T>` trait in order to sample the next state, then, \n\n/// for the best performance possible, create your own struct that implements\n\n/// the `Transition<T, (N, T)>` trait.\n\n#[derive(Debug, Clone)]\n\npub struct TimedMarkovChain<N, T, F, R> {\n\n state: T,\n\n transition: F,\n\n rng: R,\n\n phantom: PhantomData<N>,\n\n}\n\n\n\nimpl<N, T, F, R> TimedMarkovChain<N, T, F, R>\n\nwhere\n\n R: Rng,\n\n F: Transition<T, (N, T)>,\n\n N: From<f64>,\n\n{\n\n #[inline]\n\n pub fn new(state: T, transition: F, rng: R) -> Self {\n", "file_path": "src/timed_markov_chain.rs", "rank": 37, "score": 20.43940426102349 }, { "content": " }\n\n }\n\n\n\n #[inline]\n\n fn sample_index(&mut self) -> usize {\n\n self.transition_matrix[self.state_index].sample(&mut self.rng)\n\n }\n\n\n\n #[inline]\n\n fn sample_clock(&mut self) -> W {\n\n let rate = self.transiton_clock[self.state_index];\n\n Exp::new(rate).unwrap().sample(&mut self.rng)\n\n }\n\n}\n\n\n\nimpl<T, W, R> State for ContFiniteMarkovChain<T, W, R>\n\nwhere\n\n W: Float + AliasableWeight,\n\n Exp1: Distribution<W>,\n\n T: Debug + PartialEq + Clone,\n", "file_path": "src/continuous_finite_markov_chain.rs", "rank": 38, "score": 19.551115613628234 }, { "content": "where\n\n W: Float + AliasableWeight,\n\n Exp1: Distribution<W>,\n\n T: Debug + PartialEq + Clone,\n\n R: Rng,\n\n{\n\n /// Sample a possible next state. \n\n #[inline]\n\n fn sample<R2>(&self, rng: &mut R2) -> (W, T) \n\n where\n\n R2: Rng + ?Sized,\n\n { \n\n let new_index = self.transition_matrix[self.state_index].sample(rng);\n\n let rate = self.transiton_clock[self.state_index];\n\n let step = Exp::new(rate).unwrap().sample(rng);\n\n\n\n (step, self.state_space[new_index].clone())\n\n }\n\n}", "file_path": "src/continuous_finite_markov_chain.rs", "rank": 39, "score": 19.45656340789994 }, { "content": " self.state().cloned().map(|state| (N::from(0.0), state))\n\n }\n\n}\n\n\n\nimpl<N, T, F, R> Distribution<(N, T)> for TimedMarkovChain<N, T, F, R>\n\nwhere\n\n T: Debug + Clone,\n\n F: Transition<T, (N, T)>,\n\n R: Rng,\n\n N: From<f64>,\n\n{\n\n /// Sample a possible next state. \n\n #[inline]\n\n fn sample<R2>(&self, rng: &mut R2) -> (N, T)\n\n where\n\n R2: Rng + ?Sized,\n\n { \n\n self.transition.sample_from(&self.state, rng)\n\n }\n\n}\n", "file_path": "src/timed_markov_chain.rs", "rank": 40, "score": 19.014096510317447 }, { "content": "// #[derive(Debug, Clone)]\n\npub struct ContFiniteMarkovChain<T, W, R>\n\nwhere\n\n W: Float + AliasableWeight,\n\n Exp1: Distribution<W>,\n\n R: Rng,\n\n{\n\n state_index: usize,\n\n transition_matrix: Vec<WeightedAliasIndex<W>>,\n\n transiton_clock: Vec<W>,\n\n state_space: Vec<T>,\n\n rng: R,\n\n}\n\n\n\nimpl<T, W, R> ContFiniteMarkovChain<T, W, R>\n\nwhere\n\n W: Float + AliasableWeight,\n\n Exp1: Distribution<W>,\n\n R: Rng,\n\n{\n", "file_path": "src/continuous_finite_markov_chain.rs", "rank": 41, "score": 18.698795139970084 }, { "content": "///\n\n/// [Distribution implementation]: struct.Raw.html#impl-Distribution<T>\n\n#[derive(Debug, Clone, PartialEq)]\n\npub struct Raw<I> {\n\n iter: I,\n\n}\n\n\n\nimpl<I> Raw<I> {\n\n #[inline]\n\n pub fn new(iter: I) -> Self {\n\n Raw { iter }\n\n }\n\n}\n\n\n\nimpl<P, T, I> Distribution<T> for Raw<I>\n\nwhere\n\n P: Zero + One + PartialOrd + Debug + Copy, \n\n f64: From<P>,\n\n I: IntoIterator<Item = (P, T)> + Clone,\n\n{\n", "file_path": "src/distributions/raw.rs", "rank": 42, "score": 18.685689947426813 }, { "content": " #[inline]\n\n fn sample<R>(&self, rng: &mut R) -> T\n\n where\n\n R: Rng + ?Sized,\n\n {\n\n let cum_goal: f64 = rng.gen(); // NOT CORRECT\n\n\n\n let mut acc: f64 = 0.0;\n\n let one = f64::from(P::one());\n\n\n\n for (prob, state) in self.iter.clone() {\n\n assert!(P::zero() <= prob, \"Probabilities can not be negative. Tried to use {:?}\", prob);\n\n assert!(one >= acc, \"Probabilities can not be more than one. Tried to use {:?}\", acc);\n\n \tacc += f64::from(prob);\n\n if acc >= cum_goal {\n\n return state;\n\n }\n\n }\n\n panic!(\"Sampling was not possible: probabilities did not cover all posiibilities. Check the type of your probabilities and all possibilities by rng.gen() there.\")\n\n }\n", "file_path": "src/distributions/raw.rs", "rank": 44, "score": 18.249262226785945 }, { "content": "// Traits\n\nuse crate::{State, StateIterator};\n\nuse core::fmt::Debug;\n\nuse rand::Rng;\n\nuse rand_distr::{weighted_alias::{WeightedAliasIndex, AliasableWeight}, Distribution};\n\nuse num_traits::float::Float;\n\n\n\n// Structs\n\nuse crate::errors::InvalidState;\n\nuse rand_distr::{Exp1, Exp};\n\n\n\n// Functions\n\nuse core::mem;\n\n\n\n/// Finite state Markov Chain in continuous time. \n\n/// \n\n/// # Costs\n\n/// \n\n/// Construction cost: O(n), n: size of the state space.\n\n/// Sample cost: O(1).\n", "file_path": "src/continuous_finite_markov_chain.rs", "rank": 45, "score": 17.85983782682678 }, { "content": " match self.state_space.iter().position(|s| *s == new_state) {\n\n Some(mut state_index) => {\n\n mem::swap(&mut self.state_index, &mut state_index);\n\n Ok(Some(self.state_space[state_index].clone()))\n\n }\n\n None => Err(InvalidState::new(new_state)),\n\n }\n\n }\n\n}\n\n\n\nimpl<T, W, R> Iterator for FiniteMarkovChain<T, W, R>\n\nwhere\n\n W: AliasableWeight + Debug + Clone,\n\n Uniform<W>: Debug + Clone,\n\n T: Debug + PartialEq + Clone,\n\n R: Rng + Debug + Clone,\n\n{\n\n type Item = T;\n\n\n\n #[inline]\n", "file_path": "src/finite_markov_chain/fast_sample.rs", "rank": 46, "score": 17.425327983967616 }, { "content": "\n\nuse core::marker::PhantomData;\n\nuse rand::Rng;\n\nuse rand_distr::Distribution;\n\n\n\n/// Concrete struct for the function of a `Distribution. \n\n/// \n\n/// # Examples\n\n/// \n\n/// The squared of a exponential.\n\n/// ```\n\n/// # use rand_distr::Exp1;\n\n/// # use markovian::distributions::Unary;\n\n/// let exp_squared = Unary::new(|x: f64| x.powi(2_i32), Exp1);\n\n/// ```\n\n#[derive(Debug, Copy, Clone)]\n\npub struct Unary<S, T, F, D> \n\nwhere\n\n F: Fn(S) -> T,\n\n D: Distribution<S>,\n", "file_path": "src/distributions/unary.rs", "rank": 47, "score": 17.40771066704862 }, { "content": "impl<T, W, R> Into<(DiGraph<T, W>, petgraph::graph::NodeIndex)> for FiniteMarkovChain<T, W, R>\n\nwhere\n\n W: AliasableWeight + Debug + Clone,\n\n Uniform<W>: Debug + Clone,\n\n T: Debug + PartialEq + Clone,\n\n R: Rng + Debug + Clone,\n\n{\n\n /// Performs the conversion.\n\n ///\n\n /// # Examples\n\n ///\n\n /// An absorbing Markov Chain with one transient state and one absorbing state.\n\n /// ```\n\n /// # use ndarray::array;\n\n /// # use markovian::{FiniteMarkovChain, State};\n\n /// # use petgraph::graph::DiGraph;\n\n /// let mc = FiniteMarkovChain::from((0, array![[0.5, 0.5], [0.0, 1.0]], rand::thread_rng()));\n\n /// let (graph, node) = mc.into();\n\n /// assert_eq!(graph[node], 0);\n\n /// assert_eq!(graph.neighbors(node).count(), 2);\n", "file_path": "src/finite_markov_chain/fast_sample.rs", "rank": 48, "score": 17.34793470474759 }, { "content": " R: Rng,\n\n{\n\n type Item = T;\n\n\n\n #[inline]\n\n fn state(&self) -> Option<&Self::Item> {\n\n Some(&self.state_space[self.state_index])\n\n }\n\n\n\n #[inline]\n\n fn state_mut(&mut self) -> Option<&mut Self::Item> {\n\n Some(&mut self.state_space[self.state_index])\n\n }\n\n\n\n #[inline]\n\n fn set_state(\n\n &mut self,\n\n new_state: Self::Item,\n\n ) -> Result<Option<Self::Item>, InvalidState<Self::Item>> {\n\n match self.state_space.iter().position(|s| *s == new_state) {\n", "file_path": "src/continuous_finite_markov_chain.rs", "rank": 49, "score": 17.318552934125087 }, { "content": " R: Rng,\n\n{\n\n /// Creates a new Branching process. \n\n /// \n\n /// # Examples\n\n /// \n\n /// Construction using density p(0) = 0.3, p(1) = 0.4, p(2) = 0.3. \n\n /// ```\n\n /// # #![allow(unused_mut)]\n\n /// # use markovian::prelude::*;\n\n /// # use rand::prelude::*;\n\n /// let init_state: u32 = 1;\n\n /// let density = raw_dist![(0.3, 0), (0.4, 1), (0.3, 2)];\n\n /// let rng = thread_rng();\n\n /// let mut branching_process = Branching::new(init_state, density, rng);\n\n /// ``` \n\n ///\n\n #[inline]\n\n pub fn new(state: T, base_distribution: D, rng: R) -> Self {\n\n Branching {\n", "file_path": "src/processes/branching.rs", "rank": 50, "score": 16.913070867266477 }, { "content": "use thiserror::Error;\n\n\n\n#[derive(Copy, Clone, Debug, Default, Eq, Hash, Ord, PartialEq, PartialOrd, Error)]\n\n#[error(\"the state {state:?} is not a valid assignation\")]\n\npub struct InvalidState<T: std::fmt::Debug> {\n\n state: T,\n\n}\n\n\n\nimpl<T: std::fmt::Debug> InvalidState<T> {\n\n\t#[inline]\n\n pub fn new(state: T) -> Self {\n\n InvalidState { state }\n\n }\n\n}\n", "file_path": "src/errors.rs", "rank": 51, "score": 16.834955059980857 }, { "content": "where\n\n W: AliasableWeight + Debug + Clone,\n\n Uniform<W>: Debug + Clone,\n\n T: Debug + PartialEq + Clone,\n\n R: Rng + Debug + Clone,\n\n{\n\n /// Sample a possible next state. \n\n #[inline]\n\n fn sample<R2>(&self, rng: &mut R2) -> T\n\n where\n\n R2: Rng + ?Sized,\n\n { \n\n let new_index = self.transition_matrix_variables[self.state_index].sample(rng);\n\n\n\n self.state_space[new_index].clone()\n\n }\n\n}\n\n\n\nimpl<W, R> From<(usize, Vec<Vec<W>>, R)> for FiniteMarkovChain<usize, W, R>\n\nwhere\n", "file_path": "src/finite_markov_chain/fast_sample.rs", "rank": 52, "score": 16.60596768179914 }, { "content": "// Traits\n\nuse crate::{State, StateIterator};\n\nuse core::fmt::Debug;\n\nuse num_traits::{sign::Unsigned, One, Zero};\n\nuse rand::Rng;\n\nuse rand_distr::Distribution;\n\n\n\n// Structs\n\nuse crate::errors::InvalidState;\n\n\n\n// Functions\n\nuse core::mem;\n\n\n\n/// Branching process in the natural numbers NN = {0, 1, 2, ...}.\n\n/// \n\n/// A Branching process is characterized by a density p over NN. It can be \n\n/// thought of the size of a population. \n\n/// In this population, each individual is identical to the rest and they are \n\n/// independent of each other. Moreover, at each time step, \n\n/// individuals have descendents and die. Their descendants \n", "file_path": "src/processes/branching.rs", "rank": 53, "score": 16.49695247888869 }, { "content": "pub struct FiniteMarkovChain<T, W, R>\n\nwhere\n\n W: AliasableWeight + Debug + Clone,\n\n Uniform<W>: Debug + Clone,\n\n T: Debug + PartialEq + Clone,\n\n R: Rng + Debug + Clone,\n\n{\n\n state_index: usize,\n\n transition_matrix: Vec<Vec<W>>,\n\n transition_matrix_variables: Vec<WeightedAliasIndex<W>>,\n\n state_space: Vec<T>,\n\n rng: R,\n\n}\n\n\n\nimpl<T, W, R> FiniteMarkovChain<T, W, R>\n\nwhere\n\n W: AliasableWeight + Debug + Clone,\n\n Uniform<W>: Debug + Clone,\n\n T: Debug + PartialEq + Clone,\n\n R: Rng + Debug + Clone,\n", "file_path": "src/finite_markov_chain/fast_sample.rs", "rank": 54, "score": 16.362995318213446 }, { "content": "///\n\n/// # Examples\n\n///\n\n/// Random walk in the integers.\n\n/// ```\n\n/// # use markovian::{MarkovChain, prelude::*};\n\n/// # use rand::prelude::*;\n\n/// let init_state: i32 = 0;\n\n/// let transition = |state: &i32| Raw::new(vec![(0.5, state + 1), (0.5, state - 1)]);\n\n/// MarkovChain::new(init_state, &transition, thread_rng());\n\n/// ```\n\n#[derive(Debug, Clone)]\n\npub struct MarkovChain<T, F, R> {\n\n state: T,\n\n transition: F,\n\n rng: R,\n\n}\n\n\n\nimpl<T, F, R> MarkovChain<T, F, R>\n\nwhere\n", "file_path": "src/markov_chain.rs", "rank": 55, "score": 16.314238615311154 }, { "content": " W: AliasableWeight + Debug + Clone,\n\n Uniform<W>: Debug + Clone,\n\n R: Rng + Debug + Clone,\n\n{\n\n\t/// Performs the conversion.\n\n\t///\n\n /// # Panics\n\n ///\n\n /// This method panics if: \n\n /// - Any vector of `transition_matrix` has more than u32::MAX columns.\n\n /// - For any entry w of any vector of `transition_matrix` v: \n\n /// w < 0 or w > max where max = W::MAX / v.len().\n\n /// - For any vector of `transition_matrix` the sum of weights is zero.\n\n fn from((state_index, transition_matrix, rng): (usize, Vec<Vec<W>>, R)) -> Self {\n\n let state_space: Vec<usize> = (0..transition_matrix.len()).collect();\n\n FiniteMarkovChain::new(state_index, transition_matrix, state_space, rng)\n\n }\n\n}\n\n\n\nimpl<T, W, R> From<(usize, ndarray::Array2<W>, Vec<T>, R)> for FiniteMarkovChain<T, W, R>\n", "file_path": "src/finite_markov_chain/fast_sample.rs", "rank": 56, "score": 15.955913506941426 }, { "content": "// Traits\n\nuse rand::Rng;\n\nuse rand_distr::Distribution;\n\n\n\n// Structs\n\nuse core::marker::PhantomData;\n\n\n\n/// Concrete struct for the function of two Distributions`. \n\n/// \n\n/// # Examples\n\n/// \n\n/// A Beta(2, 1) as the sum of two exponentials(1).\n\n/// ```\n\n/// # use rand_distr::Exp1;\n\n/// # use markovian::distributions::Binary;\n\n/// let beta = Binary::new(|x: f64, y: f64| x + y, Exp1, Exp1);\n\n/// ```\n\n#[derive(Debug, Copy, Clone)]\n\npub struct Binary<S1, S2, T, F, D1, D2> \n\nwhere\n", "file_path": "src/distributions/binary.rs", "rank": 57, "score": 15.911994370664381 }, { "content": "where\n\n W: AliasableWeight + Debug + Clone,\n\n Uniform<W>: Debug + Clone,\n\n T: Debug + PartialEq + Clone,\n\n R: Rng + Debug + Clone,\n\n{\n\n\t/// Performs the conversion.\n\n\t///\n\n /// # Panics\n\n ///\n\n /// This method panics if: \n\n /// - (In debug mode only) The dimensions of `state_space` and `transition_matrix` do not match.\n\n /// - `transition_matrix` has more than u32::MAX columns.\n\n /// - For any entry of `transition_matrix` w: \n\n /// w < 0 or w > max where max = W::MAX / transition_matrix.ncols().\n\n /// - For any row of `transition_matrix` the sum of weights is zero.\n\n\tfn from((state_index, transition_matrix, state_space, rng): (usize, ndarray::Array2<W>, Vec<T>, R)) -> Self {\n\n let transition_matrix: Vec<Vec<W>> = transition_matrix.genrows()\n\n .into_iter()\n\n .map(|weights| {\n", "file_path": "src/finite_markov_chain/fast_sample.rs", "rank": 58, "score": 15.756250344793155 }, { "content": " fn state(&self) -> Option<&Self::Item> {\n\n Some(&self.state)\n\n }\n\n\n\n #[inline]\n\n fn state_mut(&mut self) -> Option<&mut Self::Item> {\n\n Some(&mut self.state)\n\n }\n\n\n\n #[inline]\n\n fn set_state(\n\n &mut self,\n\n mut new_state: Self::Item,\n\n ) -> Result<Option<Self::Item>, InvalidState<Self::Item>> {\n\n mem::swap(&mut self.state, &mut new_state);\n\n Ok(Some(new_state))\n\n }\n\n}\n\n\n\nimpl<T, F, R> Iterator for MarkovChain<T, F, R>\n", "file_path": "src/markov_chain.rs", "rank": 59, "score": 15.724426462401727 }, { "content": "//! \n\n//! ## Continuous space\n\n//!\n\n//! Randomize the transition: return a random element together with a probability one\n\n//!\n\n//! ### Examples\n\n//! \n\n//! A random walk on the real line with variable step size. \n\n//! ```\n\n//! # use rand_distr::Exp;\n\n//! # use rand::prelude::*;\n\n//! # use markovian::prelude::*;\n\n//! let init_state: f64 = 0.0;\n\n//! struct MyTransition;\n\n//! impl markovian::Transition<f64, f64> for MyTransition {\n\n//! fn sample_from<R: ?Sized>(&self, state: &f64, rng: &mut R) -> f64\n\n//! where\n\n//! R: Rng\n\n//! {\n\n//! let step = Exp::new(2.0).unwrap().sample(rng);\n", "file_path": "src/lib.rs", "rank": 61, "score": 15.217197605821251 }, { "content": " /// w < 0 or w > max where max = W::MAX / transition_matrix.ncols().\n\n /// - For any row of `transition_matrix` the sum of weights is zero.\n\n ///\n\n /// # Example\n\n ///\n\n /// An absorbing Markov Chain with one transient state and one absorbing state.\n\n /// ```\n\n /// # use ndarray::array;\n\n /// # use markovian::FiniteMarkovChain;\n\n /// # use markovian::State;\n\n /// let mut mc = FiniteMarkovChain::from((0, array![[0.5, 0.5], [0.0, 1.0]], rand::thread_rng()));\n\n /// assert_eq!(mc.state(), Some(&0));\n\n /// println!(\"At time {}, the state is {}\", 1_000, mc.nth(1_000).unwrap()); // Most likely 1\n\n /// ``` \n\n fn from((state_index, transition_matrix, rng): (usize, ndarray::Array2<W>, R)) -> Self {\n\n let state_space: Vec<usize> = (0..transition_matrix.nrows()).collect();\n\n FiniteMarkovChain::from((state_index, transition_matrix, state_space, rng))\n\n }\n\n}\n\n\n", "file_path": "src/finite_markov_chain/fast_sample.rs", "rank": 62, "score": 14.559517985639353 }, { "content": " ///\n\n /// There is one absorbing state: state `b`, which has index `1`.\n\n /// ```\n\n /// # use ndarray::array;\n\n /// # use markovian::{FiniteMarkovChain, State};\n\n /// let mc = FiniteMarkovChain::from((0, array![[0.5, 0.5], [0.0, 1.0]], rand::thread_rng()))\n\n /// .set_state_space(vec!['a', 'b']);\n\n /// assert_eq!(mc.absorbing_states_indexes(), vec![1]);\n\n /// ```\n\n #[inline]\n\n pub fn absorbing_states_indexes(&self) -> Vec<usize> {\n\n let transition_matrix = &self.transition_matrix;\n\n \t(0..self.state_space.len())\n\n .filter(|&i| {\n\n let quantities_check = transition_matrix[i].iter()\n\n .enumerate()\n\n .map(|(j, w)| {\n\n if j == i {\n\n w > &W::ZERO\n\n } else {\n", "file_path": "src/finite_markov_chain/fast_sample.rs", "rank": 63, "score": 14.330856277317668 }, { "content": " weights.to_vec()\n\n })\n\n .collect();\n\n FiniteMarkovChain::new(state_index, transition_matrix, state_space, rng)\n\n }\n\n}\n\n\n\nimpl<W, R> From<(usize, ndarray::Array2<W>, R)> for FiniteMarkovChain<usize, W, R>\n\nwhere\n\n W: AliasableWeight + Debug + Clone,\n\n Uniform<W>: Debug + Clone,\n\n R: Rng + Debug + Clone,\n\n{\n\n\t/// Performs the conversion.\n\n\t///\n\n /// # Panics\n\n ///\n\n /// This method panics if: \n\n /// - `transition_matrix` has more than u32::MAX columns.\n\n /// - For any entry of `transition_matrix` w: \n", "file_path": "src/finite_markov_chain/fast_sample.rs", "rank": 64, "score": 13.59787424625264 }, { "content": "}\n\n\n\nimpl<S1, S2, T, F, D1, D2> Distribution<T> for Binary<S1, S2, T, F, D1, D2>\n\nwhere\n\n F: Fn(S1, S2) -> T,\n\n D1: Distribution<S1>,\n\n D2: Distribution<S2>,\n\n{\n\n #[inline]\n\n fn sample<R: Rng + ?Sized>(&self, rng: &mut R) -> T {\n\n (self.func)(self.distr_1.sample(rng), self.distr_2.sample(rng))\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n\tuse super::*;\n\n\tuse rand_distr::Exp1;\n\n\n\n #[test]\n", "file_path": "src/distributions/binary.rs", "rank": 65, "score": 13.59430199185643 }, { "content": " /// assert!(mc.may_absorb());\n\n /// ``` \n\n #[inline]\n\n pub fn may_absorb(&self) -> bool {\n\n let set: std::collections::HashSet<_> = self.absorbing_states_indexes().into_iter().collect();\n\n let (graph, node) = self.clone().into();\n\n let mut bfs = petgraph::visit::Bfs::new(&graph, node);\n\n while let Some(other_node) = bfs.next(&graph) {\n\n if set.contains(&other_node.index()) {\n\n return true\n\n } \n\n }\n\n false\n\n }\n\n}\n\n\n\nimpl<T, W, R> State for FiniteMarkovChain<T, W, R>\n\nwhere\n\n W: AliasableWeight + Debug + Clone,\n\n Uniform<W>: Debug + Clone,\n", "file_path": "src/finite_markov_chain/fast_sample.rs", "rank": 66, "score": 13.535616709619688 }, { "content": " /// even if they are not recheable from the current state.\n\n ///\n\n /// # Panics\n\n ///\n\n /// In debug mode, if `new_state_space` is not as long as the current state space. \n\n ///\n\n /// # Examples\n\n ///\n\n /// Changing from numbers to letters.\n\n /// ```\n\n /// # use ndarray::array;\n\n /// # use markovian::{FiniteMarkovChain, State};\n\n /// let mc = FiniteMarkovChain::from((1, array![[0.5, 0.5], [0.0, 1.0]], rand::thread_rng()));\n\n /// assert_eq!(mc.state(), Some(&1));\n\n /// let mc = mc.set_state_space(vec!['a', 'b']);\n\n /// assert_eq!(mc.state(), Some(&'b'));\n\n /// ```\n\n #[inline]\n\n pub fn set_state_space<U>(self, new_state_space: Vec<U>) -> FiniteMarkovChain<U, W, R> \n\n where\n", "file_path": "src/finite_markov_chain/fast_sample.rs", "rank": 67, "score": 13.488384392443491 }, { "content": " #[inline]\n\n pub fn new(\n\n state_index: usize,\n\n transition_weights: Vec<Vec<W>>,\n\n state_space: Vec<T>,\n\n rng: R,\n\n ) -> Self {\n\n let transition_matrix: Vec<WeightedAliasIndex<W>> = transition_weights.clone()\n\n .into_iter()\n\n .map(|weights| WeightedAliasIndex::new(weights).unwrap())\n\n .collect();\n\n let transiton_clock: Vec<W> = transition_weights.into_iter()\n\n .map(|weights| weights.into_iter().sum::<W>())\n\n .collect();\n\n ContFiniteMarkovChain {\n\n state_index,\n\n transition_matrix,\n\n transiton_clock,\n\n state_space,\n\n rng,\n", "file_path": "src/continuous_finite_markov_chain.rs", "rank": 68, "score": 13.109169676751044 }, { "content": " D: Distribution<S>,\n\n{\n\n #[inline]\n\n fn sample<R: Rng + ?Sized>(&self, rng: &mut R) -> T {\n\n (self.func)(self.distr.sample(rng))\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n\tuse super::*;\n\n\tuse rand_distr::Exp1;\n\n\n\n #[test]\n\n fn use_cases() {\n\n let mut rng = crate::tests::rng(1);\n\n let expected = 0.5851203106605716 - 1.;\n\n let sample: f64 = Unary::new(|x: f64| x - 1., Exp1).sample(&mut rng);\n\n\n\n assert_eq!(sample, expected);\n\n\n\n let expected = 0.15721404552459717;\n\n let sample: f64 = Unary::new(|x:f64| x + 1., Unary::new(|x: f32| (x - 1.) as f64, Exp1)).sample(&mut rng);\n\n assert_eq!(sample, expected);\n\n }\n\n}\n", "file_path": "src/distributions/unary.rs", "rank": 70, "score": 12.798672466158056 }, { "content": "//! let init_state: u32 = 1;\n\n//! let base_distribution = raw_dist![(0.3, 0), (0.4, 1), (0.3, 2)];\n\n//! let rng = thread_rng();\n\n//! let mut branching_process = Branching::new(init_state, base_distribution, rng);\n\n//! ``` \n\n//! ## Continuous time\n\n//! \n\n//! Construction of a random walk in the integers, with expponential time for each transition.\n\n//! ```\n\n//! # #![allow(unused_mut)]\n\n//! # use rand::prelude::*;\n\n//! # use rand_distr::{Exp, Uniform};\n\n//! # use markovian::prelude::*;\n\n//! let init_state: i32 = 0;\n\n//! struct MyTransition;\n\n//! impl markovian::Transition<i32, (f64, i32)> for MyTransition {\n\n//! fn sample_from<R: ?Sized>(&self, state: &i32, rng: &mut R) -> (f64, i32)\n\n//! where\n\n//! R: Rng\n\n//! {\n", "file_path": "src/lib.rs", "rank": 71, "score": 12.705060775144755 }, { "content": "///\n\n/// # Warning\n\n///\n\n/// The user should make sure that the indexes resulting from random transitions \n\n/// correspond to a state in the state space. In other words, new indexes\n\n/// should always be less than the length of the state space. \n\n///\n\n/// # Examples\n\n///\n\n/// The easiest way is construct a finite Markov Chain is from a transition matrix. \n\n/// This has been abstracted by using `from`. For example,\n\n/// an absorbing Markov Chain with one transient state and one absorbing state.\n\n/// ```\n\n/// # use ndarray::array;\n\n/// # use markovian::{FiniteMarkovChain, State};\n\n/// let mut mc = FiniteMarkovChain::from((0, array![[0.5, 0.5], [0.0, 1.0]], rand::thread_rng()));\n\n/// assert_eq!(mc.state(), Some(&0));\n\n/// println!(\"At time {}, the state is {}\", 1_000, mc.nth(1_000).unwrap()); // Most likely 1\n\n/// ```\n\n#[derive(Debug, Clone)]\n", "file_path": "src/finite_markov_chain/fast_sample.rs", "rank": 72, "score": 12.576474927706412 }, { "content": " /// # use ndarray::array;\n\n /// # use markovian::{FiniteMarkovChain, State};\n\n /// let mc = FiniteMarkovChain::from((0, array![[0.5, 0.5], [0.0, 1.0]], rand::thread_rng()))\n\n /// .set_state_space(vec!['a', 'b']);\n\n /// assert_eq!(mc.absorbing_states(), vec![&'b']);\n\n /// ```\n\n #[inline]\n\n pub fn absorbing_states(&self) -> Vec<&T> {\n\n \tself.absorbing_states_indexes()\n\n \t\t.iter()\n\n \t\t.map(|&i| &self.state_space()[i])\n\n \t\t.collect()\n\n }\n\n\n\n /// Returns the indexes indexes of all absorbing state, if any.\n\n ///\n\n /// An absorbing state is a state such that, if the process starts there, \n\n /// it will allways be there, i.e. the probability of moving to itself is one.\n\n ///\n\n /// # Examples\n", "file_path": "src/finite_markov_chain/fast_sample.rs", "rank": 73, "score": 12.57260470476633 }, { "content": " /// The state space is the collection of all values the chain might ever take,\n\n /// even if they are not recheable from the current state.\n\n ///\n\n /// # Examples\n\n ///\n\n /// The state space can be more than one state, \n\n /// even if the Markov Chain is already absorbed. \n\n /// ```\n\n /// # use ndarray::array;\n\n /// # use markovian::{FiniteMarkovChain, State};\n\n /// let mc = FiniteMarkovChain::from((1, array![[0.5, 0.5], [0.0, 1.0]], rand::thread_rng()));\n\n /// assert_eq!(mc.state_space(), &vec![0, 1]);\n\n /// ```\n\n #[inline]\n\n pub fn state_space(&self) -> &Vec<T> {\n\n &self.state_space\n\n } \n\n\n\n /// Returns the size of the state space.\n\n ///\n", "file_path": "src/finite_markov_chain/fast_sample.rs", "rank": 74, "score": 12.49429872765829 }, { "content": " rng: R,\n\n ) -> Self {\n\n let transition_matrix_variables = transition_matrix.clone().into_iter()\n\n \t.map(|v| WeightedAliasIndex::new(v).unwrap())\n\n \t.collect();\n\n\n\n FiniteMarkovChain::new_raw(\n\n state_index,\n\n transition_matrix,\n\n transition_matrix_variables,\n\n state_space,\n\n rng\n\n )\n\n }\n\n\n\n #[inline]\n\n fn new_raw(\n\n state_index: usize,\n\n transition_matrix: Vec<Vec<W>>,\n\n transition_matrix_variables: Vec<WeightedAliasIndex<W>>, \n", "file_path": "src/finite_markov_chain/fast_sample.rs", "rank": 75, "score": 12.306995357438103 }, { "content": " \tU: Debug + PartialEq + Clone,\n\n {\n\n FiniteMarkovChain::new_raw( \n\n\t\t self.state_index,\n\n\t\t self.transition_matrix,\n\n\t\t self.transition_matrix_variables,\n\n\t\t new_state_space,\n\n\t\t self.rng,\n\n )\n\n }\n\n\n\n /// Returns all absorbing state, if any.\n\n ///\n\n /// An absorbing state is a state such that, if the process starts there, \n\n /// it will allways be there, i.e. the probability of moving to itself is one.\n\n ///\n\n /// # Examples\n\n ///\n\n /// There is one absorbing state: state `b`.\n\n /// ```\n", "file_path": "src/finite_markov_chain/fast_sample.rs", "rank": 76, "score": 12.153202441410158 }, { "content": "### Good\n\n\n\nStruct P<T>: and from methods for different T: Copy + Clone + Debug + PartialOrd + Num\n\n\n\n- Pros\n\n\n\n - More general than closed01\n\n - Checked initialization in debug mode (use assert_debug!)\n\n- Implement any trait we want, e.g. [num_traits](https://docs.rs/num-traits/0.2.12/num_traits/index.html) and some [core::ops](https://doc.rust-lang.org/nightly/core/ops/index.html).\n\n - Accept unums or posits, e.g. [softposit](https://crates.io/crates/softposit) (best implementation of the best rivel to floating point!)\n\n - [Visualization](https://cse512-19s.github.io/FP-Well-Rounded/)\n\n - [Paper](http://www.johngustafson.net/pdfs/BeatingFloatingPoint.pdf)\n\n\n\n#### Rejected\n\n\n\n- [closed01](https://crates.io/crates/closed01): a new type of float with extra methods. \n\n - Pros:\n\n - Add\n\n - WrappingAdd\n\n - SaturingAdd\n\n - Cons:\n\n - Still only floats\n\n \n", "file_path": "TODO.md", "rank": 77, "score": 11.775125360920233 }, { "content": "// Traits\n\nuse rand_distr::Distribution;\n\nuse crate::traits::{State, StateIterator, Transition};\n\nuse core::fmt::Debug;\n\nuse rand::Rng;\n\n\n\n// Structs\n\nuse crate::errors::InvalidState;\n\nuse core::marker::PhantomData;\n\n\n\n// Functions\n\nuse core::mem;\n\n\n\n/// Markov Chain in continuous time, with arbitrary space.\n\n///\n\n/// Rust allows for more than only exponential time in the transitions, so \n\n/// does this crate. \n\n/// \n\n/// # Remarks\n\n/// \n", "file_path": "src/timed_markov_chain.rs", "rank": 78, "score": 11.752318339214323 }, { "content": "// Traits\n\nuse crate::{State, StateIterator};\n\nuse core::fmt::Debug;\n\nuse rand::Rng;\n\nuse rand_distr::{weighted_alias::{WeightedAliasIndex, AliasableWeight}, Uniform, Distribution};\n\n\n\n// Structs\n\nuse crate::errors::InvalidState;\n\nuse petgraph::graph::DiGraph;\n\n\n\n// Functions\n\nuse core::mem;\n\n\n\n/// Finite state Markov Chain in discrete time. \n\n/// \n\n/// # Costs\n\n/// \n\n/// **Construction**: O(n^2), where n is the size of the state space.\n\n/// \n\n/// **Sample**: O(1).\n", "file_path": "src/finite_markov_chain/fast_sample.rs", "rank": 79, "score": 11.713243377663002 }, { "content": "//! (1.0 - 1.0 / (path_stadistic.abs() + 1) as f64, right), \n\n//! (1.0 / (path_stadistic.abs() + 1) as f64, left)\n\n//! ]\n\n//! }\n\n//! };\n\n//! let rng = thread_rng();\n\n//! let mut mc = markovian::MarkovChain::new(init_state, transition, rng);\n\n//! \n\n//! // state has history\n\n//! mc.next();\n\n//! assert_eq!(mc.state().unwrap().len(), 2);\n\n//! ```\n\n//! \n\npub use self::continuous_finite_markov_chain::ContFiniteMarkovChain;\n\npub use self::finite_markov_chain::FiniteMarkovChain;\n\npub use self::markov_chain::MarkovChain;\n\npub use self::timed_markov_chain::TimedMarkovChain;\n\npub use self::traits::{State, StateIterator, Transition};\n\n\n\n/// Generating random trajectories from stochactic processes\n", "file_path": "src/lib.rs", "rank": 80, "score": 11.30647119918135 }, { "content": "// Traits\n\nuse rand_distr::Distribution;\n\nuse crate::traits::{State, StateIterator, Transition};\n\nuse core::fmt::Debug;\n\nuse rand::Rng;\n\n\n\n// Structs\n\nuse crate::errors::InvalidState;\n\n\n\n// Functions\n\nuse core::mem;\n\n\n\n/// Markov Chain in discrete time, with arbitrary space.\n\n///\n\n/// # Remarks\n\n/// \n\n/// If your transition function `transition` could reuse of structs that implement\n\n/// the `Distribution<T>` trait in order to sample the next state, then, \n\n/// for the best performance possible, create your own struct that implements\n\n/// the `Transition<T, T>` trait.\n", "file_path": "src/markov_chain.rs", "rank": 81, "score": 11.27169721570398 }, { "content": "//! let time = Exp::new(2.0).unwrap().sample(rng);\n\n//! let step = Uniform::from(0..=1).sample(rng) * 2 - 1;\n\n//! (time, state + step)\n\n//! }\n\n//! }\n\n//! let transition = MyTransition;\n\n//! let rng = thread_rng();\n\n//! let mut mc = markovian::TimedMarkovChain::new(init_state, transition, rng);\n\n//! ``` \n\n//!\n\n//! # Remarks\n\n//!\n\n//! All methods are `inline`, by design.\n\n//! \n\n//! Non-trivial ways to use the crate are described below, including time dependence, \n\n//! continuous space and non-markovian processes.\n\n//!\n\n//! ## Time dependence\n\n//!\n\n//! Include the time as part of the state of the process.\n", "file_path": "src/lib.rs", "rank": 82, "score": 10.927266439655487 }, { "content": "// Traits\n\nuse core::fmt::Debug;\n\nuse num_traits::{One, Zero};\n\nuse rand::Rng;\n\nuse rand_distr::Distribution;\n\n\n\n// use num_traits::Zero;\n\n\n\n/// Distribution over possibly infinte iterators. \n\n/// \n\n/// A random variable is represented by an iterator that yields values ``(P, T)``,\n\n/// where ``P`` represents the probability of the realization ``T``.\n\n/// See [Distribution implementation] for the trait constrains over ``P``.\n\n/// \n\n/// # Examples\n\n/// \n\n/// With help of the `raw_dist` macro, we construct a random variable that samples always a fixed value.\n\n/// ```\n\n/// # use markovian::prelude::*;\n\n/// # use rand::prelude::*;\n", "file_path": "src/distributions/raw.rs", "rank": 83, "score": 10.915609404524444 }, { "content": " ///\n\n /// Although the state the Markov Chain does not change, \n\n /// its random number generator does. That is why this method needs `&mut self`.\n\n ///\n\n /// # Examples\n\n ///\n\n /// From the current state, the next index has equal probability of being `0` or `1`.\n\n /// ```\n\n /// # use ndarray::array;\n\n /// # use markovian::{FiniteMarkovChain, State};\n\n /// let mut mc = FiniteMarkovChain::from((0, array![[0.5, 0.5], [0.0, 1.0]], rand::thread_rng()));\n\n /// println!(\"The next index could be {}\", mc.sample_index()); // 50% 0 and 50% 1.\n\n /// ```\n\n #[inline]\n\n pub fn sample_index(&mut self) -> usize {\n\n self.transition_matrix_variables[self.state_index].sample(&mut self.rng)\n\n }\n\n\n\n /// Returns the state space of the Markov Chain.\n\n ///\n", "file_path": "src/finite_markov_chain/fast_sample.rs", "rank": 84, "score": 10.6287057713355 }, { "content": "# To do\n\n\n\nRe-factor to be only stochastic processes and not sub-stochastic ones.\n\n\n\n## Derive Macros\n\n\n\n- State\n\n - Needs to read where_clauses to write them again in the implementation of State trait\n\n - Needs to be given (input) the type of Item (...which is always `T`). \n\n - Nice error message!\n\n- StateIterator\n\n - ??\n\n\n\nOnce implemented, clean the code for all the structs. \n\n\n\n## FiniteMarkovChain\n\n\n\n- Construction\n\n - Checks of state_space!\n\n - What is the condition you want\n\n - state_space must be different elements\n\n - set_state_space\n\n - state_space must be different elements\n\n - From Vec<T>\n\n - From a sample, \n\n - Consider the initial state\n\n - Discover the state space\n\n - Count each transition\n\n - Construct the chain \n\n - Add to lib module documentation\n\n- Move from panicking to errors\n\n - Create errors\n\n\n\n## ContFiniteMarkovChain\n\n\n\n- The same\n\n- has_absorbing_state(&self) -> bool\n\n\n\n## Macros\n\n\n\n- Learn to do useful macros!\n\n\n\n## Organization\n\n\n\n- **Modules organization:** different algorithms for simulation\n\n - Exact\n\n - Fast\n\n - Sample speed\n\n - Accurate\n\n - epsilon-strong\n\n - Brownian motion\n\n\n\n## Jump processes\n\n\n\nIn a few cases, the marginal distributions of the increments have a simple form such as a **gamma distribution**, a **stable distribution**, or an **inverse Gaussian distribution** so that special methods for such distributions allow to generate discrete skeletons. \n\n\n\n- [x] Possion process\n\n- [ ] Levy process\n\n\n\n\n\n\n\n## Abstract probabilities\n\n\n\nNo longer use only f64 to represent probabilities\n\n\n\nRequirements:\n\n\n\n- Need to represent cummulative probabilities, to simulate raw_dist!\n\n\n\n### Options\n\n\n\n### Best\n\n\n\n`Unit` wrapper from `nalgebra`!\n\n\n\nhttps://docs.rs/nalgebra/0.26.2/nalgebra/base/struct.Unit.html\n\n\n\nIdea: `Distribution<[T; N]>` wrapper for vectors, with constant generics!\n\n\n\n- Implementation: prob-num crate!\n\n\n", "file_path": "TODO.md", "rank": 85, "score": 10.60432284534424 }, { "content": " /// The state space is the collection of all values the chain might ever take,\n\n /// even if they are not recheable from the current state.\n\n ///\n\n /// # Examples\n\n ///\n\n /// A Markov Chain with two states. \n\n /// ```\n\n /// # use ndarray::array;\n\n /// # use markovian::FiniteMarkovChain;\n\n /// let mc = FiniteMarkovChain::from((1, array![[0.5, 0.5], [0.0, 1.0]], rand::thread_rng()));\n\n /// assert_eq!(mc.nstates(), 2);\n\n /// ```\n\n #[inline]\n\n pub fn nstates(&self) -> usize {\n\n self.state_space().len()\n\n } \n\n\n\n /// Changes the state space of the Markov Chain.\n\n ///\n\n /// The state space is the collection of all values the chain might ever take,\n", "file_path": "src/finite_markov_chain/fast_sample.rs", "rank": 86, "score": 10.49427391955882 }, { "content": "pub use self::state::State;\n\npub use self::state_iterator::StateIterator;\n\npub use self::transition::Transition;\n\n\n\nmod state;\n\nmod state_iterator;\n\nmod transition;\n", "file_path": "src/traits.rs", "rank": 87, "score": 10.357390618001421 }, { "content": "{\n\n type Item = T;\n\n\n\n /// Changes the state of the Branching to a new state, chosen \n\n /// according to the distribution for offsprings, and returns the new state. \n\n /// \n\n /// # Examples\n\n /// \n\n /// ```\n\n /// # use rand::prelude::*;\n\n /// # use markovian::prelude::*;\n\n /// let init_state: u32 = 1;\n\n /// let density = raw_dist![(0.3, 0), (0.4, 1), (0.3, 2)];\n\n /// let rng = thread_rng();\n\n /// let mut branching_process = Branching::new(init_state, density, rng);\n\n ///\n\n /// // The next state is 0, 1 or 2. \n\n /// let new_state = branching_process.next();\n\n /// assert!( (new_state == Some(0)) || (new_state == Some(1)) || (new_state == Some(2)) );\n\n /// ```\n", "file_path": "src/processes/branching.rs", "rank": 88, "score": 10.227343491178656 }, { "content": "//!\n\n//! ### Examples\n\n//! \n\n//! A random walk on the integers that tends to move more to the right as time goes by. \n\n//! ```\n\n//! # #![allow(unused_mut)]\n\n//! # use rand::prelude::*;\n\n//! # use rand_distr::{Exp, Uniform};\n\n//! # use markovian::prelude::*;\n\n//! let init_state: (usize, i32) = (0, 0);\n\n//! let transition = |(time, state): &(usize, i32)| raw_dist![\n\n//! (0.6 - 1.0 / (time + 2) as f64, (time + 1, state + 1)),\n\n//! (0.4 + 1.0 / (time + 2) as f64, (time + 1, state - 1))\n\n//! ];\n\n//! let rng = thread_rng();\n\n//! let mut mc = markovian::MarkovChain::new(init_state, &transition, rng);\n\n//! \n\n//! // Take a sample of 10 elements \n\n//! mc.take(10).map(|(_, state)| state).collect::<Vec<i32>>();\n\n//! ```\n", "file_path": "src/lib.rs", "rank": 89, "score": 9.855801178412321 }, { "content": " /// from the current state.\n\n ///\n\n /// # Examples\n\n ///\n\n /// Checking the possibility of achieving a state from different initial states.\n\n /// ```\n\n /// # use ndarray::array;\n\n /// # use markovian::{FiniteMarkovChain, State};\n\n /// let mut mc = FiniteMarkovChain::from((0, array![[0.5, 0.5], [0.0, 1.0]], rand::thread_rng()))\n\n /// .set_state_space(vec!['x', 'y']);\n\n /// assert!(mc.may_achieve('x').unwrap());\n\n /// assert!(mc.may_achieve('y').unwrap());\n\n /// mc.set_state('y');\n\n /// assert!(!mc.may_achieve('x').unwrap());\n\n /// assert!(mc.may_achieve('y').unwrap());\n\n /// ```\n\n #[inline]\n\n pub fn may_achieve(&self, query: T) -> Result<bool, InvalidState<T>> {\n\n match self.state_space.iter().position(|s| *s == query) {\n\n Some(state_index) => {\n", "file_path": "src/finite_markov_chain/fast_sample.rs", "rank": 90, "score": 9.748182746781236 }, { "content": "{\n\n /// Constructs a new `FiniteMarkovChain<T, W, R>`.\n\n /// \n\n /// # Panics\n\n /// \n\n /// # Panics\n\n ///\n\n /// This method panics if: \n\n /// - The`state_space` vector has repeated elements\n\n /// (defined by PartialEq).\n\n /// - The dimensions of `state_space` and `transition_matrix` do not match.\n\n /// - Any vector of `transition_matrix` has more than u32::MAX columns.\n\n /// - For any entry w of any vector of `transition_matrix` v: \n\n /// w < 0 or w > max where max = W::MAX / v.len().\n\n /// - For any vector of `transition_matrix` the sum of weights is zero.\n\n #[inline]\n\n pub fn new(\n\n state_index: usize,\n\n transition_matrix: Vec<Vec<W>>,\n\n state_space: Vec<T>,\n", "file_path": "src/finite_markov_chain/fast_sample.rs", "rank": 91, "score": 9.678872268072116 }, { "content": "//! state + step\n\n//! }\n\n//! }\n\n//! let transition = MyTransition;\n\n//! let rng = thread_rng();\n\n//! let mut mc = markovian::MarkovChain::new(init_state, transition, rng);\n\n//! mc.next();\n\n//! \n\n//! // current_state is positive \n\n//! assert!(mc.state().unwrap() > &0.0);\n\n//! ```\n\n//! \n\n//! ## Non markovian\n\n//!\n\n//! Include history in the state. For example, instead of `i32`, use `Vec<i32>`. \n\n//!\n\n//! ### Examples\n\n//! \n\n//! A random walk on the integers that is atracted to zero in a non markovian\n\n//! fashion. \n", "file_path": "src/lib.rs", "rank": 92, "score": 9.516512692551528 }, { "content": " acc = acc + self.base_distribution.sample(rng);\n\n count = count + T::one();\n\n }\n\n acc\n\n }\n\n}\n\n\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::prelude::*;\n\n\n\n #[test]\n\n fn value_stability() {\n\n let expected = vec![2, 1, 2, 1, 1, 2, 4, 3, 2, 1, 1, 0];\n\n let init_state: u32 = 1;\n\n let density = raw_dist![(0.3, 0), (0.4, 1), (0.3, 2)];\n\n let rng = crate::tests::rng(1);\n\n let branching_process = Branching::new(init_state, density, rng);\n\n let sample: Vec<u32> = branching_process.take(12).collect();\n\n assert_eq!(sample, expected);\n\n }\n\n}", "file_path": "src/processes/branching.rs", "rank": 93, "score": 9.25210375592958 }, { "content": "//! Simulation of (sub-)stochastic processes.\n\n//!\n\n//! # Goal\n\n//!\n\n//! Serve as an extension of the [rand crate](https://crates.io/crates/rand) for sub-stochastic processes.\n\n//! \n\n//! # Examples\n\n//!\n\n//! ## Finite Markov Chains\n\n//!\n\n//! An absorbing Markov Chain with one transient state and one absorbing state.\n\n//! ```\n\n//! # use ndarray::array;\n\n//! # use markovian::State;\n\n//! let mut mc = markovian::FiniteMarkovChain::from((0, array![[0.5, 0.5], [0.0, 1.0]], rand::thread_rng()));\n\n//! assert_eq!(mc.state(), Some(&0));\n\n//! assert_eq!(mc.state_space(), &vec![0, 1]);\n\n//! println!(\"At time {}, the state is {}\", 1_000, mc.nth(1_000).unwrap()); // Most likely 1\n\n//! ``` \n\n//!\n", "file_path": "src/lib.rs", "rank": 94, "score": 9.241545738504666 }, { "content": " state_space: Vec<T>,\n\n rng: R,\n\n ) -> Self {\n\n let state_space_len_true: usize = state_space.iter()\n\n .map(|x| state_space.iter().filter(|&y| x == y).count())\n\n .sum();\n\n assert_eq!(state_space_len_true, state_space.len());\n\n assert_eq!(transition_matrix.len(), state_space.len());\n\n FiniteMarkovChain {\n\n state_index,\n\n transition_matrix,\n\n transition_matrix_variables,\n\n state_space,\n\n rng,\n\n }\n\n }\n\n\n\n /// Samples a possible index for the next state.\n\n ///\n\n /// # Remarks\n", "file_path": "src/finite_markov_chain/fast_sample.rs", "rank": 95, "score": 8.953311159089026 }, { "content": "/// let value = 0;\n\n/// let dis = raw_dist![(1, value)];\n\n///\n\n/// assert_eq!(value, dis.sample(&mut thread_rng()));\n\n/// ```\n\n/// \n\n/// # Panics\n\n/// \n\n/// Panics if probabilities: \n\n/// - Are strictly less than zero. \n\n/// - Sum up strictly more than one.\n\n/// \n\n/// # Costs\n\n/// \n\n/// Sample cost: O(iterator length).\n\n/// Construction cost: O(1).\n\n/// \n\n/// # Remarks\n\n/// \n\n/// This struct is meant to be used when one needs to sample once from an infinte iterator.\n", "file_path": "src/distributions/raw.rs", "rank": 96, "score": 8.90743876716201 }, { "content": "//! ## Discrete time\n\n//! \n\n//! Construction of a random walk in the integers.\n\n//! ```\n\n//! # #![allow(unused_mut)]\n\n//! # use markovian::prelude::*;\n\n//! # use rand::prelude::*;\n\n//! let init_state: i32 = 0;\n\n//! let transition = |state: &i32| raw_dist![(0.5, state + 1), (0.5, state - 1)];\n\n//! let rng = thread_rng();\n\n//! let mut mc = markovian::MarkovChain::new(init_state, transition, rng);\n\n//! ``` \n\n//! \n\n//! ## Branching process\n\n//!\n\n//! Construction using density p(0) = 0.3, p(1) = 0.4, p(2) = 0.3. \n\n//! ```\n\n//! # #![allow(unused_mut)]\n\n//! # use markovian::prelude::*;\n\n//! # use rand::prelude::*;\n", "file_path": "src/lib.rs", "rank": 97, "score": 8.719189377812281 }, { "content": " Ok(self.may_achieve_index(state_index))\n\n },\n\n None => Err(InvalidState::new(query)),\n\n }\n\n }\n\n\n\n /// Returns `true` if the Markov Chain contains a recheable absorbing state, \n\n /// from the current state.\n\n ///\n\n /// An absorbing state is a state such that, if the process starts there, \n\n /// it will allways be there, i.e. the probability of moving to itself is one.\n\n /// A reacheable state is a state that can be reached with positive probability.\n\n ///\n\n /// # Examples\n\n ///\n\n /// Checking the possibility of achieving a state from different initial states.\n\n /// ```\n\n /// # use ndarray::array;\n\n /// # use markovian::{FiniteMarkovChain, State};\n\n /// let mut mc = FiniteMarkovChain::from((0, array![[0.5, 0.5], [0.0, 1.0]], rand::thread_rng()));\n", "file_path": "src/finite_markov_chain/fast_sample.rs", "rank": 98, "score": 8.678184054697157 }, { "content": "{\n\n func: F,\n\n distr: D,\n\n phantom: PhantomData<(S, T)>\n\n}\n\n\n\nimpl<S, T, F, D> Unary<S, T, F, D>\n\nwhere\n\n F: Fn(S) -> T,\n\n D: Distribution<S>,\n\n{\n\n #[inline]\n\n pub fn new(func: F, distr: D) -> Self {\n\n Unary { func, distr, phantom: PhantomData }\n\n }\n\n}\n\n\n\nimpl<S, T, F, D> Distribution<T> for Unary<S, T, F, D>\n\nwhere\n\n F: Fn(S) -> T,\n", "file_path": "src/distributions/unary.rs", "rank": 99, "score": 8.491335771571404 } ]
Rust
src/libinput.rs
harshadgavali/gnome-x11-gesture-daemon
3c5a56a5ca9cf151bcee05bb8bbc8af3309b5f12
use std::{ fs::{File, OpenOptions}, os::unix::prelude::{AsRawFd, FromRawFd, IntoRawFd, OpenOptionsExt, RawFd}, path::Path, sync::mpsc, }; use input::{ event::{ gesture::{GestureHoldEvent, GesturePinchEvent, GestureSwipeEvent}, Event, GestureEvent, }, ffi::{ libinput_event_gesture_get_angle_delta, libinput_event_gesture_get_cancelled, libinput_event_gesture_get_dx_unaccelerated, libinput_event_gesture_get_dy_unaccelerated, libinput_event_gesture_get_finger_count, libinput_event_gesture_get_scale, libinput_event_gesture_get_time, }, Libinput, LibinputInterface, }; use libc::{poll, pollfd}; use serde::{Deserialize, Serialize}; use zvariant::derive::Type; #[derive(Debug, Deserialize, Serialize, Type)] pub struct CustomSwipeEvent { pub stage: String, pub fingers: i32, pub dx: f64, pub dy: f64, pub time: u32, } #[derive(Debug, Deserialize, Serialize, Type)] pub struct CustomHoldEvent { pub stage: String, pub fingers: i32, pub time: u32, pub is_cancelled: bool, } #[derive(Debug, Deserialize, Serialize, Type)] pub struct CustomPinchEvent { pub stage: String, pub fingers: i32, pub angle_delta: f64, pub scale: f64, pub time: u32, } pub enum CustomGestureEvent { Swipe(CustomSwipeEvent), Hold(CustomHoldEvent), Pinch(CustomPinchEvent), } struct Interface; impl LibinputInterface for Interface { #[allow(clippy::bad_bit_mask)] fn open_restricted(&mut self, path: &Path, flags: i32) -> Result<RawFd, i32> { OpenOptions::new() .custom_flags(flags) .read((flags & libc::O_RDONLY != 0) | (flags & libc::O_RDWR != 0)) .write((flags & libc::O_WRONLY != 0) | (flags & libc::O_RDWR != 0)) .open(path) .map(|file| file.into_raw_fd()) .map_err(|err| err.raw_os_error().unwrap()) } fn close_restricted(&mut self, fd: RawFd) { unsafe { File::from_raw_fd(fd); } } } pub fn handle_swipe(swipe: GestureSwipeEvent, transmitter: &mpsc::Sender<CustomGestureEvent>) { let stage = match &swipe { GestureSwipeEvent::Begin(_) => "Begin", GestureSwipeEvent::Update(_) => "Update", GestureSwipeEvent::End(_) => "End", _ => panic!("Unkown gesture event {:?}", swipe), }; let (fingers, dx, dy, time) = unsafe { let raw_gesture_event = input::AsRaw::as_raw_mut(&swipe); ( libinput_event_gesture_get_finger_count(raw_gesture_event), libinput_event_gesture_get_dx_unaccelerated(raw_gesture_event), libinput_event_gesture_get_dy_unaccelerated(raw_gesture_event), libinput_event_gesture_get_time(raw_gesture_event), ) }; let swipe = CustomSwipeEvent { stage: stage.into(), fingers, dx, dy, time, }; transmitter.send(CustomGestureEvent::Swipe(swipe)).unwrap(); } pub fn handle_hold(hold: GestureHoldEvent, transmitter: &mpsc::Sender<CustomGestureEvent>) { let stage = match &hold { GestureHoldEvent::Begin(_) => "Begin", GestureHoldEvent::End(_) => "End", _ => panic!("Unkown gesture event {:?}", hold), }; let (fingers, time, is_cancelled) = unsafe { let raw_gesture_event = input::AsRaw::as_raw_mut(&hold); ( libinput_event_gesture_get_finger_count(raw_gesture_event), libinput_event_gesture_get_time(raw_gesture_event), matches!(hold, GestureHoldEvent::End(_)) && libinput_event_gesture_get_cancelled(raw_gesture_event) != 0, ) }; if fingers < 3 { return; } let hold = CustomHoldEvent { stage: stage.into(), fingers, time, is_cancelled, }; transmitter.send(CustomGestureEvent::Hold(hold)).unwrap(); } fn handle_pinch(pinch: GesturePinchEvent, transmitter: &mpsc::Sender<CustomGestureEvent>) { let stage = match &pinch { GesturePinchEvent::Begin(_) => "Begin", GesturePinchEvent::Update(_) => "Update", GesturePinchEvent::End(_) => "End", _ => panic!("Unkown gesture event {:?}", pinch), }; let (fingers, angle_delta, scale, time) = unsafe { let raw_gesture_event = input::AsRaw::as_raw_mut(&pinch); ( libinput_event_gesture_get_finger_count(raw_gesture_event), libinput_event_gesture_get_angle_delta(raw_gesture_event), libinput_event_gesture_get_scale(raw_gesture_event), libinput_event_gesture_get_time(raw_gesture_event), ) }; if fingers < 3 { return; } let pinch = CustomPinchEvent { stage: stage.into(), fingers, angle_delta, scale, time, }; transmitter.send(CustomGestureEvent::Pinch(pinch)).unwrap(); } pub fn libinput_listener(transmitter: mpsc::Sender<CustomGestureEvent>) { let mut input = Libinput::new_with_udev(Interface); input.udev_assign_seat("seat0").unwrap(); const POLLIN: i16 = 1; let mut poll_fds = pollfd { fd: input.as_raw_fd(), events: POLLIN, revents: 0, }; loop { unsafe { poll(&mut poll_fds, 1, -1); } input.dispatch().unwrap(); loop { let event = input.next(); if event.is_none() { break; } if let Event::Gesture(gesture_event) = event.unwrap() { match gesture_event { GestureEvent::Hold(hold) => handle_hold(hold, &transmitter), GestureEvent::Swipe(swipe) => handle_swipe(swipe, &transmitter), GestureEvent::Pinch(pinch) => handle_pinch(pinch, &transmitter), _ => {} } } } } }
use std::{ fs::{File, OpenOptions}, os::unix::prelude::{AsRawFd, FromRawFd, IntoRawFd, OpenOptionsExt, RawFd}, path::Path, sync::mpsc, }; use input::{ event::{ gesture::{GestureHoldEvent, GesturePinchEvent, GestureSwipeEvent}, Event, GestureEvent, }, ffi::{ libinput_event_gesture_get_angle_delta, libinput_event_gesture_get_cancelled, libinput_event_gesture_get_dx_unaccelerated, libinput_event_gesture_get_dy_unaccelerated, libinput_event_gesture_get_finger_count, libinput_event_gesture_get_scale, libinput_event_gesture_get_time, }, Libinput, LibinputInterface, }; use libc::{poll, pollfd}; use serde::{Deserialize, Serialize}; use zvariant::derive::Type; #[derive(Debug, Deserialize, Serialize, Type)] pub struct CustomSwipeEvent { pub stage: String, pub fingers: i32, pub dx: f64, pub dy: f64, pub time: u32, } #[derive(Debug, Deserialize, Serialize, Type)] pub struct CustomHoldEvent { pub stage: String, pub fingers: i32, pub time: u32, pub is_cancelled: bool, } #[derive(Debug, Deserialize, Serialize, Type)] pub struct CustomPinchEvent { pub stage: String, pub fingers: i32, pub angle_delta: f64, pub scale: f64, pub time: u32, } pub enum CustomGestureEvent { Swipe(CustomSwipeEvent), Hold(CustomHoldEvent), Pinch(CustomPinchEvent), } struct Interface; impl LibinputInterface for Interface { #[allow(clippy::bad_bit_mask)]
fn close_restricted(&mut self, fd: RawFd) { unsafe { File::from_raw_fd(fd); } } } pub fn handle_swipe(swipe: GestureSwipeEvent, transmitter: &mpsc::Sender<CustomGestureEvent>) { let stage = match &swipe { GestureSwipeEvent::Begin(_) => "Begin", GestureSwipeEvent::Update(_) => "Update", GestureSwipeEvent::End(_) => "End", _ => panic!("Unkown gesture event {:?}", swipe), }; let (fingers, dx, dy, time) = unsafe { let raw_gesture_event = input::AsRaw::as_raw_mut(&swipe); ( libinput_event_gesture_get_finger_count(raw_gesture_event), libinput_event_gesture_get_dx_unaccelerated(raw_gesture_event), libinput_event_gesture_get_dy_unaccelerated(raw_gesture_event), libinput_event_gesture_get_time(raw_gesture_event), ) }; let swipe = CustomSwipeEvent { stage: stage.into(), fingers, dx, dy, time, }; transmitter.send(CustomGestureEvent::Swipe(swipe)).unwrap(); } pub fn handle_hold(hold: GestureHoldEvent, transmitter: &mpsc::Sender<CustomGestureEvent>) { let stage = match &hold { GestureHoldEvent::Begin(_) => "Begin", GestureHoldEvent::End(_) => "End", _ => panic!("Unkown gesture event {:?}", hold), }; let (fingers, time, is_cancelled) = unsafe { let raw_gesture_event = input::AsRaw::as_raw_mut(&hold); ( libinput_event_gesture_get_finger_count(raw_gesture_event), libinput_event_gesture_get_time(raw_gesture_event), matches!(hold, GestureHoldEvent::End(_)) && libinput_event_gesture_get_cancelled(raw_gesture_event) != 0, ) }; if fingers < 3 { return; } let hold = CustomHoldEvent { stage: stage.into(), fingers, time, is_cancelled, }; transmitter.send(CustomGestureEvent::Hold(hold)).unwrap(); } fn handle_pinch(pinch: GesturePinchEvent, transmitter: &mpsc::Sender<CustomGestureEvent>) { let stage = match &pinch { GesturePinchEvent::Begin(_) => "Begin", GesturePinchEvent::Update(_) => "Update", GesturePinchEvent::End(_) => "End", _ => panic!("Unkown gesture event {:?}", pinch), }; let (fingers, angle_delta, scale, time) = unsafe { let raw_gesture_event = input::AsRaw::as_raw_mut(&pinch); ( libinput_event_gesture_get_finger_count(raw_gesture_event), libinput_event_gesture_get_angle_delta(raw_gesture_event), libinput_event_gesture_get_scale(raw_gesture_event), libinput_event_gesture_get_time(raw_gesture_event), ) }; if fingers < 3 { return; } let pinch = CustomPinchEvent { stage: stage.into(), fingers, angle_delta, scale, time, }; transmitter.send(CustomGestureEvent::Pinch(pinch)).unwrap(); } pub fn libinput_listener(transmitter: mpsc::Sender<CustomGestureEvent>) { let mut input = Libinput::new_with_udev(Interface); input.udev_assign_seat("seat0").unwrap(); const POLLIN: i16 = 1; let mut poll_fds = pollfd { fd: input.as_raw_fd(), events: POLLIN, revents: 0, }; loop { unsafe { poll(&mut poll_fds, 1, -1); } input.dispatch().unwrap(); loop { let event = input.next(); if event.is_none() { break; } if let Event::Gesture(gesture_event) = event.unwrap() { match gesture_event { GestureEvent::Hold(hold) => handle_hold(hold, &transmitter), GestureEvent::Swipe(swipe) => handle_swipe(swipe, &transmitter), GestureEvent::Pinch(pinch) => handle_pinch(pinch, &transmitter), _ => {} } } } } }
fn open_restricted(&mut self, path: &Path, flags: i32) -> Result<RawFd, i32> { OpenOptions::new() .custom_flags(flags) .read((flags & libc::O_RDONLY != 0) | (flags & libc::O_RDWR != 0)) .write((flags & libc::O_WRONLY != 0) | (flags & libc::O_RDWR != 0)) .open(path) .map(|file| file.into_raw_fd()) .map_err(|err| err.raw_os_error().unwrap()) }
function_block-full_function
[ { "content": "struct Greeter {}\n\n\n\n#[dbus_interface(name = \"org.gestureImprovements.gestures\")]\n\nimpl Greeter {\n\n #[dbus_interface(signal)]\n\n fn touchpad_swipe(&self, event: &libinput::CustomSwipeEvent) -> zbus::Result<()>;\n\n\n\n #[dbus_interface(signal)]\n\n fn touchpad_hold(&self, event: &libinput::CustomHoldEvent) -> zbus::Result<()>;\n\n\n\n #[dbus_interface(signal)]\n\n fn touchpad_pinch(&self, event: &libinput::CustomPinchEvent) -> zbus::Result<()>;\n\n\n\n fn get_version(&mut self) -> String {\n\n const VERSION: Option<&'static str> = option_env!(\"CARGO_PKG_VERSION\");\n\n VERSION.unwrap_or(\"unknown\").into()\n\n }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 4, "score": 28434.426307474187 }, { "content": "fn display_info(arguments: Vec<String>) {\n\n const VERSION: Option<&'static str> = option_env!(\"CARGO_PKG_VERSION\");\n\n const COMMIT: Option<&'static str> = option_env!(\"GIT_HEAD_SHA\");\n\n\n\n match arguments[1].as_str() {\n\n \"--version\" => {\n\n println!(\"version: {}\", VERSION.unwrap_or(\"unknown\"));\n\n if let Some(commit) = COMMIT {\n\n println!(\"commit: {}\", commit)\n\n }\n\n }\n\n\n\n _ => {\n\n println!(\n\n \"Unknown argument: {:}\\n\\\n\n Supported arguments:\\n\\\n\n \\t--version\\tdisplay version information\\n\\\n\n \\nRun without arguments to start dbus service\",\n\n &arguments[1 .. arguments.len()].join(\" \")\n\n );\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 5, "score": 23113.800157261874 }, { "content": "use std::os::unix::prelude::AsRawFd;\n\nuse std::sync::mpsc;\n\nuse std::thread;\n\nuse std::{convert::TryInto, time::Duration};\n\n\n\nuse libc::{poll, pollfd};\n\nuse zbus::{dbus_interface, fdo};\n\n\n\nmod libinput;\n", "file_path": "src/main.rs", "rank": 13, "score": 7.713994581484934 }, { "content": " let greeter = Greeter {};\n\n let path = &\"/org/gestureImprovements/gestures\".try_into().unwrap();\n\n object_server.at(path, greeter).unwrap();\n\n\n\n thread::spawn(|| {\n\n libinput::libinput_listener(transmitter);\n\n });\n\n\n\n // println!(\"starting loop\");\n\n\n\n let timeout = Duration::from_millis(1000);\n\n\n\n const POLLIN: i16 = 1; // available to read\n\n\n\n let mut poll_fds = pollfd {\n\n fd: connection.as_raw_fd(),\n\n events: POLLIN,\n\n revents: 0,\n\n };\n\n\n", "file_path": "src/main.rs", "rank": 14, "score": 2.849669425046422 }, { "content": " let starve_limit = 16;\n\n let mut msg_recv = starve_limit;\n\n\n\n loop {\n\n let msg = reciever.recv_timeout(timeout);\n\n\n\n match msg {\n\n Ok(msg) => {\n\n object_server\n\n .with(path, move |iface: &Greeter| match &msg {\n\n libinput::CustomGestureEvent::Hold(hold) => iface.touchpad_hold(hold),\n\n libinput::CustomGestureEvent::Swipe(swipe) => iface.touchpad_swipe(swipe),\n\n libinput::CustomGestureEvent::Pinch(pinch) => iface.touchpad_pinch(pinch),\n\n })\n\n .unwrap();\n\n msg_recv += 1;\n\n }\n\n\n\n Err(_) => {\n\n msg_recv = starve_limit;\n", "file_path": "src/main.rs", "rank": 15, "score": 2.191685016446732 }, { "content": "\n\n# Installation\n\n### User needs be in `input` group\n\n```\n\nsudo usermod -aG input $USER\n\n```\n\n* For Fedora Silverblue\n\n```\n\n# group 'input' needs to be in /etc/group first\n\ngrep -E '^input:' /usr/lib/group | sudo tee -a /etc/group\n\nsudo usermod -aG input $USER\n\n```\n\n\n\n### **From releases**\n\n- Download zip file from [releases](https://github.com/harshadgavali/gnome-x11-gesture-daemon/releases)\n\n- Extract zip file\n\n- Inside extracted folder, Run \n\n```\n\n# Without sudo/root\n\nsh install.sh # Reboot is needed after this\n\n\n\n# ** OR ** to restart daemon, without rebooting\n\nsh install.sh --restart\n\n\n\n```\n\n\n\n### Distributions\n\n\n\n* Arch Linux/Manjaro Linux - [gnome-x11-gesture-daemon](https://aur.archlinux.org/packages/gnome-x11-gesture-daemon) Thanks to @[yochananmarqos](https://github.com/yochananmarqos)\n\n```code\n\nyay -S gnome-x11-gesture-daemon\n\n```\n\n\n\n### Build from source\n\n\n\n#### Using docker/podman\n\n```\n\nmake build-docker && make install\n\n```\n\n\n\n#### Without docker/podman\n\n* First install build dependencies\n\n```\n\n# dnf/rpm based distributions\n\nsudo dnf install libinput-devel\n\n\n\n# apt/deb based distributions\n\nsudo apt install libinput-dev\n\n```\n\n* Then build and install\n\n```\n\nmake build && make install\n\n```\n\n\n\n### Troubleshooting\n\n- First make sure you've restarted your system after installing.\n\n\n\n- Run following command to check if service is running properly (It should be running on X11).\n\n```\n\nsystemctl --user status gesture_improvements_gesture_daemon.service\n\n```\n\n\n\n- Open issue on github, with output of the above command\n\n### Uninstallation\n\n```\n\n# Without sudo/root\n\nsh ./uninstall.sh\n\n```\n\n\n\n# Contributors\n\n[Swastik Dwivedi](https://github.com/drunckj)\n\n\n\n# Thanks\n\n[@Smithay](https://github.com/Smithay) for [rust bindings](https://crates.io/crates/input) for libinput\n\n\n\n[FreeDesktop/Dbus Project](https://gitlab.freedesktop.org/dbus/) for [Rust API](https://crates.io/crates/zbus) for D-Bus.\n", "file_path": "README.md", "rank": 16, "score": 2.0547560873802793 } ]
Rust
src/bvh.rs
q4x3/raytracer
7ebb6e1f505dae0985a972fab2d9237cbba3cbc9
use crate::{ aabb::AABB, hittable::{HitRecord, HitTable}, ray::Ray, rtweekend::random_int, vec3::Point3, }; use std::{cmp::Ordering, sync::Arc}; #[derive(Clone)] pub struct BVHNode { left: Arc<dyn HitTable>, right: Arc<dyn HitTable>, bvhbox: AABB, } impl BVHNode { pub fn new( objects: &mut Vec<Arc<dyn HitTable>>, start: usize, end: usize, time0: f64, time1: f64, ) -> Self { let axis = random_int(0, 3); let mut tmp: BVHNode; let comparator = if axis == 0 { box_x_compare } else if axis == 1 { box_y_compare } else { box_z_compare }; let object_span = end - start; if object_span == 1 { tmp = BVHNode { left: objects[start].clone(), right: objects[start].clone(), bvhbox: AABB::new(Point3::zero(), Point3::zero()), }; } else if object_span == 2 { if comparator(&objects[start], &objects[start + 1]) == Ordering::Less { tmp = BVHNode { left: objects[start].clone(), right: objects[start + 1].clone(), bvhbox: AABB::new(Point3::zero(), Point3::zero()), }; } else { tmp = BVHNode { left: objects[start + 1].clone(), right: objects[start].clone(), bvhbox: AABB::new(Point3::zero(), Point3::zero()), }; } } else { objects.as_mut_slice()[start..end].sort_by(comparator); let mid = start + object_span / 2; tmp = BVHNode { left: Arc::new(BVHNode::new(objects, start, mid, time0, time1)), right: Arc::new(BVHNode::new(objects, mid, end, time0, time1)), bvhbox: AABB::new(Point3::zero(), Point3::zero()), }; } let mut box_left = AABB::new(Point3::zero(), Point3::zero()); let mut box_right = AABB::new(Point3::zero(), Point3::zero()); if !tmp.left.bounding_box(time0, time1, &mut box_left) || !tmp.right.bounding_box(time0, time1, &mut box_right) { println!("No bounding box in bvh_node constructor.\n"); } tmp.bvhbox = AABB::surrounding_box(&box_left, &box_right); tmp } } pub fn box_x_compare(a: &Arc<dyn HitTable>, b: &Arc<dyn HitTable>) -> Ordering { let mut box_a = AABB::new(Point3::zero(), Point3::zero()); let mut box_b = AABB::new(Point3::zero(), Point3::zero()); if !a.bounding_box(0.0, 0.0, &mut box_a) || !b.bounding_box(0.0, 0.0, &mut box_b) { println!("No bounding box in bvh_node constructor.\n"); } if box_a._min.x < box_b._min.x { return Ordering::Less; } else if box_a._min.x > box_b._min.x { return Ordering::Greater; } Ordering::Equal } pub fn box_y_compare(a: &Arc<dyn HitTable>, b: &Arc<dyn HitTable>) -> Ordering { let mut box_a = AABB::new(Point3::zero(), Point3::zero()); let mut box_b = AABB::new(Point3::zero(), Point3::zero()); if !a.bounding_box(0.0, 0.0, &mut box_a) || !b.bounding_box(0.0, 0.0, &mut box_b) { println!("No bounding box in bvh_node constructor.\n"); } if box_a._min.y < box_b._min.y { return Ordering::Less; } else if box_a._min.y > box_b._min.y { return Ordering::Greater; } Ordering::Equal } pub fn box_z_compare(a: &Arc<dyn HitTable>, b: &Arc<dyn HitTable>) -> Ordering { let mut box_a = AABB::new(Point3::zero(), Point3::zero()); let mut box_b = AABB::new(Point3::zero(), Point3::zero()); if !a.bounding_box(0.0, 0.0, &mut box_a) || !b.bounding_box(0.0, 0.0, &mut box_b) { println!("No bounding box in bvh_node constructor.\n"); } if box_a._min.z < box_b._min.z { return Ordering::Less; } else if box_a._min.z > box_b._min.z { return Ordering::Greater; } Ordering::Equal } impl HitTable for BVHNode { fn hit(&self, r: &Ray, t_min: f64, t_max: f64, rec: &mut HitRecord) -> bool { if !self.bvhbox.hit(r, t_min, t_max) { return false; } let hit_left = self.left.hit(r, t_min, t_max, rec); let hit_right = self .right .hit(r, t_min, if hit_left { rec.t } else { t_max }, rec); hit_left || hit_right } fn bounding_box(&self, _t0: f64, _t1: f64, output_box: &mut AABB) -> bool { *output_box = self.bvhbox.clone(); true } fn distance(&self, _other_center: &Point3) -> f64 { 0.0 } }
use crate::{ aabb::AABB, hittable::{HitRecord, HitTable}, ray::Ray, rtweekend::random_int, vec3::Point3, }; use std::{cmp::Ordering, sync::Arc}; #[derive(Clone)] pub struct BVHNode { left: Arc<dyn HitTable>, right: Arc<dyn HitTable>, bvhbox: AABB, } impl BVHNode { pub fn new( objects: &mut Vec<Arc<dyn HitTable>>, start: usize, end: usize, time0: f64, time1: f64, ) -> Self { let axis = random_int(0, 3); let mut tmp: BVHNode; let comparator = if axis == 0 { box_x_compare } else if axis == 1 { box_y_compare } else { box_z_compare }; let object_span = end - start; if object_span == 1 {
tor(&objects[start], &objects[start + 1]) == Ordering::Less { tmp = BVHNode { left: objects[start].clone(), right: objects[start + 1].clone(), bvhbox: AABB::new(Point3::zero(), Point3::zero()), }; } else { tmp = BVHNode { left: objects[start + 1].clone(), right: objects[start].clone(), bvhbox: AABB::new(Point3::zero(), Point3::zero()), }; } } else { objects.as_mut_slice()[start..end].sort_by(comparator); let mid = start + object_span / 2; tmp = BVHNode { left: Arc::new(BVHNode::new(objects, start, mid, time0, time1)), right: Arc::new(BVHNode::new(objects, mid, end, time0, time1)), bvhbox: AABB::new(Point3::zero(), Point3::zero()), }; } let mut box_left = AABB::new(Point3::zero(), Point3::zero()); let mut box_right = AABB::new(Point3::zero(), Point3::zero()); if !tmp.left.bounding_box(time0, time1, &mut box_left) || !tmp.right.bounding_box(time0, time1, &mut box_right) { println!("No bounding box in bvh_node constructor.\n"); } tmp.bvhbox = AABB::surrounding_box(&box_left, &box_right); tmp } } pub fn box_x_compare(a: &Arc<dyn HitTable>, b: &Arc<dyn HitTable>) -> Ordering { let mut box_a = AABB::new(Point3::zero(), Point3::zero()); let mut box_b = AABB::new(Point3::zero(), Point3::zero()); if !a.bounding_box(0.0, 0.0, &mut box_a) || !b.bounding_box(0.0, 0.0, &mut box_b) { println!("No bounding box in bvh_node constructor.\n"); } if box_a._min.x < box_b._min.x { return Ordering::Less; } else if box_a._min.x > box_b._min.x { return Ordering::Greater; } Ordering::Equal } pub fn box_y_compare(a: &Arc<dyn HitTable>, b: &Arc<dyn HitTable>) -> Ordering { let mut box_a = AABB::new(Point3::zero(), Point3::zero()); let mut box_b = AABB::new(Point3::zero(), Point3::zero()); if !a.bounding_box(0.0, 0.0, &mut box_a) || !b.bounding_box(0.0, 0.0, &mut box_b) { println!("No bounding box in bvh_node constructor.\n"); } if box_a._min.y < box_b._min.y { return Ordering::Less; } else if box_a._min.y > box_b._min.y { return Ordering::Greater; } Ordering::Equal } pub fn box_z_compare(a: &Arc<dyn HitTable>, b: &Arc<dyn HitTable>) -> Ordering { let mut box_a = AABB::new(Point3::zero(), Point3::zero()); let mut box_b = AABB::new(Point3::zero(), Point3::zero()); if !a.bounding_box(0.0, 0.0, &mut box_a) || !b.bounding_box(0.0, 0.0, &mut box_b) { println!("No bounding box in bvh_node constructor.\n"); } if box_a._min.z < box_b._min.z { return Ordering::Less; } else if box_a._min.z > box_b._min.z { return Ordering::Greater; } Ordering::Equal } impl HitTable for BVHNode { fn hit(&self, r: &Ray, t_min: f64, t_max: f64, rec: &mut HitRecord) -> bool { if !self.bvhbox.hit(r, t_min, t_max) { return false; } let hit_left = self.left.hit(r, t_min, t_max, rec); let hit_right = self .right .hit(r, t_min, if hit_left { rec.t } else { t_max }, rec); hit_left || hit_right } fn bounding_box(&self, _t0: f64, _t1: f64, output_box: &mut AABB) -> bool { *output_box = self.bvhbox.clone(); true } fn distance(&self, _other_center: &Point3) -> f64 { 0.0 } }
tmp = BVHNode { left: objects[start].clone(), right: objects[start].clone(), bvhbox: AABB::new(Point3::zero(), Point3::zero()), }; } else if object_span == 2 { if compara
random
[]
Rust
devices/src/virtio/video/decoder/capability.rs
aosp-riscv/platform_external_crosvm
ff681b6a18eff76336a68058c11afdc287255615
use base::warn; use std::collections::btree_map::Entry; use std::collections::BTreeMap; use crate::virtio::video::control::*; use crate::virtio::video::format::*; fn from_pixel_format( fmt: &libvda::PixelFormat, mask: u64, width_range: FormatRange, height_range: FormatRange, ) -> FormatDesc { let format = match fmt { libvda::PixelFormat::NV12 => Format::NV12, libvda::PixelFormat::YV12 => Format::YUV420, }; let frame_formats = vec![FrameFormat { width: width_range, height: height_range, bitrates: Vec::new(), }]; FormatDesc { mask, format, frame_formats, } } pub struct Capability { pub in_fmts: Vec<FormatDesc>, pub out_fmts: Vec<FormatDesc>, profiles: BTreeMap<Format, Vec<Profile>>, levels: BTreeMap<Format, Vec<Level>>, } impl Capability { pub fn new(caps: &libvda::decode::Capabilities) -> Self { let mask = !(u64::max_value() << caps.output_formats.len()); let mut in_fmts = vec![]; let mut profiles: BTreeMap<Format, Vec<Profile>> = Default::default(); for fmt in caps.input_formats.iter() { match Profile::from_libvda_profile(fmt.profile) { Some(profile) => { let format = profile.to_format(); in_fmts.push(FormatDesc { mask, format, frame_formats: vec![Default::default()], }); match profiles.entry(format) { Entry::Occupied(mut e) => e.get_mut().push(profile), Entry::Vacant(e) => { e.insert(vec![profile]); } } } None => { warn!( "No virtio-video equivalent for libvda profile, skipping: {:?}", fmt.profile ); } } } let levels: BTreeMap<Format, Vec<Level>> = if profiles.contains_key(&Format::H264) { vec![(Format::H264, vec![Level::H264_1_0])] .into_iter() .collect() } else { Default::default() }; let min_width = caps.input_formats.iter().map(|fmt| fmt.min_width).max(); let max_width = caps.input_formats.iter().map(|fmt| fmt.max_width).min(); let min_height = caps.input_formats.iter().map(|fmt| fmt.min_height).max(); let max_height = caps.input_formats.iter().map(|fmt| fmt.max_height).min(); let width_range = FormatRange { min: min_width.unwrap_or(0), max: max_width.unwrap_or(0), step: 1, }; let height_range = FormatRange { min: min_height.unwrap_or(0), max: max_height.unwrap_or(0), step: 1, }; let mask = !(u64::max_value() << caps.input_formats.len()); let out_fmts = caps .output_formats .iter() .map(|fmt| from_pixel_format(fmt, mask, width_range, height_range)) .collect(); Capability { in_fmts, out_fmts, profiles, levels, } } pub fn query_control(&self, t: &QueryCtrlType) -> Option<QueryCtrlResponse> { use QueryCtrlType::*; match *t { Profile(fmt) => { let profiles = self.profiles.get(&fmt)?; Some(QueryCtrlResponse::Profile( profiles.iter().copied().collect(), )) } Level(fmt) => { let levels = self.levels.get(&fmt)?; Some(QueryCtrlResponse::Level(levels.iter().copied().collect())) } } } }
use base::warn; use std::collections::btree_map::Entry; use std::collections::BTreeMap; use crate::virtio::video::control::*; use crate::virtio::video::format::*; fn from_pixel_format( fmt: &libvda::PixelFormat, mask: u64, width_range: FormatRange, height_range: FormatRange, ) -> FormatDesc { let format = match fmt { libvda::PixelFormat::NV12 => Format::NV12, libvda::PixelFormat::YV12 => Format::YUV420, }; let frame_formats = vec![FrameFormat { width: width_range, height: height_range, bitrates: Vec::new(), }]; FormatDesc { mask, format, frame_formats, } } pub struct Capability { pub in_fmts: Vec<FormatDesc>, pub out_fmts: Vec<FormatDesc>, profiles: BTreeMap<Format, Vec<Profile>>, levels: BTreeMap<Format, Vec<Level>>, } impl Capability { pub fn new(caps: &libvda::decode::Capabilities) -> Self { let mask = !(u64::max_value() << caps.output_formats.len()); let mut in_fmts = vec![]; let mut profiles: BTreeMap<Format, Vec<Profile>> = Default::default(); for fmt in caps.input_formats.iter() { match Profile::from_libvda_profile(fmt.profile) { Some(profile) => { let format = profile.to_format(); in_fmts.push(FormatDesc { mask, format, frame_formats: vec![Default::default()], }); match profiles.entry(format) { Entry::Occupied(mut e) => e.get_mut().push(profile), Entry::Vacant(e) => { e.insert(vec![profile]); } } } None => { warn!( "No virtio-video equivalent for libvda profile, skipping: {:?}", fmt.profile ); } }
nge, height_range)) .collect(); Capability { in_fmts, out_fmts, profiles, levels, } } pub fn query_control(&self, t: &QueryCtrlType) -> Option<QueryCtrlResponse> { use QueryCtrlType::*; match *t { Profile(fmt) => { let profiles = self.profiles.get(&fmt)?; Some(QueryCtrlResponse::Profile( profiles.iter().copied().collect(), )) } Level(fmt) => { let levels = self.levels.get(&fmt)?; Some(QueryCtrlResponse::Level(levels.iter().copied().collect())) } } } }
} let levels: BTreeMap<Format, Vec<Level>> = if profiles.contains_key(&Format::H264) { vec![(Format::H264, vec![Level::H264_1_0])] .into_iter() .collect() } else { Default::default() }; let min_width = caps.input_formats.iter().map(|fmt| fmt.min_width).max(); let max_width = caps.input_formats.iter().map(|fmt| fmt.max_width).min(); let min_height = caps.input_formats.iter().map(|fmt| fmt.min_height).max(); let max_height = caps.input_formats.iter().map(|fmt| fmt.max_height).min(); let width_range = FormatRange { min: min_width.unwrap_or(0), max: max_width.unwrap_or(0), step: 1, }; let height_range = FormatRange { min: min_height.unwrap_or(0), max: max_height.unwrap_or(0), step: 1, }; let mask = !(u64::max_value() << caps.input_formats.len()); let out_fmts = caps .output_formats .iter() .map(|fmt| from_pixel_format(fmt, mask, width_ra
random
[ { "content": "/// Returns a Vec of the valid memory addresses.\n\n/// These should be used to configure the GuestMemory structure for the platfrom.\n\npub fn arch_memory_regions(size: u64) -> Vec<(GuestAddress, u64)> {\n\n vec![(GuestAddress(AARCH64_PHYS_MEM_START), size)]\n\n}\n\n\n", "file_path": "aarch64/src/lib.rs", "rank": 0, "score": 321065.8048601196 }, { "content": "/// Helper function to wrap up a closure with fail handle. The fail handle will be triggered if the\n\n/// closure returns an error.\n\npub fn fallible_closure<E: std::fmt::Display, C: FnMut() -> Result<(), E> + 'static + Send>(\n\n fail_handle: Arc<dyn FailHandle>,\n\n mut callback: C,\n\n) -> impl FnMut() + 'static + Send {\n\n move || match callback() {\n\n Ok(()) => {}\n\n Err(e) => {\n\n error!(\"callback failed {}\", e);\n\n fail_handle.fail();\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use std::sync::{Arc, Mutex};\n\n\n\n fn task(_: RingBufferStopCallback) {}\n\n\n", "file_path": "devices/src/usb/xhci/ring_buffer_stop_cb.rs", "rank": 1, "score": 315007.1257591103 }, { "content": "/// Does the same as push_fds, but using the RawDescriptorType\n\npub fn push_descriptors(descriptors: &mut Vec<RawDescriptor>) {\n\n push_fds(descriptors)\n\n}\n\n\n", "file_path": "sys_util/src/syslog.rs", "rank": 2, "score": 304844.5186512809 }, { "content": "/// Retrieves the file descriptors owned by the global syslogger.\n\n///\n\n/// Does nothing if syslog was never initialized. If their are any file descriptors, they will be\n\n/// pushed into `fds`.\n\n///\n\n/// Note that the `stderr` file descriptor is never added, as it is not owned by syslog.\n\npub fn push_fds(fds: &mut Vec<RawFd>) {\n\n let state = lock!();\n\n state.syslog.push_fds(fds);\n\n fds.extend(state.file.iter().map(|f| f.as_raw_fd()));\n\n}\n\n\n", "file_path": "sys_util/src/syslog.rs", "rank": 3, "score": 304838.06589525647 }, { "content": "/// Instantiates a VirtioInputConfig object with the default configuration for a trackpad. It\n\n/// supports touch, left button and right button events, as well as X and Y axis.\n\npub fn new_trackpad_config(width: u32, height: u32) -> VirtioInputConfig {\n\n VirtioInputConfig::new(\n\n virtio_input_device_ids::new(0, 0, 0, 0),\n\n b\"Crosvm Virtio Trackpad\".to_vec(),\n\n b\"virtio-trackpad\".to_vec(),\n\n virtio_input_bitmap::new([0u8; 128]),\n\n default_trackpad_events(),\n\n default_trackpad_absinfo(width, height),\n\n )\n\n}\n\n\n", "file_path": "devices/src/virtio/input/defaults.rs", "rank": 4, "score": 281455.6090706681 }, { "content": "/// Copy virtio device configuration data from a subslice of `src` to a subslice of `dst`.\n\n/// Unlike std::slice::copy_from_slice(), this function copies as much as possible within\n\n/// the common subset of the two slices, truncating the requested range instead of\n\n/// panicking if the slices do not match in size.\n\n///\n\n/// `dst_offset` and `src_offset` specify the starting indexes of the `dst` and `src`\n\n/// slices, respectively; if either index is out of bounds, this function is a no-op\n\n/// rather than panicking. This makes it safe to call with arbitrary user-controlled\n\n/// inputs.\n\npub fn copy_config(dst: &mut [u8], dst_offset: u64, src: &[u8], src_offset: u64) {\n\n if let Ok(dst_offset) = usize::try_from(dst_offset) {\n\n if let Ok(src_offset) = usize::try_from(src_offset) {\n\n if let Some(dst_slice) = dst.get_mut(dst_offset..) {\n\n if let Some(src_slice) = src.get(src_offset..) {\n\n let len = cmp::min(dst_slice.len(), src_slice.len());\n\n let dst_subslice = &mut dst_slice[0..len];\n\n let src_subslice = &src_slice[0..len];\n\n dst_subslice.copy_from_slice(src_subslice);\n\n }\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "devices/src/virtio/mod.rs", "rank": 5, "score": 279545.2904768888 }, { "content": "/// Instantiates a VirtioInputConfig object with the default configuration for a multitouch\n\n/// touchscreen.\n\npub fn new_multi_touch_config(width: u32, height: u32) -> VirtioInputConfig {\n\n VirtioInputConfig::new(\n\n virtio_input_device_ids::new(0, 0, 0, 0),\n\n b\"Crosvm Virtio Multitouch Touchscreen\".to_vec(),\n\n b\"virtio-touchscreen\".to_vec(),\n\n virtio_input_bitmap::from_bits(&[INPUT_PROP_DIRECT]),\n\n default_multitouchscreen_events(),\n\n default_multitouchscreen_absinfo(width, height, 10, 10),\n\n )\n\n}\n\n\n", "file_path": "devices/src/virtio/input/defaults.rs", "rank": 6, "score": 277807.93345490645 }, { "content": "/// Instantiates a VirtioInputConfig object with the default configuration for a touchscreen (no\n\n/// multitouch support).\n\npub fn new_single_touch_config(width: u32, height: u32) -> VirtioInputConfig {\n\n VirtioInputConfig::new(\n\n virtio_input_device_ids::new(0, 0, 0, 0),\n\n b\"Crosvm Virtio Touchscreen\".to_vec(),\n\n b\"virtio-touchscreen\".to_vec(),\n\n virtio_input_bitmap::from_bits(&[INPUT_PROP_DIRECT]),\n\n default_touchscreen_events(),\n\n default_touchscreen_absinfo(width, height),\n\n )\n\n}\n\n\n", "file_path": "devices/src/virtio/input/defaults.rs", "rank": 7, "score": 277807.93345490645 }, { "content": "// For the specified (Base, Len), returns (base, len) pair which could be\n\n// set into mtrr register. mtrr requires: the base-address alignment value can't be\n\n// less than its length\n\nfn get_mtrr_pairs(base: u64, len: u64) -> Vec<(u64, u64)> {\n\n let mut vecs = Vec::new();\n\n\n\n let mut remains = len;\n\n let mut new = base;\n\n while remains != 0 {\n\n let max = get_max_len(new, remains);\n\n vecs.push((new, max));\n\n remains -= max;\n\n new += max;\n\n }\n\n\n\n vecs\n\n}\n\n\n", "file_path": "x86_64/src/regs.rs", "rank": 8, "score": 266523.9820278699 }, { "content": "fn append_mtrr_entries(vpu: &dyn VcpuX86_64, pci_start: u64, entries: &mut Vec<Register>) {\n\n // Get VAR MTRR num from MSR_MTRRcap\n\n let mut msrs = vec![Register {\n\n id: crate::msr_index::MSR_MTRRcap,\n\n ..Default::default()\n\n }];\n\n if vpu.get_msrs(&mut msrs).is_err() {\n\n warn!(\"get msrs fail, guest with pass through device may be very slow\");\n\n return;\n\n }\n\n let var_num = msrs[0].value & VAR_MTRR_NUM_MASK;\n\n\n\n // Set pci_start .. 4G as UC\n\n // all others are set to default WB\n\n let pci_len = (1 << 32) - pci_start;\n\n let vecs = get_mtrr_pairs(pci_start, pci_len);\n\n if vecs.len() as u64 > var_num {\n\n warn!(\n\n \"mtrr fail for pci mmio, please check pci_start addr,\n\n guest with pass through device may be very slow\"\n", "file_path": "x86_64/src/regs.rs", "rank": 9, "score": 260584.29370246537 }, { "content": "// Reads the next u64 from the file.\n\nfn read_u64_from_file(mut f: &File) -> Result<u64> {\n\n let mut value = [0u8; 8];\n\n (&mut f)\n\n .read_exact(&mut value)\n\n .map_err(Error::ReadingHeader)?;\n\n Ok(u64::from_be_bytes(value))\n\n}\n\n\n\nimpl QcowHeader {\n\n /// Creates a QcowHeader from a reference to a file.\n\n pub fn new(f: &mut File) -> Result<QcowHeader> {\n\n f.seek(SeekFrom::Start(0)).map_err(Error::ReadingHeader)?;\n\n\n\n let magic = read_u32_from_file(f)?;\n\n if magic != QCOW_MAGIC {\n\n return Err(Error::InvalidMagic);\n\n }\n\n\n\n let mut header = QcowHeader {\n\n magic,\n", "file_path": "disk/src/qcow/mod.rs", "rank": 10, "score": 246513.91643149132 }, { "content": "/// Returns a Vec of the valid memory addresses.\n\n/// These should be used to configure the GuestMemory structure for the platform.\n\n/// For x86_64 all addresses are valid from the start of the kernel except a\n\n/// carve out at the end of 32bit address space.\n\nfn arch_memory_regions(size: u64, bios_size: Option<u64>) -> Vec<(GuestAddress, u64)> {\n\n let mem_end = GuestAddress(size);\n\n let first_addr_past_32bits = GuestAddress(FIRST_ADDR_PAST_32BITS);\n\n let end_32bit_gap_start = GuestAddress(END_ADDR_BEFORE_32BITS);\n\n\n\n let mut regions = Vec::new();\n\n if mem_end <= end_32bit_gap_start {\n\n regions.push((GuestAddress(0), size));\n\n if let Some(bios_size) = bios_size {\n\n regions.push((bios_start(bios_size), bios_size));\n\n }\n\n } else {\n\n regions.push((GuestAddress(0), end_32bit_gap_start.offset()));\n\n if let Some(bios_size) = bios_size {\n\n regions.push((bios_start(bios_size), bios_size));\n\n }\n\n regions.push((\n\n first_addr_past_32bits,\n\n mem_end.offset_from(end_32bit_gap_start),\n\n ));\n", "file_path": "x86_64/src/lib.rs", "rank": 11, "score": 244626.96961802477 }, { "content": "fn read_u64<T: Read>(readable: &mut T) -> u64 {\n\n let mut buf = [0u8; size_of::<u64>()];\n\n readable.read_exact(&mut buf[..]).unwrap();\n\n u64::from_le_bytes(buf)\n\n}\n", "file_path": "fuzz/block_fuzzer.rs", "rank": 12, "score": 241066.2587728423 }, { "content": "fn read_u64<T: Read>(readable: &mut T) -> u64 {\n\n let mut buf = [0u8; size_of::<u64>()];\n\n readable.read_exact(&mut buf[..]).unwrap();\n\n u64::from_le_bytes(buf)\n\n}\n", "file_path": "fuzz/qcow_fuzzer.rs", "rank": 13, "score": 241066.2587728423 }, { "content": "/// Drops all capabilities (permitted, inheritable, and effective) from the current process.\n\npub fn drop_capabilities() -> Result<()> {\n\n unsafe {\n\n // Safe because we do not actually manipulate any memory handled by libcap\n\n // and we check errors.\n\n let caps = cap_init();\n\n if caps.is_null() {\n\n return errno_result();\n\n }\n\n\n\n // Freshly initialized capabilities do not have any bits set, so applying them\n\n // will drop all capabilities from the process.\n\n // Safe because we will check the result and otherwise do not touch the memory.\n\n let ret = cap_set_proc(caps);\n\n // We need to free capabilities regardless of success of the operation above.\n\n cap_free(caps);\n\n // Now check if we managed to apply (drop) capabilities.\n\n if ret < 0 {\n\n return errno_result();\n\n }\n\n }\n\n Ok(())\n\n}\n", "file_path": "sys_util/src/capabilities.rs", "rank": 15, "score": 239129.78298568385 }, { "content": "/// Allows the use of any serde deserializer within a closure while providing access to the a set of\n\n/// descriptors for use in `deserialize_descriptor`.\n\n///\n\n/// This is the corresponding call to use deserialize after using `SerializeDescriptors`.\n\n///\n\n/// If `deserialize_with_descriptors` is called anywhere within the given closure, it return an\n\n/// error.\n\npub fn deserialize_with_descriptors<F, T, E>(\n\n f: F,\n\n descriptors: &mut Vec<Option<SafeDescriptor>>,\n\n) -> Result<T, E>\n\nwhere\n\n F: FnOnce() -> Result<T, E>,\n\n E: de::Error,\n\n{\n\n let swap_descriptors = std::mem::take(descriptors);\n\n set_descriptor_src(swap_descriptors).map_err(E::custom)?;\n\n\n\n // catch_unwind is used to ensure that set_descriptor_src is always balanced with a call to\n\n // take_descriptor_src afterwards.\n\n let res = catch_unwind(AssertUnwindSafe(f));\n\n\n\n // unwrap is used because set_descriptor_src is always called before this, so it should never\n\n // panic.\n\n *descriptors = take_descriptor_src().unwrap();\n\n\n\n match res {\n", "file_path": "sys_util/src/descriptor_reflection.rs", "rank": 16, "score": 237268.93405673082 }, { "content": "#[doc(hidden)]\n\n#[inline]\n\npub fn max<T: BitFieldSpecifier>() -> u64 {\n\n if T::FIELD_WIDTH < 64 {\n\n (1 << T::FIELD_WIDTH) - 1\n\n } else {\n\n u64::max_value()\n\n }\n\n}\n\n\n\n// Defines bit_field::BitField0 through bit_field::BitField64.\n\nbit_field_derive::define_bit_field_specifiers!();\n\n\n\nimpl BitFieldSpecifier for bool {\n\n const FIELD_WIDTH: u8 = 1;\n\n type SetterType = bool;\n\n type GetterType = bool;\n\n\n\n #[inline]\n\n fn from_u64(val: u64) -> Self::GetterType {\n\n val > 0\n\n }\n", "file_path": "bit_field/src/lib.rs", "rank": 17, "score": 237050.53350922512 }, { "content": "// Strips any `user.virtiofs.` prefix from `buf`. If buf contains one or more nul-bytes, each\n\n// nul-byte-separated slice is treated as a C string and the prefix is stripped from each one.\n\nfn strip_xattr_prefix(buf: &mut Vec<u8>) {\n\n fn next_cstr(b: &[u8], start: usize) -> Option<&[u8]> {\n\n if start >= b.len() {\n\n return None;\n\n }\n\n\n\n let end = b[start..]\n\n .iter()\n\n .position(|&c| c == b'\\0')\n\n .map(|p| start + p + 1)\n\n .unwrap_or(b.len());\n\n\n\n Some(&b[start..end])\n\n }\n\n\n\n let mut pos = 0;\n\n while let Some(name) = next_cstr(&buf, pos) {\n\n if !name.starts_with(USER_VIRTIOFS_XATTR) {\n\n pos += name.len();\n\n continue;\n", "file_path": "devices/src/virtio/fs/passthrough.rs", "rank": 18, "score": 234363.23688900663 }, { "content": "/// Fills `output` completely with random bytes from the specified `source`.\n\npub fn rand_bytes(mut output: &mut [u8], source: Source) -> Result<()> {\n\n if output.is_empty() {\n\n return Ok(());\n\n }\n\n\n\n loop {\n\n // Safe because output is mutable and the writes are limited by output.len().\n\n let bytes = handle_eintr_errno!(unsafe {\n\n libc::getrandom(\n\n output.as_mut_ptr() as *mut c_void,\n\n output.len(),\n\n source.to_getrandom_flags(),\n\n )\n\n });\n\n\n\n if bytes < 0 {\n\n return errno_result();\n\n }\n\n if bytes as usize == output.len() {\n\n return Ok(());\n\n }\n\n\n\n // Wait for more entropy and try again for the remaining bytes.\n\n sleep(POLL_INTERVAL);\n\n output = &mut output[bytes as usize..];\n\n }\n\n}\n\n\n", "file_path": "sys_util/src/rand.rs", "rank": 19, "score": 234297.16211588396 }, { "content": "fn convert_copy<R, W>(reader: &mut R, writer: &mut W, offset: u64, size: u64) -> Result<()>\n\nwhere\n\n R: Read + Seek,\n\n W: Write + Seek,\n\n{\n\n const CHUNK_SIZE: usize = 65536;\n\n let mut buf = [0; CHUNK_SIZE];\n\n let mut read_count = 0;\n\n reader\n\n .seek(SeekFrom::Start(offset))\n\n .map_err(Error::SeekingFile)?;\n\n writer\n\n .seek(SeekFrom::Start(offset))\n\n .map_err(Error::SeekingFile)?;\n\n loop {\n\n let this_count = min(CHUNK_SIZE as u64, size - read_count) as usize;\n\n let nread = reader\n\n .read(&mut buf[..this_count])\n\n .map_err(Error::ReadingData)?;\n\n writer.write(&buf[..nread]).map_err(Error::WritingData)?;\n\n read_count += nread as u64;\n\n if nread == 0 || read_count == size {\n\n break;\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "disk/src/disk.rs", "rank": 20, "score": 234174.32229302704 }, { "content": "/// Retrieves the signal mask of the current thread as a vector of c_ints.\n\npub fn get_blocked_signals() -> SignalResult<Vec<c_int>> {\n\n let mut mask = Vec::new();\n\n\n\n // Safe - return values are checked.\n\n unsafe {\n\n let mut old_sigset: sigset_t = mem::zeroed();\n\n let ret = pthread_sigmask(SIG_BLOCK, null(), &mut old_sigset as *mut sigset_t);\n\n if ret < 0 {\n\n return Err(Error::RetrieveSignalMask(ret));\n\n }\n\n\n\n for num in 0..=SIGRTMAX() {\n\n if sigismember(&old_sigset, num) > 0 {\n\n mask.push(num);\n\n }\n\n }\n\n }\n\n\n\n Ok(mask)\n\n}\n\n\n", "file_path": "sys_util/src/signal.rs", "rank": 21, "score": 233706.17294795119 }, { "content": "/// Enables real time thread priorities in the current thread up to `limit`.\n\npub fn set_rt_prio_limit(limit: u64) -> Result<()> {\n\n let rt_limit_arg = libc::rlimit {\n\n rlim_cur: limit as libc::rlim_t,\n\n rlim_max: limit as libc::rlim_t,\n\n };\n\n // Safe because the kernel doesn't modify memory that is accessible to the process here.\n\n let res = unsafe { libc::setrlimit(libc::RLIMIT_RTPRIO, &rt_limit_arg) };\n\n\n\n if res != 0 {\n\n errno_result()\n\n } else {\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "sys_util/src/priority.rs", "rank": 22, "score": 233592.66616579602 }, { "content": "/// Returns the set of reserved base features common to all virtio devices.\n\npub fn base_features(protected_vm: ProtectionType) -> u64 {\n\n let mut features: u64 = 1 << VIRTIO_F_VERSION_1;\n\n\n\n if protected_vm == ProtectionType::Protected {\n\n features |= 1 << VIRTIO_F_ACCESS_PLATFORM;\n\n }\n\n\n\n features\n\n}\n", "file_path": "devices/src/virtio/mod.rs", "rank": 23, "score": 233592.66616579602 }, { "content": "fn set_features(vu: &mut Master, avail_features: u64, ack_features: u64) -> Result<u64> {\n\n let features = avail_features & ack_features;\n\n vu.set_features(features).map_err(Error::SetFeatures)?;\n\n Ok(features)\n\n}\n\n\n\npub struct VhostUserHandler {\n\n vu: Master,\n\n pub avail_features: u64,\n\n acked_features: u64,\n\n protocol_features: VhostUserProtocolFeatures,\n\n}\n\n\n\nimpl VhostUserHandler {\n\n /// Creates a `VhostUserHandler` instance with features and protocol features initialized.\n\n pub fn new(\n\n mut vu: Master,\n\n allow_features: u64,\n\n init_features: u64,\n\n allow_protocol_features: VhostUserProtocolFeatures,\n", "file_path": "devices/src/virtio/vhost/user/handler.rs", "rank": 24, "score": 233130.58169376486 }, { "content": "/// Parses a slice of valid frame formats and the desired resolution\n\n/// and returns the closest available resolution.\n\npub fn find_closest_resolution(\n\n frame_formats: &[FrameFormat],\n\n desired_width: u32,\n\n desired_height: u32,\n\n) -> (u32, u32) {\n\n for FrameFormat { width, height, .. } in frame_formats.iter() {\n\n if desired_width < width.min || desired_width > width.max {\n\n continue;\n\n }\n\n if desired_height < height.min || desired_height > height.max {\n\n continue;\n\n }\n\n let allowed_width = clamp_size(desired_width, width.min, width.step);\n\n let allowed_height = clamp_size(desired_height, height.min, height.step);\n\n return (allowed_width, allowed_height);\n\n }\n\n\n\n // Return the resolution with maximum surface if nothing better is found.\n\n match frame_formats\n\n .iter()\n", "file_path": "devices/src/virtio/video/format.rs", "rank": 25, "score": 226884.98740649916 }, { "content": "fn create_integer(v: usize, bytes: &mut Vec<u8>) {\n\n if v <= u8::max_value().into() {\n\n (v as u8).to_aml_bytes(bytes);\n\n } else if v <= u16::max_value().into() {\n\n (v as u16).to_aml_bytes(bytes);\n\n } else if v <= u32::max_value() as usize {\n\n (v as u32).to_aml_bytes(bytes);\n\n } else {\n\n (v as u64).to_aml_bytes(bytes);\n\n }\n\n}\n\n\n\npub type Usize = usize;\n\n\n\nimpl Aml for Usize {\n\n fn to_aml_bytes(&self, bytes: &mut Vec<u8>) {\n\n create_integer(*self, bytes);\n\n }\n\n}\n\n\n", "file_path": "acpi_tables/src/aml.rs", "rank": 26, "score": 226196.26416858245 }, { "content": "pub fn gdb_thread(mut gdbstub: GdbStub, port: u32) {\n\n let addr = format!(\"0.0.0.0:{}\", port);\n\n let listener = match TcpListener::bind(addr.clone()) {\n\n Ok(s) => s,\n\n Err(e) => {\n\n error!(\"Failed to create a TCP listener: {}\", e);\n\n return;\n\n }\n\n };\n\n info!(\"Waiting for a GDB connection on {:?}...\", addr);\n\n\n\n let (stream, addr) = match listener.accept() {\n\n Ok(v) => v,\n\n Err(e) => {\n\n error!(\"Failed to accept a connection from GDB: {}\", e);\n\n return;\n\n }\n\n };\n\n info!(\"GDB connected from {}\", addr);\n\n\n", "file_path": "src/gdb.rs", "rank": 27, "score": 225468.52568076248 }, { "content": "/// Configure base registers for x86\n\n///\n\n/// # Arguments\n\n///\n\n/// * `vcpu` - Structure for the vcpu that holds the vcpu fd.\n\n/// * `boot_ip` - Starting instruction pointer.\n\n/// * `boot_sp` - Starting stack pointer.\n\n/// * `boot_si` - Must point to zero page address per Linux ABI.\n\npub fn setup_regs(vcpu: &dyn VcpuX86_64, boot_ip: u64, boot_sp: u64, boot_si: u64) -> Result<()> {\n\n let regs = Regs {\n\n rflags: 0x0000000000000002u64,\n\n rip: boot_ip,\n\n rsp: boot_sp,\n\n rbp: boot_sp,\n\n rsi: boot_si,\n\n ..Default::default()\n\n };\n\n\n\n vcpu.set_regs(&regs).map_err(Error::SettingRegistersIoctl)\n\n}\n\n\n\nconst X86_CR0_PE: u64 = 0x1;\n\nconst X86_CR0_PG: u64 = 0x80000000;\n\nconst X86_CR4_PAE: u64 = 0x20;\n\n\n\nconst EFER_LME: u64 = 0x100;\n\nconst EFER_LMA: u64 = 0x400;\n\n\n\nconst BOOT_GDT_OFFSET: u64 = 0x500;\n\nconst BOOT_IDT_OFFSET: u64 = 0x520;\n\n\n\nconst BOOT_GDT_MAX: usize = 4;\n\n\n", "file_path": "x86_64/src/regs.rs", "rank": 28, "score": 223437.60558572723 }, { "content": "pub fn canonical_image_requirements(\n\n info: ImageAllocationInfo,\n\n) -> RutabagaResult<ImageMemoryRequirements> {\n\n let mut image_requirements: ImageMemoryRequirements = Default::default();\n\n let mut size: u32 = 0;\n\n let layout = info.drm_format.planar_layout()?;\n\n for plane in 0..layout.num_planes {\n\n let plane_stride = stride_from_layout(&layout, info.width, plane)?;\n\n image_requirements.strides[plane] = plane_stride;\n\n if plane > 0 {\n\n image_requirements.offsets[plane] = size as u32;\n\n }\n\n\n\n let height = info.height;\n\n let vertical_subsampling = layout.vertical_subsampling[plane];\n\n let subsampled_height = checked_arithmetic!(height / vertical_subsampling)?;\n\n let plane_size = checked_arithmetic!(subsampled_height * plane_stride)?;\n\n size = checked_arithmetic!(size + plane_size)?;\n\n }\n\n\n", "file_path": "rutabaga_gfx/src/rutabaga_gralloc/formats.rs", "rank": 29, "score": 223046.34922273632 }, { "content": "/// Automatically build the hypervisor Segment struct for set_sregs from the kernel bit fields.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `entry` - The gdt entry.\n\n/// * `table_index` - Index of the entry in the gdt table.\n\npub fn segment_from_gdt(entry: u64, table_index: u8) -> Segment {\n\n Segment {\n\n base: get_base(entry),\n\n limit: get_limit(entry),\n\n selector: (table_index * 8) as u16,\n\n type_: get_type(entry),\n\n present: get_p(entry),\n\n dpl: get_dpl(entry),\n\n db: get_db(entry),\n\n s: get_s(entry),\n\n l: get_l(entry),\n\n g: get_g(entry),\n\n avl: get_avl(entry),\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n", "file_path": "x86_64/src/gdt.rs", "rank": 30, "score": 222048.44509525195 }, { "content": "/// Read raw bytes from stdin.\n\n///\n\n/// This will block depending on the underlying mode of stdin. This will ignore the usual lock\n\n/// around stdin that the stdlib usually uses. If other code is using stdin, it is undefined who\n\n/// will get the underlying bytes.\n\npub fn read_raw_stdin(out: &mut [u8]) -> Result<usize> {\n\n // Safe because reading from stdin shouldn't have any safety implications.\n\n unsafe { read_raw(STDIN_FILENO, out) }\n\n}\n\n\n\n/// Trait for file descriptors that are TTYs, according to `isatty(3)`.\n\n///\n\n/// This is marked unsafe because the implementation must promise that the returned RawFd is a valid\n\n/// fd and that the lifetime of the returned fd is at least that of the trait object.\n\npub unsafe trait Terminal {\n\n /// Gets the file descriptor of the TTY.\n\n fn tty_fd(&self) -> RawFd;\n\n\n\n /// Set this terminal's mode to canonical mode (`ICANON | ECHO | ISIG`).\n\n fn set_canon_mode(&self) -> Result<()> {\n\n modify_mode(self.tty_fd(), |t| t.c_lflag |= ICANON | ECHO | ISIG)\n\n }\n\n\n\n /// Set this terminal's mode to raw mode (`!(ICANON | ECHO | ISIG)`).\n\n fn set_raw_mode(&self) -> Result<()> {\n", "file_path": "sys_util/src/terminal.rs", "rank": 31, "score": 222017.61503767507 }, { "content": "/// Allocates a vector of length `len` filled with random bytes from the\n\n/// specified `source`.\n\npub fn rand_vec(len: usize, source: Source) -> Result<Vec<u8>> {\n\n let mut rand = Vec::with_capacity(len);\n\n if len == 0 {\n\n return Ok(rand);\n\n }\n\n\n\n // Safe because rand will either be initialized by getrandom or dropped.\n\n unsafe { rand.set_len(len) };\n\n rand_bytes(rand.as_mut_slice(), source)?;\n\n Ok(rand)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n const TEST_SIZE: usize = 64;\n\n\n\n #[test]\n\n fn randbytes_success() {\n", "file_path": "sys_util/src/rand.rs", "rank": 32, "score": 221935.66565564126 }, { "content": "/// The kernel API has many structs that resemble the following `Foo` structure:\n\n///\n\n/// ```ignore\n\n/// #[repr(C)]\n\n/// struct Foo {\n\n/// some_data: u32,\n\n/// entries: __IncompleteArrayField<__u32>,\n\n/// }\n\n/// ```\n\n///\n\n/// In order to allocate such a structure, `size_of::<Foo>()` would be too small because it would\n\n/// not include any space for `entries`. To make the allocation large enough while still being\n\n/// aligned for `Foo`, a `Vec<Foo>` is created. Only the first element of `Vec<Foo>` would actually\n\n/// be used as a `Foo`. The remaining memory in the `Vec<Foo>` is for `entries`, which must be\n\n/// contiguous with `Foo`. This function is used to make the `Vec<Foo>` with enough space for\n\n/// `count` entries.\n\npub fn vec_with_array_field<T: Default, F>(count: usize) -> Vec<T> {\n\n let element_space = count * size_of::<F>();\n\n let vec_size_bytes = size_of::<T>() + element_space;\n\n vec_with_size_in_bytes(vec_size_bytes)\n\n}\n\n\n\n/// The following code provides generic helpers for creating and accessing flexible array structs.\n\n/// A complete definition of flexible array structs is found in the ISO 9899 specification\n\n/// (http://www.iso-9899.info/n1570.html). A flexible array struct is of the form:\n\n///\n\n/// ```ignore\n\n/// #[repr(C)]\n\n/// struct T {\n\n/// some_data: u32,\n\n/// nent: u32,\n\n/// entries: __IncompleteArrayField<S>,\n\n/// }\n\n/// ```\n\n/// where:\n\n///\n\n/// - `T` is the flexible array struct type\n\n/// - `S` is the flexible array type\n\n/// - `nent` is the flexible array length\n\n/// - `entries` is the flexible array member\n\n///\n\n/// These structures are used by the kernel API.\n\n\n", "file_path": "data_model/src/flexible_array.rs", "rank": 33, "score": 216247.57540764406 }, { "content": "/// Add an e820 region to the e820 map.\n\n/// Returns Ok(()) if successful, or an error if there is no space left in the map.\n\nfn add_e820_entry(params: &mut boot_params, addr: u64, size: u64, mem_type: u32) -> Result<()> {\n\n if params.e820_entries >= params.e820_table.len() as u8 {\n\n return Err(Error::E820Configuration);\n\n }\n\n\n\n params.e820_table[params.e820_entries as usize].addr = addr;\n\n params.e820_table[params.e820_entries as usize].size = size;\n\n params.e820_table[params.e820_entries as usize].type_ = mem_type;\n\n params.e820_entries += 1;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "x86_64/src/lib.rs", "rank": 34, "score": 216232.70447212565 }, { "content": "fn convert_reader_writer<R, W>(reader: &mut R, writer: &mut W, size: u64) -> Result<()>\n\nwhere\n\n R: Read + Seek + SeekHole,\n\n W: Write + Seek,\n\n{\n\n let mut offset = 0;\n\n while offset < size {\n\n // Find the next range of data.\n\n let next_data = match reader.seek_data(offset).map_err(Error::SeekingFile)? {\n\n Some(o) => o,\n\n None => {\n\n // No more data in the file.\n\n break;\n\n }\n\n };\n\n let next_hole = match reader.seek_hole(next_data).map_err(Error::SeekingFile)? {\n\n Some(o) => o,\n\n None => {\n\n // This should not happen - there should always be at least one hole\n\n // after any data.\n", "file_path": "disk/src/disk.rs", "rank": 35, "score": 215449.1125282151 }, { "content": "fn reply_error<W: Writer>(e: io::Error, unique: u64, mut w: W) -> Result<usize> {\n\n let header = OutHeader {\n\n len: size_of::<OutHeader>() as u32,\n\n error: -e.raw_os_error().unwrap_or(libc::EIO),\n\n unique,\n\n };\n\n\n\n w.write_all(header.as_slice())\n\n .map_err(Error::EncodeMessage)?;\n\n w.flush().map_err(Error::FlushMessage)?;\n\n\n\n Ok(header.len as usize)\n\n}\n\n\n", "file_path": "fuse/src/server.rs", "rank": 36, "score": 212609.80203569954 }, { "content": "/// Configure Model specific registers for x86\n\n///\n\n/// # Arguments\n\n///\n\n/// * `vcpu` - Structure for the vcpu that holds the vcpu fd.\n\npub fn setup_msrs(vcpu: &dyn VcpuX86_64, pci_start: u64) -> Result<()> {\n\n let msrs = create_msr_entries(vcpu, pci_start);\n\n vcpu.set_msrs(&msrs).map_err(Error::MsrIoctlFailed)\n\n}\n\n\n", "file_path": "x86_64/src/regs.rs", "rank": 37, "score": 210812.68001791625 }, { "content": "/// Creates a flattened device tree containing all of the parameters used\n\n/// by Android.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `fdt` - The DTB to modify. The top-most node should be open.\n\n/// * `android-fstab` - A text file of Android fstab entries to add to the DTB\n\npub fn create_android_fdt(fdt: &mut FdtWriter, fstab: File) -> Result<()> {\n\n let vecs = BufReader::new(fstab)\n\n .lines()\n\n .map(|l| parse_fstab_line(&l.map_err(Error::FdtIoError)?))\n\n .collect::<Result<Vec<Vec<String>>>>()?;\n\n let firmware_node = fdt.begin_node(\"firmware\")?;\n\n let android_node = fdt.begin_node(\"android\")?;\n\n fdt.property_string(\"compatible\", \"android,firmware\")?;\n\n\n\n let (dtprop, fstab): (_, Vec<_>) = vecs.into_iter().partition(|x| x[0] == \"#dt-vendor\");\n\n let vendor_node = fdt.begin_node(\"vendor\")?;\n\n for vec in dtprop {\n\n let content = std::fs::read_to_string(&vec[2]).map_err(Error::FdtIoError)?;\n\n fdt.property_string(&vec[1], &content)?;\n\n }\n\n fdt.end_node(vendor_node)?;\n\n let fstab_node = fdt.begin_node(\"fstab\")?;\n\n fdt.property_string(\"compatible\", \"android,fstab\")?;\n\n for vec in fstab {\n\n let partition = &vec[1][1..];\n", "file_path": "arch/src/android.rs", "rank": 38, "score": 210788.3694509086 }, { "content": "/// Constructor for a conventional segment GDT (or LDT) entry. Derived from the kernel's segment.h.\n\npub fn gdt_entry(flags: u16, base: u32, limit: u32) -> u64 {\n\n (((base as u64) & 0xff000000u64) << (56 - 24))\n\n | (((flags as u64) & 0x0000f0ffu64) << 40)\n\n | (((limit as u64) & 0x000f0000u64) << (48 - 16))\n\n | (((base as u64) & 0x00ffffffu64) << 16)\n\n | ((limit as u64) & 0x0000ffffu64)\n\n}\n\n\n", "file_path": "x86_64/src/gdt.rs", "rank": 39, "score": 209656.6932626657 }, { "content": "fn create_msr_entries(vcpu: &dyn VcpuX86_64, pci_start: u64) -> Vec<Register> {\n\n let mut entries = vec![\n\n Register {\n\n id: crate::msr_index::MSR_IA32_SYSENTER_CS,\n\n value: 0x0,\n\n },\n\n Register {\n\n id: crate::msr_index::MSR_IA32_SYSENTER_ESP,\n\n value: 0x0,\n\n },\n\n Register {\n\n id: crate::msr_index::MSR_IA32_SYSENTER_EIP,\n\n value: 0x0,\n\n },\n\n // x86_64 specific msrs, we only run on x86_64 not x86\n\n Register {\n\n id: crate::msr_index::MSR_STAR,\n\n value: 0x0,\n\n },\n\n Register {\n", "file_path": "x86_64/src/regs.rs", "rank": 40, "score": 208503.17999355646 }, { "content": "/// Terminates and reaps a child process. If the child process is a group leader, its children will\n\n/// be terminated and reaped as well. After the given timeout, the child process and any relevant\n\n/// children are killed (i.e. sent SIGKILL).\n\npub fn kill_tree(child: &mut Child, terminate_timeout: Duration) -> SignalResult<()> {\n\n let target = {\n\n let pid = child.id() as Pid;\n\n if getsid(Some(pid)).map_err(Error::GetSid)? == pid {\n\n -pid\n\n } else {\n\n pid\n\n }\n\n };\n\n\n\n // Safe because target is a child process (or group) and behavior of SIGTERM is defined.\n\n ok_if!(unsafe { kill(target, libc::SIGTERM) }, libc::ESRCH).map_err(Error::Kill)?;\n\n\n\n // Reap the direct child first in case it waits for its descendants, afterward reap any\n\n // remaining group members.\n\n let start = Instant::now();\n\n let mut child_running = true;\n\n loop {\n\n // Wait for the direct child to exit before reaping any process group members.\n\n if child_running {\n", "file_path": "sys_util/src/signal.rs", "rank": 41, "score": 207791.02853538995 }, { "content": "pub fn ungrab_evdev<T: AsRawDescriptor>(descriptor: &mut T) -> Result<()> {\n\n let ret = unsafe {\n\n // Safe because the kernel only reads the value of the ptr (doesn't dereference) and\n\n // we check the return value\n\n ioctl_with_ptr(\n\n &Descriptor(descriptor.as_raw_descriptor()),\n\n EVIOCGRAB(),\n\n null::<u32>(),\n\n )\n\n };\n\n if ret == 0 {\n\n Ok(())\n\n } else {\n\n Err(InputError::EvdevGrabError(errno()))\n\n }\n\n}\n", "file_path": "devices/src/virtio/input/evdev.rs", "rank": 42, "score": 207791.02853538995 }, { "content": "/// Grabs an event device (see EVIOCGGRAB ioctl for details). After this function succeeds the given\n\n/// descriptor has exclusive access to the device, effectively making it unusable for any other process in\n\n/// the host.\n\npub fn grab_evdev<T: AsRawDescriptor>(descriptor: &mut T) -> Result<()> {\n\n let val: u32 = 1;\n\n let ret = unsafe {\n\n // Safe because the kernel only read the value of the ptr and we check the return value\n\n ioctl_with_ref(\n\n &Descriptor(descriptor.as_raw_descriptor()),\n\n EVIOCGRAB(),\n\n &val,\n\n )\n\n };\n\n if ret == 0 {\n\n Ok(())\n\n } else {\n\n Err(InputError::EvdevGrabError(errno()))\n\n }\n\n}\n\n\n", "file_path": "devices/src/virtio/input/evdev.rs", "rank": 43, "score": 207791.02853538995 }, { "content": "fn create_serial_node(fdt: &mut FdtWriter, addr: u64, irq: u32) -> Result<()> {\n\n let serial_reg_prop = [addr, AARCH64_SERIAL_SIZE];\n\n let irq = [GIC_FDT_IRQ_TYPE_SPI, irq, IRQ_TYPE_EDGE_RISING];\n\n\n\n let serial_node = fdt.begin_node(&format!(\"U6_16550A@{:x}\", addr))?;\n\n fdt.property_string(\"compatible\", \"ns16550a\")?;\n\n fdt.property_array_u64(\"reg\", &serial_reg_prop)?;\n\n fdt.property_u32(\"clock-frequency\", AARCH64_SERIAL_SPEED)?;\n\n fdt.property_array_u32(\"interrupts\", &irq)?;\n\n fdt.end_node(serial_node)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "aarch64/src/fdt.rs", "rank": 44, "score": 204066.27282594267 }, { "content": "/// Gets the name of an event device (see EVIOCGNAME ioctl for details).\n\npub fn name<T: AsRawDescriptor>(descriptor: &T) -> Result<Vec<u8>> {\n\n let mut name = evdev_buffer::new();\n\n let len = unsafe {\n\n // Safe because the kernel won't write more than size of evdev_buffer and we check the\n\n // return value\n\n ioctl_with_mut_ref(\n\n &Descriptor(descriptor.as_raw_descriptor()),\n\n EVIOCGNAME(),\n\n &mut name,\n\n )\n\n };\n\n if len < 0 {\n\n return Err(InputError::EvdevNameError(errno()));\n\n }\n\n Ok(name.buffer[0..len as usize].to_vec())\n\n}\n\n\n", "file_path": "devices/src/virtio/input/evdev.rs", "rank": 45, "score": 203614.869958067 }, { "content": "#[derive(Debug, Copy, Clone)]\n\nstruct Range(u64, u64, bool);\n\n\n\nimpl Eq for Range {}\n\n\n\nimpl PartialEq for Range {\n\n fn eq(&self, other: &Range) -> bool {\n\n self.0 == other.0\n\n }\n\n}\n\n\n\nimpl Ord for Range {\n\n fn cmp(&self, other: &Range) -> cmp::Ordering {\n\n self.0.cmp(&other.0)\n\n }\n\n}\n\n\n\nimpl PartialOrd for Range {\n\n fn partial_cmp(&self, other: &Range) -> Option<cmp::Ordering> {\n\n self.0.partial_cmp(&other.0)\n\n }\n\n}\n\n\n\n// Wrapper types to make the kvm register structs DataInit\n", "file_path": "src/plugin/vcpu.rs", "rank": 46, "score": 202641.24398127839 }, { "content": "fn parse_remove_bits_attr(ast: &mut DeriveInput) -> Result<Option<u64>> {\n\n let mut width = None;\n\n let mut bits_idx = 0;\n\n\n\n for (i, attr) in ast.attrs.iter().enumerate() {\n\n if let Some(w) = try_parse_bits_attr(attr)? {\n\n bits_idx = i;\n\n width = Some(w.base10_parse()?);\n\n }\n\n }\n\n\n\n if width.is_some() {\n\n ast.attrs.remove(bits_idx);\n\n }\n\n\n\n Ok(width)\n\n}\n\n\n", "file_path": "bit_field/bit_field_derive/bit_field_derive.rs", "rank": 47, "score": 202498.3284784344 }, { "content": "fn create_gic_node(fdt: &mut FdtWriter, is_gicv3: bool, num_cpus: u64) -> Result<()> {\n\n let mut gic_reg_prop = [AARCH64_GIC_DIST_BASE, AARCH64_GIC_DIST_SIZE, 0, 0];\n\n\n\n let intc_node = fdt.begin_node(\"intc\")?;\n\n if is_gicv3 {\n\n fdt.property_string(\"compatible\", \"arm,gic-v3\")?;\n\n gic_reg_prop[2] = AARCH64_GIC_DIST_BASE - (AARCH64_GIC_REDIST_SIZE * num_cpus);\n\n gic_reg_prop[3] = AARCH64_GIC_REDIST_SIZE * num_cpus;\n\n } else {\n\n fdt.property_string(\"compatible\", \"arm,cortex-a15-gic\")?;\n\n gic_reg_prop[2] = AARCH64_GIC_CPUI_BASE;\n\n gic_reg_prop[3] = AARCH64_GIC_CPUI_SIZE;\n\n }\n\n fdt.property_u32(\"#interrupt-cells\", GIC_FDT_IRQ_NUM_CELLS)?;\n\n fdt.property_null(\"interrupt-controller\")?;\n\n fdt.property_array_u64(\"reg\", &gic_reg_prop)?;\n\n fdt.property_u32(\"phandle\", PHANDLE_GIC)?;\n\n fdt.property_u32(\"#address-cells\", 2)?;\n\n fdt.property_u32(\"#size-cells\", 2)?;\n\n fdt.end_node(intc_node)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "aarch64/src/fdt.rs", "rank": 48, "score": 201206.81017044964 }, { "content": "/// Gets the unique (serial) name of an event device (see EVIOCGUNIQ ioctl for details).\n\npub fn serial_name<T: AsRawDescriptor>(descriptor: &T) -> Result<Vec<u8>> {\n\n let mut uniq = evdev_buffer::new();\n\n let len = unsafe {\n\n // Safe because the kernel won't write more than size of evdev_buffer and we check the\n\n // return value\n\n ioctl_with_mut_ref(\n\n &Descriptor(descriptor.as_raw_descriptor()),\n\n EVIOCGUNIQ(),\n\n &mut uniq,\n\n )\n\n };\n\n if len < 0 {\n\n return Err(InputError::EvdevSerialError(errno()));\n\n }\n\n Ok(uniq.buffer[0..len as usize].to_vec())\n\n}\n\n\n", "file_path": "devices/src/virtio/input/evdev.rs", "rank": 49, "score": 200757.32154271586 }, { "content": "/// Configures LAPICs. LAPIC0 is set for external interrupts, LAPIC1 is set for NMI.\n\n///\n\n/// # Arguments\n\n/// * `vcpu_id` - The number of the VCPU to configure.\n\n/// * `irqchip` - The IrqChip for getting/setting LAPIC state.\n\npub fn set_lint(vcpu_id: usize, irqchip: &mut dyn IrqChipX86_64) -> Result<()> {\n\n let mut lapic = irqchip.get_lapic_state(vcpu_id).map_err(Error::GetLapic)?;\n\n\n\n for (reg, mode) in &[\n\n (APIC_LVT0_REGISTER, APIC_MODE_EXTINT),\n\n (APIC_LVT1_REGISTER, APIC_MODE_NMI),\n\n ] {\n\n lapic.regs[*reg] = set_apic_delivery_mode(lapic.regs[*reg], *mode);\n\n }\n\n\n\n irqchip\n\n .set_lapic_state(vcpu_id, &lapic)\n\n .map_err(Error::SetLapic)\n\n}\n", "file_path": "x86_64/src/interrupts.rs", "rank": 50, "score": 200643.055922371 }, { "content": "fn default_touchscreen_absinfo(width: u32, height: u32) -> BTreeMap<u16, virtio_input_absinfo> {\n\n let mut absinfo: BTreeMap<u16, virtio_input_absinfo> = BTreeMap::new();\n\n absinfo.insert(ABS_X, virtio_input_absinfo::new(0, width, 0, 0));\n\n absinfo.insert(ABS_Y, virtio_input_absinfo::new(0, height, 0, 0));\n\n absinfo\n\n}\n\n\n", "file_path": "devices/src/virtio/input/defaults.rs", "rank": 51, "score": 198039.20587855554 }, { "content": "fn default_trackpad_absinfo(width: u32, height: u32) -> BTreeMap<u16, virtio_input_absinfo> {\n\n let mut absinfo: BTreeMap<u16, virtio_input_absinfo> = BTreeMap::new();\n\n absinfo.insert(ABS_X, virtio_input_absinfo::new(0, width, 0, 0));\n\n absinfo.insert(ABS_Y, virtio_input_absinfo::new(0, height, 0, 0));\n\n absinfo\n\n}\n\n\n", "file_path": "devices/src/virtio/input/defaults.rs", "rank": 52, "score": 198039.20587855554 }, { "content": "/// Given a `reader` for a full set of descriptors as provided by the Linux kernel\n\n/// usbdevfs `descriptors` file, parse the descriptors into a tree data structure.\n\npub fn parse_usbfs_descriptors<R: Read>(mut reader: R) -> Result<DeviceDescriptorTree> {\n\n // Given a structure of length `struct_length`, of which `bytes_consumed` have\n\n // already been read, skip the remainder of the struct. If `bytes_consumed` is\n\n // more than `struct_length`, no additional bytes are skipped.\n\n fn skip<R: Read>(reader: R, bytes_consumed: usize, struct_length: u8) -> io::Result<u64> {\n\n let bytes_to_skip = u64::from(struct_length).saturating_sub(bytes_consumed as u64);\n\n io::copy(&mut reader.take(bytes_to_skip), &mut io::sink())\n\n }\n\n\n\n // Find the next descriptor of type T and return it.\n\n // Any other descriptors encountered while searching for the expected type are skipped.\n\n fn next_descriptor<R: Read, T: Descriptor + DataInit>(mut reader: R) -> Result<T> {\n\n let desc_type = T::descriptor_type() as u8;\n\n loop {\n\n let hdr = DescriptorHeader::from_reader(&mut reader).map_err(Error::DescriptorRead)?;\n\n if hdr.bDescriptorType == desc_type {\n\n if usize::from(hdr.bLength) < size_of::<DescriptorHeader>() + size_of::<T>() {\n\n return Err(Error::DescriptorParse);\n\n }\n\n\n", "file_path": "usb_util/src/descriptor.rs", "rank": 53, "score": 197911.58526136173 }, { "content": "// Ceiling of the division of `dividend`/`divisor`.\n\nfn div_round_up_u64(dividend: u64, divisor: u64) -> u64 {\n\n dividend / divisor + if dividend % divisor != 0 { 1 } else { 0 }\n\n}\n\n\n", "file_path": "disk/src/qcow/mod.rs", "rank": 54, "score": 197389.17364046088 }, { "content": "fn get_base(entry: u64) -> u64 {\n\n (((entry) & 0xFF00000000000000) >> 32)\n\n | (((entry) & 0x000000FF00000000) >> 16)\n\n | (((entry) & 0x00000000FFFF0000) >> 16)\n\n}\n\n\n", "file_path": "x86_64/src/gdt.rs", "rank": 55, "score": 196172.60574465265 }, { "content": "// Returns the max length which suitable for mtrr setting based on the\n\n// specified (base, len)\n\nfn get_max_len(base: u64, len: u64) -> u64 {\n\n let mut ret = get_power_of_two(len);\n\n\n\n while base % ret != 0 {\n\n ret >>= 1;\n\n }\n\n\n\n ret\n\n}\n\n\n", "file_path": "x86_64/src/regs.rs", "rank": 56, "score": 194875.5278234344 }, { "content": "/// Safe wrapper for `libc::lseek64()`\n\nfn lseek(file: &mut File, offset: i64, whence: i32) -> Result<Option<u64>> {\n\n // This is safe because we pass a known-good file descriptor.\n\n let res = unsafe { lseek64(file.as_raw_fd(), offset, whence) };\n\n\n\n if res < 0 {\n\n // Convert ENXIO into None; pass any other error as-is.\n\n let err = Error::last_os_error();\n\n if let Some(errno) = Error::raw_os_error(&err) {\n\n if errno == ENXIO {\n\n return Ok(None);\n\n }\n\n }\n\n Err(err)\n\n } else {\n\n Ok(Some(res as u64))\n\n }\n\n}\n\n\n\nimpl SeekHole for File {\n\n fn seek_hole(&mut self, offset: u64) -> Result<Option<u64>> {\n", "file_path": "sys_util/src/seek_hole.rs", "rank": 57, "score": 193817.4943852676 }, { "content": "// Returns the value of the highest bit in a 64-bit value. Equivalent to\n\n// 1 << HighBitSet(x)\n\nfn get_power_of_two(data: u64) -> u64 {\n\n 1 << (64 - data.leading_zeros() - 1)\n\n}\n\n\n", "file_path": "x86_64/src/regs.rs", "rank": 58, "score": 193708.88239374422 }, { "content": "fn reply_readdir<W: Writer>(len: usize, unique: u64, mut w: W) -> Result<usize> {\n\n let out = OutHeader {\n\n len: (size_of::<OutHeader>() + len) as u32,\n\n error: 0,\n\n unique,\n\n };\n\n\n\n w.write_all(out.as_slice()).map_err(Error::EncodeMessage)?;\n\n w.flush().map_err(Error::FlushMessage)?;\n\n Ok(out.len as usize)\n\n}\n\n\n", "file_path": "fuse/src/server.rs", "rank": 59, "score": 193490.37794821116 }, { "content": "/// Parses the given `args` against the list of know arguments `arg_list` and calls `f` with each\n\n/// present argument and value if required.\n\n///\n\n/// This function guarantees that only valid long argument names from `arg_list` are sent to the\n\n/// callback `f`. It is also guaranteed that if an arg requires a value (i.e.\n\n/// `arg.value.is_some()`), the value will be `Some` in the callbacks arguments. If the callback\n\n/// returns `Err`, this function will end parsing and return that `Err`.\n\n///\n\n/// See the [module level](index.html) example for a usage example.\n\npub fn set_arguments<I, R, F>(args: I, arg_list: &[Argument], mut f: F) -> Result<()>\n\nwhere\n\n I: Iterator<Item = R>,\n\n R: AsRef<str>,\n\n F: FnMut(&str, Option<&str>) -> Result<()>,\n\n{\n\n parse_arguments(args, |name, value| {\n\n let mut matches = None;\n\n for arg in arg_list {\n\n if let Some(short) = arg.short {\n\n if name.len() == 1 && name.starts_with(short) {\n\n if value.is_some() != arg.value.is_some() {\n\n return Err(Error::ExpectedValue(short.to_string()));\n\n }\n\n matches = Some(arg.long);\n\n }\n\n }\n\n if matches.is_none() && arg.long == name {\n\n if value.is_none() && arg.value_mode == ArgumentValueMode::Required {\n\n return Err(Error::ExpectedValue(arg.long.to_owned()));\n", "file_path": "src/argument.rs", "rank": 60, "score": 193094.76453077223 }, { "content": "fn create_pkg_length(data: &[u8], include_self: bool) -> Vec<u8> {\n\n let mut result = Vec::new();\n\n\n\n /* PkgLength is inclusive and includes the length bytes */\n\n let length_length = if data.len() < (2usize.pow(6) - 1) {\n\n 1\n\n } else if data.len() < (2usize.pow(12) - 2) {\n\n 2\n\n } else if data.len() < (2usize.pow(20) - 3) {\n\n 3\n\n } else {\n\n 4\n\n };\n\n\n\n let length = data.len() + if include_self { length_length } else { 0 };\n\n\n\n match length_length {\n\n 1 => result.push(length as u8),\n\n 2 => {\n\n result.push((1u8 << 6) | (length & 0xf) as u8);\n", "file_path": "acpi_tables/src/aml.rs", "rank": 61, "score": 186636.11096513082 }, { "content": "/// Sets up the cpuid entries for the given vcpu. Can fail if there are too many CPUs specified or\n\n/// if an ioctl returns an error.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `hypervisor` - `HypervisorX86_64` impl for getting supported CPU IDs.\n\n/// * `vcpu` - `VcpuX86_64` for setting CPU ID.\n\n/// * `vcpu_id` - The vcpu index of `vcpu`.\n\n/// * `nrcpus` - The number of vcpus being used by this VM.\n\npub fn setup_cpuid(\n\n hypervisor: &dyn HypervisorX86_64,\n\n irq_chip: &dyn IrqChipX86_64,\n\n vcpu: &dyn VcpuX86_64,\n\n vcpu_id: usize,\n\n nrcpus: usize,\n\n no_smt: bool,\n\n) -> Result<()> {\n\n let mut cpuid = hypervisor\n\n .get_supported_cpuid()\n\n .map_err(Error::GetSupportedCpusFailed)?;\n\n\n\n filter_cpuid(vcpu_id, nrcpus, &mut cpuid, irq_chip, no_smt);\n\n\n\n vcpu.set_cpuid(&cpuid)\n\n .map_err(Error::SetSupportedCpusFailed)\n\n}\n\n\n", "file_path": "x86_64/src/cpuid.rs", "rank": 62, "score": 186040.4758719863 }, { "content": "pub fn run_vcpus(\n\n kvm: &Kvm,\n\n vm: &Vm,\n\n plugin: &Process,\n\n vcpu_count: u32,\n\n kill_signaled: &Arc<AtomicBool>,\n\n exit_evt: &Event,\n\n vcpu_handles: &mut Vec<thread::JoinHandle<()>>,\n\n) -> Result<()> {\n\n let vcpu_thread_barrier = Arc::new(Barrier::new((vcpu_count) as usize));\n\n let use_kvm_signals = !kvm.check_extension(Cap::ImmediateExit);\n\n\n\n // If we need to force a vcpu to exit from a VM then a SIGRTMIN signal is sent\n\n // to that vcpu's thread. If KVM is running the VM then it'll return -EINTR.\n\n // An issue is what to do when KVM isn't running the VM (where we could be\n\n // in the kernel or in the app).\n\n //\n\n // If KVM supports \"immediate exit\" then we set a signal handler that will\n\n // set the |immediate_exit| flag that tells KVM to return -EINTR before running\n\n // the VM.\n", "file_path": "src/plugin/mod.rs", "rank": 63, "score": 186030.2127654347 }, { "content": "/// Performs setup of the MP table for the given `num_cpus`.\n\npub fn setup_mptable(\n\n mem: &GuestMemory,\n\n num_cpus: u8,\n\n pci_irqs: Vec<(PciAddress, u32, PciInterruptPin)>,\n\n) -> Result<()> {\n\n // Used to keep track of the next base pointer into the MP table.\n\n let mut base_mp = GuestAddress(MPTABLE_START);\n\n\n\n // Calculate ISA bus number in the system, report at least one PCI bus.\n\n let isa_bus_id = match pci_irqs.iter().max_by_key(|v| v.0.bus) {\n\n Some(pci_irq) => pci_irq.0.bus + 1,\n\n _ => 1,\n\n };\n\n let mp_size = compute_mp_size(num_cpus);\n\n\n\n // The checked_add here ensures the all of the following base_mp.unchecked_add's will be without\n\n // overflow.\n\n if let Some(end_mp) = base_mp.checked_add(mp_size as u64 - 1) {\n\n if !mem.address_in_range(end_mp) {\n\n return Err(Error::NotEnoughMemory);\n", "file_path": "x86_64/src/mptable.rs", "rank": 64, "score": 186030.2127654347 }, { "content": "/// Creates a flattened device tree containing all of the parameters for the\n\n/// kernel and loads it into the guest memory at the specified offset.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `fdt_max_size` - The amount of space reserved for the device tree\n\n/// * `guest_mem` - The guest memory object\n\n/// * `fdt_load_offset` - The offset into physical memory for the device tree\n\n/// * `android_fstab` - the File object for the android fstab\n\npub fn create_fdt(\n\n fdt_max_size: usize,\n\n guest_mem: &GuestMemory,\n\n fdt_load_offset: u64,\n\n android_fstab: File,\n\n) -> Result<usize, Error> {\n\n // Reserve space for the setup_data\n\n let fdt_data_size = fdt_max_size - mem::size_of::<setup_data>();\n\n\n\n let mut fdt = FdtWriter::new(&[]);\n\n\n\n // The whole thing is put into one giant node with some top level properties\n\n let root_node = fdt.begin_node(\"\")?;\n\n create_android_fdt(&mut fdt, android_fstab)?;\n\n fdt.end_node(root_node)?;\n\n\n\n let fdt_final = fdt.finish(fdt_data_size)?;\n\n\n\n assert_eq!(\n\n mem::size_of::<setup_data>(),\n", "file_path": "x86_64/src/fdt.rs", "rank": 65, "score": 186030.2127654347 }, { "content": "/// Safe wrapper for `fallocate()`.\n\npub fn fallocate(\n\n file: &dyn AsRawFd,\n\n mode: FallocateMode,\n\n keep_size: bool,\n\n offset: u64,\n\n len: u64,\n\n) -> Result<()> {\n\n let offset = if offset > libc::off64_t::max_value() as u64 {\n\n return Err(Error::new(libc::EINVAL));\n\n } else {\n\n offset as libc::off64_t\n\n };\n\n\n\n let len = if len > libc::off64_t::max_value() as u64 {\n\n return Err(Error::new(libc::EINVAL));\n\n } else {\n\n len as libc::off64_t\n\n };\n\n\n\n let mut mode = match mode {\n", "file_path": "sys_util/src/lib.rs", "rank": 66, "score": 186030.2127654347 }, { "content": "/// Creates a flattened device tree containing all of the parameters for the\n\n/// kernel and loads it into the guest memory at the specified offset.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `fdt_max_size` - The amount of space reserved for the device tree\n\n/// * `guest_mem` - The guest memory object\n\n/// * `pci_irqs` - List of PCI device address to PCI interrupt number and pin mappings\n\n/// * `num_cpus` - Number of virtual CPUs the guest will have\n\n/// * `fdt_load_offset` - The offset into physical memory for the device tree\n\n/// * `pci_device_base` - The offset into physical memory for PCI device memory\n\n/// * `pci_device_size` - The size of PCI device memory\n\n/// * `cmdline` - The kernel commandline\n\n/// * `initrd` - An optional tuple of initrd guest physical address and size\n\n/// * `android_fstab` - An optional file holding Android fstab entries\n\n/// * `is_gicv3` - True if gicv3, false if v2\n\n/// * `psci_version` - the current PSCI version\n\npub fn create_fdt(\n\n fdt_max_size: usize,\n\n guest_mem: &GuestMemory,\n\n pci_irqs: Vec<(PciAddress, u32, PciInterruptPin)>,\n\n num_cpus: u32,\n\n fdt_load_offset: u64,\n\n pci_device_base: u64,\n\n pci_device_size: u64,\n\n cmdline: &str,\n\n initrd: Option<(GuestAddress, usize)>,\n\n android_fstab: Option<File>,\n\n is_gicv3: bool,\n\n use_pmu: bool,\n\n psci_version: PsciVersion,\n\n) -> Result<()> {\n\n let mut fdt = FdtWriter::new(&[]);\n\n\n\n // The whole thing is put into one giant node with some top level properties\n\n let root_node = fdt.begin_node(\"\")?;\n\n fdt.property_u32(\"interrupt-parent\", PHANDLE_GIC)?;\n", "file_path": "aarch64/src/fdt.rs", "rank": 67, "score": 186030.2127654347 }, { "content": "// Implement setter and getter for all fields.\n\nfn get_fields_impl(fields: &[FieldSpec]) -> Vec<TokenStream> {\n\n let mut impls = Vec::new();\n\n // This vec keeps track of types before this field, used to generate the offset.\n\n let current_types = &mut vec![quote!(::bit_field::BitField0)];\n\n\n\n for spec in fields {\n\n let ty = spec.ty;\n\n let getter_ident = Ident::new(format!(\"get_{}\", spec.ident).as_str(), Span::call_site());\n\n let setter_ident = Ident::new(format!(\"set_{}\", spec.ident).as_str(), Span::call_site());\n\n\n\n // Optional #[bits = N] attribute to provide compile-time checked\n\n // documentation of how many bits some field covers.\n\n let check_expected_bits = spec.expected_bits.as_ref().map(|expected_bits| {\n\n // If expected_bits does not match the actual number of bits in the\n\n // bit field specifier, this will fail to compile with an error\n\n // pointing into the #[bits = N] attribute.\n\n let span = expected_bits.span();\n\n quote_spanned! {span=>\n\n #[allow(dead_code)]\n\n const EXPECTED_BITS: [(); #expected_bits] =\n", "file_path": "bit_field/bit_field_derive/bit_field_derive.rs", "rank": 68, "score": 185658.3115430874 }, { "content": "fn range_intersection(a: &Range<u64>, b: &Range<u64>) -> Range<u64> {\n\n Range {\n\n start: max(a.start, b.start),\n\n end: min(a.end, b.end),\n\n }\n\n}\n\n\n\n/// A magic string placed at the beginning of a composite disk file to identify it.\n\npub static CDISK_MAGIC: &str = \"composite_disk\\x1d\";\n\n/// The length of the CDISK_MAGIC string. Created explicitly as a static constant so that it is\n\n/// possible to create a character array of the same length.\n\npub const CDISK_MAGIC_LEN: usize = 15;\n\n\n\nimpl CompositeDiskFile {\n\n fn new(mut disks: Vec<ComponentDiskPart>) -> Result<CompositeDiskFile> {\n\n disks.sort_by(|d1, d2| d1.offset.cmp(&d2.offset));\n\n let contiguous_err = disks\n\n .windows(2)\n\n .map(|s| {\n\n if s[0].offset == s[1].offset {\n", "file_path": "disk/src/composite.rs", "rank": 69, "score": 185633.6038511344 }, { "content": "fn to_kvm_msrs(vec: &[Register]) -> Vec<kvm_msrs> {\n\n let vec: Vec<kvm_msr_entry> = vec\n\n .iter()\n\n .map(|e| kvm_msr_entry {\n\n index: e.id as u32,\n\n data: e.value,\n\n ..Default::default()\n\n })\n\n .collect();\n\n\n\n let mut msrs = vec_with_array_field::<kvm_msrs, kvm_msr_entry>(vec.len());\n\n unsafe {\n\n // Mapping the unsized array to a slice is unsafe because the length isn't known.\n\n // Providing the length used to create the struct guarantees the entire slice is valid.\n\n msrs[0]\n\n .entries\n\n .as_mut_slice(vec.len())\n\n .copy_from_slice(&vec);\n\n }\n\n msrs[0].nmsrs = vec.len() as u32;\n", "file_path": "hypervisor/src/kvm/x86_64.rs", "rank": 70, "score": 184810.06492336682 }, { "content": "/// Get the current monotonic time of host in nanoseconds\n\nfn get_monotonic_time() -> u64 {\n\n let mut time = libc::timespec {\n\n tv_sec: 0,\n\n tv_nsec: 0,\n\n };\n\n // Safe because time struct is local to this function and we check the returncode\n\n let ret = unsafe { libc::clock_gettime(libc::CLOCK_MONOTONIC, &mut time) };\n\n if ret != 0 {\n\n 0\n\n } else {\n\n time.tv_sec as u64 * 1_000_000_000u64 + time.tv_nsec as u64\n\n }\n\n}\n\n\n\nimpl PitCounter {\n\n fn new(\n\n counter_id: usize,\n\n interrupt_evt: Option<Event>,\n\n clock: Arc<Mutex<Clock>>,\n\n ) -> PitResult<PitCounter> {\n", "file_path": "devices/src/pit.rs", "rank": 71, "score": 183889.00264387048 }, { "content": "/// Creates a new virtio keyboard, which supports the same events as an en-us physical keyboard.\n\npub fn new_keyboard<T>(source: T, virtio_features: u64) -> Result<Input<SocketEventSource<T>>>\n\nwhere\n\n T: Read + Write + AsRawDescriptor,\n\n{\n\n Ok(Input {\n\n kill_evt: None,\n\n worker_thread: None,\n\n config: defaults::new_keyboard_config(),\n\n source: Some(SocketEventSource::new(source)),\n\n virtio_features,\n\n })\n\n}\n\n\n", "file_path": "devices/src/virtio/input/mod.rs", "rank": 72, "score": 183074.34319674212 }, { "content": "/// Creates a new virtio input device from an event device node\n\npub fn new_evdev<T>(source: T, virtio_features: u64) -> Result<Input<EvdevEventSource<T>>>\n\nwhere\n\n T: Read + Write + AsRawDescriptor,\n\n{\n\n Ok(Input {\n\n kill_evt: None,\n\n worker_thread: None,\n\n config: VirtioInputConfig::from_evdev(&source)?,\n\n source: Some(EvdevEventSource::new(source)),\n\n virtio_features,\n\n })\n\n}\n\n\n", "file_path": "devices/src/virtio/input/mod.rs", "rank": 73, "score": 183074.34319674212 }, { "content": "/// Creates a new virtio mouse which supports primary, secondary, wheel and REL events.\n\npub fn new_mouse<T>(source: T, virtio_features: u64) -> Result<Input<SocketEventSource<T>>>\n\nwhere\n\n T: Read + Write + AsRawDescriptor,\n\n{\n\n Ok(Input {\n\n kill_evt: None,\n\n worker_thread: None,\n\n config: defaults::new_mouse_config(),\n\n source: Some(SocketEventSource::new(source)),\n\n virtio_features,\n\n })\n\n}\n\n\n", "file_path": "devices/src/virtio/input/mod.rs", "rank": 74, "score": 183074.34319674212 }, { "content": "/// Creates a new virtio device for switches.\n\npub fn new_switches<T>(source: T, virtio_features: u64) -> Result<Input<SocketEventSource<T>>>\n\nwhere\n\n T: Read + Write + AsRawDescriptor,\n\n{\n\n Ok(Input {\n\n kill_evt: None,\n\n worker_thread: None,\n\n config: defaults::new_switches_config(),\n\n source: Some(SocketEventSource::new(source)),\n\n virtio_features,\n\n })\n\n}\n", "file_path": "devices/src/virtio/input/mod.rs", "rank": 75, "score": 183074.34319674212 }, { "content": "/// Create ACPI tables and return the RSDP.\n\n/// The basic tables DSDT/FACP/MADT/XSDT are constructed in this function.\n\n/// # Arguments\n\n///\n\n/// * `guest_mem` - The guest memory where the tables will be stored.\n\n/// * `num_cpus` - Used to construct the MADT.\n\n/// * `sci_irq` - Used to fill the FACP SCI_INTERRUPT field, which\n\n/// is going to be used by the ACPI drivers to register\n\n/// sci handler.\n\n/// * `acpi_dev_resource` - resouces needed by the ACPI devices for creating tables\n\npub fn create_acpi_tables(\n\n guest_mem: &GuestMemory,\n\n num_cpus: u8,\n\n sci_irq: u32,\n\n acpi_dev_resource: ACPIDevResource,\n\n) -> Option<GuestAddress> {\n\n // RSDP is at the HI RSDP WINDOW\n\n let rsdp_offset = GuestAddress(super::ACPI_HI_RSDP_WINDOW_BASE);\n\n let mut offset = rsdp_offset.checked_add(RSDP::len() as u64)?;\n\n let mut tables: Vec<u64> = Vec::new();\n\n let mut dsdt_offset: Option<GuestAddress> = None;\n\n\n\n // User supplied System Description Tables, e.g. SSDT.\n\n for sdt in acpi_dev_resource.sdts.iter() {\n\n guest_mem.write_at_addr(sdt.as_slice(), offset).ok()?;\n\n if sdt.is_signature(b\"DSDT\") {\n\n dsdt_offset = Some(offset);\n\n } else {\n\n tables.push(offset.0);\n\n }\n", "file_path": "x86_64/src/acpi.rs", "rank": 76, "score": 183032.3025147685 }, { "content": "/// Creates a root PCI device for use by this Vm.\n\npub fn generate_pci_root(\n\n mut devices: Vec<(Box<dyn PciDevice>, Option<Minijail>)>,\n\n irq_chip: &mut impl IrqChip,\n\n mmio_bus: &mut Bus,\n\n resources: &mut SystemAllocator,\n\n vm: &mut impl Vm,\n\n max_irqs: usize,\n\n) -> Result<\n\n (\n\n PciRoot,\n\n Vec<(PciAddress, u32, PciInterruptPin)>,\n\n BTreeMap<u32, String>,\n\n ),\n\n DeviceRegistrationError,\n\n> {\n\n let mut root = PciRoot::new();\n\n let mut pci_irqs = Vec::new();\n\n let mut pid_labels = BTreeMap::new();\n\n\n\n let mut irqs: Vec<Option<u32>> = vec![None; max_irqs];\n", "file_path": "arch/src/lib.rs", "rank": 77, "score": 183031.8734152952 }, { "content": "/// Adds goldfish battery\n\n/// return the platform needed resouces include its AML data, irq number\n\n///\n\n/// # Arguments\n\n///\n\n/// * `amls` - the vector to put the goldfish battery AML\n\n/// * `battery_jail` - used when sandbox is enabled\n\n/// * `mmio_bus` - bus to add the devices to\n\n/// * `irq_chip` - the IrqChip object for registering irq events\n\n/// * `irq_num` - assigned interrupt to use\n\n/// * `resources` - the SystemAllocator to allocate IO and MMIO for acpi\n\npub fn add_goldfish_battery(\n\n amls: &mut Vec<u8>,\n\n battery_jail: Option<Minijail>,\n\n mmio_bus: &mut Bus,\n\n irq_chip: &mut impl IrqChip,\n\n irq_num: u32,\n\n resources: &mut SystemAllocator,\n\n) -> Result<Tube, DeviceRegistrationError> {\n\n let alloc = resources.get_anon_alloc();\n\n let mmio_base = resources\n\n .mmio_allocator(MmioType::Low)\n\n .allocate_with_align(\n\n devices::bat::GOLDFISHBAT_MMIO_LEN,\n\n alloc,\n\n \"GoldfishBattery\".to_string(),\n\n devices::bat::GOLDFISHBAT_MMIO_LEN,\n\n )\n\n .map_err(DeviceRegistrationError::AllocateIoResource)?;\n\n\n\n let irq_evt = Event::new().map_err(DeviceRegistrationError::EventCreate)?;\n", "file_path": "arch/src/lib.rs", "rank": 78, "score": 183031.64842608123 }, { "content": "/// Add serial options to the provided `cmdline` based on `serial_parameters`.\n\n/// `serial_io_type` should be \"io\" if the platform uses x86-style I/O ports for serial devices\n\n/// or \"mmio\" if the serial ports are memory mapped.\n\npub fn get_serial_cmdline(\n\n cmdline: &mut kernel_cmdline::Cmdline,\n\n serial_parameters: &BTreeMap<(SerialHardware, u8), SerialParameters>,\n\n serial_io_type: &str,\n\n) -> GetSerialCmdlineResult<()> {\n\n match serial_parameters\n\n .iter()\n\n .filter(|(_, p)| p.console)\n\n .map(|(k, _)| k)\n\n .next()\n\n {\n\n Some((SerialHardware::Serial, num)) => {\n\n cmdline\n\n .insert(\"console\", &format!(\"ttyS{}\", num - 1))\n\n .map_err(GetSerialCmdlineError::KernelCmdline)?;\n\n }\n\n Some((SerialHardware::VirtioConsole, num)) => {\n\n cmdline\n\n .insert(\"console\", &format!(\"hvc{}\", num - 1))\n\n .map_err(GetSerialCmdlineError::KernelCmdline)?;\n", "file_path": "arch/src/serial.rs", "rank": 79, "score": 183031.0317843838 }, { "content": "pub fn do_modify_battery(\n\n socket_path: &Path,\n\n battery_type: &str,\n\n property: &str,\n\n target: &str,\n\n) -> DoModifyBatteryResult {\n\n let response = match battery_type.parse::<BatteryType>() {\n\n Ok(type_) => match BatControlCommand::new(property.to_string(), target.to_string()) {\n\n Ok(cmd) => {\n\n let request = VmRequest::BatCommand(type_, cmd);\n\n Ok(handle_request(&request, socket_path)?)\n\n }\n\n Err(e) => Err(ModifyBatError::BatControlErr(e)),\n\n },\n\n Err(e) => Err(ModifyBatError::BatControlErr(e)),\n\n };\n\n\n\n match response {\n\n Ok(response) => {\n\n println!(\"{}\", response);\n", "file_path": "vm_control/src/client.rs", "rank": 80, "score": 183025.42065927084 }, { "content": "/// The intent of our panic hook is to get panic info and a stacktrace into the syslog, even for\n\n/// jailed subprocesses. It will always abort on panic to ensure a minidump is generated.\n\n///\n\n/// Note that jailed processes will usually have a stacktrace of <unknown> because the backtrace\n\n/// routines attempt to open this binary and are unable to do so in a jail.\n\npub fn set_panic_hook() {\n\n let default_panic = panic::take_hook();\n\n panic::set_hook(Box::new(move |info| {\n\n log_panic_info(default_panic.as_ref(), info);\n\n // Abort to trigger the crash reporter so that a minidump is generated.\n\n abort();\n\n }));\n\n}\n", "file_path": "src/panic_hook.rs", "rank": 81, "score": 183025.42065927084 }, { "content": "pub fn do_usb_attach(\n\n socket_path: &Path,\n\n bus: u8,\n\n addr: u8,\n\n vid: u16,\n\n pid: u16,\n\n dev_path: &Path,\n\n) -> ModifyUsbResult<UsbControlResult> {\n\n let usb_file: File = if dev_path.parent() == Some(Path::new(\"/proc/self/fd\")) {\n\n // Special case '/proc/self/fd/*' paths. The FD is already open, just use it.\n\n // Safe because we will validate |raw_fd|.\n\n unsafe { File::from_raw_descriptor(raw_descriptor_from_path(&dev_path)?) }\n\n } else {\n\n OpenOptions::new()\n\n .read(true)\n\n .write(true)\n\n .open(&dev_path)\n\n .map_err(|_| ModifyUsbError::UsbControl(UsbControlResult::FailedToOpenDevice))?\n\n };\n\n\n", "file_path": "vm_control/src/client.rs", "rank": 82, "score": 183025.42065927084 }, { "content": "/// Writes the command line string to the given memory slice.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `guest_mem` - A u8 slice that will be partially overwritten by the command line.\n\n/// * `guest_addr` - The address in `guest_mem` at which to load the command line.\n\n/// * `cmdline` - The kernel command line.\n\npub fn load_cmdline(\n\n guest_mem: &GuestMemory,\n\n guest_addr: GuestAddress,\n\n cmdline: &CStr,\n\n) -> Result<()> {\n\n let len = cmdline.to_bytes().len();\n\n if len == 0 {\n\n return Ok(());\n\n }\n\n\n\n let end = guest_addr\n\n .checked_add(len as u64 + 1)\n\n .ok_or(Error::CommandLineOverflow)?; // Extra for null termination.\n\n if end > guest_mem.end_addr() {\n\n return Err(Error::CommandLineOverflow);\n\n }\n\n\n\n guest_mem\n\n .write_at_addr(cmdline.to_bytes_with_nul(), guest_addr)\n\n .map_err(|_| Error::CommandLineCopy)?;\n", "file_path": "kernel_loader/src/lib.rs", "rank": 83, "score": 183025.42065927084 }, { "content": "/// Creates a mmio memory region for pstore.\n\npub fn create_memory_region(\n\n vm: &mut impl Vm,\n\n resources: &mut SystemAllocator,\n\n pstore: &Pstore,\n\n) -> Result<RamoopsRegion> {\n\n let file = OpenOptions::new()\n\n .read(true)\n\n .write(true)\n\n .create(true)\n\n .open(&pstore.path)\n\n .map_err(Error::IoError)?;\n\n file.set_len(pstore.size as u64).map_err(Error::IoError)?;\n\n\n\n let address = resources\n\n .mmio_allocator(MmioType::High)\n\n .allocate(pstore.size as u64, Alloc::Pstore, \"pstore\".to_owned())\n\n .map_err(Error::ResourcesError)?;\n\n\n\n let memory_mapping = MemoryMappingBuilder::new(pstore.size as usize)\n\n .from_file(&file)\n", "file_path": "arch/src/pstore.rs", "rank": 84, "score": 183025.42065927084 }, { "content": "/// Adds serial devices to the provided bus based on the serial parameters given.\n\n///\n\n/// Only devices with hardware type `SerialHardware::Serial` are added by this function.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `io_bus` - Bus to add the devices to\n\n/// * `com_evt_1_3` - event for com1 and com3\n\n/// * `com_evt_1_4` - event for com2 and com4\n\n/// * `io_bus` - Bus to add the devices to\n\n/// * `serial_parameters` - definitions of serial parameter configurations.\n\n/// All four of the traditional PC-style serial ports (COM1-COM4) must be specified.\n\npub fn add_serial_devices(\n\n protected_vm: ProtectionType,\n\n io_bus: &mut Bus,\n\n com_evt_1_3: &Event,\n\n com_evt_2_4: &Event,\n\n serial_parameters: &BTreeMap<(SerialHardware, u8), SerialParameters>,\n\n serial_jail: Option<Minijail>,\n\n) -> Result<(), DeviceRegistrationError> {\n\n for x in 0..=3 {\n\n let com_evt = match x {\n\n 0 => com_evt_1_3,\n\n 1 => com_evt_2_4,\n\n 2 => com_evt_1_3,\n\n 3 => com_evt_2_4,\n\n _ => com_evt_1_3,\n\n };\n\n\n\n let param = serial_parameters\n\n .get(&(SerialHardware::Serial, x + 1))\n\n .ok_or(DeviceRegistrationError::MissingRequiredSerialDevice(x + 1))?;\n", "file_path": "arch/src/serial.rs", "rank": 85, "score": 183025.42065927084 }, { "content": "fn finish_ioctl<W: Writer>(unique: u64, res: io::Result<Vec<u8>>, w: W) -> Result<usize> {\n\n let (out, data) = match res {\n\n Ok(data) => {\n\n let out = IoctlOut {\n\n result: 0,\n\n ..Default::default()\n\n };\n\n (out, Some(data))\n\n }\n\n Err(e) => {\n\n let out = IoctlOut {\n\n result: -e.raw_os_error().unwrap_or(libc::EIO),\n\n ..Default::default()\n\n };\n\n (out, None)\n\n }\n\n };\n\n reply_ok(Some(out), data.as_ref().map(|d| &d[..]), unique, w)\n\n}\n\n\n", "file_path": "fuse/src/server.rs", "rank": 86, "score": 182420.48030831665 }, { "content": "/// Records a log message with the given details.\n\n///\n\n/// Note that this will fail silently if syslog was not initialized.\n\n///\n\n/// # Arguments\n\n/// * `pri` - The `Priority` (i.e. severity) of the log message.\n\n/// * `fac` - The `Facility` of the log message. Usually `Facility::User` should be used.\n\n/// * `file_line` - Optional tuple of the name of the file that generated the\n\n/// log and the line number within that file.\n\n/// * `args` - The log's message to record, in the form of `format_args!()` return value\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// # use sys_util::syslog;\n\n/// # if let Err(e) = syslog::init() {\n\n/// # println!(\"failed to initiailize syslog: {}\", e);\n\n/// # return;\n\n/// # }\n\n/// syslog::log(syslog::Priority::Error,\n\n/// syslog::Facility::User,\n\n/// Some((file!(), line!())),\n\n/// format_args!(\"hello syslog\"));\n\n/// ```\n\npub fn log(pri: Priority, fac: Facility, file_line: Option<(&str, u32)>, args: fmt::Arguments) {\n\n let mut state = lock!();\n\n let mut buf = [0u8; 1024];\n\n\n\n state.syslog.log(\n\n state.proc_name.as_ref().map(|s| s.as_ref()),\n\n pri,\n\n fac,\n\n file_line,\n\n args,\n\n );\n\n\n\n let res = {\n\n let mut buf_cursor = Cursor::new(&mut buf[..]);\n\n if let Some((file_name, line)) = &file_line {\n\n write!(&mut buf_cursor, \"[{}:{}:{}] \", pri, file_name, line)\n\n } else {\n\n Ok(())\n\n }\n\n .and_then(|()| writeln!(&mut buf_cursor, \"{}\", args))\n", "file_path": "sys_util/src/syslog.rs", "rank": 87, "score": 182088.94402947137 }, { "content": "fn fdt_offset(mem_size: u64, has_bios: bool) -> u64 {\n\n // TODO(rammuthiah) make kernel and BIOS startup use FDT from the same location. ARCVM startup\n\n // currently expects the kernel at 0x80080000 and the FDT at the end of RAM for unknown reasons.\n\n // Root cause and figure out how to fold these code paths together.\n\n if has_bios {\n\n AARCH64_FDT_OFFSET_IN_BIOS_MODE\n\n } else {\n\n // Put fdt up near the top of memory\n\n // TODO(sonnyrao): will have to handle this differently if there's\n\n // > 4GB memory\n\n mem_size - AARCH64_FDT_MAX_SIZE - 0x10000\n\n }\n\n}\n\n\n\npub struct AArch64;\n\n\n\nimpl arch::LinuxArch for AArch64 {\n\n type Error = Error;\n\n\n\n fn guest_memory_layout(\n", "file_path": "aarch64/src/lib.rs", "rank": 88, "score": 180704.76411255443 }, { "content": "/// Construct a bmRequestType value for a control request.\n\npub fn control_request_type(\n\n type_: ControlRequestType,\n\n dir: ControlRequestDataPhaseTransferDirection,\n\n recipient: ControlRequestRecipient,\n\n) -> u8 {\n\n ((type_ as u8) << CONTROL_REQUEST_TYPE_OFFSET)\n\n | ((dir as u8) << DATA_PHASE_DIRECTION_OFFSET)\n\n | (recipient as u8)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn control_request_types() {\n\n assert_eq!(\n\n control_request_type(\n\n ControlRequestType::Standard,\n\n ControlRequestDataPhaseTransferDirection::HostToDevice,\n", "file_path": "usb_util/src/types.rs", "rank": 89, "score": 180185.41019897864 }, { "content": "/// Add the default serial parameters for serial ports that have not already been specified.\n\n///\n\n/// This ensures that `serial_parameters` will contain parameters for each of the four PC-style\n\n/// serial ports (COM1-COM4).\n\n///\n\n/// It also sets the first `SerialHardware::Serial` to be the default console device if no other\n\n/// serial parameters exist with console=true and the first serial device has not already been\n\n/// configured explicitly.\n\npub fn set_default_serial_parameters(\n\n serial_parameters: &mut BTreeMap<(SerialHardware, u8), SerialParameters>,\n\n) {\n\n // If no console device exists and the first serial port has not been specified,\n\n // set the first serial port as a stdout+stdin console.\n\n let default_console = (SerialHardware::Serial, 1);\n\n if !serial_parameters.iter().any(|(_, p)| p.console) {\n\n serial_parameters\n\n .entry(default_console)\n\n .or_insert(SerialParameters {\n\n type_: SerialType::Stdout,\n\n hardware: SerialHardware::Serial,\n\n path: None,\n\n input: None,\n\n num: 1,\n\n console: true,\n\n earlycon: false,\n\n stdin: true,\n\n });\n\n }\n", "file_path": "arch/src/serial.rs", "rank": 90, "score": 180185.41019897864 }, { "content": "#[inline]\n\nfn usize_to_u64(val: usize) -> u64 {\n\n val.try_into().expect(\"`usize` doesn't fit inside a `u64`\")\n\n}\n\n\n\npub struct PendingOperation {\n\n waker_token: Option<WakerToken>,\n\n ex: Weak<RawExecutor>,\n\n submitted: bool,\n\n}\n\n\n\nimpl Future for PendingOperation {\n\n type Output = Result<u32>;\n\n\n\n fn poll(mut self: Pin<&mut Self>, cx: &mut Context) -> Poll<Self::Output> {\n\n let token = self\n\n .waker_token\n\n .as_ref()\n\n .expect(\"PendingOperation polled after returning Poll::Ready\");\n\n if let Some(ex) = self.ex.upgrade() {\n\n if let Some(result) = ex.get_result(token, cx) {\n", "file_path": "cros_async/src/uring_executor.rs", "rank": 91, "score": 178992.64154008956 }, { "content": "// Unwrap ast to get the named fields. We only care about field names and types:\n\n// \"myfield : BitField3\" -> (\"myfield\", Token(BitField3))\n\nfn get_struct_fields(fields: &FieldsNamed) -> Result<Vec<FieldSpec>> {\n\n let mut vec = Vec::new();\n\n\n\n for field in &fields.named {\n\n let ident = field\n\n .ident\n\n .as_ref()\n\n .expect(\"Fields::Named has named fields\");\n\n let ty = &field.ty;\n\n let expected_bits = parse_bits_attr(&field.attrs)?;\n\n vec.push(FieldSpec {\n\n ident,\n\n ty,\n\n expected_bits,\n\n });\n\n }\n\n\n\n Ok(vec)\n\n}\n\n\n", "file_path": "bit_field/bit_field_derive/bit_field_derive.rs", "rank": 92, "score": 178891.3579755898 }, { "content": "fn from_sample_format(format: SampleFormat) -> u8 {\n\n match format {\n\n SampleFormat::U8 => VIRTIO_SND_PCM_FMT_U8,\n\n SampleFormat::S16LE => VIRTIO_SND_PCM_FMT_U16,\n\n SampleFormat::S24LE => VIRTIO_SND_PCM_FMT_U24,\n\n SampleFormat::S32LE => VIRTIO_SND_PCM_FMT_U32,\n\n }\n\n}\n\n\n", "file_path": "devices/src/virtio/snd/vios_backend/shm_streams.rs", "rank": 93, "score": 178564.5976197162 }, { "content": "fn bitfield_enum_with_width_impl(\n\n ast: &DeriveInput,\n\n data: &DataEnum,\n\n width: u64,\n\n) -> Result<TokenStream> {\n\n if width > 64 {\n\n return Err(Error::new(\n\n Span::call_site(),\n\n \"max width of bitfield enum is 64\",\n\n ));\n\n }\n\n let bits = width as u8;\n\n let declare_discriminants = get_declare_discriminants_for_enum(bits, ast, data);\n\n\n\n let ident = &ast.ident;\n\n let type_name = ident.to_string();\n\n let variants = &data.variants;\n\n let match_discriminants = variants.iter().map(|variant| {\n\n let variant = &variant.ident;\n\n quote! {\n", "file_path": "bit_field/bit_field_derive/bit_field_derive.rs", "rank": 94, "score": 178031.0601430477 }, { "content": "/// Helper function to submit usb_transfer to device handle.\n\npub fn submit_transfer(\n\n fail_handle: Arc<dyn FailHandle>,\n\n job_queue: &Arc<AsyncJobQueue>,\n\n xhci_transfer: Arc<XhciTransfer>,\n\n device: &mut Device,\n\n usb_transfer: Transfer,\n\n) -> Result<()> {\n\n let transfer_status = {\n\n // We need to hold the lock to avoid race condition.\n\n // While we are trying to submit the transfer, another thread might want to cancel the same\n\n // transfer. Holding the lock here makes sure one of them is cancelled.\n\n let mut state = xhci_transfer.state().lock();\n\n match mem::replace(&mut *state, XhciTransferState::Cancelled) {\n\n XhciTransferState::Created => {\n\n match device.submit_transfer(usb_transfer) {\n\n Err(e) => {\n\n error!(\"fail to submit transfer {:?}\", e);\n\n *state = XhciTransferState::Completed;\n\n TransferStatus::NoDevice\n\n }\n", "file_path": "devices/src/usb/host_backend/utils.rs", "rank": 95, "score": 177496.98829326447 }, { "content": "/// Test utility function to create a descriptor chain in guest memory.\n\npub fn create_descriptor_chain(\n\n memory: &GuestMemory,\n\n descriptor_array_addr: GuestAddress,\n\n mut buffers_start_addr: GuestAddress,\n\n descriptors: Vec<(DescriptorType, u32)>,\n\n spaces_between_regions: u32,\n\n) -> Result<DescriptorChain> {\n\n let descriptors_len = descriptors.len();\n\n for (index, (type_, size)) in descriptors.into_iter().enumerate() {\n\n let mut flags = 0;\n\n if let DescriptorType::Writable = type_ {\n\n flags |= VIRTQ_DESC_F_WRITE;\n\n }\n\n if index + 1 < descriptors_len {\n\n flags |= VIRTQ_DESC_F_NEXT;\n\n }\n\n\n\n let index = index as u16;\n\n let desc = virtq_desc {\n\n addr: buffers_start_addr.offset().into(),\n", "file_path": "devices/src/virtio/descriptor_utils.rs", "rank": 96, "score": 177496.98829326447 }, { "content": "pub fn get_resource_info(\n\n tube: &Tube,\n\n request: ResourceRequest,\n\n) -> std::result::Result<ResourceInfo, ResourceBridgeError> {\n\n if let Err(e) = tube.send(&request) {\n\n return Err(ResourceBridgeError::SendFailure(request, e));\n\n }\n\n\n\n match tube.recv() {\n\n Ok(ResourceResponse::Resource(info)) => Ok(info),\n\n Ok(ResourceResponse::Invalid) => Err(ResourceBridgeError::InvalidResource(request)),\n\n Err(e) => Err(ResourceBridgeError::RecieveFailure(request, e)),\n\n }\n\n}\n", "file_path": "devices/src/virtio/resource_bridge.rs", "rank": 97, "score": 177496.98829326447 }, { "content": "/// Load an image from a file into guest memory.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `guest_mem` - The memory to be used by the guest.\n\n/// * `guest_addr` - The starting address to load the image in the guest memory.\n\n/// * `max_size` - The amount of space in bytes available in the guest memory for the image.\n\n/// * `image` - The file containing the image to be loaded.\n\n///\n\n/// The size in bytes of the loaded image is returned.\n\npub fn load_image<F>(\n\n guest_mem: &GuestMemory,\n\n image: &mut F,\n\n guest_addr: GuestAddress,\n\n max_size: u64,\n\n) -> Result<usize, LoadImageError>\n\nwhere\n\n F: Read + Seek + AsRawDescriptor,\n\n{\n\n let size = image.seek(SeekFrom::End(0)).map_err(LoadImageError::Seek)?;\n\n\n\n if size > usize::max_value() as u64 || size > max_size {\n\n return Err(LoadImageError::ImageSizeTooLarge(size));\n\n }\n\n\n\n // This is safe due to the bounds check above.\n\n let size = size as usize;\n\n\n\n image\n\n .seek(SeekFrom::Start(0))\n\n .map_err(LoadImageError::Seek)?;\n\n\n\n guest_mem\n\n .read_to_memory(guest_addr, image, size)\n\n .map_err(LoadImageError::ReadToMemory)?;\n\n\n\n Ok(size)\n\n}\n\n\n", "file_path": "arch/src/lib.rs", "rank": 98, "score": 176831.00768807397 }, { "content": "#[inline(always)]\n\npub fn pagesize() -> usize {\n\n // Trivially safe\n\n unsafe { sysconf(_SC_PAGESIZE) as usize }\n\n}\n\n\n", "file_path": "sys_util/src/lib.rs", "rank": 99, "score": 176825.85628149443 } ]
Rust
src/vec/tests.rs
interlockledger/rust-il2-utils
1c441c609fd71a25fb3a4644b6ed9052180443a6
/* * BSD 3-Clause License * * Copyright (c) 2019-2020, InterlockLedger Network * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * * Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * * Neither the name of the copyright holder nor the names of its * contributors may be used to endorse or promote products derived from * this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ use super::*; #[test] fn test_vecextensions_with_value() { for i in 0..32 { let sample: [u8; 32] = [i as u8; 32]; let v: Vec<u8> = Vec::with_value(&sample); assert_eq!(v.as_slice(), &sample); } } #[test] fn test_vecextensions_set_capacity_to() { let mut v = Vec::<u8>::new(); v.set_capacity_to(10); assert_eq!(v.len(), 0); assert!(v.capacity() >= 10); v.set_capacity_to(100); assert_eq!(v.len(), 0); assert!(v.capacity() >= 100); let sample: [u8; 4] = [1, 2, 3, 4]; let mut v = Vec::<u8>::new(); v.extend_from_slice(&sample); v.set_capacity_to(10); assert_eq!(v.as_slice(), &sample); assert!(v.capacity() >= 10); v.set_capacity_to(100); assert_eq!(v.as_slice(), &sample); assert!(v.capacity() >= 100); } #[test] fn test_vecextensions_set_capacity_to_secure() { let mut v = Vec::<u8>::new(); v.set_capacity_to_secure(10); assert_eq!(v.len(), 0); assert!(v.capacity() >= 10); v.set_capacity_to_secure(100); assert_eq!(v.len(), 0); assert!(v.capacity() >= 100); let sample: [u8; 4] = [1, 2, 3, 4]; let mut v = Vec::<u8>::new(); v.extend_from_slice(&sample); v.set_capacity_to_secure(10); assert_eq!(v.as_slice(), &sample); assert!(v.capacity() >= 10); v.set_capacity_to_secure(100); assert_eq!(v.as_slice(), &sample); assert!(v.capacity() >= 100); } #[test] fn test_vecextensions_set_contents_from_slice() { let sample: [u8; 32] = [0xFA; 32]; let mut v = Vec::<u8>::new(); let old_capacity = v.capacity(); v.set_contents_from_slice(&sample[0..0]); assert!(v.is_empty()); assert_eq!(v.capacity(), old_capacity); let mut v = Vec::<u8>::new(); let old_capacity = v.capacity(); v.set_contents_from_slice(&sample); assert_eq!(v.as_slice(), &sample); assert!(old_capacity < v.capacity()); let old_capacity = v.capacity(); let sample: [u8; 16] = [0xBA; 16]; v.set_contents_from_slice(&sample); assert_eq!(v.as_slice(), &sample); assert_eq!(v.capacity(), old_capacity); } #[test] fn test_vecextensions_set_contents_from_slice_secure() { let sample: [u8; 32] = [0xFA; 32]; let mut v = Vec::<u8>::new(); let old_capacity = v.capacity(); v.set_contents_from_slice_secure(&sample[0..0]); assert!(v.is_empty()); assert_eq!(v.capacity(), old_capacity); let mut v = Vec::<u8>::new(); let old_capacity = v.capacity(); v.set_contents_from_slice_secure(&sample); assert_eq!(v.as_slice(), &sample); assert!(old_capacity < v.capacity()); let old_capacity = v.capacity(); let sample: [u8; 16] = [0xBA; 16]; v.set_contents_from_slice_secure(&sample); assert_eq!(v.as_slice(), &sample); assert_eq!(v.capacity(), old_capacity); } #[test] fn test_vecextensions_shrink_to_fit_secure() { let sample: [u8; 32] = [0xFA; 32]; let mut v = Vec::<u8>::with_capacity(128); let old_capacity = v.capacity(); v.shrink_to_fit_secure(); assert!(v.capacity() < old_capacity); let mut v = Vec::<u8>::with_capacity(128); let old_capacity = v.capacity(); v.set_contents_from_slice(&sample); v.shrink_to_fit_secure(); assert!(v.capacity() < old_capacity); assert_eq!(v.as_slice(), &sample); } #[test] fn test_vecextensions_reserve_secure() { let sample: [u8; 32] = [0xFA; 32]; let mut v = Vec::<u8>::new(); let old_capacity = v.capacity(); v.reserve_secure(10); assert!(v.capacity() > old_capacity); let mut v = Vec::<u8>::new(); v.set_contents_from_slice(&sample); let old_capacity = v.capacity(); v.reserve_secure(128); assert!(v.capacity() > old_capacity); assert_eq!(v.as_slice(), &sample); } #[test] fn test_vecextensions_extend_from_slice_secure() { let mut v = Vec::<u8>::new(); let mut exp = Vec::<u8>::new(); for i in 0..32 { let sample: [u8; 32] = [i as u8; 32]; v.extend_from_slice_secure(&sample); exp.extend_from_slice(&sample); assert_eq!(v.as_slice(), exp.as_slice()); } }
/* * BSD 3-Clause License * * Copyright (c) 2019-2020, InterlockLedger Network * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * * Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * * Neither the name of the copyright holder nor the names of its * contributors may be used to endorse or promote products derived from * this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ use super::*; #[test] fn test_vecextensions_with_value() { for i in 0..32 { let sample: [u8; 32] = [i as u8; 32]; let v: Vec<u8> = Vec::with_value(&sample); assert_eq!(v.as_slice(), &sample); } } #[test] fn test_vecextensions_set_capacity_to() { let mut v = Vec::<u8>::new(); v.set_capacity_to(10); assert_eq!(v.len(), 0); assert!(v.capacity() >= 10); v.set_capacity_to(100); assert_eq!(v.len(), 0); assert!(v.capacity() >= 100); let sample: [u8; 4] = [1, 2, 3, 4]; let mut v = Vec::<u8>::new(); v.extend_from_slice(&sample); v.set_capacity_to(10); assert_eq!(v.as_slice(), &sample); assert!(v.capacity() >= 10); v.set_capacity_to(100); assert_eq!(v.as_slice(), &sample); assert!(v.capacity() >= 100); } #[test]
#[test] fn test_vecextensions_set_contents_from_slice() { let sample: [u8; 32] = [0xFA; 32]; let mut v = Vec::<u8>::new(); let old_capacity = v.capacity(); v.set_contents_from_slice(&sample[0..0]); assert!(v.is_empty()); assert_eq!(v.capacity(), old_capacity); let mut v = Vec::<u8>::new(); let old_capacity = v.capacity(); v.set_contents_from_slice(&sample); assert_eq!(v.as_slice(), &sample); assert!(old_capacity < v.capacity()); let old_capacity = v.capacity(); let sample: [u8; 16] = [0xBA; 16]; v.set_contents_from_slice(&sample); assert_eq!(v.as_slice(), &sample); assert_eq!(v.capacity(), old_capacity); } #[test] fn test_vecextensions_set_contents_from_slice_secure() { let sample: [u8; 32] = [0xFA; 32]; let mut v = Vec::<u8>::new(); let old_capacity = v.capacity(); v.set_contents_from_slice_secure(&sample[0..0]); assert!(v.is_empty()); assert_eq!(v.capacity(), old_capacity); let mut v = Vec::<u8>::new(); let old_capacity = v.capacity(); v.set_contents_from_slice_secure(&sample); assert_eq!(v.as_slice(), &sample); assert!(old_capacity < v.capacity()); let old_capacity = v.capacity(); let sample: [u8; 16] = [0xBA; 16]; v.set_contents_from_slice_secure(&sample); assert_eq!(v.as_slice(), &sample); assert_eq!(v.capacity(), old_capacity); } #[test] fn test_vecextensions_shrink_to_fit_secure() { let sample: [u8; 32] = [0xFA; 32]; let mut v = Vec::<u8>::with_capacity(128); let old_capacity = v.capacity(); v.shrink_to_fit_secure(); assert!(v.capacity() < old_capacity); let mut v = Vec::<u8>::with_capacity(128); let old_capacity = v.capacity(); v.set_contents_from_slice(&sample); v.shrink_to_fit_secure(); assert!(v.capacity() < old_capacity); assert_eq!(v.as_slice(), &sample); } #[test] fn test_vecextensions_reserve_secure() { let sample: [u8; 32] = [0xFA; 32]; let mut v = Vec::<u8>::new(); let old_capacity = v.capacity(); v.reserve_secure(10); assert!(v.capacity() > old_capacity); let mut v = Vec::<u8>::new(); v.set_contents_from_slice(&sample); let old_capacity = v.capacity(); v.reserve_secure(128); assert!(v.capacity() > old_capacity); assert_eq!(v.as_slice(), &sample); } #[test] fn test_vecextensions_extend_from_slice_secure() { let mut v = Vec::<u8>::new(); let mut exp = Vec::<u8>::new(); for i in 0..32 { let sample: [u8; 32] = [i as u8; 32]; v.extend_from_slice_secure(&sample); exp.extend_from_slice(&sample); assert_eq!(v.as_slice(), exp.as_slice()); } }
fn test_vecextensions_set_capacity_to_secure() { let mut v = Vec::<u8>::new(); v.set_capacity_to_secure(10); assert_eq!(v.len(), 0); assert!(v.capacity() >= 10); v.set_capacity_to_secure(100); assert_eq!(v.len(), 0); assert!(v.capacity() >= 100); let sample: [u8; 4] = [1, 2, 3, 4]; let mut v = Vec::<u8>::new(); v.extend_from_slice(&sample); v.set_capacity_to_secure(10); assert_eq!(v.as_slice(), &sample); assert!(v.capacity() >= 10); v.set_capacity_to_secure(100); assert_eq!(v.as_slice(), &sample); assert!(v.capacity() >= 100); }
function_block-full_function
[ { "content": "#[test]\n\nfn test_defaultsharedfilelocknamebuilder_namebuilder_create_lock_file_name() {\n\n let b = DefaultSharedFileLockNameBuilder;\n\n\n\n let name = OsStr::new(\"file\");\n\n assert_eq!(b.create_lock_file_name(name), OsStr::new(\".file.lock~\"));\n\n\n\n let name = OsStr::new(\"z\");\n\n assert_eq!(b.create_lock_file_name(name), OsStr::new(\".z.lock~\"));\n\n}\n\n\n\n//=============================================================================\n\n// SharedFileReadLockGuard\n\n//-----------------------------------------------------------------------------\n", "file_path": "src/fs/shared/tests.rs", "rank": 3, "score": 91801.94253521915 }, { "content": "#[test]\n\nfn test_defaultprotectedvalue() {\n\n let exp: [u8; 8] = [1, 2, 3, 4, 5, 6, 7, 8];\n\n\n\n let p = DefaultProtectedValue::new(&exp);\n\n assert_ne!(p.secret.value(), &exp);\n\n assert_ne!(p.seed, 0);\n\n\n\n let v = p.get_secret();\n\n assert_eq!(v.value(), &exp);\n\n}\n\n\n\n//=============================================================================\n\n// ProtectedValue\n\n//-----------------------------------------------------------------------------\n", "file_path": "src/mem/tests.rs", "rank": 4, "score": 77921.59515923585 }, { "content": "#[test]\n\nfn test_simplecache_impl() {\n\n let c: SimpleCache<u64, u64> = SimpleCache::new(10);\n\n {\n\n let r = c.engine.read().unwrap();\n\n assert_eq!(r.len(), 0);\n\n }\n\n {\n\n let mut w = c.engine.write().unwrap();\n\n w.clear();\n\n }\n\n}\n\n\n", "file_path": "src/cache/tests.rs", "rank": 5, "score": 75977.97706417715 }, { "content": "#[test]\n\nfn test_nanoseconds_to_microseconds() {\n\n assert_eq!(nanoseconds_to_microseconds(12345), 12);\n\n assert_eq!(nanoseconds_to_microseconds(2345), 2);\n\n assert_eq!(nanoseconds_to_microseconds(345), 0);\n\n}\n", "file_path": "src/time/tests.rs", "rank": 7, "score": 75977.97706417715 }, { "content": "#[test]\n\nfn test_simplecacheentry_impl() {\n\n let v = Arc::new(10 as u64);\n\n\n\n let e = SimpleCacheEntry::new(&v, 10);\n\n assert_eq!(e.counter(), 10);\n\n\n\n let vr = e.get_value();\n\n assert_eq!(v, vr);\n\n assert!(!std::ptr::eq(&v, &vr));\n\n\n\n let mut e = SimpleCacheEntry::new(&v, 10);\n\n assert_eq!(e.counter(), 10);\n\n e.set_counter(1234);\n\n assert_eq!(e.counter(), 1234);\n\n}\n\n\n\n//=============================================================================\n\n// SimpleCacheEngine\n\n//-----------------------------------------------------------------------------\n", "file_path": "src/cache/tests.rs", "rank": 8, "score": 75977.97706417715 }, { "content": "#[test]\n\nfn test_bytemaskgenerator_new() {\n\n let g = ByteMaskGenerator::new(1234);\n\n assert_eq!(g.state, 1234)\n\n}\n\n\n", "file_path": "src/mem/tests.rs", "rank": 9, "score": 75977.97706417715 }, { "content": "#[test]\n\nfn test_bytemaskgenerator_next() {\n\n // Reference\n\n let mut g = ByteMaskGenerator::new(1234);\n\n assert_eq!(g.next(), 0x5b);\n\n assert_eq!(g.next(), 0x18);\n\n assert_eq!(g.next(), 0x2a);\n\n\n\n // Test stability\n\n let seed: u64 = random();\n\n let mut g1 = ByteMaskGenerator::new(seed);\n\n let mut g2 = ByteMaskGenerator::new(seed);\n\n for _ in 0..1000 {\n\n assert_eq!(g1.next(), g2.next());\n\n }\n\n}\n\n\n\n//=============================================================================\n\n// DefaultProtectedValue\n\n//-----------------------------------------------------------------------------\n", "file_path": "src/mem/tests.rs", "rank": 10, "score": 75977.97706417715 }, { "content": "#[cfg(target_os = \"linux\")]\n\n#[test]\n\nfn test_lock_supported() {\n\n assert!(lock_supported());\n\n}\n\n\n\n//=============================================================================\n\n// SecretBytes\n\n//-----------------------------------------------------------------------------\n", "file_path": "src/mem/tests.rs", "rank": 11, "score": 75977.72790489894 }, { "content": "#[test]\n\nfn test_defaultprotectedvalue_apply_mask() {\n\n let zero: [u8; 16] = [0; 16];\n\n let seed = 1234;\n\n let mut apply: [u8; 16] = [0; 16];\n\n\n\n DefaultProtectedValue::apply_mask(seed, &mut apply);\n\n assert_ne!(&zero, &apply);\n\n DefaultProtectedValue::apply_mask(seed, &mut apply);\n\n assert_eq!(&zero, &apply);\n\n}\n\n\n", "file_path": "src/mem/tests.rs", "rank": 12, "score": 74157.97607077617 }, { "content": "#[test]\n\nfn test_simplesliceserializer_impl() {\n\n let mut data: [u8; 4] = [0; 4];\n\n let mut v = SimpleSliceSerializer::new(&mut data);\n\n\n\n for i in 0..4 {\n\n v.offset = i;\n\n assert_eq!(v.offset(), i);\n\n assert_eq!(v.available(), 4 - i);\n\n assert_eq!(v.offset(), i);\n\n for r in 0..v.available() {\n\n v.can_write(r).unwrap();\n\n }\n\n assert!(matches!(\n\n v.can_write(v.available() + 1),\n\n Err(ErrorKind::UnableToWrite)\n\n ));\n\n }\n\n}\n\n\n", "file_path": "src/simple_serialization/tests.rs", "rank": 13, "score": 74157.97607077617 }, { "content": "#[test]\n\nfn test_secret_bytes_with_value() {\n\n let exp: [u8; 8] = [1, 2, 3, 4, 5, 6, 7, 8];\n\n\n\n // Locked\n\n let s = SecretBytes::with_value(&exp, true);\n\n assert_eq!(s.len(), s.len());\n\n assert_eq!(s.buffer_len(), s.len());\n\n assert_eq!(s.value(), &exp);\n\n if SecretBytes::lock_supported() {\n\n assert!(s.locked());\n\n } else {\n\n assert!(!s.locked());\n\n }\n\n\n\n // Unlocked\n\n let s = SecretBytes::with_value(&exp, false);\n\n assert_eq!(s.len(), s.len());\n\n assert_eq!(s.buffer_len(), s.len());\n\n assert_eq!(s.value(), &exp);\n\n assert!(!s.locked());\n\n}\n\n\n", "file_path": "src/mem/tests.rs", "rank": 14, "score": 74157.97607077617 }, { "content": "#[test]\n\nfn test_sharedfilewritelockguard_impl() {\n\n let test_dir = TestDirUtils::new(\"test_sharedfilewritelockguard_impl\").unwrap();\n\n let lock_file = test_dir.create_test_file(\"target.lock\", b\"1\").unwrap();\n\n let target_file = test_dir.create_test_file(\"target\", b\"2\").unwrap();\n\n\n\n let mut lock = fd_lock::RwLock::new(File::open(&lock_file).unwrap());\n\n let mut lock2 = fd_lock::RwLock::new(File::open(&lock_file).unwrap());\n\n let mut target = OpenOptions::new()\n\n .read(true)\n\n .write(true)\n\n .open(&target_file)\n\n .unwrap();\n\n {\n\n let mut rwlock = SharedFileWriteLockGuard {\n\n file: &mut target,\n\n _lock: lock.write().unwrap(),\n\n };\n\n // Cannot read nor write\n\n assert!(lock2.try_write().is_err());\n\n drop(lock2.try_read().is_err());\n", "file_path": "src/fs/shared/tests.rs", "rank": 15, "score": 74157.97607077617 }, { "content": "#[test]\n\nfn test_secret_bytes_clone() {\n\n let exp: [u8; 8] = [1, 2, 3, 4, 5, 6, 7, 8];\n\n // Locked\n\n let mut src = SecretBytes::with_value(&exp, true);\n\n let s = src.clone();\n\n assert_eq!(s.len(), s.len());\n\n assert_eq!(s.buffer_len(), s.len());\n\n assert_eq!(s.value(), &exp);\n\n if SecretBytes::lock_supported() {\n\n assert!(s.locked());\n\n } else {\n\n assert!(!s.locked());\n\n }\n\n\n\n // Locked alt len\n\n src.set_len(6);\n\n let s = src.clone();\n\n assert_eq!(s.len(), 6);\n\n assert_eq!(s.buffer_len(), exp.len());\n\n assert_eq!(s.value(), &exp[..6]);\n", "file_path": "src/mem/tests.rs", "rank": 16, "score": 74157.97607077617 }, { "content": "#[test]\n\nfn test_shared_directory() {\n\n let test_dir = TestDirUtils::new(\"test_shared_directory\").unwrap();\n\n test_dir.reset().unwrap();\n\n\n\n let mut shared1 = SharedDirectory::new(test_dir.test_dir()).unwrap();\n\n\n\n // Ensure that the lock file exists\n\n let lock_file = test_dir.get_test_file_path(SharedDirectory::DEFAULT_LOCK_FILE_NAME);\n\n let lock_file_path = Path::new(&lock_file);\n\n assert!(lock_file_path.is_file());\n\n\n\n let mut shared2 = SharedDirectory::new(test_dir.test_dir()).unwrap();\n\n\n\n // Test write lock from 1 and read from 2\n\n let lock1 = shared1.write().unwrap();\n\n assert!(shared2.try_read().is_err());\n\n drop(lock1);\n\n\n\n // Test read from 2 and write from 1\n\n let lock2 = shared2.read().unwrap();\n\n assert!(shared1.try_write().is_err());\n\n drop(lock2);\n\n\n\n // Write again fom 1\n\n let lock1 = shared1.write().unwrap();\n\n drop(lock1);\n\n}\n", "file_path": "src/fs/shared/tests.rs", "rank": 17, "score": 74157.97607077617 }, { "content": "#[test]\n\nfn test_secret_bytes_new() {\n\n // Locked\n\n let s = SecretBytes::new(16, true);\n\n let exp: [u8; 16] = [0; 16];\n\n assert_eq!(s.len(), s.len());\n\n assert_eq!(s.buffer_len(), s.len());\n\n assert_eq!(s.value(), &exp);\n\n if SecretBytes::lock_supported() {\n\n assert!(s.locked());\n\n } else {\n\n assert!(!s.locked());\n\n }\n\n\n\n // Unlocked\n\n let s = SecretBytes::new(16, false);\n\n let exp: [u8; 16] = [0; 16];\n\n assert_eq!(s.len(), s.len());\n\n assert_eq!(s.buffer_len(), s.len());\n\n assert_eq!(s.value(), &exp);\n\n assert!(!s.locked());\n\n}\n\n\n", "file_path": "src/mem/tests.rs", "rank": 18, "score": 74157.97607077617 }, { "content": "#[test]\n\nfn test_defaultsharedfilelocknamebuilder_impl() {\n\n assert_eq!(DefaultSharedFileLockNameBuilder::LOCK_FILE_PREFIX, \".\");\n\n assert_eq!(DefaultSharedFileLockNameBuilder::LOCK_FILE_SUFFIX, \".lock~\");\n\n}\n\n\n", "file_path": "src/fs/shared/tests.rs", "rank": 19, "score": 74157.97607077617 }, { "content": "#[test]\n\nfn test_sharedfile_impl() {\n\n let test_dir = TestDirUtils::new(\"test_sharedfile_impl\").unwrap();\n\n\n\n // This test ends up testing all constructors because\n\n // new() calls with_options(),\n\n // with_options() calls with_option_builder() and\n\n // with_option_builder() calls with_option_lock_file().\n\n //\n\n // Furthermore, it also tests read(), try_read(), write() and try_write() as\n\n // well in a concurrent scenario with at least to SharedFile instances\n\n // pointing to the same file.\n\n let dummy_content: &'static str = \"123456\";\n\n let test_file = test_dir.get_test_file_path(\"protected\");\n\n let test_file_path = Path::new(&test_file);\n\n let lock_file_builder = DefaultSharedFileLockNameBuilder;\n\n let test_file_lock = lock_file_builder\n\n .create_lock_file_path(test_file_path)\n\n .unwrap();\n\n let test_file_lock_path = Path::new(&test_file_lock);\n\n\n", "file_path": "src/fs/shared/tests.rs", "rank": 20, "score": 74157.97607077617 }, { "content": "#[test]\n\nfn test_simplecacheengine_simplecacheengine_insert() {\n\n let mut e: SimpleCacheEngine<u64, u64> = SimpleCacheEngine::new(10);\n\n\n\n // Adding 10 entries\n\n for key in 0..10 as u64 {\n\n assert_eq!(e.len(), key as usize);\n\n let value = Arc::new(key + 100);\n\n let curr_counter = e.counter;\n\n e.insert(key, &value);\n\n assert_eq!(e.len(), (key + 1) as usize);\n\n let entry = e.map.get(&key).unwrap();\n\n assert_eq!(entry.counter, curr_counter);\n\n assert_eq!(*entry.value, *value);\n\n }\n\n\n\n // Replacing entries\n\n for key in 0..10 as u64 {\n\n assert_eq!(e.len(), 10);\n\n let value = Arc::new(key + 1000);\n\n let curr_counter = e.counter;\n", "file_path": "src/cache/tests.rs", "rank": 21, "score": 74157.97607077617 }, { "content": "#[test]\n\nfn test_simpleslicedeserializer_impl() {\n\n let data: [u8; 4] = [0; 4];\n\n let mut v = SimpleSliceDeserializer::new(&data);\n\n\n\n for i in 0..4 {\n\n v.offset = i;\n\n assert_eq!(v.offset(), i);\n\n assert_eq!(v.avaliable(), 4 - i);\n\n for r in 0..v.avaliable() {\n\n v.can_read(r).unwrap();\n\n }\n\n assert!(matches!(\n\n v.can_read(v.avaliable() + 1),\n\n Err(ErrorKind::UnableToRead)\n\n ));\n\n }\n\n}\n\n\n", "file_path": "src/simple_serialization/tests.rs", "rank": 22, "score": 74157.97607077617 }, { "content": "#[test]\n\nfn test_error_conversion() {\n\n match translation() {\n\n Err(MyErrorkind::MyError) => (),\n\n Ok(()) => panic!(\"Error expected!\"),\n\n }\n\n}\n\n\n\n//=============================================================================\n\n// Samples\n\n//-----------------------------------------------------------------------------\n\nconst SAMPLE: &'static [u8; 50] = &[\n\n 0x36, 0x57, 0x2c, 0x7d, 0xe4, 0x48, 0xb2, 0x70, 0xc4, 0x87, 0x47, 0x46, 0xab, 0x46, 0x67, 0x5f,\n\n 0x43, 0xea, 0xee, 0xf6, 0xe5, 0x5a, 0xf7, 0x0e, 0x57, 0xb5, 0x60, 0xa6, 0x8f, 0x81, 0x66, 0x42,\n\n 0x11, 0x40, 0x49, 0x0f, 0xdb, 0x40, 0x09, 0x21, 0xFB, 0x54, 0x44, 0x2D, 0x18, 0x00, 0x03, 0x19,\n\n 0x26, 0x39,\n\n];\n\n\n\nconst SAMPLE00: &'static [u8; 3] = &[0x36, 0x57, 0x2c];\n\nconst SAMPLE01: u8 = 0x7d;\n\nconst SAMPLE02: u16 = 0xe448;\n", "file_path": "src/simple_serialization/tests.rs", "rank": 23, "score": 74157.97607077617 }, { "content": "#[test]\n\nfn test_simpledataserializer_vec() {\n\n let mut v = Vec::<u8>::new();\n\n\n\n v.write(SAMPLE00).unwrap();\n\n v.write_u8(SAMPLE01).unwrap();\n\n v.write_u16(SAMPLE02).unwrap();\n\n v.write_u32(SAMPLE03).unwrap();\n\n v.write_u64(SAMPLE04).unwrap();\n\n v.write_i8(SAMPLE05).unwrap();\n\n v.write_i16(SAMPLE06).unwrap();\n\n v.write_i32(SAMPLE07).unwrap();\n\n v.write_i64(SAMPLE08).unwrap();\n\n v.write_f32(SAMPLE09).unwrap();\n\n v.write_f64(SAMPLE10).unwrap();\n\n v.write_byte_array(SAMPLE11).unwrap();\n\n assert_eq!(v.as_slice(), SAMPLE);\n\n}\n\n\n\n//=============================================================================\n\n// SimpleSliceSerializer\n\n//-----------------------------------------------------------------------------\n", "file_path": "src/simple_serialization/tests.rs", "rank": 24, "score": 74157.97607077617 }, { "content": "#[test]\n\nfn test_simplecacheengine_impl_new() {\n\n let e: SimpleCacheEngine<u64, u64> = SimpleCacheEngine::new(10);\n\n assert_eq!(e.map.len(), 0);\n\n assert_eq!(e.max_size, 10);\n\n assert_eq!(e.counter, 0);\n\n}\n\n\n", "file_path": "src/cache/tests.rs", "rank": 25, "score": 74157.97607077617 }, { "content": "#[test]\n\nfn test_sharedfilereadlockguard_impl() {\n\n let test_dir = TestDirUtils::new(\"test_sharedfilereadlockguard_impl\").unwrap();\n\n let lock_file = test_dir.create_test_file(\"target.lock\", b\"1\").unwrap();\n\n let target_file = test_dir.create_test_file(\"target\", b\"2\").unwrap();\n\n\n\n let lock = fd_lock::RwLock::new(File::open(&lock_file).unwrap());\n\n let mut lock2 = fd_lock::RwLock::new(File::open(&lock_file).unwrap());\n\n let mut target = OpenOptions::new()\n\n .read(true)\n\n .write(true)\n\n .open(&target_file)\n\n .unwrap();\n\n {\n\n let mut rlock = SharedFileReadLockGuard {\n\n file: &mut target,\n\n _lock: lock.read().unwrap(),\n\n };\n\n // Cannot write\n\n assert!(lock2.try_write().is_err());\n\n // But can read\n", "file_path": "src/fs/shared/tests.rs", "rank": 26, "score": 74157.97607077617 }, { "content": "#[test]\n\nfn test_secret_bytes_len() {\n\n let exp: [u8; 8] = [1, 2, 3, 4, 5, 6, 7, 8];\n\n let mut s = SecretBytes::with_value(&exp, false);\n\n\n\n assert_eq!(s.len(), exp.len());\n\n assert_eq!(s.buffer_len(), exp.len());\n\n assert_eq!(s.value(), &exp);\n\n assert_eq!(s.mut_value(), &exp);\n\n assert_eq!(s.buffer(), &exp);\n\n assert_eq!(s.mut_buffer(), &exp);\n\n assert_eq!(s.value().as_ptr(), s.buffer().as_ptr());\n\n\n\n s.set_len(4);\n\n assert_eq!(s.len(), 4);\n\n assert_eq!(s.buffer_len(), exp.len());\n\n assert_eq!(s.value(), &exp[..4]);\n\n assert_eq!(s.mut_value(), &exp[..4]);\n\n assert_eq!(s.mut_buffer(), &exp);\n\n assert_eq!(s.value().as_ptr(), s.buffer().as_ptr());\n\n\n\n s.set_len(9);\n\n assert_eq!(s.len(), exp.len());\n\n assert_eq!(s.buffer_len(), exp.len());\n\n assert_eq!(s.value(), &exp);\n\n assert_eq!(s.mut_value(), &exp);\n\n assert_eq!(s.mut_buffer(), &exp);\n\n assert_eq!(s.value().as_ptr(), s.buffer().as_ptr());\n\n}\n\n\n", "file_path": "src/mem/tests.rs", "rank": 27, "score": 74157.97607077617 }, { "content": "#[test]\n\nfn test_create_protected_value() {\n\n let exp: [u8; 8] = [1, 2, 3, 4, 5, 6, 7, 8];\n\n let x = create_protected_value(&exp);\n\n let y = x.clone();\n\n\n\n let s1 = x.get_secret();\n\n let s2 = y.get_secret();\n\n assert_eq!(s1.value(), &exp);\n\n assert_eq!(s1.value(), s2.value());\n\n}\n", "file_path": "src/mem/tests.rs", "rank": 28, "score": 74157.97607077617 }, { "content": "#[test]\n\nfn test_simplecacheengine_simplecacheengine_get() {\n\n let mut e: SimpleCacheEngine<u64, u64> = SimpleCacheEngine::new(10);\n\n\n\n for key in 0..10 as u64 {\n\n let value = Arc::new(key + 100);\n\n let counter = e.next_counter();\n\n e.map.insert(key, SimpleCacheEntry::new(&value, counter));\n\n }\n\n\n\n // Test the recovery and the counter update at each\n\n for key in 0..10 as u64 {\n\n let old_counter = e.map.get(&key).unwrap().counter;\n\n let v = e.get(&key).unwrap();\n\n assert_eq!(*v, key + 100);\n\n let new_counter = e.map.get(&key).unwrap().counter;\n\n assert!(old_counter < new_counter);\n\n }\n\n\n\n // Ensure that the counter is always increased\n\n for key in 0..10 as u64 {\n", "file_path": "src/cache/tests.rs", "rank": 29, "score": 74157.97607077617 }, { "content": "#[test]\n\nfn test_simplecacheengine_simplecacheengine_clear() {\n\n let mut e: SimpleCacheEngine<u64, u64> = SimpleCacheEngine::new(10);\n\n\n\n for key in 0..10 as u64 {\n\n let value = Arc::new(key + 100);\n\n e.insert(key, &value);\n\n assert_eq!(e.len(), (key + 1) as usize);\n\n }\n\n assert!(!e.is_empty());\n\n e.clear();\n\n assert!(e.is_empty());\n\n}\n\n\n\n//=============================================================================\n\n// SimpleCache\n\n//-----------------------------------------------------------------------------\n", "file_path": "src/cache/tests.rs", "rank": 30, "score": 74157.97607077617 }, { "content": "#[cfg(target_os = \"linux\")]\n\n#[test]\n\nfn test_lock_unlock_mem() {\n\n let mut vec = Vec::<u8>::with_capacity(16);\n\n vec.resize(16, 0);\n\n\n\n assert!(lock_mem(vec.as_ptr(), vec.len()));\n\n assert!(unlock_mem(vec.as_ptr(), vec.len()));\n\n\n\n assert!(!lock_mem(vec.as_ptr(), 0));\n\n assert!(!unlock_mem(vec.as_ptr(), 0));\n\n}\n\n\n", "file_path": "src/mem/tests.rs", "rank": 31, "score": 74157.72691149797 }, { "content": "#[test]\n\nfn test_simplecache_valuecache_concurrent_clear() {\n\n let c: Arc<SimpleCache<u64, u64>> = Arc::new(SimpleCache::new(10));\n\n\n\n for key in 0..10 as u64 {\n\n let value = Arc::new(key + 1000);\n\n c.insert(key, &value);\n\n }\n\n\n\n let t1c = Arc::clone(&c);\n\n let t1 = std::thread::spawn(move || {\n\n t1c.clear();\n\n assert_eq!(t1c.len(), 0);\n\n assert!(t1c.is_empty());\n\n });\n\n let t2c = Arc::clone(&c);\n\n let t2 = std::thread::spawn(move || {\n\n t2c.clear();\n\n assert_eq!(t2c.len(), 0);\n\n assert!(t2c.is_empty());\n\n });\n\n t1.join().unwrap();\n\n t2.join().unwrap();\n\n\n\n assert_eq!(c.len(), 0);\n\n assert!(c.is_empty());\n\n}\n", "file_path": "src/cache/tests.rs", "rank": 32, "score": 72449.74060012537 }, { "content": "#[test]\n\nfn test_win32protectedvalue_new() {\n\n let exp: [u8; 8] = [1, 2, 3, 4, 5, 6, 7, 8];\n\n let p = Win32ProtectedValue::new(&exp);\n\n\n\n assert_eq!(p.protected_data.len(), exp.len());\n\n assert_eq!(\n\n p.protected_data.buffer_len(),\n\n Win32ProtectedValue::protected_size(exp.len())\n\n );\n\n assert_ne!(p.protected_data.value(), &exp);\n\n}\n\n\n", "file_path": "src/mem/impl_win32/tests.rs", "rank": 33, "score": 72449.74060012537 }, { "content": "#[test]\n\nfn test_simplesliceserializer_simpledataserializer_write() {\n\n let mut data: [u8; 50] = [0; 50];\n\n let mut v = SimpleSliceSerializer::new(&mut data);\n\n\n\n v.write(SAMPLE00).unwrap();\n\n v.write_u8(SAMPLE01).unwrap();\n\n v.write_u16(SAMPLE02).unwrap();\n\n v.write_u32(SAMPLE03).unwrap();\n\n v.write_u64(SAMPLE04).unwrap();\n\n v.write_i8(SAMPLE05).unwrap();\n\n v.write_i16(SAMPLE06).unwrap();\n\n v.write_i32(SAMPLE07).unwrap();\n\n v.write_i64(SAMPLE08).unwrap();\n\n v.write_f32(SAMPLE09).unwrap();\n\n v.write_f64(SAMPLE10).unwrap();\n\n v.write_byte_array(SAMPLE11).unwrap();\n\n assert_eq!(&data, SAMPLE);\n\n}\n\n\n", "file_path": "src/simple_serialization/tests.rs", "rank": 35, "score": 72449.74060012537 }, { "content": "#[test]\n\nfn test_simplecacheengine_impl_next_counter() {\n\n let mut e: SimpleCacheEngine<u64, u64> = SimpleCacheEngine::new(10);\n\n\n\n assert_eq!(e.counter, 0);\n\n assert_eq!(e.next_counter(), 0);\n\n assert_eq!(e.next_counter(), 1);\n\n assert_eq!(e.next_counter(), 2);\n\n assert_eq!(e.next_counter(), 3);\n\n}\n\n\n", "file_path": "src/cache/tests.rs", "rank": 37, "score": 72449.74060012537 }, { "content": "#[test]\n\nfn test_simplecacheengine_impl_remove_oldest() {\n\n let mut e: SimpleCacheEngine<u64, u64> = SimpleCacheEngine::new(10);\n\n\n\n for key in 0..10 as u64 {\n\n let value = Arc::new(key);\n\n e.map.insert(key, SimpleCacheEntry::new(&value, key));\n\n }\n\n for key in 0..10 as u64 {\n\n assert_eq!(e.len(), (10 - key) as usize);\n\n e.remove_oldest();\n\n assert_eq!(e.len(), (10 - key - 1) as usize);\n\n assert!(e.get(&key).is_none());\n\n }\n\n}\n\n\n", "file_path": "src/cache/tests.rs", "rank": 38, "score": 72449.74060012537 }, { "content": "#[test]\n\nfn test_sharedfile_default_options() {\n\n let options = SharedFile::default_options();\n\n let mut exp_options = OpenOptions::new();\n\n exp_options.read(true).write(true).create(true);\n\n // I'll compare the contents of both options using debug as it has\n\n // access to the internal fields. Furthermore, the debug strings should\n\n // be equal if the objects are instantiated in the same way. Furthermore,\n\n // I think it will\n\n assert_eq!(format!(\"{:?}\", options), format!(\"{:?}\", exp_options));\n\n}\n\n\n\n//=============================================================================\n\n// SharedDirectory\n\n//-----------------------------------------------------------------------------\n\n\n", "file_path": "src/fs/shared/tests.rs", "rank": 40, "score": 72449.74060012537 }, { "content": "#[test]\n\nfn test_simplecache_valuecache_concurrent_insert() {\n\n let c: Arc<SimpleCache<u64, u64>> = Arc::new(SimpleCache::new(10));\n\n\n\n let t1c = Arc::clone(&c);\n\n let t1 = std::thread::spawn(move || {\n\n for key in 0..8 as u64 {\n\n let value = Arc::new(key + 1000);\n\n t1c.insert(key, &value);\n\n }\n\n });\n\n let t2c = Arc::clone(&c);\n\n let t2 = std::thread::spawn(move || {\n\n for key in 2..10 as u64 {\n\n let value = Arc::new(key + 10000);\n\n t2c.insert(key, &value);\n\n }\n\n });\n\n t1.join().unwrap();\n\n t2.join().unwrap();\n\n\n\n // Test the result of the inserts\n\n for key in 0..10 as u64 {\n\n c.get(&key).unwrap();\n\n }\n\n}\n\n\n", "file_path": "src/cache/tests.rs", "rank": 41, "score": 72449.74060012537 }, { "content": "#[test]\n\nfn test_simplecache_valuecache_concurrent_get() {\n\n let c: Arc<SimpleCache<u64, u64>> = Arc::new(SimpleCache::new(10));\n\n\n\n for key in 0..10 as u64 {\n\n let value = Arc::new(key + 1000);\n\n c.insert(key, &value);\n\n }\n\n\n\n let t1c = Arc::clone(&c);\n\n let t1 = std::thread::spawn(move || {\n\n for key in 0..8 as u64 {\n\n t1c.get(&key).unwrap();\n\n }\n\n });\n\n let t2c = Arc::clone(&c);\n\n let t2 = std::thread::spawn(move || {\n\n for key in 2..10 as u64 {\n\n t2c.get(&key).unwrap();\n\n }\n\n });\n", "file_path": "src/cache/tests.rs", "rank": 42, "score": 72449.74060012537 }, { "content": "#[test]\n\nfn test_simpleslicedeserializer_simpledeserializer_read() {\n\n let mut v = SimpleSliceDeserializer::new(&*SAMPLE);\n\n\n\n let mut offs = 0;\n\n let size = 3;\n\n v.read(SAMPLE00.len()).unwrap();\n\n assert_eq!(SAMPLE00, v.data());\n\n offs += size;\n\n\n\n let size = 1;\n\n assert_eq!(v.read_u8().unwrap(), SAMPLE01);\n\n assert_eq!(&SAMPLE[offs..offs + size], v.data());\n\n offs += size;\n\n\n\n let size = 2;\n\n assert_eq!(v.read_u16().unwrap(), SAMPLE02);\n\n assert_eq!(&SAMPLE[offs..offs + size], v.data());\n\n offs += size;\n\n\n\n let size = 4;\n", "file_path": "src/simple_serialization/tests.rs", "rank": 43, "score": 72449.74060012537 }, { "content": "#[cfg(target_os = \"linux\")]\n\n#[test]\n\nfn test_lock_unlock_mem_core() {\n\n let mut vec = Vec::<u8>::with_capacity(16);\n\n vec.resize(16, 0);\n\n\n\n assert!(lock_mem_core(vec.as_ptr() as *const c_void, vec.len()));\n\n assert!(unlock_mem_core(vec.as_ptr() as *const c_void, vec.len()));\n\n}\n\n\n", "file_path": "src/mem/tests.rs", "rank": 44, "score": 72449.49144084717 }, { "content": "#[test]\n\nfn test_lock_supported_core() {\n\n assert!(lock_supported_core());\n\n}\n", "file_path": "src/mem/impl_linux/tests.rs", "rank": 45, "score": 70842.93094315293 }, { "content": "#[test]\n\nfn test_win32protectedvalue_get_secret() {\n\n let exp: [u8; 8] = [1, 2, 3, 4, 5, 6, 7, 8];\n\n let p = Win32ProtectedValue::new(&exp);\n\n\n\n let s = p.get_secret();\n\n assert_ne!(p.protected_data.value(), &exp);\n\n assert_eq!(s.value(), &exp);\n\n}\n", "file_path": "src/mem/impl_win32/tests.rs", "rank": 46, "score": 70842.93094315293 }, { "content": "#[test]\n\nfn test_win32protectedvalue_protected_size() {\n\n let block_size = CRYPTPROTECTMEMORY_BLOCK_SIZE as usize;\n\n for c in 0..8 {\n\n for i in (block_size * c)..(block_size * (c + 1)) {\n\n assert_eq!(Win32ProtectedValue::protected_size(i), block_size * (c + 1));\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/mem/impl_win32/tests.rs", "rank": 47, "score": 70842.93094315293 }, { "content": "#[test]\n\nfn test_lock_supported_core() {\n\n assert!(!lock_supported_core());\n\n}\n", "file_path": "src/mem/impl_default/tests.rs", "rank": 49, "score": 70842.93094315293 }, { "content": "#[test]\n\nfn test_lock_supported_core() {\n\n assert!(lock_supported_core());\n\n}\n\n\n\n//=============================================================================\n\n// Win32ProtectedValue\n\n//-----------------------------------------------------------------------------\n", "file_path": "src/mem/impl_win32/tests.rs", "rank": 50, "score": 70842.93094315293 }, { "content": "#[test]\n\nfn test_sharedfilelocknamebuilder_get_lock_directory() {\n\n let f = DummySharedFileLockNameBuilder;\n\n\n\n let file = Path::new(\"name\");\n\n assert_eq!(f.get_lock_directory(&file), file.parent());\n\n\n\n let file = Path::new(\"/test/name\");\n\n assert_eq!(f.get_lock_directory(&file), file.parent());\n\n\n\n let file = Path::new(\"/\");\n\n assert_eq!(f.get_lock_directory(&file), None);\n\n}\n\n\n", "file_path": "src/fs/shared/tests.rs", "rank": 51, "score": 70842.93094315293 }, { "content": "#[test]\n\nfn test_simpleslicedeserializer_simpledeserializer_read_fail() {\n\n let s: [u8; 8] = [0, 3, 0, 0, 0, 0, 0, 0];\n\n\n\n let mut v = SimpleSliceDeserializer::new(&s[..2]);\n\n assert!(matches!(v.read(3), Err(ErrorKind::UnableToRead)));\n\n assert_eq!(v.offset, 0);\n\n assert_eq!(v.data_offset, 0);\n\n\n\n let mut v = SimpleSliceDeserializer::new(&s[..0]);\n\n assert!(matches!(v.read_u8(), Err(ErrorKind::UnableToRead)));\n\n assert_eq!(v.offset, 0);\n\n assert_eq!(v.data_offset, 0);\n\n assert!(matches!(v.read_i8(), Err(ErrorKind::UnableToRead)));\n\n assert_eq!(v.offset, 0);\n\n assert_eq!(v.data_offset, 0);\n\n\n\n let mut v = SimpleSliceDeserializer::new(&s[..1]);\n\n assert!(matches!(v.read_u16(), Err(ErrorKind::UnableToRead)));\n\n assert_eq!(v.offset, 0);\n\n assert_eq!(v.data_offset, 0);\n", "file_path": "src/simple_serialization/tests.rs", "rank": 52, "score": 70842.93094315293 }, { "content": "#[test]\n\nfn test_simplesliceserializer_simpledataserializer_write_fail() {\n\n let mut data: [u8; 2] = [0; 2];\n\n let mut v = SimpleSliceSerializer::new(&mut data);\n\n assert!(matches!(v.write(SAMPLE00), Err(ErrorKind::UnableToWrite)));\n\n\n\n let mut data: [u8; 0] = [];\n\n let mut v = SimpleSliceSerializer::new(&mut data);\n\n assert!(matches!(\n\n v.write_u8(SAMPLE01),\n\n Err(ErrorKind::UnableToWrite)\n\n ));\n\n assert!(matches!(\n\n v.write_i8(SAMPLE05),\n\n Err(ErrorKind::UnableToWrite)\n\n ));\n\n\n\n let mut data: [u8; 1] = [0];\n\n let mut v = SimpleSliceSerializer::new(&mut data);\n\n assert!(matches!(\n\n v.write_u16(SAMPLE02),\n", "file_path": "src/simple_serialization/tests.rs", "rank": 53, "score": 70842.93094315293 }, { "content": "#[test]\n\nfn test_lock_unlock_mem_core() {\n\n let mut v: Vec<u8> = Vec::with_capacity(16);\n\n v.resize(16, 0);\n\n assert!(lock_mem_core(v.as_ptr() as *const c_void, v.len()));\n\n assert!(unlock_mem_core(v.as_ptr() as *const c_void, v.len()));\n\n}\n\n\n", "file_path": "src/mem/impl_linux/tests.rs", "rank": 54, "score": 69328.48029130363 }, { "content": "#[test]\n\nfn test_lock_unlock_mem_core() {\n\n let mut v: Vec<u8> = Vec::with_capacity(16);\n\n v.resize(16, 0);\n\n assert!(lock_mem_core(v.as_ptr() as *const c_void, v.len()));\n\n assert!(unlock_mem_core(v.as_ptr() as *const c_void, v.len()));\n\n}\n\n\n", "file_path": "src/mem/impl_win32/tests.rs", "rank": 55, "score": 69328.48029130363 }, { "content": "#[test]\n\nfn test_sharedfilelocknamebuilder_create_lock_file_path() {\n\n let f = DummySharedFileLockNameBuilder;\n\n\n\n let file = Path::new(\"name\");\n\n assert_eq!(\n\n f.create_lock_file_path(&file).unwrap(),\n\n OsString::from(\"prefix.name.suffix\")\n\n );\n\n\n\n let file = Path::new(\"/test/name\");\n\n assert_eq!(\n\n f.create_lock_file_path(&file).unwrap(),\n\n OsString::from(\"/test/prefix.name.suffix\")\n\n );\n\n\n\n let file = Path::new(\"/name\");\n\n assert_eq!(\n\n f.create_lock_file_path(&file).unwrap(),\n\n OsString::from(\"/prefix.name.suffix\")\n\n );\n", "file_path": "src/fs/shared/tests.rs", "rank": 56, "score": 69328.48029130363 }, { "content": "#[test]\n\nfn test_lock_unlock_mem_core() {\n\n let mut v: Vec<u8> = Vec::with_capacity(16);\n\n v.resize(16, 0);\n\n assert!(!lock_mem_core(v.as_ptr() as *const c_void, v.len()));\n\n assert!(!unlock_mem_core(v.as_ptr() as *const c_void, v.len()));\n\n}\n\n\n", "file_path": "src/mem/impl_default/tests.rs", "rank": 57, "score": 69328.48029130363 }, { "content": "#[cfg(target_os = \"windows\")]\n\npub fn create_protected_value(value: &[u8]) -> Arc<dyn ProtectedValue> {\n\n Arc::new(impl_win32::Win32ProtectedValue::new(value))\n\n}\n", "file_path": "src/mem/mod.rs", "rank": 58, "score": 68841.24656441272 }, { "content": "fn translation() -> Result<()> {\n\n let mut buff: [u8; 0] = [];\n\n let mut serializer = SimpleSliceSerializer::new(&mut buff);\n\n serializer.write_u8(0)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/simple_serialization/tests.rs", "rank": 59, "score": 66462.23515307916 }, { "content": "fn main() {\n\n build_windows();\n\n}\n", "file_path": "build.rs", "rank": 60, "score": 47355.10051263917 }, { "content": "//=============================================================================\n\n// SharedFileLockNameBuilder\n\n//-----------------------------------------------------------------------------\n\nstruct DummySharedFileLockNameBuilder;\n\n\n\nimpl SharedFileLockNameBuilder for DummySharedFileLockNameBuilder {\n\n fn create_lock_file_name(&self, file_name: &OsStr) -> OsString {\n\n let mut lock_file_name = OsString::from(\"prefix.\");\n\n lock_file_name.push(file_name);\n\n lock_file_name.push(\".suffix\");\n\n lock_file_name\n\n }\n\n}\n\n\n", "file_path": "src/fs/shared/tests.rs", "rank": 61, "score": 46866.509670422 }, { "content": "#[cfg(target_os = \"windows\")]\n\nfn build_windows() {\n\n windows::build! {\n\n Windows::Win32::System::Memory::{VirtualLock,VirtualUnlock},\n\n Windows::Win32::Security::{CryptProtectMemory,CryptUnprotectMemory,CRYPTPROTECTMEMORY_SAME_PROCESS,CRYPTPROTECTMEMORY_BLOCK_SIZE},\n\n };\n\n}\n\n\n", "file_path": "build.rs", "rank": 62, "score": 45977.88566165545 }, { "content": "#[cfg(not(target_os = \"windows\"))]\n\nfn build_windows() {}\n\n\n", "file_path": "build.rs", "rank": 63, "score": 45977.88566165545 }, { "content": "//=============================================================================\n\n// CacheEntry\n\n//-----------------------------------------------------------------------------\n\n/// This struct implements a SimpleCache entry. The value is shared by an\n\n/// [`Arc`] reference.\n\nstruct SimpleCacheEntry<V: Send + Sync> {\n\n value: Arc<V>,\n\n counter: u64,\n\n}\n\n\n\nimpl<V: Send + Sync> SimpleCacheEntry<V> {\n\n /// Creates a new [`SimpleCacheEntry`].\n\n ///\n\n /// Arguments:\n\n /// - `value`: The value;\n\n /// - `counter`: The current counter;\n\n ///\n\n pub fn new(value: &Arc<V>, counter: u64) -> Self {\n\n Self {\n\n value: Arc::clone(value),\n\n counter,\n\n }\n\n }\n\n\n\n /// Returns a new [`Arc`] that points to the value.\n", "file_path": "src/cache/mod.rs", "rank": 64, "score": 45546.70215923032 }, { "content": "#[inline]\n\npub fn get_timestamp() -> i64 {\n\n let now = chrono::Utc::now();\n\n nanoseconds_to_microseconds(now.timestamp_nanos())\n\n}\n", "file_path": "src/time/mod.rs", "rank": 65, "score": 38534.15920193519 }, { "content": "/// Determines if this platform supports memory locking or not.\n\n///\n\n/// Returns true if it is supported or false otherwise.\n\npub fn lock_supported() -> bool {\n\n lock_supported_core()\n\n}\n\n\n\n//=============================================================================\n\n// SecretBytes\n\n//-----------------------------------------------------------------------------\n\n/// This struct wraps a byte array that is guaranteed to have its contents\n\n/// shredded upon destruction.\n\n///\n\n/// It also allows the locking of the value in memory if required, preventing it\n\n/// from being moved into the disk.\n\n///\n\n/// This struct also implements a mechanism to set a logical length that differs\n\npub struct SecretBytes {\n\n value: Vec<u8>,\n\n locked: bool,\n\n len: usize,\n\n}\n\n\n", "file_path": "src/mem/mod.rs", "rank": 66, "score": 38534.15920193519 }, { "content": "#[inline]\n\npub fn lock_supported_core() -> bool {\n\n true\n\n}\n\n\n\n//=============================================================================\n\n// Win32ProtectedValue\n\n//-----------------------------------------------------------------------------\n\n/// This is the implementation of the [`ProtectedValue`] for Windows that uses\n\n/// `CryptProtectMemory()` and `CryptUnprotectMemory()` to protect the values\n\n/// against memory scans attacks.\n\npub struct Win32ProtectedValue {\n\n protected_data: SecretBytes,\n\n}\n\n\n\nimpl Win32ProtectedValue {\n\n /// Returns the size of the buffer required to store the protected value.\n\n ///\n\n /// Arguments:\n\n /// - `data_size`: The size of the value to be protected.\n\n ///\n", "file_path": "src/mem/impl_win32/mod.rs", "rank": 67, "score": 35969.363391401406 }, { "content": "#[inline]\n\npub fn lock_supported_core() -> bool {\n\n false\n\n}\n", "file_path": "src/mem/impl_default/mod.rs", "rank": 68, "score": 35969.363391401406 }, { "content": "#[inline]\n\npub fn lock_supported_core() -> bool {\n\n true\n\n}\n", "file_path": "src/mem/impl_linux/mod.rs", "rank": 69, "score": 35969.363391401406 }, { "content": "#[inline]\n\npub fn nanoseconds_to_microseconds(nanos: i64) -> i64 {\n\n nanos / 1000\n\n}\n\n\n\n/// Returns the current timestamp in microseconds in UNIX Time.\n", "file_path": "src/time/mod.rs", "rank": 70, "score": 34718.72596025624 }, { "content": "//=============================================================================\n\n// SimpleCacheEngine\n\n//-----------------------------------------------------------------------------\n\n/// This struct implements the SimpleCacheEngine. It is the core of the\n\n/// [`SimpleCache`] implementation.\n\n///\n\n/// When it reaches its maximum capacity it will drop the oldest unused entries.\n\n///\n\n/// This struct is not thread safe and must have its concurrency protected by\n\n/// an external [`RwLock`] or other synchronization primitive.\n\nstruct SimpleCacheEngine<K: Eq + Hash + Copy + Send + Sync, V: Send + Sync> {\n\n map: HashMap<K, SimpleCacheEntry<V>>,\n\n max_size: usize,\n\n counter: u64,\n\n}\n\n\n\nimpl<K: Eq + Hash + Copy + Send + Sync, V: Send + Sync> SimpleCacheEngine<K, V> {\n\n /// Creates a new `SimpleCacheEngine` with a given capacity.\n\n ///\n\n /// Arguments:\n\n /// - `max_size`: Maximum number of items in the cache;\n\n pub fn new(max_size: usize) -> Self {\n\n Self {\n\n map: HashMap::new(),\n\n max_size,\n\n counter: 0,\n\n }\n\n }\n\n\n\n /// Returns the next value of the internal counter.\n", "file_path": "src/cache/mod.rs", "rank": 71, "score": 34415.31830574942 }, { "content": "//=============================================================================\n\n// ValueCache\n\n//-----------------------------------------------------------------------------\n\n/// This trait is implemented by all value caches on this module. A value cache\n\n/// must be able to associate shared read-only values to a given key value.\n\n///\n\n/// It is up to the implementator of this trait to define how old values are\n\n/// prunned from the cache.\n\n///\n\n/// All methods of this trait are required to be thread safe.\n\npub trait ValueCache<K: Eq + Hash + Copy + Sync, V: Send + Sync>: Send {\n\n /// Gets the value from the cache if it exists.\n\n ///\n\n /// Arguments:\n\n /// - `key`: The key to be found;\n\n ///\n\n /// Returns:\n\n /// - `Some(v)`: The cached value. `v` is a new [`Arc`] that points to it.\n\n /// - `None`: IF the entry is not in the cache;\n\n fn get(&self, key: &K) -> Option<Arc<V>>;\n\n\n\n /// Inserts the value into the cache. Reinserting a new value with the\n\n /// same key will replace the existing value.\n\n ///\n\n /// Arguments:\n\n /// - `key`: The key;\n\n /// - `value`: A reference to an [`Arc`] that points to the value;\n\n fn insert(&self, key: K, value: &Arc<V>);\n\n\n\n /// Removes all entries from the cache.\n\n fn clear(&self);\n\n\n\n /// Returns the number of entries in the cache.\n\n fn len(&self) -> usize;\n\n\n\n /// Returns true if the cache is empty or false otherwise.\n\n fn is_empty(&self) -> bool;\n\n}\n\n\n", "file_path": "src/cache/mod.rs", "rank": 72, "score": 33711.53099864077 }, { "content": "//=============================================================================\n\n// CacheEngine\n\n//-----------------------------------------------------------------------------\n\n/// This trait is implemented by all value caches on this module. A value cache\n\n/// must be able to associate shared read-only values to a given key value.\n\n///\n\n/// It is up to the implementator of this trait to define how old values are\n\n/// prunned from the cache.\n\n///\n\n/// All methods of this trait are required to be thread safe.\n\npub trait CacheEngine<K: Eq + Hash + Copy + Sync, V: Send + Sync>: Sync {\n\n /// Gets the value from the cache if it exists.\n\n ///\n\n /// Arguments:\n\n /// - `key`: The key to be found;\n\n ///\n\n /// Returns:\n\n /// - `Some(v)`: The cached value. `v` is a new [`Arc`] that points to it.\n\n /// - `None`: IF the entry is not in the cache;\n\n fn get(&mut self, key: &K) -> Option<Arc<V>>;\n\n\n\n /// Inserts the value into the cache.\n\n ///\n\n /// Arguments:\n\n /// - `key`: The key;\n\n /// - `value`: A reference to an [`Arc`] that points to the value;\n\n fn insert(&mut self, key: K, value: &Arc<V>);\n\n\n\n /// Removes all entries from the cache.\n\n fn clear(&mut self);\n\n\n\n /// Returns the number of entries in the cache.\n\n fn len(&self) -> usize;\n\n\n\n /// Returns true if the cache is empty or false otherwise.\n\n fn is_empty(&self) -> bool;\n\n}\n\n\n", "file_path": "src/cache/mod.rs", "rank": 73, "score": 33711.53099864077 }, { "content": " * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\"\n\n * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE\n\n * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE\n\n * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE\n\n * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL\n\n * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR\n\n * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER\n\n * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,\n\n * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n\n * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n */\n\nuse super::*;\n\n\n\n#[test]\n", "file_path": "src/time/tests.rs", "rank": 74, "score": 29093.122205208856 }, { "content": " * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\"\n\n * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE\n\n * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE\n\n * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE\n\n * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL\n\n * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR\n\n * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER\n\n * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,\n\n * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n\n * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n */\n\nuse super::*;\n\n\n\n//=============================================================================\n\n// CacheEntry\n\n//-----------------------------------------------------------------------------\n\n#[test]\n", "file_path": "src/cache/tests.rs", "rank": 76, "score": 29091.665238564718 }, { "content": " * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\"\n\n * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE\n\n * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE\n\n * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE\n\n * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL\n\n * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR\n\n * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER\n\n * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,\n\n * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n\n * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n */\n\nuse super::*;\n\n\n\n#[cfg(target_os = \"linux\")]\n\n#[test]\n", "file_path": "src/mem/tests.rs", "rank": 77, "score": 29090.25209472095 }, { "content": "/*\n\n * BSD 3-Clause License\n\n *\n\n * Copyright (c) 2019-2020, InterlockLedger Network\n\n * All rights reserved.\n\n *\n\n * Redistribution and use in source and binary forms, with or without\n\n * modification, are permitted provided that the following conditions are met:\n\n *\n\n * * Redistributions of source code must retain the above copyright notice, this\n\n * list of conditions and the following disclaimer.\n\n *\n\n * * Redistributions in binary form must reproduce the above copyright notice,\n\n * this list of conditions and the following disclaimer in the documentation\n\n * and/or other materials provided with the distribution.\n\n *\n\n * * Neither the name of the copyright holder nor the names of its\n\n * contributors may be used to endorse or promote products derived from\n\n * this software without specific prior written permission.\n\n *\n", "file_path": "src/cache/tests.rs", "rank": 78, "score": 29081.108822865375 }, { "content": "/*\n\n * BSD 3-Clause License\n\n *\n\n * Copyright (c) 2019-2020, InterlockLedger Network\n\n * All rights reserved.\n\n *\n\n * Redistribution and use in source and binary forms, with or without\n\n * modification, are permitted provided that the following conditions are met:\n\n *\n\n * * Redistributions of source code must retain the above copyright notice, this\n\n * list of conditions and the following disclaimer.\n\n *\n\n * * Redistributions in binary form must reproduce the above copyright notice,\n\n * this list of conditions and the following disclaimer in the documentation\n\n * and/or other materials provided with the distribution.\n\n *\n\n * * Neither the name of the copyright holder nor the names of its\n\n * contributors may be used to endorse or promote products derived from\n\n * this software without specific prior written permission.\n\n *\n", "file_path": "src/mem/tests.rs", "rank": 79, "score": 29081.108822865375 }, { "content": "/*\n\n * BSD 3-Clause License\n\n *\n\n * Copyright (c) 2019-2020, InterlockLedger Network\n\n * All rights reserved.\n\n *\n\n * Redistribution and use in source and binary forms, with or without\n\n * modification, are permitted provided that the following conditions are met:\n\n *\n\n * * Redistributions of source code must retain the above copyright notice, this\n\n * list of conditions and the following disclaimer.\n\n *\n\n * * Redistributions in binary form must reproduce the above copyright notice,\n\n * this list of conditions and the following disclaimer in the documentation\n\n * and/or other materials provided with the distribution.\n\n *\n\n * * Neither the name of the copyright holder nor the names of its\n\n * contributors may be used to endorse or promote products derived from\n\n * this software without specific prior written permission.\n\n *\n", "file_path": "src/time/tests.rs", "rank": 80, "score": 29081.108822865375 }, { "content": " t1.join().unwrap();\n\n t2.join().unwrap();\n\n\n\n // Test the counters\n\n for key in 0..10 as u64 {\n\n let counter = c.engine.read().unwrap().map.get(&key).unwrap().counter;\n\n print!(\"{:?} \", counter);\n\n assert!(counter >= 10);\n\n }\n\n}\n\n\n", "file_path": "src/cache/tests.rs", "rank": 82, "score": 28998.177276340324 }, { "content": " if SecretBytes::lock_supported() {\n\n assert!(s.locked());\n\n } else {\n\n assert!(!s.locked());\n\n }\n\n\n\n // Unlocked\n\n let mut src = SecretBytes::with_value(&exp, false);\n\n let s = src.clone();\n\n assert_eq!(s.len(), s.len());\n\n assert_eq!(s.buffer_len(), s.len());\n\n assert_eq!(s.value(), &exp);\n\n assert!(!s.locked());\n\n\n\n // Unlocked alt len\n\n src.set_len(6);\n\n let s = src.clone();\n\n assert_eq!(s.len(), 6);\n\n assert_eq!(s.buffer_len(), exp.len());\n\n assert_eq!(s.value(), &exp[..6]);\n\n assert!(!s.locked());\n\n}\n\n\n\n//=============================================================================\n\n// ByteMaskGenerator\n\n//-----------------------------------------------------------------------------\n", "file_path": "src/mem/tests.rs", "rank": 83, "score": 28997.39651761819 }, { "content": " let old_counter = e.map.get(&key).unwrap().counter;\n\n let v = e.get(&key).unwrap();\n\n assert_eq!(*v, key + 100);\n\n let new_counter = e.map.get(&key).unwrap().counter;\n\n assert!(old_counter < new_counter);\n\n }\n\n\n\n let key = 10 as u64;\n\n assert!(e.get(&key).is_none());\n\n}\n\n\n", "file_path": "src/cache/tests.rs", "rank": 84, "score": 28995.50817209922 }, { "content": " e.insert(key, &value);\n\n assert_eq!(e.len(), 10);\n\n let entry = e.map.get(&key).unwrap();\n\n assert_eq!(entry.counter, curr_counter);\n\n assert_eq!(*entry.value, *value);\n\n }\n\n\n\n // Adding 10 new entries\n\n for key in 10..20 as u64 {\n\n assert_eq!(e.len(), 10);\n\n let value = Arc::new(key + 1000);\n\n let curr_counter = e.counter;\n\n e.insert(key, &value);\n\n assert_eq!(e.len(), 10);\n\n let entry = e.map.get(&key).unwrap();\n\n assert_eq!(entry.counter, curr_counter);\n\n assert_eq!(*entry.value, *value);\n\n\n\n // The older key will always be the one with the smallest key\n\n let removed = key - 10;\n\n assert!(e.map.get(&removed).is_none());\n\n }\n\n}\n\n\n", "file_path": "src/cache/tests.rs", "rank": 85, "score": 28995.50817209922 }, { "content": "#[allow(clippy::not_unsafe_ptr_arg_deref)]\n\n#[inline]\n\npub fn unlock_mem_core(ptr: *const c_void, size: usize) -> bool {\n\n unsafe { matches!(munlock(ptr, size), 0) }\n\n}\n\n\n", "file_path": "src/mem/impl_linux/mod.rs", "rank": 86, "score": 28532.872796558266 }, { "content": "#[inline]\n\npub fn lock_mem_core(ptr: *const c_void, size: usize) -> bool {\n\n unsafe { VirtualLock(ptr as *mut c_void, size).as_bool() }\n\n}\n\n\n", "file_path": "src/mem/impl_win32/mod.rs", "rank": 87, "score": 28532.872796558266 }, { "content": "#[inline]\n\npub fn unlock_mem_core(ptr: *const c_void, size: usize) -> bool {\n\n unsafe { VirtualUnlock(ptr as *mut c_void, size).as_bool() }\n\n}\n\n\n", "file_path": "src/mem/impl_win32/mod.rs", "rank": 88, "score": 28532.872796558266 }, { "content": "#[allow(clippy::not_unsafe_ptr_arg_deref)]\n\n#[inline]\n\npub fn lock_mem_core(ptr: *const c_void, size: usize) -> bool {\n\n unsafe { matches!(mlock(ptr, size), 0) }\n\n}\n\n\n", "file_path": "src/mem/impl_linux/mod.rs", "rank": 89, "score": 28532.872796558266 }, { "content": "#[inline]\n\npub fn unlock_mem_core(ptr: *const c_void, size: usize) -> bool {\n\n false\n\n}\n\n\n", "file_path": "src/mem/impl_default/mod.rs", "rank": 90, "score": 28532.872796558266 }, { "content": "#[inline]\n\npub fn lock_mem_core(ptr: *const c_void, size: usize) -> bool {\n\n false\n\n}\n\n\n", "file_path": "src/mem/impl_default/mod.rs", "rank": 91, "score": 28532.872796558266 }, { "content": "/// Try to lock the memory segment into memory, preventing it from\n\n/// being moved to the disk. All calls to this function must be\n\n/// followed by a call to [`unlock_mem()`].\n\n///\n\n/// Use this method with extreme care because it interferes with tne\n\n/// OS ability to manage virtual memory.\n\n///\n\n/// Arguments:\n\n/// - `ptr`: The pointer to the memory segment;\n\n/// - `size`: The size of the ptr in units;\n\n///\n\n/// Retunrs true on success or false otherwise.\n\npub fn lock_mem<T: Sized>(ptr: *const T, size: usize) -> bool {\n\n if size > 0 {\n\n lock_mem_core(ptr as *const c_void, size * size_of::<T>())\n\n } else {\n\n false\n\n }\n\n}\n\n\n", "file_path": "src/mem/mod.rs", "rank": 92, "score": 28109.679748089016 }, { "content": "/// Unlocks the memory segment. It reverts the effects of [`lock_mem()`].\n\n///\n\n/// Arguments:\n\n/// - `ptr`: The pointer to the memory segment;\n\n/// - `size`: The size of the ptr in units;\n\n///\n\n/// Retunrs true on success or false otherwise.\n\npub fn unlock_mem<T: Sized>(ptr: *const T, size: usize) -> bool {\n\n if size > 0 {\n\n unlock_mem_core(ptr as *const c_void, size * size_of::<T>())\n\n } else {\n\n false\n\n }\n\n}\n\n\n", "file_path": "src/mem/mod.rs", "rank": 93, "score": 28102.775910877022 }, { "content": " * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\"\n\n * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE\n\n * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE\n\n * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE\n\n * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL\n\n * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR\n\n * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER\n\n * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,\n\n * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n\n * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n */\n\nuse super::*;\n\n\n\n//=============================================================================\n\n// Test error remaping\n\n//-----------------------------------------------------------------------------\n\n#[derive(Debug)]\n", "file_path": "src/simple_serialization/tests.rs", "rank": 94, "score": 27866.80674614714 }, { "content": "/*\n\n * BSD 3-Clause License\n\n *\n\n * Copyright (c) 2019-2020, InterlockLedger Network\n\n * All rights reserved.\n\n *\n\n * Redistribution and use in source and binary forms, with or without\n\n * modification, are permitted provided that the following conditions are met:\n\n *\n\n * * Redistributions of source code must retain the above copyright notice, this\n\n * list of conditions and the following disclaimer.\n\n *\n\n * * Redistributions in binary form must reproduce the above copyright notice,\n\n * this list of conditions and the following disclaimer in the documentation\n\n * and/or other materials provided with the distribution.\n\n *\n\n * * Neither the name of the copyright holder nor the names of its\n\n * contributors may be used to endorse or promote products derived from\n\n * this software without specific prior written permission.\n\n *\n", "file_path": "src/fs/shared/tests.rs", "rank": 95, "score": 27855.868771941066 }, { "content": "/*\n\n * BSD 3-Clause License\n\n *\n\n * Copyright (c) 2019-2020, InterlockLedger Network\n\n * All rights reserved.\n\n *\n\n * Redistribution and use in source and binary forms, with or without\n\n * modification, are permitted provided that the following conditions are met:\n\n *\n\n * * Redistributions of source code must retain the above copyright notice, this\n\n * list of conditions and the following disclaimer.\n\n *\n\n * * Redistributions in binary form must reproduce the above copyright notice,\n\n * this list of conditions and the following disclaimer in the documentation\n\n * and/or other materials provided with the distribution.\n\n *\n\n * * Neither the name of the copyright holder nor the names of its\n\n * contributors may be used to endorse or promote products derived from\n\n * this software without specific prior written permission.\n\n *\n", "file_path": "src/simple_serialization/tests.rs", "rank": 96, "score": 27855.868771941066 }, { "content": " * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\"\n\n * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE\n\n * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE\n\n * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE\n\n * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL\n\n * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR\n\n * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER\n\n * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,\n\n * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n\n * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n */\n\nuse super::*;\n\nuse il2_test_utils::testdir::TestDirUtils;\n\nuse std::ffi::{OsStr, OsString};\n\nuse std::fs::{File, OpenOptions};\n\nuse std::path::Path;\n\n\n\n//=============================================================================\n\n// SharedFileLockNameBuilder\n\n//-----------------------------------------------------------------------------\n", "file_path": "src/fs/shared/tests.rs", "rank": 97, "score": 27853.57658273543 }, { "content": " ));\n\n\n\n let mut data: [u8; 7] = [0; 7];\n\n let mut v = SimpleSliceSerializer::new(&mut data);\n\n assert!(matches!(\n\n v.write_u64(SAMPLE04),\n\n Err(ErrorKind::UnableToWrite)\n\n ));\n\n assert!(matches!(\n\n v.write_i64(SAMPLE08),\n\n Err(ErrorKind::UnableToWrite)\n\n ));\n\n assert!(matches!(\n\n v.write_f64(SAMPLE10),\n\n Err(ErrorKind::UnableToWrite)\n\n ));\n\n\n\n let mut data: [u8; 4] = [0; 4];\n\n let mut v = SimpleSliceSerializer::new(&mut data);\n\n assert!(matches!(\n\n v.write_byte_array(SAMPLE11),\n\n Err(ErrorKind::UnableToWrite)\n\n ));\n\n}\n\n\n\n//=============================================================================\n\n// SimpleSliceDeserializer\n\n//-----------------------------------------------------------------------------\n", "file_path": "src/simple_serialization/tests.rs", "rank": 98, "score": 27780.49655505263 }, { "content": " * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\"\n\n * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE\n\n * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE\n\n * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE\n\n * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL\n\n * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR\n\n * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER\n\n * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,\n\n * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n\n * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n */\n\nuse super::*;\n\n\n\n#[test]\n", "file_path": "src/mem/impl_win32/tests.rs", "rank": 99, "score": 97.6140331096365 } ]
Rust
src/lib.rs
arthurhenrique/rusti-cal
5d481a8afb72827f70712caf8f7d88ddad20847a
mod locale; const REFORM_YEAR: u32 = 1099; const MONTHS: usize = 12; const WEEKDAYS: u32 = 7; const COLUMN: usize = 3; const ROWS: usize = 4; const ROW_SIZE: usize = 7; static TOKEN: &str = "\n"; fn is_leap_year(year: u32) -> bool { if year <= REFORM_YEAR { return year % 4 == 0; } (year % 4 == 0) ^ (year % 100 == 0) ^ (year % 400 == 0) } fn days_by_year(mut year: u32) -> u32 { let mut count: u32 = 0; while year > 1 { year -= 1; if is_leap_year(year) { count += 366 } else { count += 365 } } count } fn days_by_month(year: u32) -> Vec<u32> { let mut feb_day: u32 = 28; if is_leap_year(year) { feb_day = 29; } vec![0, 31, feb_day, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31] } fn days_by_date( day: u32, month: usize, year: u32, months_memoized: Vec<u32>, year_memoized: u32, ) -> u32 { let mut count = 0; count += day; if month > 1 { count += months_memoized[month - 1] } if year > 1 { count += year_memoized } count } fn get_days_accumulated_by_month(year: u32) -> (Vec<u32>, Vec<u32>) { let mut count = 0; let mut accum = Vec::new(); let days: Vec<u32> = days_by_month(year); (0..MONTHS + 1).for_each(|i| { count += days[i]; accum.push(count); }); (accum, days) } fn first_day_printable(day_year: u32, starting_day: u32) -> String { let mut spaces: String = "".to_string(); let mut printable = format!(""); if (day_year - starting_day) % WEEKDAYS == 0 { printable.push_str(" "); } for i in 2..WEEKDAYS { spaces += &" ".to_string(); if (day_year - starting_day) % WEEKDAYS == i { printable.push_str(spaces.as_str()); break; } } printable } fn remain_day_printable(day: u32, day_year: u32, starting_day: u32) -> String { let base = if ((day_year - starting_day) % WEEKDAYS) == 0 { format!("{:3}{}", day, TOKEN) } else { String::default() }; let complement = (1..WEEKDAYS) .find_map(|i| ((day_year - starting_day) % WEEKDAYS == i).then(|| format!("{:3}", day))) .unwrap_or_default(); format!("{}{}", base, complement) } fn body_printable( year: u32, month: usize, days: u32, months_memoized: Vec<u32>, year_memoized: u32, starting_day: u32, ) -> Vec<String> { let mut result = Vec::<String>::new(); let mut result_days = format!(""); (1..days + 1).for_each(|day| { if day == 1 { let first_day = days_by_date(1, month, year, months_memoized.clone(), year_memoized); result_days.push_str(&first_day_printable(first_day, starting_day)) } let day_year = days_by_date(day, month, year, months_memoized.clone(), year_memoized); result_days.push_str(&remain_day_printable(day, day_year, starting_day)) }); result_days .split(TOKEN) .collect::<Vec<&str>>() .into_iter() .for_each(|i| result.push(i.to_string())); let len = result.len(); if len <= 6 { let spaces = 21 - result[len - 1].len(); if result[len - 1].len() < 20 { for _i in 0..spaces { result[len - 1] += " " } } result.push(" ".to_string()) } result } fn month_printable( year: u32, month: usize, days: u32, months_memoized: Vec<u32>, year_memoized: u32, starting_day: u32, month_names: Vec<String>, week_names: Vec<String>, ) -> Vec<String> { let mut result = Vec::<String>::new(); let body = body_printable( year, month, days, months_memoized, year_memoized, starting_day, ); let month_name = &month_names[month - 1]; result.push(format!(" {:^20}", month_name)); let header = circular_week_name(week_names, starting_day as usize); result.push(header); body.into_iter().for_each(|item| { result.push(item); }); result } fn circular_week_name(week_name: Vec<String>, idx: usize) -> String { let mut s = " ".to_string(); let mut i = idx; while i < ROW_SIZE + idx { if i == (ROW_SIZE - 1) + idx { s.push_str(week_name[i % ROW_SIZE].as_str()); } else { s.push_str(&format!("{} ", week_name[i % ROW_SIZE])); } i += 1 } s.to_string() } pub fn calendar(year: u32, locale_str: &str, starting_day: u32) -> Vec<Vec<Vec<String>>> { let mut rows: Vec<Vec<Vec<String>>> = vec![vec![vec![String::from("")]; COLUMN]; ROWS]; let mut row_counter = 0; let mut column_counter = 0; let (months_memoized, months) = get_days_accumulated_by_month(year); let year_memoized = days_by_year(year); let locale_info = locale::LocaleInfo::new(locale_str); (1..MONTHS + 1).for_each(|month| { rows[row_counter][column_counter] = month_printable( year, month, months[month], months_memoized.clone(), year_memoized, starting_day, locale_info.month_names(), locale_info.week_day_names(), ); column_counter = month % COLUMN; if column_counter == 0 { row_counter += 1; } }); rows } pub fn display(year: u32, locale_str: &str, starting_day: u32) { let rows = calendar(year, locale_str, starting_day); println!(" {:^63}", year); for row in rows { for i in 0..8 { for j in 0..3 { print!("{} ", &row[j][i]); } println!(); } } } #[test] fn test_circular_week_name() { let locale_str = "en_US"; let locale_info = locale::LocaleInfo::new(locale_str); let week_name = locale_info.week_day_names(); assert_eq!(circular_week_name(week_name, 0), " Su Mo Tu We Th Fr Sa"); } #[test] fn test_circular_week_name_pt_br() { let locale_str = "pt_BR"; let locale_info = locale::LocaleInfo::new(locale_str); let week_name = locale_info.week_day_names(); assert_eq!(circular_week_name(week_name, 0), " Do Se Te Qu Qu Se Sá"); }
mod locale; const REFORM_YEAR: u32 = 1099; const MONTHS: usize = 12; const WEEKDAYS: u32 = 7; const COLUMN: usize = 3; const ROWS: usize = 4; const ROW_SIZE: usize = 7; static TOKEN: &str = "\n"; fn is_leap_year(year: u32) -> bool { if year <= REFORM_YEAR { return year % 4 == 0; } (year % 4 == 0) ^ (year % 100 == 0) ^ (year % 400 == 0) } fn days_by_year(mut year: u32) -> u32 { let mut count: u32 = 0; while year > 1 { year -= 1; if is_leap_year(year) { count += 366 } else { count += 365 } } count } fn days_by_month(year: u32) -> Vec<u32> { let mut feb_day: u32 = 28; if is_leap_year(year) { feb_day = 29; } vec![0, 31, feb_day, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31] } fn days_by_date( day: u32, month: usize, year: u32, months_memoized: Vec<u32>, year_memoized: u32, ) -> u32 { let mut count = 0; count += day; if month > 1 { count += months_memoized[month - 1] } if year > 1 { count += year_memoized } count } fn get_days_accumulated_by_month(year: u32) -> (Vec<u32>, Vec<u32>) { let mut count = 0; let mut accum = Vec::new(); let days: Vec<u32> = days_by_month(year); (0..MONTHS + 1).for_each(|i| { count += days[i]; accum.push(count); }); (accum, days) } fn first_day_printable(day_year: u32, starting_day: u32) -> String { let mut spaces: String = "".to_string(); let mut printable = format!(""); if (day_year - starting_day) % WEEKDAYS == 0 { printable.push_str(" "); } for i in 2..WEEKDAYS { spaces += &" ".to_string(); if (day_year - starting_day) % WEEKDAYS == i { printable.push_str(spaces.as_str()); break; } } printable } fn remain_day_printable(day: u32, day_year: u32, starting_day: u32) -> String { let base = if ((day_year - starting_day) % WEEKDAYS) == 0 { format!("{:3}{}", day, TOKEN) } else { String::default() }; let complement = (1..WEEKDAYS) .find_map(|i| ((day_year - starting_day) % WEEKDAYS == i).then(|| format!("{:3}", day))) .unwrap_or_default(); format!("{}{}", base, complement) } fn body_printable( year: u32, month: usize, days: u32, months_memoized: Vec<u32>, year_memoized: u32, starting_day: u32, ) -> Vec<String> { let mut result = Vec::<String>::new(); let mut result_days = format!(""); (1..days + 1).for_each(|day| {
let day_year = days_by_date(day, month, year, months_memoized.clone(), year_memoized); result_days.push_str(&remain_day_printable(day, day_year, starting_day)) }); result_days .split(TOKEN) .collect::<Vec<&str>>() .into_iter() .for_each(|i| result.push(i.to_string())); let len = result.len(); if len <= 6 { let spaces = 21 - result[len - 1].len(); if result[len - 1].len() < 20 { for _i in 0..spaces { result[len - 1] += " " } } result.push(" ".to_string()) } result } fn month_printable( year: u32, month: usize, days: u32, months_memoized: Vec<u32>, year_memoized: u32, starting_day: u32, month_names: Vec<String>, week_names: Vec<String>, ) -> Vec<String> { let mut result = Vec::<String>::new(); let body = body_printable( year, month, days, months_memoized, year_memoized, starting_day, ); let month_name = &month_names[month - 1]; result.push(format!(" {:^20}", month_name)); let header = circular_week_name(week_names, starting_day as usize); result.push(header); body.into_iter().for_each(|item| { result.push(item); }); result } fn circular_week_name(week_name: Vec<String>, idx: usize) -> String { let mut s = " ".to_string(); let mut i = idx; while i < ROW_SIZE + idx { if i == (ROW_SIZE - 1) + idx { s.push_str(week_name[i % ROW_SIZE].as_str()); } else { s.push_str(&format!("{} ", week_name[i % ROW_SIZE])); } i += 1 } s.to_string() } pub fn calendar(year: u32, locale_str: &str, starting_day: u32) -> Vec<Vec<Vec<String>>> { let mut rows: Vec<Vec<Vec<String>>> = vec![vec![vec![String::from("")]; COLUMN]; ROWS]; let mut row_counter = 0; let mut column_counter = 0; let (months_memoized, months) = get_days_accumulated_by_month(year); let year_memoized = days_by_year(year); let locale_info = locale::LocaleInfo::new(locale_str); (1..MONTHS + 1).for_each(|month| { rows[row_counter][column_counter] = month_printable( year, month, months[month], months_memoized.clone(), year_memoized, starting_day, locale_info.month_names(), locale_info.week_day_names(), ); column_counter = month % COLUMN; if column_counter == 0 { row_counter += 1; } }); rows } pub fn display(year: u32, locale_str: &str, starting_day: u32) { let rows = calendar(year, locale_str, starting_day); println!(" {:^63}", year); for row in rows { for i in 0..8 { for j in 0..3 { print!("{} ", &row[j][i]); } println!(); } } } #[test] fn test_circular_week_name() { let locale_str = "en_US"; let locale_info = locale::LocaleInfo::new(locale_str); let week_name = locale_info.week_day_names(); assert_eq!(circular_week_name(week_name, 0), " Su Mo Tu We Th Fr Sa"); } #[test] fn test_circular_week_name_pt_br() { let locale_str = "pt_BR"; let locale_info = locale::LocaleInfo::new(locale_str); let week_name = locale_info.week_day_names(); assert_eq!(circular_week_name(week_name, 0), " Do Se Te Qu Qu Se Sá"); }
if day == 1 { let first_day = days_by_date(1, month, year, months_memoized.clone(), year_memoized); result_days.push_str(&first_day_printable(first_day, starting_day)) }
if_condition
[ { "content": "fn to_titlecase(str: &str) -> String {\n\n str.chars()\n\n .enumerate()\n\n .map(|(pos, c)| {\n\n if pos == 0 {\n\n c.to_uppercase().to_string()\n\n } else {\n\n c.to_string()\n\n }\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "src/locale.rs", "rank": 7, "score": 100154.44718854193 }, { "content": "fn locale() -> String {\n\n let locale = Locale::user_default();\n\n locale\n\n .tags()\n\n .next()\n\n .map(|(_, x)| x.to_string().replace(\"-\", \"_\"))\n\n .unwrap_or_default()\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 9, "score": 67101.15217022695 }, { "content": "fn default_year() -> u32 {\n\n let now = Local::now();\n\n let (_, year) = now.year_ce();\n\n year\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 10, "score": 63893.529313676656 }, { "content": "#[test]\n\nfn parse_invalid_locale() {\n\n assert_eq!(LocaleInfo::new(\" \").locale, Locale::POSIX);\n\n assert_eq!(LocaleInfo::new(\"bogus\").locale, Locale::POSIX);\n\n}\n\n\n", "file_path": "src/locale.rs", "rank": 13, "score": 38307.62080071853 }, { "content": "#[test]\n\nfn parse_default_locale() {\n\n let res = LocaleInfo::new(\"\");\n\n assert_eq!(res.locale, Locale::POSIX);\n\n\n\n let months = res.month_names();\n\n assert_eq!(months[0], \"January\");\n\n assert_eq!(months[1], \"February\");\n\n assert_eq!(months[2], \"March\");\n\n assert_eq!(months[3], \"April\");\n\n assert_eq!(months[4], \"May\");\n\n assert_eq!(months[5], \"June\");\n\n assert_eq!(months[6], \"July\");\n\n assert_eq!(months[7], \"August\");\n\n assert_eq!(months[8], \"September\");\n\n assert_eq!(months[9], \"October\");\n\n assert_eq!(months[10], \"November\");\n\n assert_eq!(months[11], \"December\");\n\n\n\n let days = res.week_day_names();\n\n assert_eq!(days, [\"Su\", \"Mo\", \"Tu\", \"We\", \"Th\", \"Fr\", \"Sa\"]);\n\n}\n\n\n", "file_path": "src/locale.rs", "rank": 14, "score": 38307.62080071853 }, { "content": "#[test]\n\nfn parse_english_locale() {\n\n let res = LocaleInfo::new(\"en_AU\");\n\n assert_eq!(res.locale, Locale::en_AU);\n\n\n\n let months = res.month_names();\n\n assert_eq!(months[0], \"January\");\n\n assert_eq!(months[1], \"February\");\n\n assert_eq!(months[2], \"March\");\n\n assert_eq!(months[3], \"April\");\n\n assert_eq!(months[4], \"May\");\n\n assert_eq!(months[5], \"June\");\n\n assert_eq!(months[6], \"July\");\n\n assert_eq!(months[7], \"August\");\n\n assert_eq!(months[8], \"September\");\n\n assert_eq!(months[9], \"October\");\n\n assert_eq!(months[10], \"November\");\n\n assert_eq!(months[11], \"December\");\n\n\n\n let days = res.week_day_names();\n\n assert_eq!(days, [\"Su\", \"Mo\", \"Tu\", \"We\", \"Th\", \"Fr\", \"Sa\"]);\n\n}\n\n\n", "file_path": "src/locale.rs", "rank": 15, "score": 38307.62080071853 }, { "content": "#[test]\n\nfn parse_non_english_locale() {\n\n let res = LocaleInfo::new(\"hu_HU\");\n\n assert_eq!(res.locale, Locale::hu_HU);\n\n\n\n let months = res.month_names();\n\n assert_eq!(months[0], \"Január\");\n\n assert_eq!(months[1], \"Február\");\n\n assert_eq!(months[2], \"Március\");\n\n assert_eq!(months[3], \"Április\");\n\n assert_eq!(months[4], \"Május\");\n\n assert_eq!(months[5], \"Június\");\n\n assert_eq!(months[6], \"Július\");\n\n assert_eq!(months[7], \"Augusztus\");\n\n assert_eq!(months[8], \"Szeptember\");\n\n assert_eq!(months[9], \"Október\");\n\n assert_eq!(months[10], \"November\");\n\n assert_eq!(months[11], \"December\");\n\n\n\n let days = res.week_day_names();\n\n assert_eq!(days, [\"V \", \"H \", \"K \", \"Sz\", \"Cs\", \"P \", \"Sz\"]);\n\n}\n\n\n", "file_path": "src/locale.rs", "rank": 16, "score": 37158.88821328625 }, { "content": "#[test]\n\nfn test_titlecase() {\n\n assert_eq!(to_titlecase(\"January\"), \"January\");\n\n assert_eq!(to_titlecase(\"április\"), \"Április\");\n\n}\n", "file_path": "src/locale.rs", "rank": 19, "score": 34619.61667531515 }, { "content": "fn main() {\n\n let arg = argh::from_env::<WithPositional>();\n\n display(arg.year, &locale(), arg.starting_day);\n\n}\n", "file_path": "src/main.rs", "rank": 20, "score": 18898.567709755673 }, { "content": "use pure_rust_locales::Locale;\n\nuse std::convert::TryInto;\n\n\n\n#[derive(Debug, PartialEq)]\n\npub struct LocaleInfo {\n\n locale: Locale,\n\n}\n\n\n\nimpl LocaleInfo {\n\n pub fn new(locale_str: &str) -> LocaleInfo {\n\n let locale: Locale = match locale_str.try_into() {\n\n Ok(l) => l,\n\n _ => \"POSIX\".try_into().unwrap(),\n\n };\n\n LocaleInfo { locale }\n\n }\n\n\n\n pub fn month_names(&self) -> Vec<String> {\n\n let months = pure_rust_locales::locale_match!(self.locale => LC_TIME::MON);\n\n months.iter().map(|month| to_titlecase(month)).collect()\n", "file_path": "src/locale.rs", "rank": 21, "score": 18185.302635944496 }, { "content": " }\n\n\n\n pub fn week_day_names(&self) -> Vec<String> {\n\n let abbreviated_days = pure_rust_locales::locale_match!(self.locale => LC_TIME::ABDAY);\n\n abbreviated_days\n\n .iter()\n\n .map(|day| to_titlecase(day))\n\n .map(|day| match day.chars().count() {\n\n 1 => format!(\"{} \", day),\n\n _ => day.chars().take(2).collect(),\n\n })\n\n .collect()\n\n }\n\n}\n\n\n", "file_path": "src/locale.rs", "rank": 22, "score": 18185.094785249745 }, { "content": "<p align=\"center\">\n\n <img src=\"https://user-images.githubusercontent.com/3329711/116007073-5c9f4380-a5e4-11eb-9d79-aa0aab50c14d.png\">\n\n</p>\n\n\n\n⚡️ Lightning-fast and minimal calendar command line. It's similar to `cal`.\n\nWritten in Rust 🦀\n\n\n\n## How Can Install?\n\n\n\n```sh\n\n$ cargo install rusti-cal\n\n```\n\n\n\n## Usage\n\n\n\n```sh\n\n$ rusti-cal <year>\n\n```\n\n\n\n## Hands On\n\n\n\n![hands-on](./doc/rusti-cal.gif)\n\n\n\n## Optional Usage\n\n\n\n+ Start Week With Sunday (Default)\n\n\n\n```sh\n\n$ rusti-cal <year> --starting-day 0\n\n```\n\n\n\n+ Start Week With Mondays\n\n\n\n```sh\n\n$ rusti-cal <year> --starting-day 1\n\n```\n\n\n\n+ Start Week With Tuesday\n\n\n\n```sh\n\n$ rusti-cal <year> --starting-day 2\n\n```\n\n\n\n+ Start Week With Wednesday\n\n\n\n```sh\n\n$ rusti-cal <year> --starting-day 3\n\n```\n\n\n\n+ Start Week With Thursday\n\n\n\n```sh\n\n$ rusti-cal <year> --starting-day 4\n\n```\n\n\n\n+ Start Week With Friday\n\n\n\n```sh\n\n$ rusti-cal <year> --starting-day 5\n\n```\n\n\n\n+ Start Week With Saturday\n\n\n\n```sh\n\n$ rusti-cal <year> --starting-day 6\n\n```\n\n\n\n## Locale based names\n\n\n\nThe current locale can be overwritten with the `LANG` environment variable to display the calendar in another language.\n\n\n\n```sh\n\n$ LANG=fr_FR rusti-cal <year>\n\n```\n", "file_path": "README.md", "rank": 27, "score": 3.2558384584354045 }, { "content": "use argh::FromArgs;\n\nuse chrono::prelude::*;\n\nuse locale_config::Locale;\n\n\n\nuse rusti_cal::display;\n\n\n\n#[derive(FromArgs, PartialEq, Debug)]\n\n/// A command with positional arguments.\n", "file_path": "src/main.rs", "rank": 28, "score": 1.300276991206209 } ]
Rust
rust/game/src/schemas.rs
sisso/test-unity3d-rust
883ad1eba80a6fad2b566c170a8597dc2a7600ad
mod packages_generated; mod requests_generated; mod responses_generated; pub use requests_generated::ffi_requests; pub use responses_generated::ffi_responses; use crate::{Error, GameEvent, Request, Result}; use flatbuffers::FlatBufferBuilder; pub type RawMsg = [u8]; pub type RawMsgBuffer = Vec<u8>; pub type PackageKind = u16; pub fn parse_game_requests(kind: PackageKind, requests: &RawMsg) -> Result<Vec<Request>> { if kind != packages_generated::ffi_packages::PackageKind::Request as u16 { eprintln!("receive unknown kind {:?}", kind); return Err(Error::Unknown(format!("Unknown kind {}", kind))); } let root = flatbuffers::get_root::<ffi_requests::Requests>(requests); let total_requests = root.total_messages() as usize; let mut index: Vec<Option<Request>> = Vec::with_capacity(total_requests); for i in 0..total_requests { index.push(None); } for package in root.empty_packages().unwrap_or_default().iter() { match package.kind() { ffi_requests::RequestKind::StartGame => { index[package.ordering() as usize] = Some(Request::StartGame) } ffi_requests::RequestKind::GameStatus => { index[package.ordering() as usize] = Some(Request::GameStatus) } ffi_requests::RequestKind::GetAll => { index[package.ordering() as usize] = Some(Request::GetAll) } other => return Err(Error::Unknown(format!("Invalid kind {:?}", other))), } } let result: Vec<_> = index.into_iter().flatten().collect(); if result.len() != total_requests { Err(format!("invalid result {:?}", result).into()) } else { Ok(result) } } pub fn serialize_game_events( game_responses: Vec<GameEvent>, ) -> Result<(PackageKind, RawMsgBuffer)> { let mut fb = FlatBufferBuilder::new(); macro_rules! create_vector { ($field:expr) => { if $field.is_empty() { None } else { Some(fb.create_vector($field.as_ref())) } }; } let mut total = 0u32; let mut empty_packages = vec![]; let mut create_packages = vec![]; let mut pos_packages = vec![]; let total_game_responses = game_responses.len(); for responses in game_responses { let ordering = total; total += 1; match responses { GameEvent::CreateObj { id, x, y } => { create_packages.push(ffi_responses::CreatePackage::new( ffi_responses::ResponseKind::CreateObj, ordering, id, ffi_responses::PrefabKind::Player, x, y, )); } GameEvent::MoveObj { obj_id, x, y } => { pos_packages.push(ffi_responses::PosPackage::new( ffi_responses::ResponseKind::MoveObj, ordering, obj_id, x, y, )); } GameEvent::GameStarted => empty_packages.push(ffi_responses::EmptyPackage::new( ffi_responses::ResponseKind::GameStarted, ordering, )), GameEvent::GameStatusRunning => empty_packages.push(ffi_responses::EmptyPackage::new( ffi_responses::ResponseKind::GameStatusRunning, ordering, )), GameEvent::GameStatusIdle => empty_packages.push(ffi_responses::EmptyPackage::new( ffi_responses::ResponseKind::GameStatusIdle, ordering, )), GameEvent::FullStateResponse => empty_packages.push(ffi_responses::EmptyPackage::new( ffi_responses::ResponseKind::FullStateResponse, ordering, )), } } if total != total_game_responses as u32 { return Err(format!( "invalid response count {:?}, expected {:?}", total, total_game_responses ) .into()); } let args = ffi_responses::ResponsesArgs { total_messages: total, empty_packages: create_vector!(empty_packages), create_packages: create_vector!(create_packages), pos_packages: create_vector!(pos_packages), string_packages: None, }; let out = ffi_responses::Responses::create(&mut fb, &args); fb.finish_minimal(out); Ok(( packages_generated::ffi_packages::PackageKind::Response as u16, fb.finished_data().to_vec(), )) }
mod packages_generated; mod requests_generated; mod responses_generated; pub use requests_generated::ffi_requests; pub use responses_generated::ffi_responses; use crate::{Error, GameEvent, Request, Result}; use flatbuffers::FlatBufferBuilder; pub type RawMsg = [u8]; pub type RawMsgBuffer = Vec<u8>; pub type PackageKind = u16; pub fn parse_game_requests(kind: PackageKind, requests: &RawMsg) -> Result<Vec<Request>> { if kind != packages_generated::ffi_packages::PackageKind::Request as u16 { eprintln!("receive unknown kind {:?}", kind); return Err(Error::Unknown(format!("Unknown kind {}", kind))); } let root = flatbuffers::get_root::<ffi_requests::Requests>(requests); let total_requests = root.total_messages() as usize; let mut index: Vec<Option<Request>> = Vec::with_capacity(total_requests); for i in 0..total_requests { index.push(None); } for package in root.empty_packages().unwrap_or_default().iter() { match package.kind() { ffi_requests::RequestKind::StartGame => { index[package.ordering() as usize] = Some(Request::StartGame) } ffi_requests::RequestKind::GameStatus => { index[package.ordering() as usize] = Some(Request::GameStatus) } ffi_requests::RequestKind::GetAll => { index[package.ordering() as usize] = Some(Request::GetAll) } other => return Err(Error::Unknown(format!("Invalid kind {:?}", other))), } } let result: Vec<_> = index.into_iter().flatten().collect(); if result.len() != total_requests { Err(format!("invalid result {:?}", result).into()) } else { Ok(result) } } pub fn serialize_game_events( game_responses: Vec<GameEvent>, ) -> Result<(PackageKind, RawMsgBuffer)> { let mut fb = FlatBufferBuilder::new(); macro_rules! create_vector { ($field:expr) => { if $field.is_empty() { None } else { Some(fb.create_vector($field.as_ref())) } }; } let mut total = 0u32; let mut empty_packages = vec![]; let mut create_packages = vec![]; let mut pos_packages = vec![]; let total_game_responses = game_responses.len(); for responses in game_responses { let ordering = total; total += 1; match responses { GameEvent::CreateObj { id, x, y } => { create_packages.push(ffi_responses::CreatePackage::new( ffi_responses::ResponseKind::CreateObj, ordering, id, ffi_responses::PrefabKind::Player, x, y, )); } GameEvent::MoveObj { obj_id, x, y } => { pos_packages.push(ffi_responses::PosPackage::new( ffi_responses::ResponseKind::MoveObj, orderin
g, obj_id, x, y, )); } GameEvent::GameStarted => empty_packages.push(ffi_responses::EmptyPackage::new( ffi_responses::ResponseKind::GameStarted, ordering, )), GameEvent::GameStatusRunning => empty_packages.push(ffi_responses::EmptyPackage::new( ffi_responses::ResponseKind::GameStatusRunning, ordering, )), GameEvent::GameStatusIdle => empty_packages.push(ffi_responses::EmptyPackage::new( ffi_responses::ResponseKind::GameStatusIdle, ordering, )), GameEvent::FullStateResponse => empty_packages.push(ffi_responses::EmptyPackage::new( ffi_responses::ResponseKind::FullStateResponse, ordering, )), } } if total != total_game_responses as u32 { return Err(format!( "invalid response count {:?}, expected {:?}", total, total_game_responses ) .into()); } let args = ffi_responses::ResponsesArgs { total_messages: total, empty_packages: create_vector!(empty_packages), create_packages: create_vector!(create_packages), pos_packages: create_vector!(pos_packages), string_packages: None, }; let out = ffi_responses::Responses::create(&mut fb, &args); fb.finish_minimal(out); Ok(( packages_generated::ffi_packages::PackageKind::Response as u16, fb.finished_data().to_vec(), )) }
function_block-function_prefixed
[ { "content": "pub fn enum_name_package_kind(e: PackageKind) -> &'static str {\n\n let index = e as u16;\n\n ENUM_NAMES_PACKAGE_KIND[index as usize]\n\n}\n\n\n\n} // pub mod FfiPackages\n\n\n", "file_path": "rust/game/src/schemas/packages_generated.rs", "rank": 1, "score": 163941.63861508263 }, { "content": "pub fn enum_name_request_kind(e: RequestKind) -> &'static str {\n\n let index = e as u16;\n\n ENUM_NAMES_REQUEST_KIND[index as usize]\n\n}\n\n\n\n// struct EmptyPackage, aligned to 4\n\n#[repr(C, align(4))]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub struct EmptyPackage {\n\n kind_: RequestKind,\n\n padding0__: u16,\n\n ordering_: u32,\n\n} // pub struct EmptyPackage\n\nimpl flatbuffers::SafeSliceAccess for EmptyPackage {}\n\nimpl<'a> flatbuffers::Follow<'a> for EmptyPackage {\n\n type Inner = &'a EmptyPackage;\n\n #[inline]\n\n fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {\n\n <&'a EmptyPackage>::follow(buf, loc)\n\n }\n", "file_path": "rust/game/src/schemas/requests_generated.rs", "rank": 2, "score": 152919.33198183024 }, { "content": "/// Parse a network compact format into a game event\n\npub fn parse(package: Package) -> GameEvent {\n\n unimplemented!()\n\n}", "file_path": "rust/game/src/packages/package_serialization.rs", "rank": 3, "score": 124837.53468320234 }, { "content": "/// Serialize a game event to be transfer in network compact format\n\npub fn serialize(e: GameEvent) -> Package {\n\n unimplemented!()\n\n}\n\n\n", "file_path": "rust/game/src/packages/package_serialization.rs", "rank": 4, "score": 123954.36781722636 }, { "content": "pub fn to_cstr(value: &str) -> *mut c_char {\n\n CString::new(value).unwrap().into_raw()\n\n}\n\n\n", "file_path": "rust/ffi/src/ffi/ffi_utils.rs", "rank": 5, "score": 117322.43628223336 }, { "content": "pub fn enum_name_response_kind(e: ResponseKind) -> &'static str {\n\n let index = e as u16;\n\n ENUM_NAMES_RESPONSE_KIND[index as usize]\n\n}\n\n\n\n#[allow(non_camel_case_types)]\n\n#[repr(u16)]\n\n#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]\n\npub enum PrefabKind {\n\n Player = 0,\n\n Monster = 1,\n\n\n\n}\n\n\n\nconst ENUM_MIN_PREFAB_KIND: u16 = 0;\n\nconst ENUM_MAX_PREFAB_KIND: u16 = 1;\n\n\n\nimpl<'a> flatbuffers::Follow<'a> for PrefabKind {\n\n type Inner = Self;\n\n #[inline]\n", "file_path": "rust/game/src/schemas/responses_generated.rs", "rank": 6, "score": 109765.53355969212 }, { "content": "pub fn enum_name_prefab_kind(e: PrefabKind) -> &'static str {\n\n let index = e as u16;\n\n ENUM_NAMES_PREFAB_KIND[index as usize]\n\n}\n\n\n\n// struct EmptyPackage, aligned to 4\n\n#[repr(C, align(4))]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub struct EmptyPackage {\n\n kind_: ResponseKind,\n\n padding0__: u16,\n\n ordering_: u32,\n\n} // pub struct EmptyPackage\n\nimpl flatbuffers::SafeSliceAccess for EmptyPackage {}\n\nimpl<'a> flatbuffers::Follow<'a> for EmptyPackage {\n\n type Inner = &'a EmptyPackage;\n\n #[inline]\n\n fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {\n\n <&'a EmptyPackage>::follow(buf, loc)\n\n }\n", "file_path": "rust/game/src/schemas/responses_generated.rs", "rank": 7, "score": 109765.53355969212 }, { "content": "pub trait Server {\n\n fn run(&mut self) -> ServerChanges;\n\n fn output(&mut self, connection_id: ConnectionId, msg: RawMsgBuffer);\n\n fn disconnect(&mut self, connection_id: ConnectionId);\n\n}\n", "file_path": "rust/server/src/server/mod.rs", "rank": 9, "score": 81513.1432681536 }, { "content": "// <auto-generated>\n\n// automatically generated by the FlatBuffers compiler, do not modify\n\n// </auto-generated>\n\n\n\nnamespace FfiRequests\n\n{\n\n\n\npublic enum RequestKind : ushort\n\n{\n\n GameStatus = 0,\n\n StartGame = 1,\n\n GetAll = 2,\n\n SetInputAxis = 3,\n\n};\n\n\n\n\n\n}\n", "file_path": "unity3d/Assets/src/Domain/FfiRequests/RequestKind.cs", "rank": 10, "score": 76076.36074986146 }, { "content": "// <auto-generated>\n\n// automatically generated by the FlatBuffers compiler, do not modify\n\n// </auto-generated>\n\n\n\nnamespace FfiPackages\n\n{\n\n\n\npublic enum PackageKind : ushort\n\n{\n\n Request = 0,\n\n Response = 1,\n\n};\n\n\n\n\n\n}\n", "file_path": "unity3d/Assets/src/Domain/FfiPackages/PackageKind.cs", "rank": 11, "score": 75887.23186513952 }, { "content": "pub fn from_cstr(ptr: *const c_char) -> String {\n\n let c_str = unsafe {\n\n assert!(!ptr.is_null());\n\n CStr::from_ptr(ptr)\n\n };\n\n\n\n c_str.to_str().unwrap().to_string()\n\n}\n\n\n", "file_path": "rust/ffi/src/ffi/ffi_utils.rs", "rank": 12, "score": 75865.36003698647 }, { "content": "#[test]\n\nfn test_flatbuffer_non_root_element() {\n\n let bytes: [u8; 2] = [0, 0];\n\n let kind = flatbuffers::follow_cast_ref::<ResponseKind>(&bytes, 0);\n\n assert_eq!(*kind, ResponseKind::GameStarted);\n\n}\n\n\n", "file_path": "rust/ffi/tests/serialization_tests.rs", "rank": 13, "score": 74518.39637520509 }, { "content": "pub mod package_buffer;\n\npub mod package_serialization;", "file_path": "rust/game/src/packages/mod.rs", "rank": 14, "score": 72429.62635820305 }, { "content": "pub fn to_slice<'a, T>(buffer: *const T, length: u32) -> &'a [T] {\n\n unsafe { std::slice::from_raw_parts(buffer, length as usize) }\n\n}\n", "file_path": "rust/ffi/src/ffi/ffi_utils.rs", "rank": 15, "score": 67190.45485962286 }, { "content": "\n\n public static Offset<FfiResponses.IdPackage> CreateIdPackage(FlatBufferBuilder builder, FfiResponses.ResponseKind Kind, uint Ordering, uint Id) {\n\n builder.Prep(4, 12);\n\n builder.PutUint(Id);\n\n builder.PutUint(Ordering);\n\n builder.Pad(2);\n\n builder.PutUshort((ushort)Kind);\n\n return new Offset<FfiResponses.IdPackage>(builder.Offset);\n\n }\n\n};\n\n\n\n\n\n}\n", "file_path": "unity3d/Assets/src/Domain/FfiResponses/IdPackage.cs", "rank": 16, "score": 66416.48877202293 }, { "content": "// <auto-generated>\n\n// automatically generated by the FlatBuffers compiler, do not modify\n\n// </auto-generated>\n\n\n\nnamespace FfiResponses\n\n{\n\n\n\nusing global::System;\n\nusing global::FlatBuffers;\n\n\n\npublic struct IdPackage : IFlatbufferObject\n\n{\n\n private Struct __p;\n\n public ByteBuffer ByteBuffer { get { return __p.bb; } }\n\n public void __init(int _i, ByteBuffer _bb) { __p = new Struct(_i, _bb); }\n\n public IdPackage __assign(int _i, ByteBuffer _bb) { __init(_i, _bb); return this; }\n\n\n\n public FfiResponses.ResponseKind Kind { get { return (FfiResponses.ResponseKind)__p.bb.GetUshort(__p.bb_pos + 0); } }\n\n public uint Ordering { get { return __p.bb.GetUint(__p.bb_pos + 4); } }\n\n public uint Id { get { return __p.bb.GetUint(__p.bb_pos + 8); } }\n", "file_path": "unity3d/Assets/src/Domain/FfiResponses/IdPackage.cs", "rank": 17, "score": 66415.04464750536 }, { "content": " public float Y { get { return __p.bb.GetFloat(__p.bb_pos + 12); } }\n\n\n\n public static Offset<FfiRequests.V2Package> CreateV2Package(FlatBufferBuilder builder, FfiRequests.RequestKind Kind, uint Ordering, float X, float Y) {\n\n builder.Prep(4, 16);\n\n builder.PutFloat(Y);\n\n builder.PutFloat(X);\n\n builder.PutUint(Ordering);\n\n builder.Pad(2);\n\n builder.PutUshort((ushort)Kind);\n\n return new Offset<FfiRequests.V2Package>(builder.Offset);\n\n }\n\n};\n\n\n\n\n\n}\n", "file_path": "unity3d/Assets/src/Domain/FfiRequests/V2Package.cs", "rank": 18, "score": 66015.63161542267 }, { "content": "// <auto-generated>\n\n// automatically generated by the FlatBuffers compiler, do not modify\n\n// </auto-generated>\n\n\n\nnamespace FfiRequests\n\n{\n\n\n\nusing global::System;\n\nusing global::FlatBuffers;\n\n\n\npublic struct V2Package : IFlatbufferObject\n\n{\n\n private Struct __p;\n\n public ByteBuffer ByteBuffer { get { return __p.bb; } }\n\n public void __init(int _i, ByteBuffer _bb) { __p = new Struct(_i, _bb); }\n\n public V2Package __assign(int _i, ByteBuffer _bb) { __init(_i, _bb); return this; }\n\n\n\n public FfiRequests.RequestKind Kind { get { return (FfiRequests.RequestKind)__p.bb.GetUshort(__p.bb_pos + 0); } }\n\n public uint Ordering { get { return __p.bb.GetUint(__p.bb_pos + 4); } }\n\n public float X { get { return __p.bb.GetFloat(__p.bb_pos + 8); } }\n", "file_path": "unity3d/Assets/src/Domain/FfiRequests/V2Package.cs", "rank": 19, "score": 66014.77622053897 }, { "content": " public static Offset<FfiRequests.EmptyPackage> CreateEmptyPackage(FlatBufferBuilder builder, FfiRequests.RequestKind Kind, uint Ordering) {\n\n builder.Prep(4, 8);\n\n builder.PutUint(Ordering);\n\n builder.Pad(2);\n\n builder.PutUshort((ushort)Kind);\n\n return new Offset<FfiRequests.EmptyPackage>(builder.Offset);\n\n }\n\n};\n\n\n\n\n\n}\n", "file_path": "unity3d/Assets/src/Domain/FfiRequests/EmptyPackage.cs", "rank": 20, "score": 66013.74708031293 }, { "content": " public FfiRequests.RequestKind Kind { get { int o = __p.__offset(4); return o != 0 ? (FfiRequests.RequestKind)__p.bb.GetUshort(o + __p.bb_pos) : FfiRequests.RequestKind.GameStatus; } }\n\n public uint Ordering { get { int o = __p.__offset(6); return o != 0 ? __p.bb.GetUint(o + __p.bb_pos) : (uint)0; } }\n\n public string Buffer { get { int o = __p.__offset(8); return o != 0 ? __p.__string(o + __p.bb_pos) : null; } }\n\n#if ENABLE_SPAN_T\n\n public Span<byte> GetBufferBytes() { return __p.__vector_as_span<byte>(8, 1); }\n\n#else\n\n public ArraySegment<byte>? GetBufferBytes() { return __p.__vector_as_arraysegment(8); }\n\n#endif\n\n public byte[] GetBufferArray() { return __p.__vector_as_array<byte>(8); }\n\n\n\n public static Offset<FfiRequests.StringPackage> CreateStringPackage(FlatBufferBuilder builder,\n\n FfiRequests.RequestKind kind = FfiRequests.RequestKind.GameStatus,\n\n uint ordering = 0,\n\n StringOffset bufferOffset = default(StringOffset)) {\n\n builder.StartTable(3);\n\n StringPackage.AddBuffer(builder, bufferOffset);\n\n StringPackage.AddOrdering(builder, ordering);\n\n StringPackage.AddKind(builder, kind);\n\n return StringPackage.EndStringPackage(builder);\n\n }\n", "file_path": "unity3d/Assets/src/Domain/FfiRequests/StringPackage.cs", "rank": 21, "score": 66013.47012008145 }, { "content": "// <auto-generated>\n\n// automatically generated by the FlatBuffers compiler, do not modify\n\n// </auto-generated>\n\n\n\nnamespace FfiRequests\n\n{\n\n\n\nusing global::System;\n\nusing global::FlatBuffers;\n\n\n\npublic struct EmptyPackage : IFlatbufferObject\n\n{\n\n private Struct __p;\n\n public ByteBuffer ByteBuffer { get { return __p.bb; } }\n\n public void __init(int _i, ByteBuffer _bb) { __p = new Struct(_i, _bb); }\n\n public EmptyPackage __assign(int _i, ByteBuffer _bb) { __init(_i, _bb); return this; }\n\n\n\n public FfiRequests.RequestKind Kind { get { return (FfiRequests.RequestKind)__p.bb.GetUshort(__p.bb_pos + 0); } }\n\n public uint Ordering { get { return __p.bb.GetUint(__p.bb_pos + 4); } }\n\n\n", "file_path": "unity3d/Assets/src/Domain/FfiRequests/EmptyPackage.cs", "rank": 22, "score": 66013.31383442078 }, { "content": "\n\n public static void StartStringPackage(FlatBufferBuilder builder) { builder.StartTable(3); }\n\n public static void AddKind(FlatBufferBuilder builder, FfiRequests.RequestKind kind) { builder.AddUshort(0, (ushort)kind, 0); }\n\n public static void AddOrdering(FlatBufferBuilder builder, uint ordering) { builder.AddUint(1, ordering, 0); }\n\n public static void AddBuffer(FlatBufferBuilder builder, StringOffset bufferOffset) { builder.AddOffset(2, bufferOffset.Value, 0); }\n\n public static Offset<FfiRequests.StringPackage> EndStringPackage(FlatBufferBuilder builder) {\n\n int o = builder.EndTable();\n\n return new Offset<FfiRequests.StringPackage>(o);\n\n }\n\n};\n\n\n\n\n\n}\n", "file_path": "unity3d/Assets/src/Domain/FfiRequests/StringPackage.cs", "rank": 23, "score": 66012.09369218833 }, { "content": "// <auto-generated>\n\n// automatically generated by the FlatBuffers compiler, do not modify\n\n// </auto-generated>\n\n\n\nnamespace FfiRequests\n\n{\n\n\n\nusing global::System;\n\nusing global::FlatBuffers;\n\n\n\npublic struct StringPackage : IFlatbufferObject\n\n{\n\n private Table __p;\n\n public ByteBuffer ByteBuffer { get { return __p.bb; } }\n\n public static void ValidateVersion() { FlatBufferConstants.FLATBUFFERS_1_11_1(); }\n\n public static StringPackage GetRootAsStringPackage(ByteBuffer _bb) { return GetRootAsStringPackage(_bb, new StringPackage()); }\n\n public static StringPackage GetRootAsStringPackage(ByteBuffer _bb, StringPackage obj) { return (obj.__assign(_bb.GetInt(_bb.Position) + _bb.Position, _bb)); }\n\n public void __init(int _i, ByteBuffer _bb) { __p = new Table(_i, _bb); }\n\n public StringPackage __assign(int _i, ByteBuffer _bb) { __init(_i, _bb); return this; }\n\n\n", "file_path": "unity3d/Assets/src/Domain/FfiRequests/StringPackage.cs", "rank": 24, "score": 66009.52686480348 }, { "content": "fn main() {\n\n env_logger::init();\n\n compile_dotnet();\n\n}\n\n\n", "file_path": "rust/ffi-flap/build.rs", "rank": 25, "score": 47748.9102518749 }, { "content": "fn main() {\n\n let port = 3333;\n\n let mut server = server::server_socket::SocketServer::new(port);\n\n let mut tick: u64 = 0;\n\n let mut game = Game::new();\n\n let mut users = HashMap::new();\n\n\n\n loop {\n\n let changes = server.run();\n\n\n\n for connection_id in changes.connects {\n\n println!(\"{:?} connects\", connection_id);\n\n let user_id = game.connect();\n\n users.insert(connection_id, user_id);\n\n }\n\n\n\n for connection_id in changes.disconnects {\n\n println!(\"{:?} disconnects\", connection_id);\n\n users.remove(&connection_id);\n\n }\n", "file_path": "rust/server/src/main.rs", "rank": 26, "score": 47748.9102518749 }, { "content": "fn compile_dotnet() {\n\n fn flapigen_expand(from: &Path, out: &Path) {\n\n println!(\"Run flapigen_expand\");\n\n let config = DotNetConfig::new(\"ffi_domain_2\".to_owned(), \"generated_dotnet\".into());\n\n let mut swig_gen = flapigen::Generator::new(LanguageConfig::DotNetConfig(config));\n\n swig_gen = swig_gen.rustfmt_bindings(true);\n\n swig_gen.expand(\"ffi_domain_2\", from, out);\n\n }\n\n\n\n let now = Instant::now();\n\n\n\n let out_dir = env::var(\"OUT_DIR\").unwrap();\n\n flapigen_expand(\n\n Path::new(\"src/glue.rs.in\"),\n\n &Path::new(&out_dir).join(\"glue.rs\"),\n\n );\n\n let expand_time = now.elapsed();\n\n println!(\n\n \"flapigen expand time: {}\",\n\n expand_time.as_secs() as f64 + (expand_time.subsec_nanos() as f64) / 1_000_000_000.\n\n );\n\n println!(\"cargo:rerun-if-changed=src/glue.rs.in\");\n\n println!(\"cargo:rerun-if-changed=src/lib.rs\");\n\n println!(\"cargo:rerun-if-changed=build.rs\");\n\n}\n", "file_path": "rust/ffi-flap/build.rs", "rank": 27, "score": 46647.41313701664 }, { "content": "fn main() {\n\n let mut ffi_context = FfiContext::new(Some(\"localhost:3333\"));\n\n\n\n loop {\n\n match ffi_context.take() {\n\n Ok(Some((kind, bytes))) => {\n\n let bytes_b64 = base64::encode(bytes);\n\n println!(\"receive {:?}: {:?}\", kind, bytes_b64);\n\n }\n\n\n\n Ok(None) => {\n\n println!(\"receive none\");\n\n }\n\n\n\n Err(e) => {\n\n eprintln!(\"receive error {:?}\", e);\n\n }\n\n }\n\n\n\n std::thread::sleep(Duration::from_millis(90));\n\n }\n\n}\n", "file_path": "rust/ffi/examples/ffi_context_connect_test.rs", "rank": 28, "score": 44633.00047228331 }, { "content": "use std::env::current_exe;\n\n\n\n/// Kind and the bytes that represent a single message\n\npub type Package = (u16, Vec<u8>);\n\n/// Bytes that represent one. multiples or partial messages\n\npub type RawBytes = Vec<u8>;\n\n\n\n/// This class buffer incoming RawBytes and chunk into Package\n\n#[derive(Debug)]\n\npub struct PackageBuffer {\n\n kind: Option<u16>,\n\n size: Option<usize>,\n\n buffer: Vec<u8>,\n\n}\n\n\n\nimpl PackageBuffer {\n\n pub fn new() -> Self {\n\n PackageBuffer {\n\n kind: None,\n\n size: None,\n", "file_path": "rust/game/src/packages/package_buffer.rs", "rank": 29, "score": 44146.06713978771 }, { "content": "\n\n pub fn encode_len(len: usize) -> RawBytes {\n\n (len as u32).to_be_bytes().to_vec()\n\n }\n\n\n\n pub fn encode_kind(kind: u16) -> RawBytes {\n\n kind.to_be_bytes().to_vec()\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use rand::rngs::StdRng;\n\n use rand::{Rng, SeedableRng};\n\n\n\n #[test]\n\n fn test_protocol() {\n\n let mut package_buffer = PackageBuffer::new();\n\n\n", "file_path": "rust/game/src/packages/package_buffer.rs", "rank": 30, "score": 44142.86132795151 }, { "content": " }\n\n\n\n fn get_frame(&mut self) -> Option<Package> {\n\n let kind = match self.kind {\n\n None if self.buffer.len() >= 2 => {\n\n let bytes = [self.buffer[0], self.buffer[1]];\n\n\n\n self.buffer.drain(0..2);\n\n\n\n let kind = u16::from_be_bytes(bytes);\n\n\n\n self.kind = Some(kind);\n\n kind\n\n }\n\n None => return None,\n\n Some(kind) => kind,\n\n };\n\n\n\n let size = match self.size {\n\n None if self.buffer.len() >= 4 => {\n", "file_path": "rust/game/src/packages/package_buffer.rs", "rank": 31, "score": 44136.78158048089 }, { "content": " buffer: vec![],\n\n }\n\n }\n\n\n\n pub fn push(&mut self, bytes: RawBytes) -> Vec<Package> {\n\n // println!(\"push {:?}\", bytes);\n\n self.buffer.extend(bytes);\n\n // println!(\"self {:?}\", self);\n\n self.get_frame_to_buffer(vec![])\n\n }\n\n\n\n fn get_frame_to_buffer(&mut self, mut buffer: Vec<Package>) -> Vec<Package> {\n\n match self.get_frame() {\n\n Some(frame) => {\n\n buffer.push(frame);\n\n self.get_frame_to_buffer(buffer)\n\n }\n\n\n\n None => buffer,\n\n }\n", "file_path": "rust/game/src/packages/package_buffer.rs", "rank": 32, "score": 44135.13654274512 }, { "content": " // println!(\"check buffer {:?}/{:?}\", self.buffer.len(), size);\n\n\n\n if self.buffer.len() >= size {\n\n let bytes = self.buffer[0..size].to_vec();\n\n self.buffer.drain(0..size);\n\n self.kind = None;\n\n self.size = None;\n\n Some((kind, bytes))\n\n } else {\n\n return None;\n\n }\n\n }\n\n\n\n pub fn pack((kind, frame): Package) -> RawBytes {\n\n let mut raw_bytes = vec![];\n\n raw_bytes.extend(PackageBuffer::encode_kind(kind));\n\n raw_bytes.extend(PackageBuffer::encode_len(frame.len()));\n\n raw_bytes.extend(frame);\n\n raw_bytes\n\n }\n", "file_path": "rust/game/src/packages/package_buffer.rs", "rank": 33, "score": 44134.44820615755 }, { "content": " value if value.len() == 1 => {\n\n let messages = msg_1.to_vec();\n\n assert_eq!(value[0].0, 38);\n\n assert_eq!(value[0].1, messages);\n\n }\n\n other => {\n\n panic!(\"unexpected result {:?}\", other);\n\n }\n\n }\n\n\n\n let msg_2_kind_len_plus_msg: [u8; 10] = [0, 3, 0, 0, 0, 4, 9, 8, 7, 6];\n\n\n\n match package_buffer.push(msg_2_kind_len_plus_msg.to_vec()) {\n\n value if value.len() == 1 => {\n\n assert_eq!(value[0].0, 3);\n\n assert_eq!(value[0].1, vec![9, 8, 7, 6]);\n\n }\n\n other => {\n\n panic!(\"unexpected result {:?}\", other);\n\n }\n", "file_path": "rust/game/src/packages/package_buffer.rs", "rank": 34, "score": 44130.93643155433 }, { "content": " let msg_1: [u8; 16] = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15];\n\n let msg_1_kind_bytes = PackageBuffer::encode_kind(38);\n\n let msg_1_len_bytes = PackageBuffer::encode_len(msg_1.len());\n\n\n\n assert!(package_buffer\n\n .push(msg_1_kind_bytes[0..1].to_vec())\n\n .is_empty());\n\n assert!(package_buffer\n\n .push(msg_1_kind_bytes[1..2].to_vec())\n\n .is_empty());\n\n assert!(package_buffer\n\n .push(msg_1_len_bytes[0..1].to_vec())\n\n .is_empty());\n\n assert!(package_buffer\n\n .push(msg_1_len_bytes[1..4].to_vec())\n\n .is_empty());\n\n assert!(package_buffer.push(msg_1[0..9].to_vec()).is_empty());\n\n assert!(package_buffer.push(msg_1[9..13].to_vec()).is_empty());\n\n\n\n match package_buffer.push(msg_1[13..].to_vec()) {\n", "file_path": "rust/game/src/packages/package_buffer.rs", "rank": 35, "score": 44130.42862388772 }, { "content": " }\n\n }\n\n\n\n #[test]\n\n fn test_protocol_upload() {\n\n let mut pack_buffer = PackageBuffer::new();\n\n\n\n let mut buffer = vec![];\n\n\n\n let expected_frames = vec![\n\n (32, vec![0, 1, 2, 3]),\n\n (32, vec![4, 5, 6]),\n\n (32, vec![7, 8, 9, 10, 11]),\n\n (32, vec![12]),\n\n ];\n\n\n\n for frame in &expected_frames {\n\n buffer.extend(PackageBuffer::pack(frame.clone()));\n\n }\n\n\n", "file_path": "rust/game/src/packages/package_buffer.rs", "rank": 36, "score": 44127.07307989335 }, { "content": "use crate::GameEvent;\n\nuse crate::packages::package_buffer::Package;\n\n\n\n/// Serialize a game event to be transfer in network compact format\n", "file_path": "rust/game/src/packages/package_serialization.rs", "rank": 37, "score": 44121.988179877095 }, { "content": " let mut rng = rand::thread_rng();\n\n let mut current = 0;\n\n let mut frames = vec![];\n\n\n\n while current < buffer.len() {\n\n let next = (current + rng.gen_range(1, buffer.len())).min(buffer.len());\n\n let bytes = buffer[current..next].to_vec();\n\n current = next;\n\n frames.extend(pack_buffer.push(bytes));\n\n }\n\n\n\n assert_eq!(frames, expected_frames);\n\n }\n\n}\n", "file_path": "rust/game/src/packages/package_buffer.rs", "rank": 38, "score": 44121.95531935203 }, { "content": " let bytes = [\n\n self.buffer[0],\n\n self.buffer[1],\n\n self.buffer[2],\n\n self.buffer[3],\n\n ];\n\n\n\n self.buffer.drain(0..4);\n\n\n\n let size = u32::from_be_bytes(bytes) as usize;\n\n\n\n // println!(\"set size {:?}\", size);\n\n\n\n self.size = Some(size);\n\n size\n\n }\n\n None => return None,\n\n Some(size) => size,\n\n };\n\n\n", "file_path": "rust/game/src/packages/package_buffer.rs", "rank": 39, "score": 44121.58659484597 }, { "content": "#[test]\n\nfn test_flatbuffer_schema_serialization() {\n\n let bytes = {\n\n let mut bd = FlatBufferBuilder::new();\n\n\n\n let v = bd.create_vector(&[PosPackage::new(ResponseKind::MoveObj, 0, 1, 0.2, 3.0)]);\n\n\n\n let package = Responses::create(\n\n &mut bd,\n\n &ResponsesArgs {\n\n total_messages: 1,\n\n empty_packages: None,\n\n create_packages: None,\n\n pos_packages: Some(v),\n\n string_packages: None,\n\n },\n\n );\n\n\n\n bd.finish_minimal(package);\n\n bd.finished_data().to_vec()\n\n };\n", "file_path": "rust/ffi/tests/serialization_tests.rs", "rank": 40, "score": 43709.62146658143 }, { "content": " public uint TotalMessages { get { int o = __p.__offset(4); return o != 0 ? __p.bb.GetUint(o + __p.bb_pos) : (uint)0; } }\n\n public FfiRequests.EmptyPackage? EmptyPackages(int j) { int o = __p.__offset(6); return o != 0 ? (FfiRequests.EmptyPackage?)(new FfiRequests.EmptyPackage()).__assign(__p.__vector(o) + j * 8, __p.bb) : null; }\n\n public int EmptyPackagesLength { get { int o = __p.__offset(6); return o != 0 ? __p.__vector_len(o) : 0; } }\n\n public FfiRequests.V2Package? V2Packages(int j) { int o = __p.__offset(8); return o != 0 ? (FfiRequests.V2Package?)(new FfiRequests.V2Package()).__assign(__p.__vector(o) + j * 16, __p.bb) : null; }\n\n public int V2PackagesLength { get { int o = __p.__offset(8); return o != 0 ? __p.__vector_len(o) : 0; } }\n\n\n\n public static Offset<FfiRequests.Requests> CreateRequests(FlatBufferBuilder builder,\n\n uint total_messages = 0,\n\n VectorOffset empty_packagesOffset = default(VectorOffset),\n\n VectorOffset v2_packagesOffset = default(VectorOffset)) {\n\n builder.StartTable(3);\n\n Requests.AddV2Packages(builder, v2_packagesOffset);\n\n Requests.AddEmptyPackages(builder, empty_packagesOffset);\n\n Requests.AddTotalMessages(builder, total_messages);\n\n return Requests.EndRequests(builder);\n\n }\n\n\n\n public static void StartRequests(FlatBufferBuilder builder) { builder.StartTable(3); }\n\n public static void AddTotalMessages(FlatBufferBuilder builder, uint totalMessages) { builder.AddUint(0, totalMessages, 0); }\n\n public static void AddEmptyPackages(FlatBufferBuilder builder, VectorOffset emptyPackagesOffset) { builder.AddOffset(1, emptyPackagesOffset.Value, 0); }\n", "file_path": "unity3d/Assets/src/Domain/FfiRequests/Requests.cs", "rank": 41, "score": 43496.28986246056 }, { "content": "// <auto-generated>\n\n// automatically generated by the FlatBuffers compiler, do not modify\n\n// </auto-generated>\n\n\n\nnamespace FfiRequests\n\n{\n\n\n\nusing global::System;\n\nusing global::FlatBuffers;\n\n\n\npublic struct Requests : IFlatbufferObject\n\n{\n\n private Table __p;\n\n public ByteBuffer ByteBuffer { get { return __p.bb; } }\n\n public static void ValidateVersion() { FlatBufferConstants.FLATBUFFERS_1_11_1(); }\n\n public static Requests GetRootAsRequests(ByteBuffer _bb) { return GetRootAsRequests(_bb, new Requests()); }\n\n public static Requests GetRootAsRequests(ByteBuffer _bb, Requests obj) { return (obj.__assign(_bb.GetInt(_bb.Position) + _bb.Position, _bb)); }\n\n public void __init(int _i, ByteBuffer _bb) { __p = new Table(_i, _bb); }\n\n public Requests __assign(int _i, ByteBuffer _bb) { __init(_i, _bb); return this; }\n\n\n", "file_path": "unity3d/Assets/src/Domain/FfiRequests/Requests.cs", "rank": 42, "score": 43494.315151355615 }, { "content": " public static void StartEmptyPackagesVector(FlatBufferBuilder builder, int numElems) { builder.StartVector(8, numElems, 4); }\n\n public static void AddV2Packages(FlatBufferBuilder builder, VectorOffset v2PackagesOffset) { builder.AddOffset(2, v2PackagesOffset.Value, 0); }\n\n public static void StartV2PackagesVector(FlatBufferBuilder builder, int numElems) { builder.StartVector(16, numElems, 4); }\n\n public static Offset<FfiRequests.Requests> EndRequests(FlatBufferBuilder builder) {\n\n int o = builder.EndTable();\n\n return new Offset<FfiRequests.Requests>(o);\n\n }\n\n};\n\n\n\n\n\n}\n", "file_path": "unity3d/Assets/src/Domain/FfiRequests/Requests.cs", "rank": 43, "score": 43491.55375281253 }, { "content": "\n\n FfiContext { mode }\n\n }\n\n\n\n pub fn push(&mut self, package_kind: PackageKind, bytes: &RawMsg) -> Result<()> {\n\n match &mut self.mode {\n\n RunMode::Embedded { game } => {\n\n let requests = game::schemas::parse_game_requests(package_kind, bytes)?;\n\n debug!(\"ffi receive requests: {:?}\", requests);\n\n game.handle_requests(requests)\n\n }\n\n\n\n RunMode::Server { client } => Err(Error::Unknown(\"Not implemented\".to_string())),\n\n }\n\n }\n\n\n\n // TODO: receive a closure?\n\n pub fn take(&mut self) -> Result<Option<(PackageKind, RawMsgBuffer)>> {\n\n match &mut self.mode {\n\n RunMode::Embedded { game } => {\n", "file_path": "rust/ffi/src/ffi/mod.rs", "rank": 44, "score": 36442.02922156021 }, { "content": "pub mod ffi_utils;\n\n\n\nuse crate::client::Client;\n\nuse crate::game::GameEvent::CreateObj;\n\nuse crate::game::{Game, GameEvent, Result, UserId};\n\nuse flatbuffers::FlatBufferBuilder;\n\nuse game::packages::package_buffer::Package;\n\nuse game::schemas::{ffi_requests, ffi_responses, PackageKind, RawMsg, RawMsgBuffer};\n\nuse game::Error;\n\n\n\n#[derive(Debug)]\n", "file_path": "rust/ffi/src/ffi/mod.rs", "rank": 45, "score": 36437.29486541909 }, { "content": "\n\n pub fn take(&mut self) -> Option<Vec<u8>> {\n\n if self.input_buffer.is_empty() {\n\n None\n\n } else {\n\n Some(std::mem::replace(&mut self.input_buffer, Vec::new()))\n\n }\n\n }\n\n\n\n pub fn close(&mut self) -> std::io::Result<()> {\n\n self.stream.shutdown(Shutdown::Both)\n\n }\n\n}\n\n\n", "file_path": "rust/game/src/client/mod.rs", "rank": 46, "score": 36436.43870525202 }, { "content": "\n\n Ok(())\n\n }\n\n\n\n pub fn take_responses(&mut self) -> Result<Option<(u16, RawMsgBuffer)>> {\n\n self.check_connection()?;\n\n\n\n match &mut self.state {\n\n State::Connected {\n\n buffer,\n\n socket,\n\n queue,\n\n } => match socket.tick() {\n\n Ok(()) => {\n\n if let Some(vec) = socket.take() {\n\n let packages = buffer.push(vec);\n\n queue.extend(packages);\n\n }\n\n\n\n if queue.is_empty() {\n", "file_path": "rust/ffi/src/client/mod.rs", "rank": 47, "score": 36434.56916896375 }, { "content": " let mut buffer = Vec::with_capacity(1024);\n\n for _ in 0..1024 {\n\n buffer.push(0);\n\n }\n\n\n\n Ok(SocketClient {\n\n stream,\n\n output_buffer: vec![],\n\n input_buffer: vec![],\n\n tmp_input_buffer: buffer,\n\n })\n\n }\n\n\n\n pub fn push(&mut self, bytes: Vec<u8>) {\n\n self.output_buffer.extend(bytes);\n\n }\n\n\n\n /// returns ErrorKind::BrokenPipe if connection fail\n\n pub fn tick(&mut self) -> std::io::Result<()> {\n\n if !self.output_buffer.is_empty() {\n", "file_path": "rust/game/src/client/mod.rs", "rank": 48, "score": 36434.27664643963 }, { "content": "use std::net::{TcpStream, Shutdown};\n\nuse std::io::{Write, Read, Error, ErrorKind};\n\n\n\n/// Read and write from a socket in non blocking mode with buffers\n\n#[derive(Debug)]\n\npub struct SocketClient {\n\n stream: TcpStream,\n\n /// bytes to be send\n\n output_buffer: Vec<u8>,\n\n /// bytes to be taken\n\n input_buffer: Vec<u8>,\n\n /// buffer to read bytes\n\n tmp_input_buffer: Vec<u8>,\n\n}\n\n\n\nimpl SocketClient {\n\n pub fn connect(address: &str) -> std::io::Result<Self> {\n\n let mut stream = TcpStream::connect(address)?;\n\n stream.set_nonblocking(true)?;\n\n\n", "file_path": "rust/game/src/client/mod.rs", "rank": 49, "score": 36434.22560726563 }, { "content": "use game::schemas::RawMsgBuffer;\n\n\n\npub mod server_socket;\n\n\n\npub type ConnectionId = u32;\n\n\n\n/// From the server to the connection\n\n#[derive(Debug, Clone)]\n\npub struct ServerOutput {\n\n pub connection_id: ConnectionId,\n\n pub msg: RawMsgBuffer,\n\n}\n\n\n\n/// From the connection into the server\n\n#[derive(Debug, Clone)]\n\npub struct ServerInput {\n\n pub connection_id: ConnectionId,\n\n pub msg: RawMsgBuffer,\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct ServerChanges {\n\n pub connects: Vec<ConnectionId>,\n\n pub disconnects: Vec<ConnectionId>,\n\n pub inputs: Vec<ServerInput>,\n\n}\n\n\n", "file_path": "rust/server/src/server/mod.rs", "rank": 50, "score": 36431.886835153535 }, { "content": " }\n\n }\n\n\n\n fn check_connection(&mut self) -> Result<()> {\n\n match self.state {\n\n State::NotConnected => {\n\n // println!(\"connecting\");\n\n // TODO: this looks bad, what if connection hangout?\n\n let socket = SocketClient::connect(&self.address)?;\n\n\n\n self.state = State::Connected {\n\n buffer: PackageBuffer::new(),\n\n socket,\n\n queue: vec![],\n\n };\n\n\n\n // println!(\"connected\");\n\n }\n\n _ => {}\n\n }\n", "file_path": "rust/ffi/src/client/mod.rs", "rank": 51, "score": 36429.06663106404 }, { "content": "use game::client::SocketClient;\n\nuse game::packages::package_buffer::PackageBuffer;\n\nuse game::{schemas::RawMsgBuffer, Error, Result};\n\nuse logs::*;\n\nuse std::io::ErrorKind;\n\n\n\n#[derive(Debug)]\n", "file_path": "rust/ffi/src/client/mod.rs", "rank": 52, "score": 36428.388477906155 }, { "content": " let size = self.stream.write(&self.output_buffer)?;\n\n self.output_buffer.drain(0..size);\n\n self.stream.flush()?;\n\n }\n\n\n\n match self.stream.read(&mut self.tmp_input_buffer) {\n\n Ok(size) if size == 0 => {\n\n // println!(\"empty response!\");\n\n // Ok(())\n\n Err(Error::from(ErrorKind::BrokenPipe))\n\n },\n\n Ok(size) => {\n\n self.input_buffer.extend_from_slice(&self.tmp_input_buffer[0..size]);\n\n Ok(())\n\n },\n\n Err(ref err) if err.kind() == std::io::ErrorKind::WouldBlock => Ok(()),\n\n Err(e) =>\n\n Err(e),\n\n }\n\n }\n", "file_path": "rust/game/src/client/mod.rs", "rank": 53, "score": 36422.74029125544 }, { "content": " Ok(None)\n\n } else {\n\n Ok(Some(queue.remove(0)))\n\n }\n\n }\n\n\n\n Err(e) if e.kind() == std::io::ErrorKind::BrokenPipe => {\n\n info!(\"switching to not connected\");\n\n self.state = State::NotConnected;\n\n Err(Error::Disconnect)\n\n }\n\n\n\n Err(e) => Err(e.into()),\n\n },\n\n State::NotConnected => {\n\n // TODO: should we do something?\n\n Err(Error::Disconnect)\n\n }\n\n }\n\n }\n\n}\n", "file_path": "rust/ffi/src/client/mod.rs", "rank": 54, "score": 36420.94202763655 }, { "content": " let game_responses = game.take()?;\n\n debug!(\"ffi returning responses: {:?}\", game_responses);\n\n game::schemas::serialize_game_events(game_responses).map(Option::from)\n\n }\n\n\n\n RunMode::Server { client } => client.take_responses(),\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n}\n", "file_path": "rust/ffi/src/ffi/mod.rs", "rank": 55, "score": 36420.01239743199 }, { "content": "#[derive(Debug)]\n\nenum State {\n\n NotConnected,\n\n Connected {\n\n buffer: PackageBuffer,\n\n socket: SocketClient,\n\n queue: Vec<(u16, RawMsgBuffer)>,\n\n },\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct Client {\n\n address: String,\n\n state: State,\n\n}\n\n\n\nimpl Client {\n\n pub fn new(address: &str) -> Self {\n\n Client {\n\n address: address.to_string(),\n\n state: State::NotConnected,\n", "file_path": "rust/ffi/src/client/mod.rs", "rank": 56, "score": 35311.995307442034 }, { "content": " #[inline]\n\n fn default() -> Self {\n\n StringPackageArgs {\n\n kind: RequestKind::GameStatus,\n\n ordering: 0,\n\n buffer: None,\n\n }\n\n }\n\n}\n\npub struct StringPackageBuilder<'a: 'b, 'b> {\n\n fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a>,\n\n start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>,\n\n}\n\nimpl<'a: 'b, 'b> StringPackageBuilder<'a, 'b> {\n\n #[inline]\n\n pub fn add_kind(&mut self, kind: RequestKind) {\n\n self.fbb_.push_slot::<RequestKind>(StringPackage::VT_KIND, kind, RequestKind::GameStatus);\n\n }\n\n #[inline]\n\n pub fn add_ordering(&mut self, ordering: u32) {\n", "file_path": "rust/game/src/schemas/requests_generated.rs", "rank": 57, "score": 35101.73074803082 }, { "content": "\n\n #[inline]\n\n fn push(&self, dst: &mut [u8], _rest: &[u8]) {\n\n let src = unsafe {\n\n ::std::slice::from_raw_parts(*self as *const EmptyPackage as *const u8, Self::size())\n\n };\n\n dst.copy_from_slice(src);\n\n }\n\n}\n\n\n\n\n\nimpl EmptyPackage {\n\n pub fn new<'a>(_kind: RequestKind, _ordering: u32) -> Self {\n\n EmptyPackage {\n\n kind_: _kind.to_little_endian(),\n\n ordering_: _ordering.to_little_endian(),\n\n\n\n padding0__: 0,\n\n }\n\n }\n", "file_path": "rust/game/src/schemas/requests_generated.rs", "rank": 58, "score": 35100.01205688346 }, { "content": " #[inline]\n\n fn default() -> Self {\n\n RequestsArgs {\n\n total_messages: 0,\n\n empty_packages: None,\n\n v2_packages: None,\n\n }\n\n }\n\n}\n\npub struct RequestsBuilder<'a: 'b, 'b> {\n\n fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a>,\n\n start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>,\n\n}\n\nimpl<'a: 'b, 'b> RequestsBuilder<'a, 'b> {\n\n #[inline]\n\n pub fn add_total_messages(&mut self, total_messages: u32) {\n\n self.fbb_.push_slot::<u32>(Requests::VT_TOTAL_MESSAGES, total_messages, 0);\n\n }\n\n #[inline]\n\n pub fn add_empty_packages(&mut self, empty_packages: flatbuffers::WIPOffset<flatbuffers::Vector<'b , EmptyPackage>>) {\n", "file_path": "rust/game/src/schemas/requests_generated.rs", "rank": 59, "score": 35098.6494809947 }, { "content": " #[inline]\n\n pub fn kind(&self) -> RequestKind {\n\n self._tab.get::<RequestKind>(StringPackage::VT_KIND, Some(RequestKind::GameStatus)).unwrap()\n\n }\n\n #[inline]\n\n pub fn ordering(&self) -> u32 {\n\n self._tab.get::<u32>(StringPackage::VT_ORDERING, Some(0)).unwrap()\n\n }\n\n #[inline]\n\n pub fn buffer(&self) -> Option<&'a str> {\n\n self._tab.get::<flatbuffers::ForwardsUOffset<&str>>(StringPackage::VT_BUFFER, None)\n\n }\n\n}\n\n\n\npub struct StringPackageArgs<'a> {\n\n pub kind: RequestKind,\n\n pub ordering: u32,\n\n pub buffer: Option<flatbuffers::WIPOffset<&'a str>>,\n\n}\n\nimpl<'a> Default for StringPackageArgs<'a> {\n", "file_path": "rust/game/src/schemas/requests_generated.rs", "rank": 60, "score": 35098.611468318755 }, { "content": " pub fn kind<'a>(&'a self) -> RequestKind {\n\n self.kind_.from_little_endian()\n\n }\n\n pub fn ordering<'a>(&'a self) -> u32 {\n\n self.ordering_.from_little_endian()\n\n }\n\n}\n\n\n\n// struct V2Package, aligned to 4\n\n#[repr(C, align(4))]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub struct V2Package {\n\n kind_: RequestKind,\n\n padding0__: u16,\n\n ordering_: u32,\n\n x_: f32,\n\n y_: f32,\n\n} // pub struct V2Package\n\nimpl flatbuffers::SafeSliceAccess for V2Package {}\n\nimpl<'a> flatbuffers::Follow<'a> for V2Package {\n", "file_path": "rust/game/src/schemas/requests_generated.rs", "rank": 61, "score": 35098.47346906887 }, { "content": " dst.copy_from_slice(src);\n\n }\n\n}\n\nimpl<'b> flatbuffers::Push for &'b V2Package {\n\n type Output = V2Package;\n\n\n\n #[inline]\n\n fn push(&self, dst: &mut [u8], _rest: &[u8]) {\n\n let src = unsafe {\n\n ::std::slice::from_raw_parts(*self as *const V2Package as *const u8, Self::size())\n\n };\n\n dst.copy_from_slice(src);\n\n }\n\n}\n\n\n\n\n\nimpl V2Package {\n\n pub fn new<'a>(_kind: RequestKind, _ordering: u32, _x: f32, _y: f32) -> Self {\n\n V2Package {\n\n kind_: _kind.to_little_endian(),\n", "file_path": "rust/game/src/schemas/requests_generated.rs", "rank": 62, "score": 35097.720167688654 }, { "content": "\n\nimpl flatbuffers::EndianScalar for RequestKind {\n\n #[inline]\n\n fn to_little_endian(self) -> Self {\n\n let n = u16::to_le(self as u16);\n\n let p = &n as *const u16 as *const RequestKind;\n\n unsafe { *p }\n\n }\n\n #[inline]\n\n fn from_little_endian(self) -> Self {\n\n let n = u16::from_le(self as u16);\n\n let p = &n as *const u16 as *const RequestKind;\n\n unsafe { *p }\n\n }\n\n}\n\n\n\nimpl flatbuffers::Push for RequestKind {\n\n type Output = RequestKind;\n\n #[inline]\n\n fn push(&self, dst: &mut [u8], _rest: &[u8]) {\n", "file_path": "rust/game/src/schemas/requests_generated.rs", "rank": 63, "score": 35095.82010161195 }, { "content": "#[repr(u16)]\n\n#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]\n\npub enum RequestKind {\n\n GameStatus = 0,\n\n StartGame = 1,\n\n GetAll = 2,\n\n SetInputAxis = 3,\n\n\n\n}\n\n\n\nconst ENUM_MIN_REQUEST_KIND: u16 = 0;\n\nconst ENUM_MAX_REQUEST_KIND: u16 = 3;\n\n\n\nimpl<'a> flatbuffers::Follow<'a> for RequestKind {\n\n type Inner = Self;\n\n #[inline]\n\n fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {\n\n flatbuffers::read_scalar_at::<Self>(buf, loc)\n\n }\n\n}\n", "file_path": "rust/game/src/schemas/requests_generated.rs", "rank": 64, "score": 35094.94725763536 }, { "content": " pub fn init_from_table(table: flatbuffers::Table<'a>) -> Self {\n\n Requests {\n\n _tab: table,\n\n }\n\n }\n\n #[allow(unused_mut)]\n\n pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr>(\n\n _fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr>,\n\n args: &'args RequestsArgs<'args>) -> flatbuffers::WIPOffset<Requests<'bldr>> {\n\n let mut builder = RequestsBuilder::new(_fbb);\n\n if let Some(x) = args.v2_packages { builder.add_v2_packages(x); }\n\n if let Some(x) = args.empty_packages { builder.add_empty_packages(x); }\n\n builder.add_total_messages(args.total_messages);\n\n builder.finish()\n\n }\n\n\n\n pub const VT_TOTAL_MESSAGES: flatbuffers::VOffsetT = 4;\n\n pub const VT_EMPTY_PACKAGES: flatbuffers::VOffsetT = 6;\n\n pub const VT_V2_PACKAGES: flatbuffers::VOffsetT = 8;\n\n\n", "file_path": "rust/game/src/schemas/requests_generated.rs", "rank": 65, "score": 35094.75502066149 }, { "content": " #[inline]\n\n pub fn total_messages(&self) -> u32 {\n\n self._tab.get::<u32>(Requests::VT_TOTAL_MESSAGES, Some(0)).unwrap()\n\n }\n\n #[inline]\n\n pub fn empty_packages(&self) -> Option<&'a [EmptyPackage]> {\n\n self._tab.get::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<EmptyPackage>>>(Requests::VT_EMPTY_PACKAGES, None).map(|v| v.safe_slice() )\n\n }\n\n #[inline]\n\n pub fn v2_packages(&self) -> Option<&'a [V2Package]> {\n\n self._tab.get::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<V2Package>>>(Requests::VT_V2_PACKAGES, None).map(|v| v.safe_slice() )\n\n }\n\n}\n\n\n\npub struct RequestsArgs<'a> {\n\n pub total_messages: u32,\n\n pub empty_packages: Option<flatbuffers::WIPOffset<flatbuffers::Vector<'a , EmptyPackage>>>,\n\n pub v2_packages: Option<flatbuffers::WIPOffset<flatbuffers::Vector<'a , V2Package>>>,\n\n}\n\nimpl<'a> Default for RequestsArgs<'a> {\n", "file_path": "rust/game/src/schemas/requests_generated.rs", "rank": 66, "score": 35094.52689646119 }, { "content": " pub fn init_from_table(table: flatbuffers::Table<'a>) -> Self {\n\n StringPackage {\n\n _tab: table,\n\n }\n\n }\n\n #[allow(unused_mut)]\n\n pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr>(\n\n _fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr>,\n\n args: &'args StringPackageArgs<'args>) -> flatbuffers::WIPOffset<StringPackage<'bldr>> {\n\n let mut builder = StringPackageBuilder::new(_fbb);\n\n if let Some(x) = args.buffer { builder.add_buffer(x); }\n\n builder.add_ordering(args.ordering);\n\n builder.add_kind(args.kind);\n\n builder.finish()\n\n }\n\n\n\n pub const VT_KIND: flatbuffers::VOffsetT = 4;\n\n pub const VT_ORDERING: flatbuffers::VOffsetT = 6;\n\n pub const VT_BUFFER: flatbuffers::VOffsetT = 8;\n\n\n", "file_path": "rust/game/src/schemas/requests_generated.rs", "rank": 67, "score": 35094.00141163153 }, { "content": "// automatically generated by the FlatBuffers compiler, do not modify\n\n\n\n\n\n\n\nuse std::mem;\n\nuse std::cmp::Ordering;\n\n\n\nextern crate flatbuffers;\n\nuse self::flatbuffers::EndianScalar;\n\n\n\n#[allow(unused_imports, dead_code)]\n\npub mod ffi_requests {\n\n\n\n use std::mem;\n\n use std::cmp::Ordering;\n\n\n\n extern crate flatbuffers;\n\n use self::flatbuffers::EndianScalar;\n\n\n\n#[allow(non_camel_case_types)]\n", "file_path": "rust/game/src/schemas/requests_generated.rs", "rank": 68, "score": 35093.823654782194 }, { "content": " self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(Requests::VT_EMPTY_PACKAGES, empty_packages);\n\n }\n\n #[inline]\n\n pub fn add_v2_packages(&mut self, v2_packages: flatbuffers::WIPOffset<flatbuffers::Vector<'b , V2Package>>) {\n\n self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(Requests::VT_V2_PACKAGES, v2_packages);\n\n }\n\n #[inline]\n\n pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>) -> RequestsBuilder<'a, 'b> {\n\n let start = _fbb.start_table();\n\n RequestsBuilder {\n\n fbb_: _fbb,\n\n start_: start,\n\n }\n\n }\n\n #[inline]\n\n pub fn finish(self) -> flatbuffers::WIPOffset<Requests<'a>> {\n\n let o = self.fbb_.end_table(self.start_);\n\n flatbuffers::WIPOffset::new(o.value())\n\n }\n\n}\n\n\n\n} // pub mod FfiRequests\n\n\n", "file_path": "rust/game/src/schemas/requests_generated.rs", "rank": 69, "score": 35092.85837642679 }, { "content": " ordering_: _ordering.to_little_endian(),\n\n x_: _x.to_little_endian(),\n\n y_: _y.to_little_endian(),\n\n\n\n padding0__: 0,\n\n }\n\n }\n\n pub fn kind<'a>(&'a self) -> RequestKind {\n\n self.kind_.from_little_endian()\n\n }\n\n pub fn ordering<'a>(&'a self) -> u32 {\n\n self.ordering_.from_little_endian()\n\n }\n\n pub fn x<'a>(&'a self) -> f32 {\n\n self.x_.from_little_endian()\n\n }\n\n pub fn y<'a>(&'a self) -> f32 {\n\n self.y_.from_little_endian()\n\n }\n\n}\n", "file_path": "rust/game/src/schemas/requests_generated.rs", "rank": 70, "score": 35092.02553884247 }, { "content": " type Inner = &'a V2Package;\n\n #[inline]\n\n fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {\n\n <&'a V2Package>::follow(buf, loc)\n\n }\n\n}\n\nimpl<'a> flatbuffers::Follow<'a> for &'a V2Package {\n\n type Inner = &'a V2Package;\n\n #[inline]\n\n fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {\n\n flatbuffers::follow_cast_ref::<V2Package>(buf, loc)\n\n }\n\n}\n\nimpl<'b> flatbuffers::Push for V2Package {\n\n type Output = V2Package;\n\n #[inline]\n\n fn push(&self, dst: &mut [u8], _rest: &[u8]) {\n\n let src = unsafe {\n\n ::std::slice::from_raw_parts(self as *const V2Package as *const u8, Self::size())\n\n };\n", "file_path": "rust/game/src/schemas/requests_generated.rs", "rank": 71, "score": 35089.72170000095 }, { "content": "\n\npub enum RequestsOffset {}\n\n#[derive(Copy, Clone, Debug, PartialEq)]\n\n\n\npub struct Requests<'a> {\n\n pub _tab: flatbuffers::Table<'a>,\n\n}\n\n\n\nimpl<'a> flatbuffers::Follow<'a> for Requests<'a> {\n\n type Inner = Requests<'a>;\n\n #[inline]\n\n fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {\n\n Self {\n\n _tab: flatbuffers::Table { buf: buf, loc: loc },\n\n }\n\n }\n\n}\n\n\n\nimpl<'a> Requests<'a> {\n\n #[inline]\n", "file_path": "rust/game/src/schemas/requests_generated.rs", "rank": 72, "score": 35089.69373568154 }, { "content": " self.fbb_.push_slot::<u32>(StringPackage::VT_ORDERING, ordering, 0);\n\n }\n\n #[inline]\n\n pub fn add_buffer(&mut self, buffer: flatbuffers::WIPOffset<&'b str>) {\n\n self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(StringPackage::VT_BUFFER, buffer);\n\n }\n\n #[inline]\n\n pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>) -> StringPackageBuilder<'a, 'b> {\n\n let start = _fbb.start_table();\n\n StringPackageBuilder {\n\n fbb_: _fbb,\n\n start_: start,\n\n }\n\n }\n\n #[inline]\n\n pub fn finish(self) -> flatbuffers::WIPOffset<StringPackage<'a>> {\n\n let o = self.fbb_.end_table(self.start_);\n\n flatbuffers::WIPOffset::new(o.value())\n\n }\n\n}\n", "file_path": "rust/game/src/schemas/requests_generated.rs", "rank": 73, "score": 35089.24789624167 }, { "content": "\n\npub enum StringPackageOffset {}\n\n#[derive(Copy, Clone, Debug, PartialEq)]\n\n\n\npub struct StringPackage<'a> {\n\n pub _tab: flatbuffers::Table<'a>,\n\n}\n\n\n\nimpl<'a> flatbuffers::Follow<'a> for StringPackage<'a> {\n\n type Inner = StringPackage<'a>;\n\n #[inline]\n\n fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {\n\n Self {\n\n _tab: flatbuffers::Table { buf: buf, loc: loc },\n\n }\n\n }\n\n}\n\n\n\nimpl<'a> StringPackage<'a> {\n\n #[inline]\n", "file_path": "rust/game/src/schemas/requests_generated.rs", "rank": 74, "score": 35089.12654285833 }, { "content": "}\n\nimpl<'a> flatbuffers::Follow<'a> for &'a EmptyPackage {\n\n type Inner = &'a EmptyPackage;\n\n #[inline]\n\n fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {\n\n flatbuffers::follow_cast_ref::<EmptyPackage>(buf, loc)\n\n }\n\n}\n\nimpl<'b> flatbuffers::Push for EmptyPackage {\n\n type Output = EmptyPackage;\n\n #[inline]\n\n fn push(&self, dst: &mut [u8], _rest: &[u8]) {\n\n let src = unsafe {\n\n ::std::slice::from_raw_parts(self as *const EmptyPackage as *const u8, Self::size())\n\n };\n\n dst.copy_from_slice(src);\n\n }\n\n}\n\nimpl<'b> flatbuffers::Push for &'b EmptyPackage {\n\n type Output = EmptyPackage;\n", "file_path": "rust/game/src/schemas/requests_generated.rs", "rank": 75, "score": 35088.6812615257 }, { "content": " flatbuffers::emplace_scalar::<RequestKind>(dst, *self);\n\n }\n\n}\n\n\n\n#[allow(non_camel_case_types)]\n\nconst ENUM_VALUES_REQUEST_KIND:[RequestKind; 4] = [\n\n RequestKind::GameStatus,\n\n RequestKind::StartGame,\n\n RequestKind::GetAll,\n\n RequestKind::SetInputAxis\n\n];\n\n\n\n#[allow(non_camel_case_types)]\n\nconst ENUM_NAMES_REQUEST_KIND:[&'static str; 4] = [\n\n \"GameStatus\",\n\n \"StartGame\",\n\n \"GetAll\",\n\n \"SetInputAxis\"\n\n];\n\n\n", "file_path": "rust/game/src/schemas/requests_generated.rs", "rank": 76, "score": 35088.515835323306 }, { "content": "#[repr(u16)]\n\n#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]\n\npub enum PackageKind {\n\n Request = 0,\n\n Response = 1,\n\n\n\n}\n\n\n\nconst ENUM_MIN_PACKAGE_KIND: u16 = 0;\n\nconst ENUM_MAX_PACKAGE_KIND: u16 = 1;\n\n\n\nimpl<'a> flatbuffers::Follow<'a> for PackageKind {\n\n type Inner = Self;\n\n #[inline]\n\n fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {\n\n flatbuffers::read_scalar_at::<Self>(buf, loc)\n\n }\n\n}\n\n\n\nimpl flatbuffers::EndianScalar for PackageKind {\n", "file_path": "rust/game/src/schemas/packages_generated.rs", "rank": 77, "score": 34939.15094552958 }, { "content": " #[inline]\n\n fn to_little_endian(self) -> Self {\n\n let n = u16::to_le(self as u16);\n\n let p = &n as *const u16 as *const PackageKind;\n\n unsafe { *p }\n\n }\n\n #[inline]\n\n fn from_little_endian(self) -> Self {\n\n let n = u16::from_le(self as u16);\n\n let p = &n as *const u16 as *const PackageKind;\n\n unsafe { *p }\n\n }\n\n}\n\n\n\nimpl flatbuffers::Push for PackageKind {\n\n type Output = PackageKind;\n\n #[inline]\n\n fn push(&self, dst: &mut [u8], _rest: &[u8]) {\n\n flatbuffers::emplace_scalar::<PackageKind>(dst, *self);\n\n }\n", "file_path": "rust/game/src/schemas/packages_generated.rs", "rank": 78, "score": 34937.09298221613 }, { "content": "// automatically generated by the FlatBuffers compiler, do not modify\n\n\n\n\n\n\n\nuse std::mem;\n\nuse std::cmp::Ordering;\n\n\n\nextern crate flatbuffers;\n\nuse self::flatbuffers::EndianScalar;\n\n\n\n#[allow(unused_imports, dead_code)]\n\npub mod ffi_packages {\n\n\n\n use std::mem;\n\n use std::cmp::Ordering;\n\n\n\n extern crate flatbuffers;\n\n use self::flatbuffers::EndianScalar;\n\n\n\n#[allow(non_camel_case_types)]\n", "file_path": "rust/game/src/schemas/packages_generated.rs", "rank": 79, "score": 34935.15400911133 }, { "content": "}\n\n\n\n#[allow(non_camel_case_types)]\n\nconst ENUM_VALUES_PACKAGE_KIND:[PackageKind; 2] = [\n\n PackageKind::Request,\n\n PackageKind::Response\n\n];\n\n\n\n#[allow(non_camel_case_types)]\n\nconst ENUM_NAMES_PACKAGE_KIND:[&'static str; 2] = [\n\n \"Request\",\n\n \"Response\"\n\n];\n\n\n", "file_path": "rust/game/src/schemas/packages_generated.rs", "rank": 80, "score": 34934.30335381882 }, { "content": "#[derive(Debug)]\n\nenum RunMode {\n\n Embedded { game: Game },\n\n Server { client: Client },\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct FfiContext {\n\n mode: RunMode,\n\n}\n\n\n\nimpl<'a> FfiContext {\n\n pub fn new(address: Option<&str>) -> Self {\n\n let mode = match address {\n\n Some(address) => {\n\n // TODO: should not throw errors here\n\n let client = Client::new(address);\n\n RunMode::Server { client }\n\n }\n\n None => RunMode::Embedded { game: Game::new() },\n\n };\n", "file_path": "rust/ffi/src/ffi/mod.rs", "rank": 81, "score": 34274.80289391954 }, { "content": "// <auto-generated>\n\n// automatically generated by the FlatBuffers compiler, do not modify\n\n// </auto-generated>\n\n\n\nnamespace FfiResponses\n\n{\n\n\n\npublic enum ResponseKind : ushort\n\n{\n\n GameStarted = 0,\n\n GameStatusIdle = 1,\n\n GameStatusRunning = 2,\n\n FullStateResponse = 3,\n\n CreateObj = 4,\n\n MoveObj = 5,\n\n InvalidRequest = 6,\n\n};\n\n\n\n\n\n}\n", "file_path": "unity3d/Assets/src/Domain/FfiResponses/ResponseKind.cs", "rank": 82, "score": 33392.39779630207 }, { "content": "// <auto-generated>\n\n// automatically generated by the FlatBuffers compiler, do not modify\n\n// </auto-generated>\n\n\n\nnamespace FfiResponses\n\n{\n\n\n\npublic enum PrefabKind : ushort\n\n{\n\n Player = 0,\n\n Monster = 1,\n\n};\n\n\n\n\n\n}\n", "file_path": "unity3d/Assets/src/Domain/FfiResponses/PrefabKind.cs", "rank": 83, "score": 33389.929647574034 }, { "content": " public float X { get { return __p.bb.GetFloat(__p.bb_pos + 12); } }\n\n public float Y { get { return __p.bb.GetFloat(__p.bb_pos + 16); } }\n\n\n\n public static Offset<FfiResponses.PosPackage> CreatePosPackage(FlatBufferBuilder builder, FfiResponses.ResponseKind Kind, uint Ordering, uint Id, float X, float Y) {\n\n builder.Prep(4, 20);\n\n builder.PutFloat(Y);\n\n builder.PutFloat(X);\n\n builder.PutUint(Id);\n\n builder.PutUint(Ordering);\n\n builder.Pad(2);\n\n builder.PutUshort((ushort)Kind);\n\n return new Offset<FfiResponses.PosPackage>(builder.Offset);\n\n }\n\n};\n\n\n\n\n\n}\n", "file_path": "unity3d/Assets/src/Domain/FfiResponses/PosPackage.cs", "rank": 84, "score": 32940.83791046629 }, { "content": " public FfiResponses.PrefabKind Prefab { get { return (FfiResponses.PrefabKind)__p.bb.GetUshort(__p.bb_pos + 12); } }\n\n public float X { get { return __p.bb.GetFloat(__p.bb_pos + 16); } }\n\n public float Y { get { return __p.bb.GetFloat(__p.bb_pos + 20); } }\n\n\n\n public static Offset<FfiResponses.CreatePackage> CreateCreatePackage(FlatBufferBuilder builder, FfiResponses.ResponseKind Kind, uint Ordering, uint Id, FfiResponses.PrefabKind Prefab, float X, float Y) {\n\n builder.Prep(4, 24);\n\n builder.PutFloat(Y);\n\n builder.PutFloat(X);\n\n builder.Pad(2);\n\n builder.PutUshort((ushort)Prefab);\n\n builder.PutUint(Id);\n\n builder.PutUint(Ordering);\n\n builder.Pad(2);\n\n builder.PutUshort((ushort)Kind);\n\n return new Offset<FfiResponses.CreatePackage>(builder.Offset);\n\n }\n\n};\n\n\n\n\n\n}\n", "file_path": "unity3d/Assets/src/Domain/FfiResponses/CreatePackage.cs", "rank": 85, "score": 32939.85494798735 }, { "content": " public float Y { get { return __p.bb.GetFloat(__p.bb_pos + 12); } }\n\n\n\n public static Offset<FfiResponses.V2Package> CreateV2Package(FlatBufferBuilder builder, FfiResponses.ResponseKind Kind, uint Ordering, float X, float Y) {\n\n builder.Prep(4, 16);\n\n builder.PutFloat(Y);\n\n builder.PutFloat(X);\n\n builder.PutUint(Ordering);\n\n builder.Pad(2);\n\n builder.PutUshort((ushort)Kind);\n\n return new Offset<FfiResponses.V2Package>(builder.Offset);\n\n }\n\n};\n\n\n\n\n\n}\n", "file_path": "unity3d/Assets/src/Domain/FfiResponses/V2Package.cs", "rank": 86, "score": 32938.08355810393 }, { "content": "// <auto-generated>\n\n// automatically generated by the FlatBuffers compiler, do not modify\n\n// </auto-generated>\n\n\n\nnamespace FfiResponses\n\n{\n\n\n\nusing global::System;\n\nusing global::FlatBuffers;\n\n\n\npublic struct V2Package : IFlatbufferObject\n\n{\n\n private Struct __p;\n\n public ByteBuffer ByteBuffer { get { return __p.bb; } }\n\n public void __init(int _i, ByteBuffer _bb) { __p = new Struct(_i, _bb); }\n\n public V2Package __assign(int _i, ByteBuffer _bb) { __init(_i, _bb); return this; }\n\n\n\n public FfiResponses.ResponseKind Kind { get { return (FfiResponses.ResponseKind)__p.bb.GetUshort(__p.bb_pos + 0); } }\n\n public uint Ordering { get { return __p.bb.GetUint(__p.bb_pos + 4); } }\n\n public float X { get { return __p.bb.GetFloat(__p.bb_pos + 8); } }\n", "file_path": "unity3d/Assets/src/Domain/FfiResponses/V2Package.cs", "rank": 87, "score": 32937.2290919761 }, { "content": "// <auto-generated>\n\n// automatically generated by the FlatBuffers compiler, do not modify\n\n// </auto-generated>\n\n\n\nnamespace FfiResponses\n\n{\n\n\n\nusing global::System;\n\nusing global::FlatBuffers;\n\n\n\npublic struct CreatePackage : IFlatbufferObject\n\n{\n\n private Struct __p;\n\n public ByteBuffer ByteBuffer { get { return __p.bb; } }\n\n public void __init(int _i, ByteBuffer _bb) { __p = new Struct(_i, _bb); }\n\n public CreatePackage __assign(int _i, ByteBuffer _bb) { __init(_i, _bb); return this; }\n\n\n\n public FfiResponses.ResponseKind Kind { get { return (FfiResponses.ResponseKind)__p.bb.GetUshort(__p.bb_pos + 0); } }\n\n public uint Ordering { get { return __p.bb.GetUint(__p.bb_pos + 4); } }\n\n public uint Id { get { return __p.bb.GetUint(__p.bb_pos + 8); } }\n", "file_path": "unity3d/Assets/src/Domain/FfiResponses/CreatePackage.cs", "rank": 88, "score": 32937.19944663026 }, { "content": "// <auto-generated>\n\n// automatically generated by the FlatBuffers compiler, do not modify\n\n// </auto-generated>\n\n\n\nnamespace FfiResponses\n\n{\n\n\n\nusing global::System;\n\nusing global::FlatBuffers;\n\n\n\npublic struct PosPackage : IFlatbufferObject\n\n{\n\n private Struct __p;\n\n public ByteBuffer ByteBuffer { get { return __p.bb; } }\n\n public void __init(int _i, ByteBuffer _bb) { __p = new Struct(_i, _bb); }\n\n public PosPackage __assign(int _i, ByteBuffer _bb) { __init(_i, _bb); return this; }\n\n\n\n public FfiResponses.ResponseKind Kind { get { return (FfiResponses.ResponseKind)__p.bb.GetUshort(__p.bb_pos + 0); } }\n\n public uint Ordering { get { return __p.bb.GetUint(__p.bb_pos + 4); } }\n\n public uint Id { get { return __p.bb.GetUint(__p.bb_pos + 8); } }\n", "file_path": "unity3d/Assets/src/Domain/FfiResponses/PosPackage.cs", "rank": 89, "score": 32937.19944663026 }, { "content": " public static Offset<FfiResponses.EmptyPackage> CreateEmptyPackage(FlatBufferBuilder builder, FfiResponses.ResponseKind Kind, uint Ordering) {\n\n builder.Prep(4, 8);\n\n builder.PutUint(Ordering);\n\n builder.Pad(2);\n\n builder.PutUshort((ushort)Kind);\n\n return new Offset<FfiResponses.EmptyPackage>(builder.Offset);\n\n }\n\n};\n\n\n\n\n\n}\n", "file_path": "unity3d/Assets/src/Domain/FfiResponses/EmptyPackage.cs", "rank": 90, "score": 32935.859201637504 }, { "content": "// <auto-generated>\n\n// automatically generated by the FlatBuffers compiler, do not modify\n\n// </auto-generated>\n\n\n\nnamespace FfiResponses\n\n{\n\n\n\nusing global::System;\n\nusing global::FlatBuffers;\n\n\n\npublic struct EmptyPackage : IFlatbufferObject\n\n{\n\n private Struct __p;\n\n public ByteBuffer ByteBuffer { get { return __p.bb; } }\n\n public void __init(int _i, ByteBuffer _bb) { __p = new Struct(_i, _bb); }\n\n public EmptyPackage __assign(int _i, ByteBuffer _bb) { __init(_i, _bb); return this; }\n\n\n\n public FfiResponses.ResponseKind Kind { get { return (FfiResponses.ResponseKind)__p.bb.GetUshort(__p.bb_pos + 0); } }\n\n public uint Ordering { get { return __p.bb.GetUint(__p.bb_pos + 4); } }\n\n\n", "file_path": "unity3d/Assets/src/Domain/FfiResponses/EmptyPackage.cs", "rank": 91, "score": 32935.66358132974 }, { "content": " public FfiResponses.ResponseKind Kind { get { int o = __p.__offset(4); return o != 0 ? (FfiResponses.ResponseKind)__p.bb.GetUshort(o + __p.bb_pos) : FfiResponses.ResponseKind.GameStarted; } }\n\n public uint Ordering { get { int o = __p.__offset(6); return o != 0 ? __p.bb.GetUint(o + __p.bb_pos) : (uint)0; } }\n\n public string Buffer { get { int o = __p.__offset(8); return o != 0 ? __p.__string(o + __p.bb_pos) : null; } }\n\n#if ENABLE_SPAN_T\n\n public Span<byte> GetBufferBytes() { return __p.__vector_as_span<byte>(8, 1); }\n\n#else\n\n public ArraySegment<byte>? GetBufferBytes() { return __p.__vector_as_arraysegment(8); }\n\n#endif\n\n public byte[] GetBufferArray() { return __p.__vector_as_array<byte>(8); }\n\n\n\n public static Offset<FfiResponses.StringPackage> CreateStringPackage(FlatBufferBuilder builder,\n\n FfiResponses.ResponseKind kind = FfiResponses.ResponseKind.GameStarted,\n\n uint ordering = 0,\n\n StringOffset bufferOffset = default(StringOffset)) {\n\n builder.StartTable(3);\n\n StringPackage.AddBuffer(builder, bufferOffset);\n\n StringPackage.AddOrdering(builder, ordering);\n\n StringPackage.AddKind(builder, kind);\n\n return StringPackage.EndStringPackage(builder);\n\n }\n", "file_path": "unity3d/Assets/src/Domain/FfiResponses/StringPackage.cs", "rank": 92, "score": 32935.52495913092 }, { "content": " public FfiResponses.ResponseKind Kind { get { int o = __p.__offset(4); return o != 0 ? (FfiResponses.ResponseKind)__p.bb.GetUshort(o + __p.bb_pos) : FfiResponses.ResponseKind.GameStarted; } }\n\n public uint Ordering { get { int o = __p.__offset(6); return o != 0 ? __p.bb.GetUint(o + __p.bb_pos) : (uint)0; } }\n\n public sbyte Buffer(int j) { int o = __p.__offset(8); return o != 0 ? __p.bb.GetSbyte(__p.__vector(o) + j * 1) : (sbyte)0; }\n\n public int BufferLength { get { int o = __p.__offset(8); return o != 0 ? __p.__vector_len(o) : 0; } }\n\n#if ENABLE_SPAN_T\n\n public Span<sbyte> GetBufferBytes() { return __p.__vector_as_span<sbyte>(8, 1); }\n\n#else\n\n public ArraySegment<byte>? GetBufferBytes() { return __p.__vector_as_arraysegment(8); }\n\n#endif\n\n public sbyte[] GetBufferArray() { return __p.__vector_as_array<sbyte>(8); }\n\n\n\n public static Offset<FfiResponses.BytesPackage> CreateBytesPackage(FlatBufferBuilder builder,\n\n FfiResponses.ResponseKind kind = FfiResponses.ResponseKind.GameStarted,\n\n uint ordering = 0,\n\n VectorOffset bufferOffset = default(VectorOffset)) {\n\n builder.StartTable(3);\n\n BytesPackage.AddBuffer(builder, bufferOffset);\n\n BytesPackage.AddOrdering(builder, ordering);\n\n BytesPackage.AddKind(builder, kind);\n\n return BytesPackage.EndBytesPackage(builder);\n", "file_path": "unity3d/Assets/src/Domain/FfiResponses/BytesPackage.cs", "rank": 93, "score": 32935.29820189554 }, { "content": "\n\n public static void StartStringPackage(FlatBufferBuilder builder) { builder.StartTable(3); }\n\n public static void AddKind(FlatBufferBuilder builder, FfiResponses.ResponseKind kind) { builder.AddUshort(0, (ushort)kind, 0); }\n\n public static void AddOrdering(FlatBufferBuilder builder, uint ordering) { builder.AddUint(1, ordering, 0); }\n\n public static void AddBuffer(FlatBufferBuilder builder, StringOffset bufferOffset) { builder.AddOffset(2, bufferOffset.Value, 0); }\n\n public static Offset<FfiResponses.StringPackage> EndStringPackage(FlatBufferBuilder builder) {\n\n int o = builder.EndTable();\n\n return new Offset<FfiResponses.StringPackage>(o);\n\n }\n\n};\n\n\n\n\n\n}\n", "file_path": "unity3d/Assets/src/Domain/FfiResponses/StringPackage.cs", "rank": 94, "score": 32934.89275667841 }, { "content": "// <auto-generated>\n\n// automatically generated by the FlatBuffers compiler, do not modify\n\n// </auto-generated>\n\n\n\nnamespace FfiResponses\n\n{\n\n\n\nusing global::System;\n\nusing global::FlatBuffers;\n\n\n\npublic struct StringPackage : IFlatbufferObject\n\n{\n\n private Table __p;\n\n public ByteBuffer ByteBuffer { get { return __p.bb; } }\n\n public static void ValidateVersion() { FlatBufferConstants.FLATBUFFERS_1_11_1(); }\n\n public static StringPackage GetRootAsStringPackage(ByteBuffer _bb) { return GetRootAsStringPackage(_bb, new StringPackage()); }\n\n public static StringPackage GetRootAsStringPackage(ByteBuffer _bb, StringPackage obj) { return (obj.__assign(_bb.GetInt(_bb.Position) + _bb.Position, _bb)); }\n\n public void __init(int _i, ByteBuffer _bb) { __p = new Table(_i, _bb); }\n\n public StringPackage __assign(int _i, ByteBuffer _bb) { __init(_i, _bb); return this; }\n\n\n", "file_path": "unity3d/Assets/src/Domain/FfiResponses/StringPackage.cs", "rank": 95, "score": 32934.459439830585 }, { "content": "// <auto-generated>\n\n// automatically generated by the FlatBuffers compiler, do not modify\n\n// </auto-generated>\n\n\n\nnamespace FfiResponses\n\n{\n\n\n\nusing global::System;\n\nusing global::FlatBuffers;\n\n\n\npublic struct BytesPackage : IFlatbufferObject\n\n{\n\n private Table __p;\n\n public ByteBuffer ByteBuffer { get { return __p.bb; } }\n\n public static void ValidateVersion() { FlatBufferConstants.FLATBUFFERS_1_11_1(); }\n\n public static BytesPackage GetRootAsBytesPackage(ByteBuffer _bb) { return GetRootAsBytesPackage(_bb, new BytesPackage()); }\n\n public static BytesPackage GetRootAsBytesPackage(ByteBuffer _bb, BytesPackage obj) { return (obj.__assign(_bb.GetInt(_bb.Position) + _bb.Position, _bb)); }\n\n public void __init(int _i, ByteBuffer _bb) { __p = new Table(_i, _bb); }\n\n public BytesPackage __assign(int _i, ByteBuffer _bb) { __init(_i, _bb); return this; }\n\n\n", "file_path": "unity3d/Assets/src/Domain/FfiResponses/BytesPackage.cs", "rank": 96, "score": 32934.459439830585 }, { "content": " }\n\n\n\n public static void StartBytesPackage(FlatBufferBuilder builder) { builder.StartTable(3); }\n\n public static void AddKind(FlatBufferBuilder builder, FfiResponses.ResponseKind kind) { builder.AddUshort(0, (ushort)kind, 0); }\n\n public static void AddOrdering(FlatBufferBuilder builder, uint ordering) { builder.AddUint(1, ordering, 0); }\n\n public static void AddBuffer(FlatBufferBuilder builder, VectorOffset bufferOffset) { builder.AddOffset(2, bufferOffset.Value, 0); }\n\n public static VectorOffset CreateBufferVector(FlatBufferBuilder builder, sbyte[] data) { builder.StartVector(1, data.Length, 1); for (int i = data.Length - 1; i >= 0; i--) builder.AddSbyte(data[i]); return builder.EndVector(); }\n\n public static VectorOffset CreateBufferVectorBlock(FlatBufferBuilder builder, sbyte[] data) { builder.StartVector(1, data.Length, 1); builder.Add(data); return builder.EndVector(); }\n\n public static void StartBufferVector(FlatBufferBuilder builder, int numElems) { builder.StartVector(1, numElems, 1); }\n\n public static Offset<FfiResponses.BytesPackage> EndBytesPackage(FlatBufferBuilder builder) {\n\n int o = builder.EndTable();\n\n return new Offset<FfiResponses.BytesPackage>(o);\n\n }\n\n};\n\n\n\n\n\n}\n", "file_path": "unity3d/Assets/src/Domain/FfiResponses/BytesPackage.cs", "rank": 97, "score": 32932.70171991856 } ]
Rust
src/handlers/theme.rs
cmarincia/miette
714334098a92c77fb6b962e627defab7e16b540d
use atty::Stream; use owo_colors::Style; /** Theme used by [`GraphicalReportHandler`](crate::GraphicalReportHandler) to render fancy [`Diagnostic`](crate::Diagnostic) reports. A theme consists of two things: the set of characters to be used for drawing, and the [`owo_colors::Style`](https://docs.rs/owo-colors/latest/owo_colors/struct.Style.html)s to be used to paint various items. You can create your own custom graphical theme using this type, or you can use one of the predefined ones using the methods below. */ #[derive(Debug, Clone)] pub struct GraphicalTheme { pub characters: ThemeCharacters, pub styles: ThemeStyles, } impl GraphicalTheme { pub fn ascii() -> Self { Self { characters: ThemeCharacters::ascii(), styles: ThemeStyles::ansi(), } } pub fn unicode() -> Self { Self { characters: ThemeCharacters::unicode(), styles: ThemeStyles::rgb(), } } pub fn unicode_nocolor() -> Self { Self { characters: ThemeCharacters::unicode(), styles: ThemeStyles::none(), } } pub fn none() -> Self { Self { characters: ThemeCharacters::ascii(), styles: ThemeStyles::none(), } } } impl Default for GraphicalTheme { fn default() -> Self { match std::env::var("NO_COLOR") { _ if !atty::is(Stream::Stdout) || !atty::is(Stream::Stderr) => Self::ascii(), Ok(string) if string != "0" => Self::unicode_nocolor(), _ => Self::unicode(), } } } /** Styles for various parts of graphical rendering for the [crate::GraphicalReportHandler]. */ #[derive(Debug, Clone)] pub struct ThemeStyles { pub error: Style, pub warning: Style, pub advice: Style, pub help: Style, pub link: Style, pub linum: Style, pub highlights: Vec<Style>, } fn style() -> Style { Style::new() } impl ThemeStyles { pub fn rgb() -> Self { Self { error: style().fg_rgb::<255, 30, 30>(), warning: style().fg_rgb::<244, 191, 117>(), advice: style().fg_rgb::<106, 159, 181>(), help: style().fg_rgb::<106, 159, 181>(), link: style().fg_rgb::<92, 157, 255>().underline().bold(), linum: style().dimmed(), highlights: vec![ style().fg_rgb::<246, 87, 248>(), style().fg_rgb::<30, 201, 212>(), style().fg_rgb::<145, 246, 111>(), ], } } pub fn ansi() -> Self { Self { error: style().red(), warning: style().yellow(), advice: style().cyan(), help: style().cyan(), link: style().cyan().underline().bold(), linum: style().dimmed(), highlights: vec![ style().red().bold(), style().yellow().bold(), style().cyan().bold(), ], } } pub fn none() -> Self { Self { error: style(), warning: style(), advice: style(), help: style(), link: style(), linum: style(), highlights: vec![style()], } } } #[allow(missing_docs)] #[derive(Debug, Clone, Eq, PartialEq)] pub struct ThemeCharacters { pub hbar: char, pub vbar: char, pub xbar: char, pub vbar_break: char, pub uarrow: char, pub rarrow: char, pub ltop: char, pub mtop: char, pub rtop: char, pub lbot: char, pub rbot: char, pub mbot: char, pub lbox: char, pub rbox: char, pub lcross: char, pub rcross: char, pub underbar: char, pub underline: char, pub error: String, pub warning: String, pub advice: String, } impl ThemeCharacters { pub fn unicode() -> Self { Self { hbar: '─', vbar: '│', xbar: '┼', vbar_break: '·', uarrow: '▲', rarrow: '▶', ltop: '╭', mtop: '┬', rtop: '╮', lbot: '╰', mbot: '┴', rbot: '╯', lbox: '[', rbox: ']', lcross: '├', rcross: '┤', underbar: '┬', underline: '─', error: "×".into(), warning: "⚠".into(), advice: "☞".into(), } } pub fn emoji() -> Self { Self { hbar: '─', vbar: '│', xbar: '┼', vbar_break: '·', uarrow: '▲', rarrow: '▶', ltop: '╭', mtop: '┬', rtop: '╮', lbot: '╰', mbot: '┴', rbot: '╯', lbox: '[', rbox: ']', lcross: '├', rcross: '┤', underbar: '┬', underline: '─', error: "💥".into(), warning: "⚠️".into(), advice: "💡".into(), } } pub fn ascii() -> Self { Self { hbar: '-', vbar: '|', xbar: '+', vbar_break: ':', uarrow: '^', rarrow: '>', ltop: ',', mtop: 'v', rtop: '.', lbot: '`', mbot: '^', rbot: '\'', lbox: '[', rbox: ']', lcross: '|', rcross: '|', underbar: '|', underline: '^', error: "x".into(), warning: "!".into(), advice: ">".into(), } } }
use atty::Stream; use owo_colors::Style; /** Theme used by [`GraphicalReportHandler`](crate::GraphicalReportHandler) to render fancy [`Diagnostic`](crate::Diagnostic) reports. A theme consists of two things: the set of characters to be used for drawing, and the [`owo_colors::Style`](https://docs.rs/owo-colors/latest/owo_colors/struct.Style.html)s to be used to paint various items. You can create your own custom graphical theme using this type, or you can use one of the predefined ones using the methods below. */ #[derive(Debug, Clone)] pub struct GraphicalTheme { pub characters: ThemeCharacters, pub styles: ThemeStyles, } impl GraphicalTheme { pub fn ascii() -> Self { Self { characters: ThemeCharacters::ascii(), styles: ThemeStyles::ansi(), } }
pub fn unicode_nocolor() -> Self { Self { characters: ThemeCharacters::unicode(), styles: ThemeStyles::none(), } } pub fn none() -> Self { Self { characters: ThemeCharacters::ascii(), styles: ThemeStyles::none(), } } } impl Default for GraphicalTheme { fn default() -> Self { match std::env::var("NO_COLOR") { _ if !atty::is(Stream::Stdout) || !atty::is(Stream::Stderr) => Self::ascii(), Ok(string) if string != "0" => Self::unicode_nocolor(), _ => Self::unicode(), } } } /** Styles for various parts of graphical rendering for the [crate::GraphicalReportHandler]. */ #[derive(Debug, Clone)] pub struct ThemeStyles { pub error: Style, pub warning: Style, pub advice: Style, pub help: Style, pub link: Style, pub linum: Style, pub highlights: Vec<Style>, } fn style() -> Style { Style::new() } impl ThemeStyles { pub fn rgb() -> Self { Self { error: style().fg_rgb::<255, 30, 30>(), warning: style().fg_rgb::<244, 191, 117>(), advice: style().fg_rgb::<106, 159, 181>(), help: style().fg_rgb::<106, 159, 181>(), link: style().fg_rgb::<92, 157, 255>().underline().bold(), linum: style().dimmed(), highlights: vec![ style().fg_rgb::<246, 87, 248>(), style().fg_rgb::<30, 201, 212>(), style().fg_rgb::<145, 246, 111>(), ], } } pub fn ansi() -> Self { Self { error: style().red(), warning: style().yellow(), advice: style().cyan(), help: style().cyan(), link: style().cyan().underline().bold(), linum: style().dimmed(), highlights: vec![ style().red().bold(), style().yellow().bold(), style().cyan().bold(), ], } } pub fn none() -> Self { Self { error: style(), warning: style(), advice: style(), help: style(), link: style(), linum: style(), highlights: vec![style()], } } } #[allow(missing_docs)] #[derive(Debug, Clone, Eq, PartialEq)] pub struct ThemeCharacters { pub hbar: char, pub vbar: char, pub xbar: char, pub vbar_break: char, pub uarrow: char, pub rarrow: char, pub ltop: char, pub mtop: char, pub rtop: char, pub lbot: char, pub rbot: char, pub mbot: char, pub lbox: char, pub rbox: char, pub lcross: char, pub rcross: char, pub underbar: char, pub underline: char, pub error: String, pub warning: String, pub advice: String, } impl ThemeCharacters { pub fn unicode() -> Self { Self { hbar: '─', vbar: '│', xbar: '┼', vbar_break: '·', uarrow: '▲', rarrow: '▶', ltop: '╭', mtop: '┬', rtop: '╮', lbot: '╰', mbot: '┴', rbot: '╯', lbox: '[', rbox: ']', lcross: '├', rcross: '┤', underbar: '┬', underline: '─', error: "×".into(), warning: "⚠".into(), advice: "☞".into(), } } pub fn emoji() -> Self { Self { hbar: '─', vbar: '│', xbar: '┼', vbar_break: '·', uarrow: '▲', rarrow: '▶', ltop: '╭', mtop: '┬', rtop: '╮', lbot: '╰', mbot: '┴', rbot: '╯', lbox: '[', rbox: ']', lcross: '├', rcross: '┤', underbar: '┬', underline: '─', error: "💥".into(), warning: "⚠️".into(), advice: "💡".into(), } } pub fn ascii() -> Self { Self { hbar: '-', vbar: '|', xbar: '+', vbar_break: ':', uarrow: '^', rarrow: '>', ltop: ',', mtop: 'v', rtop: '.', lbot: '`', mbot: '^', rbot: '\'', lbox: '[', rbox: ']', lcross: '|', rcross: '|', underbar: '|', underline: '^', error: "x".into(), warning: "!".into(), advice: ">".into(), } } }
pub fn unicode() -> Self { Self { characters: ThemeCharacters::unicode(), styles: ThemeStyles::rgb(), } }
function_block-full_function
[]
Rust
cranelift-codegen/src/topo_order.rs
jgouly/cranelift-1
470372f0b55cd51199466669cb87d9e038a3f459
use crate::dominator_tree::DominatorTree; use crate::entity::EntitySet; use crate::ir::{Block, Layout}; use alloc::vec::Vec; pub struct TopoOrder { preferred: Vec<Block>, next: usize, visited: EntitySet<Block>, stack: Vec<Block>, } impl TopoOrder { pub fn new() -> Self { Self { preferred: Vec::new(), next: 0, visited: EntitySet::new(), stack: Vec::new(), } } pub fn clear(&mut self) { self.preferred.clear(); self.next = 0; self.visited.clear(); self.stack.clear(); } pub fn reset<Blocks>(&mut self, preferred: Blocks) where Blocks: IntoIterator<Item = Block>, { self.preferred.clear(); self.preferred.extend(preferred); self.next = 0; self.visited.clear(); self.stack.clear(); } pub fn next(&mut self, layout: &Layout, domtree: &DominatorTree) -> Option<Block> { self.visited.resize(layout.block_capacity()); while self.stack.is_empty() { match self.preferred.get(self.next).cloned() { None => return None, Some(mut block) => { self.next += 1; while self.visited.insert(block) { self.stack.push(block); match domtree.idom(block) { Some(idom) => { block = layout.inst_block(idom).expect("idom not in layout") } None => break, } } } } } self.stack.pop() } } #[cfg(test)] mod tests { use super::*; use crate::cursor::{Cursor, FuncCursor}; use crate::dominator_tree::DominatorTree; use crate::flowgraph::ControlFlowGraph; use crate::ir::{Function, InstBuilder}; use core::iter; #[test] fn empty() { let func = Function::new(); let cfg = ControlFlowGraph::with_function(&func); let domtree = DominatorTree::with_function(&func, &cfg); let mut topo = TopoOrder::new(); assert_eq!(topo.next(&func.layout, &domtree), None); topo.reset(func.layout.blocks()); assert_eq!(topo.next(&func.layout, &domtree), None); } #[test] fn simple() { let mut func = Function::new(); let block0 = func.dfg.make_block(); let block1 = func.dfg.make_block(); { let mut cur = FuncCursor::new(&mut func); cur.insert_block(block0); cur.ins().jump(block1, &[]); cur.insert_block(block1); cur.ins().jump(block1, &[]); } let cfg = ControlFlowGraph::with_function(&func); let domtree = DominatorTree::with_function(&func, &cfg); let mut topo = TopoOrder::new(); topo.reset(iter::once(block1)); assert_eq!(topo.next(&func.layout, &domtree), Some(block0)); assert_eq!(topo.next(&func.layout, &domtree), Some(block1)); assert_eq!(topo.next(&func.layout, &domtree), None); } }
use crate::dominator_tree::DominatorTree; use crate::entity::EntitySet; use crate::ir::{Block, Layout}; use alloc::vec::Vec; pub struct TopoOrder { preferred: Vec<Block>, next: usize, visited: EntitySet<Block>, stack: Vec<Block>, } impl TopoOrder { pub fn new() -> Self { Self { preferred: Vec::new(), next: 0, visited: EntitySet::new(), stack: Vec::new(), } } pub fn clear(&mut self) { self.preferred.clear(); self.next = 0; self.visited.clear(); self.stack.clear(); } pub fn reset<Blocks>(&mut self, preferred: Blocks) where Blocks: IntoIterator<Item = Block>, { self.preferred.clear(); self.preferred.extend(preferred); self.next = 0; self.visited.clear(); self.stack.clear(); } pub fn next(&mut self, layout: &Layout, domtree: &DominatorTree) -> Option<Block> { self.visited.resize(layout.block_capacity()); while self.stack.is_empty() { match self.preferred.ge
} } } self.stack.pop() } } #[cfg(test)] mod tests { use super::*; use crate::cursor::{Cursor, FuncCursor}; use crate::dominator_tree::DominatorTree; use crate::flowgraph::ControlFlowGraph; use crate::ir::{Function, InstBuilder}; use core::iter; #[test] fn empty() { let func = Function::new(); let cfg = ControlFlowGraph::with_function(&func); let domtree = DominatorTree::with_function(&func, &cfg); let mut topo = TopoOrder::new(); assert_eq!(topo.next(&func.layout, &domtree), None); topo.reset(func.layout.blocks()); assert_eq!(topo.next(&func.layout, &domtree), None); } #[test] fn simple() { let mut func = Function::new(); let block0 = func.dfg.make_block(); let block1 = func.dfg.make_block(); { let mut cur = FuncCursor::new(&mut func); cur.insert_block(block0); cur.ins().jump(block1, &[]); cur.insert_block(block1); cur.ins().jump(block1, &[]); } let cfg = ControlFlowGraph::with_function(&func); let domtree = DominatorTree::with_function(&func, &cfg); let mut topo = TopoOrder::new(); topo.reset(iter::once(block1)); assert_eq!(topo.next(&func.layout, &domtree), Some(block0)); assert_eq!(topo.next(&func.layout, &domtree), Some(block1)); assert_eq!(topo.next(&func.layout, &domtree), None); } }
t(self.next).cloned() { None => return None, Some(mut block) => { self.next += 1; while self.visited.insert(block) { self.stack.push(block); match domtree.idom(block) { Some(idom) => { block = layout.inst_block(idom).expect("idom not in layout") } None => break, } }
function_block-random_span
[ { "content": "/// Compute the stack frame layout.\n\n///\n\n/// Determine the total size of this stack frame and assign offsets to all `Spill` and `Explicit`\n\n/// stack slots.\n\n///\n\n/// The total frame size will be a multiple of `alignment` which must be a power of two, unless the\n\n/// function doesn't perform any call.\n\n///\n\n/// Returns the total stack frame size which is also saved in `frame.frame_size`.\n\n///\n\n/// If the stack frame is too big, returns an `ImplLimitExceeded` error.\n\npub fn layout_stack(\n\n frame: &mut StackSlots,\n\n is_leaf: bool,\n\n alignment: StackSize,\n\n) -> CodegenResult<StackSize> {\n\n // Each object and the whole stack frame must fit in 2 GB such that any relative offset within\n\n // the frame fits in a `StackOffset`.\n\n let max_size = StackOffset::max_value() as StackSize;\n\n debug_assert!(alignment.is_power_of_two() && alignment <= max_size);\n\n\n\n // We assume a stack that grows toward lower addresses as implemented by modern ISAs. The\n\n // stack layout from high to low addresses will be:\n\n //\n\n // 1. incoming arguments.\n\n // 2. spills + explicits + struct returns.\n\n // 3. outgoing arguments.\n\n //\n\n // The incoming arguments can have both positive and negative offsets. A negative offset\n\n // incoming arguments is usually the x86 return address pushed by the call instruction, but\n\n // it can also be fixed stack slots pushed by an externally generated prologue.\n", "file_path": "cranelift-codegen/src/stack_layout.rs", "rank": 0, "score": 302749.1217709638 }, { "content": "/// A helper to extract all the `Type` listings of each variable in `params`\n\n/// for only parameters the return true for `is_wasm`, typically paired with\n\n/// `is_wasm_return` or `is_wasm_parameter`.\n\npub fn wasm_param_types(params: &[ir::AbiParam], is_wasm: impl Fn(usize) -> bool) -> Vec<Type> {\n\n let mut ret = Vec::with_capacity(params.len());\n\n for (i, param) in params.iter().enumerate() {\n\n if is_wasm(i) {\n\n ret.push(param.value_type);\n\n }\n\n }\n\n ret\n\n}\n", "file_path": "cranelift-wasm/src/code_translator.rs", "rank": 1, "score": 246916.54000008677 }, { "content": "/// Translating Block parameters into serializable parameters.\n\npub fn populate_params(func: &Function, block: Block) -> Vec<String> {\n\n let mut ser_vec: Vec<String> = Vec::new();\n\n let parameters = func.dfg.block_params(block);\n\n for param in parameters {\n\n ser_vec.push(param.to_string());\n\n }\n\n ser_vec\n\n}\n\n\n\n/// Serializable Data Flow Graph.\n\n#[derive(Deserialize, Serialize, Debug)]\n\npub struct SerDataFlowGraph {\n\n blocks: Vec<SerBlock>,\n\n}\n\n\n", "file_path": "cranelift-serde/src/serde_clif_json.rs", "rank": 2, "score": 237649.1780876085 }, { "content": "pub fn populate_inst(func: &Function, block: Block) -> Vec<SerInst> {\n\n let mut ser_vec: Vec<SerInst> = Vec::new();\n\n let ret_iter = func.layout.block_insts(block);\n\n for inst in ret_iter {\n\n let ser_inst: SerInst = SerInst::new(inst, &func);\n\n ser_vec.push(ser_inst);\n\n }\n\n ser_vec\n\n}\n\n\n", "file_path": "cranelift-serde/src/serde_clif_json.rs", "rank": 3, "score": 235020.27437304385 }, { "content": "pub fn split_block_params(func: &mut ir::Function, cfg: &ControlFlowGraph, block: Block) {\n\n let pos = &mut FuncCursor::new(func).at_top(block);\n\n let block_params = pos.func.dfg.block_params(block);\n\n\n\n // Add further splittable types here.\n\n fn type_requires_splitting(ty: Type) -> bool {\n\n ty == ir::types::I128\n\n }\n\n\n\n // A shortcut. If none of the param types require splitting, exit now. This helps because\n\n // the loop below necessarily has to copy the block params into a new vector, so it's better to\n\n // avoid doing so when possible.\n\n if !block_params\n\n .iter()\n\n .any(|block_param| type_requires_splitting(pos.func.dfg.value_type(*block_param)))\n\n {\n\n return;\n\n }\n\n\n\n let mut repairs = Vec::new();\n", "file_path": "cranelift-codegen/src/legalizer/split.rs", "rank": 4, "score": 227969.6591488701 }, { "content": "/// Write out the basic block header, outdented:\n\n///\n\n/// block1:\n\n/// block1(v1: i32):\n\n/// block10(v4: f64, v5: b1):\n\n///\n\npub fn write_block_header(\n\n w: &mut dyn Write,\n\n func: &Function,\n\n isa: Option<&dyn TargetIsa>,\n\n block: Block,\n\n indent: usize,\n\n) -> fmt::Result {\n\n // The `indent` is the instruction indentation. block headers are 4 spaces out from that.\n\n write!(w, \"{1:0$}{2}\", indent - 4, \"\", block)?;\n\n\n\n let regs = isa.map(TargetIsa::register_info);\n\n let regs = regs.as_ref();\n\n\n\n let mut args = func.dfg.block_params(block).iter().cloned();\n\n match args.next() {\n\n None => return writeln!(w, \":\"),\n\n Some(arg) => {\n\n write!(w, \"(\")?;\n\n write_arg(w, func, regs, arg)?;\n\n }\n\n }\n\n // Remaining arguments.\n\n for arg in args {\n\n write!(w, \", \")?;\n\n write_arg(w, func, regs, arg)?;\n\n }\n\n writeln!(w, \"):\")\n\n}\n\n\n", "file_path": "cranelift-codegen/src/write.rs", "rank": 5, "score": 226036.1325406057 }, { "content": "/// Get a reference to the stack-pointer register.\n\npub fn stack_reg() -> Reg {\n\n // XSP (stack) and XZR (zero) are logically different registers which have\n\n // the same hardware encoding, and whose meaning, in real arm64\n\n // instructions, is context-dependent. For convenience of\n\n // universe-construction and for correct printing, we make them be two\n\n // different real registers.\n\n Reg::new_real(\n\n RegClass::I64,\n\n /* enc = */ 31,\n\n /* index = */ SP_REG_INDEX,\n\n )\n\n}\n\n\n", "file_path": "cranelift-codegen/src/isa/arm64/inst/regs.rs", "rank": 6, "score": 210233.71025992994 }, { "content": "/// Serialize all parts of the Cranelift Block data structure, this includes name, parameters, and\n\n/// instructions.\n\npub fn populate_blocks(func: &Function) -> Vec<SerBlock> {\n\n let mut block_vec: Vec<SerBlock> = Vec::new();\n\n for block in func.layout.blocks() {\n\n let mut ser_block: SerBlock = SerBlock::new(block.to_string());\n\n ser_block.params = populate_params(&func, block);\n\n ser_block.insts = populate_inst(&func, block);\n\n block_vec.push(ser_block);\n\n }\n\n block_vec\n\n}\n\n\n\n/// Serializable Cranelift IR data flow graph, including all blocks.\n\nimpl SerDataFlowGraph {\n\n pub fn create_new(func: &Function) -> Self {\n\n Self {\n\n blocks: populate_blocks(func),\n\n }\n\n }\n\n\n\n pub fn new(func: &Function) -> Self {\n", "file_path": "cranelift-serde/src/serde_clif_json.rs", "rank": 7, "score": 203621.91842774942 }, { "content": "/// A primitive hash function for matching opcodes.\n\npub fn simple_hash(s: &str) -> usize {\n\n let mut h: u32 = 5381;\n\n for c in s.chars() {\n\n h = (h ^ c as u32).wrapping_add(h.rotate_right(6));\n\n }\n\n h as usize\n\n}\n\n\n\n/// Compute an open addressed, quadratically probed hash table containing\n\n/// `items`. The returned table is a list containing the elements of the\n\n/// iterable `items` and `None` in unused slots.\n", "file_path": "cranelift-codegen/shared/src/constant_hash.rs", "rank": 8, "score": 201823.80120253225 }, { "content": "/// Get a writable reference to the stack-pointer register.\n\npub fn writable_stack_reg() -> Writable<Reg> {\n\n Writable::from_reg(stack_reg())\n\n}\n\n\n", "file_path": "cranelift-codegen/src/isa/arm64/inst/regs.rs", "rank": 9, "score": 199446.6745903882 }, { "content": "/// Create a `Block` with the given Wasm parameters.\n\npub fn block_with_params<PE: TargetEnvironment + ?Sized>(\n\n builder: &mut FunctionBuilder,\n\n params: &[wasmparser::Type],\n\n environ: &PE,\n\n) -> WasmResult<ir::Block> {\n\n let block = builder.create_block();\n\n for ty in params.iter() {\n\n match ty {\n\n wasmparser::Type::I32 => {\n\n builder.append_block_param(block, ir::types::I32);\n\n }\n\n wasmparser::Type::I64 => {\n\n builder.append_block_param(block, ir::types::I64);\n\n }\n\n wasmparser::Type::F32 => {\n\n builder.append_block_param(block, ir::types::F32);\n\n }\n\n wasmparser::Type::F64 => {\n\n builder.append_block_param(block, ir::types::F64);\n\n }\n", "file_path": "cranelift-wasm/src/translation_utils.rs", "rank": 10, "score": 198579.57189888047 }, { "content": "#[derive(Clone, Debug, Default)]\n\nstruct BlockNode {\n\n prev: PackedOption<Block>,\n\n next: PackedOption<Block>,\n\n first_inst: PackedOption<Inst>,\n\n last_inst: PackedOption<Inst>,\n\n seq: SequenceNumber,\n\n}\n\n\n\n/// Iterate over blocks in layout order. See `Layout::blocks()`.\n\npub struct Blocks<'f> {\n\n layout: &'f Layout,\n\n next: Option<Block>,\n\n}\n\n\n\nimpl<'f> Iterator for Blocks<'f> {\n\n type Item = Block;\n\n\n\n fn next(&mut self) -> Option<Block> {\n\n match self.next {\n\n Some(block) => {\n", "file_path": "cranelift-codegen/src/ir/layout.rs", "rank": 11, "score": 198215.78918559712 }, { "content": "/// Look for a directive in a comment string.\n\n/// The directive is of the form \"foo:\" and should follow the leading `;` in the comment:\n\n///\n\n/// ; dominates: block3 block4\n\n///\n\n/// Return the comment text following the directive.\n\npub fn match_directive<'a>(comment: &'a str, directive: &str) -> Option<&'a str> {\n\n assert!(\n\n directive.ends_with(':'),\n\n \"Directive must include trailing colon\"\n\n );\n\n let text = comment.trim_start_matches(';').trim_start();\n\n if text.starts_with(directive) {\n\n Some(text[directive.len()..].trim())\n\n } else {\n\n None\n\n }\n\n}\n\n\n", "file_path": "cranelift-filetests/src/match_directive.rs", "rank": 12, "score": 190534.11070548624 }, { "content": "pub fn run(\n\n files: Vec<String>,\n\n flag_verbose: bool,\n\n flag_just_decode: bool,\n\n flag_check_translation: bool,\n\n flag_print: bool,\n\n flag_print_disasm: bool,\n\n flag_set: &[String],\n\n flag_triple: &str,\n\n flag_print_size: bool,\n\n flag_report_times: bool,\n\n flag_calc_value_ranges: bool,\n\n) -> Result<(), String> {\n\n let parsed = parse_sets_and_triple(flag_set, flag_triple)?;\n\n\n\n for filename in files {\n\n let path = Path::new(&filename);\n\n let name = String::from(path.as_os_str().to_string_lossy());\n\n handle_module(\n\n flag_verbose,\n", "file_path": "src/wasm.rs", "rank": 13, "score": 187995.71640973928 }, { "content": "pub fn run(\n\n filename: &str,\n\n flag_set: &[String],\n\n flag_isa: &str,\n\n verbose: bool,\n\n) -> Result<(), String> {\n\n let parsed = parse_sets_and_triple(flag_set, flag_isa)?;\n\n let fisa = parsed.as_fisa();\n\n\n\n let path = Path::new(&filename).to_path_buf();\n\n\n\n let buffer = read_to_string(&path).map_err(|e| format!(\"{}: {}\", filename, e))?;\n\n let test_file =\n\n parse_test(&buffer, ParseOptions::default()).map_err(|e| format!(\"{}: {}\", filename, e))?;\n\n\n\n // If we have an isa from the command-line, use that. Otherwise if the\n\n // file contains a unique isa, use that.\n\n let isa = if let Some(isa) = fisa.isa {\n\n isa\n\n } else if let Some(isa) = test_file.isa_spec.unique_isa() {\n", "file_path": "src/bugpoint.rs", "rank": 14, "score": 187995.71640973928 }, { "content": "pub fn print_all(\n\n isa: &dyn TargetIsa,\n\n mem: &[u8],\n\n code_size: u32,\n\n rodata_size: u32,\n\n relocs: &PrintRelocs,\n\n traps: &PrintTraps,\n\n stackmaps: &PrintStackmaps,\n\n) -> Result<(), String> {\n\n print_bytes(&mem);\n\n print_disassembly(isa, &mem[0..code_size as usize])?;\n\n print_readonly_data(&mem[code_size as usize..(code_size + rodata_size) as usize]);\n\n println!(\"\\n{}\\n{}\\n{}\", &relocs.text, &traps.text, &stackmaps.text);\n\n Ok(())\n\n}\n\n\n", "file_path": "src/disasm.rs", "rank": 15, "score": 187995.71640973928 }, { "content": "pub fn run(\n\n files: Vec<String>,\n\n flag_print: bool,\n\n flag_disasm: bool,\n\n flag_report_times: bool,\n\n flag_set: &[String],\n\n flag_isa: &str,\n\n) -> Result<(), String> {\n\n let parsed = parse_sets_and_triple(flag_set, flag_isa)?;\n\n\n\n for filename in files {\n\n let path = Path::new(&filename);\n\n let name = String::from(path.as_os_str().to_string_lossy());\n\n handle_module(\n\n flag_print,\n\n flag_disasm,\n\n flag_report_times,\n\n &path.to_path_buf(),\n\n &name,\n\n parsed.as_fisa(),\n\n )?;\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/compile.rs", "rank": 16, "score": 187995.71640973928 }, { "content": "fn block_count(func: &Function) -> usize {\n\n func.layout.blocks().count()\n\n}\n\n\n", "file_path": "src/bugpoint.rs", "rank": 17, "score": 183912.41023529577 }, { "content": "/// Compute the final block order.\n\npub fn compute_final_block_order<I: VCodeInst>(vcode: &VCode<I>) -> Vec<BlockIndex> {\n\n let mut rpo = BlockRPO::new(vcode);\n\n rpo.visit(vcode, vcode.entry());\n\n rpo.rpo()\n\n}\n", "file_path": "cranelift-codegen/src/machinst/blockorder.rs", "rank": 18, "score": 182378.01332788263 }, { "content": "/// Parse \"set\" and \"triple\" commands.\n\npub fn parse_sets_and_triple(\n\n flag_set: &[String],\n\n flag_triple: &str,\n\n) -> Result<OwnedFlagsOrIsa, String> {\n\n let mut flag_builder = settings::builder();\n\n parse_options(\n\n flag_set.iter().map(|x| x.as_str()),\n\n &mut flag_builder,\n\n Location { line_number: 0 },\n\n )\n\n .map_err(|err| err.to_string())?;\n\n\n\n let mut words = flag_triple.trim().split_whitespace();\n\n // Look for `target foo`.\n\n if let Some(triple_name) = words.next() {\n\n let triple = match Triple::from_str(triple_name) {\n\n Ok(triple) => triple,\n\n Err(parse_error) => return Err(parse_error.to_string()),\n\n };\n\n let builder_or_backend = isa::lookup(triple).map_err(|err| match err {\n", "file_path": "src/utils.rs", "rank": 19, "score": 181956.30055495835 }, { "content": "/// Performs the LICM pass by detecting loops within the CFG and moving\n\n/// loop-invariant instructions out of them.\n\n/// Changes the CFG and domtree in-place during the operation.\n\npub fn do_licm(\n\n isa: &dyn TargetIsa,\n\n func: &mut Function,\n\n cfg: &mut ControlFlowGraph,\n\n domtree: &mut DominatorTree,\n\n loop_analysis: &mut LoopAnalysis,\n\n) {\n\n let _tt = timing::licm();\n\n debug_assert!(cfg.is_valid());\n\n debug_assert!(domtree.is_valid());\n\n debug_assert!(loop_analysis.is_valid());\n\n\n\n for lp in loop_analysis.loops() {\n\n // For each loop that we want to optimize we determine the set of loop-invariant\n\n // instructions\n\n let invariant_insts = remove_loop_invariant_instructions(lp, func, cfg, loop_analysis);\n\n // Then we create the loop's pre-header and fill it with the invariant instructions\n\n // Then we remove the invariant instructions from the loop body\n\n if !invariant_insts.is_empty() {\n\n // If the loop has a natural pre-header we use it, otherwise we create it.\n", "file_path": "cranelift-codegen/src/licm.rs", "rank": 20, "score": 181956.30055495835 }, { "content": "#[allow(clippy::float_arithmetic)]\n\npub fn generate_table<'cont, T, I: iter::Iterator<Item = &'cont T>, H: Fn(&T) -> usize>(\n\n items: I,\n\n num_items: usize,\n\n hash_function: H,\n\n) -> Vec<Option<&'cont T>> {\n\n let size = (1.20 * num_items as f64) as usize;\n\n\n\n // Probing code's stop condition relies on the table having one vacant entry at least.\n\n let size = if size.is_power_of_two() {\n\n size * 2\n\n } else {\n\n size.next_power_of_two()\n\n };\n\n\n\n let mut table = vec![None; size];\n\n\n\n for i in items {\n\n let mut h = hash_function(&i) % size;\n\n let mut s = 0;\n\n while table[h].is_some() {\n", "file_path": "cranelift-codegen/shared/src/constant_hash.rs", "rank": 21, "score": 179922.71730366038 }, { "content": "/// Write `func` to `w` as equivalent text.\n\n/// Use `isa` to emit ISA-dependent annotations.\n\npub fn write_function(\n\n w: &mut dyn Write,\n\n func: &Function,\n\n annotations: &DisplayFunctionAnnotations,\n\n) -> fmt::Result {\n\n decorate_function(&mut PlainWriter, w, func, annotations)\n\n}\n\n\n", "file_path": "cranelift-codegen/src/write.rs", "rank": 22, "score": 179185.69972773295 }, { "content": "/// Split `value` into halves using the `vsplit` semantics. Do this by reusing existing values if\n\n/// possible.\n\npub fn vsplit(\n\n func: &mut ir::Function,\n\n cfg: &ControlFlowGraph,\n\n pos: CursorPosition,\n\n srcloc: ir::SourceLoc,\n\n value: Value,\n\n) -> (Value, Value) {\n\n split_any(func, cfg, pos, srcloc, value, Opcode::Vconcat)\n\n}\n\n\n", "file_path": "cranelift-codegen/src/legalizer/split.rs", "rank": 23, "score": 179185.69972773295 }, { "content": "/// Split `value` into two values using the `isplit` semantics. Do this by reusing existing values\n\n/// if possible.\n\npub fn isplit(\n\n func: &mut ir::Function,\n\n cfg: &ControlFlowGraph,\n\n pos: CursorPosition,\n\n srcloc: ir::SourceLoc,\n\n value: Value,\n\n) -> (Value, Value) {\n\n split_any(func, cfg, pos, srcloc, value, Opcode::Iconcat)\n\n}\n\n\n", "file_path": "cranelift-codegen/src/legalizer/split.rs", "rank": 24, "score": 179185.64159186982 }, { "content": "/// Used for 'pass' subcommand.\n\n/// Commands are interpreted as test and executed.\n\n///\n\n/// Directories are scanned recursively for test cases ending in `.clif`.\n\n///\n\npub fn run_passes(\n\n verbose: bool,\n\n report_times: bool,\n\n passes: &[String],\n\n target: &str,\n\n file: &str,\n\n) -> TestResult {\n\n let mut runner = TestRunner::new(verbose, /* report_times */ false);\n\n\n\n let path = Path::new(file);\n\n if path == Path::new(\"-\") || path.is_file() {\n\n runner.push_test(path);\n\n } else {\n\n runner.push_dir(path);\n\n }\n\n\n\n let result = runner.run_passes(passes, target);\n\n if report_times {\n\n println!(\"{}\", timing::take_current());\n\n }\n\n result\n\n}\n\n\n", "file_path": "cranelift-filetests/src/lib.rs", "rank": 25, "score": 179185.47398238117 }, { "content": "/// Write the operands of `inst` to `w` with a prepended space.\n\npub fn write_operands(\n\n w: &mut dyn Write,\n\n dfg: &DataFlowGraph,\n\n isa: Option<&dyn TargetIsa>,\n\n inst: Inst,\n\n) -> fmt::Result {\n\n let pool = &dfg.value_lists;\n\n use crate::ir::instructions::InstructionData::*;\n\n match dfg[inst] {\n\n Unary { arg, .. } => write!(w, \" {}\", arg),\n\n UnaryImm { imm, .. } => write!(w, \" {}\", imm),\n\n UnaryIeee32 { imm, .. } => write!(w, \" {}\", imm),\n\n UnaryIeee64 { imm, .. } => write!(w, \" {}\", imm),\n\n UnaryBool { imm, .. } => write!(w, \" {}\", imm),\n\n UnaryGlobalValue { global_value, .. } => write!(w, \" {}\", global_value),\n\n Binary { args, .. } => write!(w, \" {}, {}\", args[0], args[1]),\n\n BinaryImm { arg, imm, .. } => write!(w, \" {}, {}\", arg, imm),\n\n Ternary { args, .. } => write!(w, \" {}, {}, {}\", args[0], args[1], args[2]),\n\n MultiAry { ref args, .. } => {\n\n if args.is_empty() {\n", "file_path": "cranelift-codegen/src/write.rs", "rank": 26, "score": 179179.908153885 }, { "content": "/// A helper for bitcasting a sequence of values (e.g. function arguments). If a value is a\n\n/// vector type that does not match its expected type, this will modify the value in place to point\n\n/// to the result of a `raw_bitcast`. This conversion is necessary to translate Wasm code that\n\n/// uses `V128` as function parameters (or implicitly in block parameters) and still use specific\n\n/// CLIF types (e.g. `I32X4`) in the function body.\n\npub fn bitcast_arguments(\n\n arguments: &mut [Value],\n\n expected_types: &[Type],\n\n builder: &mut FunctionBuilder,\n\n) {\n\n assert_eq!(arguments.len(), expected_types.len());\n\n for (i, t) in expected_types.iter().enumerate() {\n\n if t.is_vector() {\n\n assert!(\n\n builder.func.dfg.value_type(arguments[i]).is_vector(),\n\n \"unexpected type mismatch: expected {}, argument {} was actually of type {}\",\n\n t,\n\n arguments[i],\n\n builder.func.dfg.value_type(arguments[i])\n\n );\n\n arguments[i] = optionally_bitcast_vector(arguments[i], *t, builder)\n\n }\n\n }\n\n}\n\n\n", "file_path": "cranelift-wasm/src/code_translator.rs", "rank": 27, "score": 176563.43805850425 }, { "content": "/// Verify value locations for `func`.\n\n///\n\n/// After register allocation, every value must be assigned to a location - either a register or a\n\n/// stack slot. These locations must be compatible with the constraints described by the\n\n/// instruction encoding recipes.\n\n///\n\n/// Values can be temporarily diverted to a different location by using the `regmove`, `regspill`,\n\n/// and `regfill` instructions, but only inside an block.\n\n///\n\n/// If a liveness analysis is provided, it is used to verify that there are no active register\n\n/// diversions across control flow edges.\n\npub fn verify_locations(\n\n isa: &dyn isa::TargetIsa,\n\n func: &ir::Function,\n\n cfg: &ControlFlowGraph,\n\n liveness: Option<&Liveness>,\n\n errors: &mut VerifierErrors,\n\n) -> VerifierStepResult<()> {\n\n let _tt = timing::verify_locations();\n\n let verifier = LocationVerifier {\n\n isa,\n\n func,\n\n reginfo: isa.register_info(),\n\n encinfo: isa.encoding_info(),\n\n cfg,\n\n liveness,\n\n };\n\n verifier.check_constraints(errors)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "cranelift-codegen/src/verifier/locations.rs", "rank": 28, "score": 176562.8294117054 }, { "content": "/// Verify liveness information for `func`.\n\n///\n\n/// The provided control flow graph is assumed to be sound.\n\n///\n\n/// - All values in the program must have a live range.\n\n/// - The live range def point must match where the value is defined.\n\n/// - The live range must reach all uses.\n\n/// - When a live range is live-in to an block, it must be live at all the predecessors.\n\n/// - The live range affinity must be compatible with encoding constraints.\n\n///\n\n/// We don't verify that live ranges are minimal. This would require recomputing live ranges for\n\n/// all values.\n\npub fn verify_liveness(\n\n isa: &dyn TargetIsa,\n\n func: &Function,\n\n cfg: &ControlFlowGraph,\n\n liveness: &Liveness,\n\n errors: &mut VerifierErrors,\n\n) -> VerifierStepResult<()> {\n\n let _tt = timing::verify_liveness();\n\n let verifier = LivenessVerifier {\n\n isa,\n\n func,\n\n cfg,\n\n liveness,\n\n };\n\n verifier.check_blocks(errors)?;\n\n verifier.check_insts(errors)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "cranelift-codegen/src/verifier/liveness.rs", "rank": 29, "score": 176561.02754522604 }, { "content": "// The emit_stackmaps() function analyzes each instruction to retrieve the liveness of\n\n// the defs and operands by traversing a function's blocks in layout order.\n\npub fn emit_stackmaps(\n\n func: &mut Function,\n\n domtree: &DominatorTree,\n\n liveness: &Liveness,\n\n tracker: &mut LiveValueTracker,\n\n isa: &dyn TargetIsa,\n\n) {\n\n let mut curr = func.layout.entry_block();\n\n\n\n while let Some(block) = curr {\n\n tracker.block_top(block, &func.dfg, liveness, &func.layout, domtree);\n\n tracker.drop_dead_params();\n\n let mut pos = FuncCursor::new(func);\n\n\n\n // From the top of the block, step through the instructions.\n\n pos.goto_top(block);\n\n\n\n while let Some(inst) = pos.next_inst() {\n\n if let InstructionData::Trap {\n\n code: TrapCode::Interrupt,\n", "file_path": "cranelift-codegen/src/regalloc/safepoint.rs", "rank": 30, "score": 176560.08333939995 }, { "content": "/// Relax branches and compute the final layout of block headers in `func`.\n\n///\n\n/// Fill in the `func.offsets` table so the function is ready for binary emission.\n\npub fn relax_branches(\n\n func: &mut Function,\n\n _cfg: &mut ControlFlowGraph,\n\n _domtree: &mut DominatorTree,\n\n isa: &dyn TargetIsa,\n\n) -> CodegenResult<CodeInfo> {\n\n let _tt = timing::relax_branches();\n\n\n\n let encinfo = isa.encoding_info();\n\n\n\n // Clear all offsets so we can recognize blocks that haven't been visited yet.\n\n func.offsets.clear();\n\n func.offsets.resize(func.dfg.num_blocks());\n\n\n\n // Start by removing redundant jumps.\n\n fold_redundant_jumps(func, _cfg, _domtree);\n\n\n\n // Convert jumps to fallthrough instructions where possible.\n\n fallthroughs(func);\n\n\n", "file_path": "cranelift-codegen/src/binemit/relaxation.rs", "rank": 31, "score": 176559.8667794984 }, { "content": "/// Verify conventional SSA form for `func`.\n\n///\n\n/// Conventional SSA form is represented in Cranelift with the help of virtual registers:\n\n///\n\n/// - Two values are said to be *PHI-related* if one is an block argument and the other is passed as\n\n/// a branch argument in a location that matches the first value.\n\n/// - PHI-related values must belong to the same virtual register.\n\n/// - Two values in the same virtual register must not have overlapping live ranges.\n\n///\n\n/// Additionally, we verify this property of virtual registers:\n\n///\n\n/// - The values in a virtual register are topologically ordered w.r.t. dominance.\n\n///\n\n/// We don't verify that virtual registers are minimal. Minimal CSSA is not required.\n\npub fn verify_cssa(\n\n func: &Function,\n\n cfg: &ControlFlowGraph,\n\n domtree: &DominatorTree,\n\n liveness: &Liveness,\n\n virtregs: &VirtRegs,\n\n errors: &mut VerifierErrors,\n\n) -> VerifierStepResult<()> {\n\n let _tt = timing::verify_cssa();\n\n\n\n let mut preorder = DominatorTreePreorder::new();\n\n preorder.compute(domtree, &func.layout);\n\n\n\n let verifier = CssaVerifier {\n\n func,\n\n cfg,\n\n domtree,\n\n virtregs,\n\n liveness,\n\n preorder,\n\n };\n\n verifier.check_virtregs(errors)?;\n\n verifier.check_cssa(errors)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "cranelift-codegen/src/verifier/cssa.rs", "rank": 32, "score": 176556.4206361621 }, { "content": "/// Returns the base size of the Recipe, assuming it's fixed. This is the default for most\n\n/// encodings; others can be variable and longer than this base size, depending on the registers\n\n/// they're using and use a different function, specific per platform.\n\npub fn base_size(\n\n sizing: &RecipeSizing,\n\n _: Encoding,\n\n _: Inst,\n\n _: &RegDiversions,\n\n _: &Function,\n\n) -> u8 {\n\n sizing.base_size\n\n}\n\n\n\n/// Code size information for an encoding recipe.\n\n///\n\n/// Encoding recipes may have runtime-determined instruction size.\n\npub struct RecipeSizing {\n\n /// Minimum size in bytes of instructions encoded with this recipe.\n\n pub base_size: u8,\n\n\n\n /// Method computing the instruction's real size, given inputs and outputs.\n\n pub compute_size: SizeCalculatorFn,\n\n\n", "file_path": "cranelift-codegen/src/isa/encoding.rs", "rank": 33, "score": 176555.0735199579 }, { "content": "/// Verify that CPU flags are used correctly.\n\n///\n\n/// The value types `iflags` and `fflags` represent CPU flags which usually live in a\n\n/// special-purpose register, so they can't be used as freely as other value types that can live in\n\n/// any register.\n\n///\n\n/// We verify the following conditions:\n\n///\n\n/// - At most one flags value can be live at a time.\n\n/// - A flags value can not be live across an instruction that clobbers the flags.\n\n///\n\n///\n\npub fn verify_flags(\n\n func: &ir::Function,\n\n cfg: &ControlFlowGraph,\n\n isa: Option<&dyn isa::TargetIsa>,\n\n errors: &mut VerifierErrors,\n\n) -> VerifierStepResult<()> {\n\n let _tt = timing::verify_flags();\n\n if isa.is_none() || isa.unwrap().get_mach_backend().is_none() {\n\n let mut verifier = FlagsVerifier {\n\n func,\n\n cfg,\n\n encinfo: isa.map(|isa| isa.encoding_info()),\n\n livein: SecondaryMap::new(),\n\n };\n\n verifier.check(errors)\n\n } else {\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "cranelift-codegen/src/verifier/flags.rs", "rank": 34, "score": 176554.50139139063 }, { "content": "pub fn run(\n\n isa: &dyn TargetIsa,\n\n func: &mut Function,\n\n cfg: &mut ControlFlowGraph,\n\n domtree: &mut DominatorTree,\n\n topo: &mut TopoOrder,\n\n) {\n\n let mut ctx = Context {\n\n has_new_blocks: false,\n\n cur: EncCursor::new(func, isa),\n\n domtree,\n\n topo,\n\n cfg,\n\n };\n\n ctx.run()\n\n}\n\n\n", "file_path": "cranelift-codegen/src/regalloc/branch_splitting.rs", "rank": 35, "score": 176548.75494350726 }, { "content": "/// Expand a `call` instruction. This lowers it to a `call_indirect`, which\n\n/// is only done if the ABI doesn't support direct calls.\n\npub fn expand_call(\n\n inst: ir::Inst,\n\n func: &mut ir::Function,\n\n _cfg: &mut ControlFlowGraph,\n\n isa: &dyn TargetIsa,\n\n) {\n\n // Unpack the instruction.\n\n let (func_ref, old_args) = match func.dfg[inst] {\n\n ir::InstructionData::Call {\n\n opcode,\n\n ref args,\n\n func_ref,\n\n } => {\n\n debug_assert_eq!(opcode, ir::Opcode::Call);\n\n (func_ref, args.clone())\n\n }\n\n _ => panic!(\"Wanted call: {}\", func.dfg.display_inst(inst, None)),\n\n };\n\n\n\n let ptr_ty = isa.pointer_type();\n", "file_path": "cranelift-codegen/src/legalizer/call.rs", "rank": 36, "score": 176548.75494350726 }, { "content": "/// Insert ABI conversion code before and after the call instruction at `pos`.\n\n///\n\n/// Instructions inserted before the call will compute the appropriate ABI values for the\n\n/// callee's new ABI-legalized signature. The function call arguments are rewritten in place to\n\n/// match the new signature.\n\n///\n\n/// Instructions will be inserted after the call to convert returned ABI values back to the\n\n/// original return values. The call's result values will be adapted to match the new signature.\n\n///\n\n/// Returns `true` if any instructions were inserted.\n\npub fn handle_call_abi(\n\n isa: &dyn TargetIsa,\n\n mut inst: Inst,\n\n func: &mut Function,\n\n cfg: &ControlFlowGraph,\n\n) -> bool {\n\n let pos = &mut FuncCursor::new(func).at_inst(inst);\n\n pos.use_srcloc(inst);\n\n\n\n // Start by checking if the argument types already match the signature.\n\n let sig_ref = match check_call_signature(&pos.func.dfg, inst) {\n\n Ok(_) => return spill_call_arguments(pos),\n\n Err(s) => s,\n\n };\n\n\n\n let sig = &pos.func.dfg.signatures[sig_ref];\n\n let old_sig = &pos.func.dfg.old_signatures[sig_ref];\n\n\n\n if sig.uses_struct_return_param()\n\n && old_sig\n", "file_path": "cranelift-codegen/src/legalizer/boundary.rs", "rank": 37, "score": 174063.4244174118 }, { "content": "/// Get a function reference for the probestack function in `func`.\n\n///\n\n/// If there is an existing reference, use it, otherwise make a new one.\n\npub fn get_probestack_funcref(\n\n func: &mut Function,\n\n reg_type: Type,\n\n arg_reg: RegUnit,\n\n isa: &dyn TargetIsa,\n\n) -> FuncRef {\n\n find_funcref(LibCall::Probestack, func)\n\n .unwrap_or_else(|| make_funcref_for_probestack(func, reg_type, arg_reg, isa))\n\n}\n\n\n", "file_path": "cranelift-codegen/src/ir/libcall.rs", "rank": 38, "score": 174062.94889930714 }, { "content": "/// Eliminate unreachable code.\n\n///\n\n/// This pass deletes whole blocks that can't be reached from the entry block. It does not delete\n\n/// individual instructions whose results are unused.\n\n///\n\n/// The reachability analysis is performed by the dominator tree analysis.\n\npub fn eliminate_unreachable_code(\n\n func: &mut ir::Function,\n\n cfg: &mut ControlFlowGraph,\n\n domtree: &DominatorTree,\n\n) {\n\n let _tt = timing::unreachable_code();\n\n let mut pos = FuncCursor::new(func);\n\n while let Some(block) = pos.next_block() {\n\n if domtree.is_reachable(block) {\n\n continue;\n\n }\n\n\n\n debug!(\"Eliminating unreachable {}\", block);\n\n // Move the cursor out of the way and make sure the next lop iteration goes to the right\n\n // block.\n\n pos.prev_block();\n\n\n\n // Remove all instructions from `block`.\n\n while let Some(inst) = pos.func.layout.first_inst(block) {\n\n debug!(\" - {}\", pos.func.dfg.display_inst(inst, None));\n", "file_path": "cranelift-codegen/src/unreachable_code.rs", "rank": 39, "score": 174058.15369404017 }, { "content": "/// Parses the Table section of the wasm module.\n\npub fn parse_table_section(\n\n tables: TableSectionReader,\n\n environ: &mut dyn ModuleEnvironment,\n\n) -> WasmResult<()> {\n\n environ.reserve_tables(tables.get_count())?;\n\n\n\n for entry in tables {\n\n let table = entry?;\n\n environ.declare_table(Table {\n\n ty: match tabletype_to_type(table.element_type, environ)? {\n\n Some(t) => TableElementType::Val(t),\n\n None => TableElementType::Func,\n\n },\n\n minimum: table.limits.initial,\n\n maximum: table.limits.maximum,\n\n })?;\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "cranelift-wasm/src/sections_translator.rs", "rank": 40, "score": 174051.73474603065 }, { "content": "/// Expand a `global_value` instruction according to the definition of the global value.\n\npub fn expand_global_value(\n\n inst: ir::Inst,\n\n func: &mut ir::Function,\n\n _cfg: &mut ControlFlowGraph,\n\n isa: &dyn TargetIsa,\n\n) {\n\n // Unpack the instruction.\n\n let gv = match func.dfg[inst] {\n\n ir::InstructionData::UnaryGlobalValue {\n\n opcode,\n\n global_value,\n\n } => {\n\n debug_assert_eq!(opcode, ir::Opcode::GlobalValue);\n\n global_value\n\n }\n\n _ => panic!(\"Wanted global_value: {}\", func.dfg.display_inst(inst, None)),\n\n };\n\n\n\n match func.global_values[gv] {\n\n ir::GlobalValueData::VMContext => vmctx_addr(inst, func),\n", "file_path": "cranelift-codegen/src/legalizer/globalvalue.rs", "rank": 41, "score": 174051.73474603065 }, { "content": "/// Legalize `sig`.\n\npub fn legalize_signature(\n\n sig: &mut Cow<ir::Signature>,\n\n triple: &Triple,\n\n _current: bool,\n\n shared_flags: &shared_settings::Flags,\n\n isa_flags: &isa_settings::Flags,\n\n) {\n\n let bits;\n\n let mut args;\n\n\n\n match triple.pointer_width().unwrap() {\n\n PointerWidth::U16 => panic!(),\n\n PointerWidth::U32 => {\n\n bits = 32;\n\n args = Args::new(bits, &[], 0, sig.call_conv, shared_flags, isa_flags);\n\n }\n\n PointerWidth::U64 => {\n\n bits = 64;\n\n args = if sig.call_conv.extends_windows_fastcall() {\n\n Args::new(\n", "file_path": "cranelift-codegen/src/isa/x86/abi.rs", "rank": 42, "score": 174051.73474603065 }, { "content": "/// Get the parameter and result types for the given Wasm blocktype.\n\npub fn blocktype_params_results(\n\n module_translation_state: &ModuleTranslationState,\n\n ty_or_ft: wasmparser::TypeOrFuncType,\n\n) -> WasmResult<(&[wasmparser::Type], &[wasmparser::Type])> {\n\n Ok(match ty_or_ft {\n\n wasmparser::TypeOrFuncType::Type(ty) => match ty {\n\n wasmparser::Type::I32 => (&[], &[wasmparser::Type::I32]),\n\n wasmparser::Type::I64 => (&[], &[wasmparser::Type::I64]),\n\n wasmparser::Type::F32 => (&[], &[wasmparser::Type::F32]),\n\n wasmparser::Type::F64 => (&[], &[wasmparser::Type::F64]),\n\n wasmparser::Type::V128 => (&[], &[wasmparser::Type::V128]),\n\n wasmparser::Type::AnyRef => (&[], &[wasmparser::Type::AnyRef]),\n\n wasmparser::Type::AnyFunc => (&[], &[wasmparser::Type::AnyFunc]),\n\n wasmparser::Type::NullRef => (&[], &[wasmparser::Type::NullRef]),\n\n wasmparser::Type::EmptyBlockType => (&[], &[]),\n\n ty => return Err(wasm_unsupported!(\"blocktype_params_results: type {:?}\", ty)),\n\n },\n\n wasmparser::TypeOrFuncType::FuncType(ty_index) => {\n\n let sig_idx = SignatureIndex::from_u32(ty_index);\n\n let (ref params, ref returns) = module_translation_state.wasm_types[sig_idx];\n\n (&*params, &*returns)\n\n }\n\n })\n\n}\n\n\n", "file_path": "cranelift-wasm/src/translation_utils.rs", "rank": 43, "score": 174051.73474603065 }, { "content": "/// Parses the Type section of the wasm module.\n\npub fn parse_type_section(\n\n types: TypeSectionReader,\n\n module_translation_state: &mut ModuleTranslationState,\n\n environ: &mut dyn ModuleEnvironment,\n\n) -> WasmResult<()> {\n\n let count = types.get_count();\n\n module_translation_state.wasm_types.reserve(count as usize);\n\n environ.reserve_signatures(count)?;\n\n\n\n for entry in types {\n\n match entry? {\n\n FuncType {\n\n form: wasmparser::Type::Func,\n\n params,\n\n returns,\n\n } => {\n\n let mut sig =\n\n Signature::new(ModuleEnvironment::target_config(environ).default_call_conv);\n\n sig.params.extend(params.iter().map(|ty| {\n\n let cret_arg: ir::Type = type_to_type(*ty, environ)\n", "file_path": "cranelift-wasm/src/sections_translator.rs", "rank": 44, "score": 174051.73474603065 }, { "content": "/// Parses the Global section of the wasm module.\n\npub fn parse_global_section(\n\n globals: GlobalSectionReader,\n\n environ: &mut dyn ModuleEnvironment,\n\n) -> WasmResult<()> {\n\n environ.reserve_globals(globals.get_count())?;\n\n\n\n for entry in globals {\n\n let wasmparser::Global {\n\n ty: GlobalType {\n\n content_type,\n\n mutable,\n\n },\n\n init_expr,\n\n } = entry?;\n\n let mut init_expr_reader = init_expr.get_binary_reader();\n\n let initializer = match init_expr_reader.read_operator()? {\n\n Operator::I32Const { value } => GlobalInit::I32Const(value),\n\n Operator::I64Const { value } => GlobalInit::I64Const(value),\n\n Operator::F32Const { value } => GlobalInit::F32Const(value.bits()),\n\n Operator::F64Const { value } => GlobalInit::F64Const(value.bits()),\n", "file_path": "cranelift-wasm/src/sections_translator.rs", "rank": 45, "score": 174051.73474603065 }, { "content": "/// Expand a `table_addr` instruction according to the definition of the table.\n\npub fn expand_table_addr(\n\n inst: ir::Inst,\n\n func: &mut ir::Function,\n\n _cfg: &mut ControlFlowGraph,\n\n _isa: &dyn TargetIsa,\n\n) {\n\n // Unpack the instruction.\n\n let (table, index, element_offset) = match func.dfg[inst] {\n\n ir::InstructionData::TableAddr {\n\n opcode,\n\n table,\n\n arg,\n\n offset,\n\n } => {\n\n debug_assert_eq!(opcode, ir::Opcode::TableAddr);\n\n (table, arg, offset)\n\n }\n\n _ => panic!(\"Wanted table_addr: {}\", func.dfg.display_inst(inst, None)),\n\n };\n\n\n\n dynamic_addr(inst, table, index, element_offset, func);\n\n}\n\n\n", "file_path": "cranelift-codegen/src/legalizer/table.rs", "rank": 46, "score": 174051.73474603065 }, { "content": "/// Some SIMD operations only operate on I8X16 in CLIF; this will convert them to that type by\n\n/// adding a raw_bitcast if necessary.\n\npub fn optionally_bitcast_vector(\n\n value: Value,\n\n needed_type: Type,\n\n builder: &mut FunctionBuilder,\n\n) -> Value {\n\n if builder.func.dfg.value_type(value) != needed_type {\n\n builder.ins().raw_bitcast(needed_type, value)\n\n } else {\n\n value\n\n }\n\n}\n\n\n", "file_path": "cranelift-wasm/src/code_translator.rs", "rank": 47, "score": 174051.73474603065 }, { "content": "/// Legalize `sig` for RISC-V.\n\npub fn legalize_signature(\n\n sig: &mut Cow<ir::Signature>,\n\n triple: &Triple,\n\n isa_flags: &settings::Flags,\n\n current: bool,\n\n) {\n\n let bits = triple.pointer_width().unwrap().bits();\n\n\n\n let mut args = Args::new(bits, isa_flags.enable_e());\n\n if let Some(new_params) = legalize_args(&sig.params, &mut args) {\n\n sig.to_mut().params = new_params;\n\n }\n\n\n\n let mut rets = Args::new(bits, isa_flags.enable_e());\n\n if let Some(new_returns) = legalize_args(&sig.returns, &mut rets) {\n\n sig.to_mut().returns = new_returns;\n\n }\n\n\n\n if current {\n\n let ptr = Type::int(u16::from(bits)).unwrap();\n", "file_path": "cranelift-codegen/src/isa/riscv/abi.rs", "rank": 48, "score": 174051.73474603065 }, { "content": "pub fn i_Shift_R(\n\n is64: bool,\n\n kind: ShiftKind,\n\n nBits: u8, // 1 .. #bits-in-type - 1, or 0 to mean \"%cl\"\n\n wdst: Writable<Reg>,\n\n) -> Inst {\n\n let dst = wdst.to_reg();\n\n debug_assert!(nBits < if is64 { 64 } else { 32 });\n\n debug_assert!(dst.get_class() == RegClass::I64);\n\n Inst::Shift_R {\n\n is64,\n\n kind,\n\n nBits,\n\n dst,\n\n }\n\n}\n\n\n", "file_path": "cranelift-codegen/src/isa/x64/inst.rs", "rank": 49, "score": 174051.73474603065 }, { "content": "/// Parses the Function section of the wasm module.\n\npub fn parse_function_section(\n\n functions: FunctionSectionReader,\n\n environ: &mut dyn ModuleEnvironment,\n\n) -> WasmResult<()> {\n\n let num_functions = functions.get_count();\n\n if num_functions == std::u32::MAX {\n\n // We reserve `u32::MAX` for our own use in cranelift-entity.\n\n return Err(WasmError::ImplLimitExceeded);\n\n }\n\n\n\n environ.reserve_func_types(num_functions)?;\n\n\n\n for entry in functions {\n\n let sigindex = entry?;\n\n environ.declare_func_type(SignatureIndex::from_u32(sigindex))?;\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "cranelift-wasm/src/sections_translator.rs", "rank": 50, "score": 174051.73474603065 }, { "content": "/// Expand a `heap_addr` instruction according to the definition of the heap.\n\npub fn expand_heap_addr(\n\n inst: ir::Inst,\n\n func: &mut ir::Function,\n\n cfg: &mut ControlFlowGraph,\n\n isa: &dyn TargetIsa,\n\n) {\n\n // Unpack the instruction.\n\n let (heap, offset, access_size) = match func.dfg[inst] {\n\n ir::InstructionData::HeapAddr {\n\n opcode,\n\n heap,\n\n arg,\n\n imm,\n\n } => {\n\n debug_assert_eq!(opcode, ir::Opcode::HeapAddr);\n\n (heap, arg, imm.into())\n\n }\n\n _ => panic!(\"Wanted heap_addr: {}\", func.dfg.display_inst(inst, None)),\n\n };\n\n\n", "file_path": "cranelift-codegen/src/legalizer/heap.rs", "rank": 51, "score": 174051.73474603065 }, { "content": "/// Parses the Memory section of the wasm module.\n\npub fn parse_memory_section(\n\n memories: MemorySectionReader,\n\n environ: &mut dyn ModuleEnvironment,\n\n) -> WasmResult<()> {\n\n environ.reserve_memories(memories.get_count())?;\n\n\n\n for entry in memories {\n\n let memory = entry?;\n\n environ.declare_memory(Memory {\n\n minimum: memory.limits.initial,\n\n maximum: memory.limits.maximum,\n\n shared: memory.shared,\n\n })?;\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "cranelift-wasm/src/sections_translator.rs", "rank": 52, "score": 174051.73474603065 }, { "content": "/// Rearrange the elements of the mutable slice `s` such that elements where `p(t)` is true precede\n\n/// the elements where `p(t)` is false.\n\n///\n\n/// The order of elements is not preserved, unless the slice is already partitioned.\n\n///\n\n/// Returns the number of elements where `p(t)` is true.\n\npub fn partition_slice<T, F>(s: &mut [T], mut p: F) -> usize\n\nwhere\n\n F: FnMut(&T) -> bool,\n\n{\n\n // The iterator works like a deque which we can pop from both ends.\n\n let mut i = s.iter_mut();\n\n\n\n // Number of elements for which the predicate is known to be true.\n\n let mut pos = 0;\n\n\n\n loop {\n\n // Find the first element for which the predicate fails.\n\n let head = loop {\n\n match i.next() {\n\n Some(head) => {\n\n if !p(&head) {\n\n break head;\n\n }\n\n }\n\n None => return pos,\n", "file_path": "cranelift-codegen/src/partition_slice.rs", "rank": 53, "score": 173088.6357418144 }, { "content": "fn block_ranges(indices: &[InstIx], len: usize) -> Vec<(usize, usize)> {\n\n let v = indices\n\n .iter()\n\n .map(|iix| iix.get() as usize)\n\n .chain(iter::once(len))\n\n .collect::<Vec<usize>>();\n\n v.windows(2).map(|p| (p[0], p[1])).collect()\n\n}\n\n\n", "file_path": "cranelift-codegen/src/machinst/vcode.rs", "rank": 54, "score": 172975.11310737938 }, { "content": "pub fn i_Cmp_RMI_R(\n\n size: u8, // 1, 2, 4 or 8\n\n src: RMI,\n\n dst: Reg,\n\n) -> Inst {\n\n debug_assert!(size == 8 || size == 4 || size == 2 || size == 1);\n\n debug_assert!(dst.get_class() == RegClass::I64);\n\n Inst::Cmp_RMI_R { size, src, dst }\n\n}\n\n\n", "file_path": "cranelift-codegen/src/isa/x64/inst.rs", "rank": 55, "score": 171678.84559699075 }, { "content": "pub fn i_Mov_R_M(\n\n size: u8, // 1, 2, 4 or 8\n\n src: Reg,\n\n addr: Addr,\n\n) -> Inst {\n\n debug_assert!(size == 8 || size == 4 || size == 2 || size == 1);\n\n debug_assert!(src.get_class() == RegClass::I64);\n\n Inst::Mov_R_M { size, src, addr }\n\n}\n\n\n", "file_path": "cranelift-codegen/src/isa/x64/inst.rs", "rank": 56, "score": 171678.84559699075 }, { "content": "#[cfg(feature = \"unwind\")]\n\npub fn emit_unwind_info(\n\n func: &ir::Function,\n\n isa: &dyn TargetIsa,\n\n kind: FrameUnwindKind,\n\n sink: &mut dyn FrameUnwindSink,\n\n) {\n\n match kind {\n\n FrameUnwindKind::Fastcall => {\n\n // Assumption: RBP is being used as the frame pointer\n\n // In the future, Windows fastcall codegen should usually omit the frame pointer\n\n if let Some(info) = UnwindInfo::try_from_func(func, isa, Some(RU::rbp.into())) {\n\n info.emit(sink);\n\n }\n\n }\n\n FrameUnwindKind::Libunwind => {\n\n if func.frame_layout.is_some() {\n\n emit_fde(func, isa, sink);\n\n }\n\n }\n\n }\n\n}\n", "file_path": "cranelift-codegen/src/isa/x86/abi.rs", "rank": 57, "score": 171678.84559699075 }, { "content": "/// Parse an iterator of command line options and apply them to `config`.\n\npub fn parse_options<'a, I>(\n\n iter: I,\n\n config: &mut dyn Configurable,\n\n loc: Location,\n\n) -> ParseResult<()>\n\nwhere\n\n I: Iterator<Item = &'a str>,\n\n{\n\n for opt in iter.map(TestOption::new) {\n\n match opt {\n\n TestOption::Flag(name) => match config.enable(name) {\n\n Ok(_) => {}\n\n Err(SetError::BadName(name)) => return err!(loc, \"unknown flag '{}'\", name),\n\n Err(_) => return err!(loc, \"not a boolean flag: '{}'\", opt),\n\n },\n\n TestOption::Value(name, value) => match config.set(name, value) {\n\n Ok(_) => {}\n\n Err(SetError::BadName(name)) => return err!(loc, \"unknown setting '{}'\", name),\n\n Err(SetError::BadType) => return err!(loc, \"invalid setting type: '{}'\", opt),\n\n Err(SetError::BadValue(expected)) => {\n", "file_path": "cranelift-reader/src/isaspec.rs", "rank": 58, "score": 169639.33755543776 }, { "content": "pub fn print_bytes(mem: &[u8]) {\n\n print!(\".byte \");\n\n let mut first = true;\n\n for byte in mem.iter() {\n\n if first {\n\n first = false;\n\n } else {\n\n print!(\", \");\n\n }\n\n print!(\"{}\", byte);\n\n }\n\n println!();\n\n}\n\n\n", "file_path": "src/disasm.rs", "rank": 59, "score": 169639.33755543776 }, { "content": "/// Pretty-print a verifier error.\n\npub fn pretty_verifier_error<'a>(\n\n func: &ir::Function,\n\n isa: Option<&dyn TargetIsa>,\n\n func_w: Option<Box<dyn FuncWriter + 'a>>,\n\n errors: VerifierErrors,\n\n) -> String {\n\n let mut errors = errors.0;\n\n let mut w = String::new();\n\n let num_errors = errors.len();\n\n\n\n decorate_function(\n\n &mut PrettyVerifierError(func_w.unwrap_or_else(|| Box::new(PlainWriter)), &mut errors),\n\n &mut w,\n\n func,\n\n &isa.into(),\n\n )\n\n .unwrap();\n\n\n\n writeln!(\n\n w,\n\n \"\\n; {} verifier error{} detected (see above). Compilation aborted.\",\n\n num_errors,\n\n if num_errors == 1 { \"\" } else { \"s\" }\n\n )\n\n .unwrap();\n\n\n\n w\n\n}\n\n\n", "file_path": "cranelift-codegen/src/print_errors.rs", "rank": 60, "score": 168973.17190016102 }, { "content": "pub fn i_Ret() -> Inst {\n\n Inst::Ret {}\n\n}\n\n\n", "file_path": "cranelift-codegen/src/isa/x64/inst.rs", "rank": 61, "score": 168973.17190016102 }, { "content": "/// Translate a sequence of bytes forming a valid Wasm binary into a list of valid Cranelift IR\n\n/// [`Function`](cranelift_codegen::ir::Function).\n\npub fn translate_module<'data>(\n\n data: &'data [u8],\n\n environ: &mut dyn ModuleEnvironment<'data>,\n\n) -> WasmResult<ModuleTranslationState> {\n\n let _tt = timing::wasm_translate_module();\n\n let mut reader = ModuleReader::new(data)?;\n\n let mut module_translation_state = ModuleTranslationState::new();\n\n\n\n while !reader.eof() {\n\n let section = reader.read()?;\n\n match section.content()? {\n\n SectionContent::Type(types) => {\n\n parse_type_section(types, &mut module_translation_state, environ)?;\n\n }\n\n\n\n SectionContent::Import(imports) => {\n\n parse_import_section(imports, environ)?;\n\n }\n\n\n\n SectionContent::Function(functions) => {\n", "file_path": "cranelift-wasm/src/module_translator.rs", "rank": 62, "score": 168973.17190016102 }, { "content": "#[allow(dead_code)]\n\npub fn has_length_of(value_list: &ir::ValueList, num: usize, func: &ir::Function) -> bool {\n\n value_list.len(&func.dfg.value_lists) == num\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn cvt_u32() {\n\n let x1 = 0u32;\n\n let x2 = 1u32;\n\n let x3 = 0xffff_fff0u32;\n\n\n\n assert!(is_signed_int(x1, 1, 0));\n\n assert!(is_signed_int(x1, 2, 1));\n\n assert!(is_signed_int(x2, 2, 0));\n\n assert!(!is_signed_int(x2, 2, 1));\n\n\n\n // `u32` doesn't sign-extend when converted to `i64`.\n", "file_path": "cranelift-codegen/src/predicates.rs", "rank": 63, "score": 167811.15635274095 }, { "content": "pub fn print_readonly_data(mem: &[u8]) {\n\n if mem.is_empty() {\n\n return;\n\n }\n\n\n\n println!(\"\\nFollowed by {} bytes of read-only data:\", mem.len());\n\n\n\n for (i, byte) in mem.iter().enumerate() {\n\n if i % 16 == 0 {\n\n if i != 0 {\n\n println!();\n\n }\n\n print!(\"{:4}: \", i);\n\n }\n\n if i % 4 == 0 {\n\n print!(\" \");\n\n }\n\n print!(\"{:02x} \", byte);\n\n }\n\n println!();\n\n}\n", "file_path": "src/disasm.rs", "rank": 64, "score": 167008.18434506003 }, { "content": "/// Parses the Name section of the wasm module.\n\npub fn parse_name_section<'data>(\n\n mut names: NameSectionReader<'data>,\n\n environ: &mut dyn ModuleEnvironment<'data>,\n\n) -> WasmResult<()> {\n\n while let Ok(subsection) = names.read() {\n\n match subsection {\n\n wasmparser::Name::Function(function_subsection) => {\n\n if let Some(function_names) = function_subsection\n\n .get_map()\n\n .ok()\n\n .and_then(parse_function_name_subsection)\n\n {\n\n for (index, name) in function_names {\n\n environ.declare_func_name(index, name)?;\n\n }\n\n }\n\n return Ok(());\n\n }\n\n wasmparser::Name::Local(_) | wasmparser::Name::Module(_) => {}\n\n };\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "cranelift-wasm/src/sections_translator.rs", "rank": 65, "score": 166600.2827511211 }, { "content": "/// Parses the Data section of the wasm module.\n\npub fn parse_data_section<'data>(\n\n data: DataSectionReader<'data>,\n\n environ: &mut dyn ModuleEnvironment<'data>,\n\n) -> WasmResult<()> {\n\n environ.reserve_data_initializers(data.get_count())?;\n\n\n\n for (index, entry) in data.into_iter().enumerate() {\n\n let Data { kind, data } = entry?;\n\n match kind {\n\n DataKind::Active {\n\n memory_index,\n\n init_expr,\n\n } => {\n\n let mut init_expr_reader = init_expr.get_binary_reader();\n\n let (base, offset) = match init_expr_reader.read_operator()? {\n\n Operator::I32Const { value } => (None, value as u32 as usize),\n\n Operator::GlobalGet { global_index } => {\n\n (Some(GlobalIndex::from_u32(global_index)), 0)\n\n }\n\n ref s => {\n", "file_path": "cranelift-wasm/src/sections_translator.rs", "rank": 66, "score": 166600.2827511211 }, { "content": "pub fn reg_RDI() -> Reg {\n\n info_RDI().0.to_reg()\n\n}\n", "file_path": "cranelift-codegen/src/isa/x64/inst.rs", "rank": 67, "score": 166600.2827511211 }, { "content": "pub fn reg_RBP() -> Reg {\n\n info_RBP().0.to_reg()\n\n}\n\n\n", "file_path": "cranelift-codegen/src/isa/x64/inst.rs", "rank": 68, "score": 166600.2827511211 }, { "content": "pub fn reg_RSI() -> Reg {\n\n info_RSI().0.to_reg()\n\n}\n", "file_path": "cranelift-codegen/src/isa/x64/inst.rs", "rank": 69, "score": 166600.2827511211 }, { "content": "pub fn reg_R8() -> Reg {\n\n info_R8().0.to_reg()\n\n}\n", "file_path": "cranelift-codegen/src/isa/x64/inst.rs", "rank": 70, "score": 166600.2827511211 }, { "content": "pub fn reg_RDX() -> Reg {\n\n info_RDX().0.to_reg()\n\n}\n", "file_path": "cranelift-codegen/src/isa/x64/inst.rs", "rank": 71, "score": 166600.2827511211 }, { "content": "/// Parses the Import section of the wasm module.\n\npub fn parse_import_section<'data>(\n\n imports: ImportSectionReader<'data>,\n\n environ: &mut dyn ModuleEnvironment<'data>,\n\n) -> WasmResult<()> {\n\n environ.reserve_imports(imports.get_count())?;\n\n\n\n for entry in imports {\n\n let import = entry?;\n\n let module_name = import.module;\n\n let field_name = import.field;\n\n\n\n match import.ty {\n\n ImportSectionEntryType::Function(sig) => {\n\n environ.declare_func_import(\n\n SignatureIndex::from_u32(sig),\n\n module_name,\n\n field_name,\n\n )?;\n\n }\n\n ImportSectionEntryType::Memory(MemoryType {\n", "file_path": "cranelift-wasm/src/sections_translator.rs", "rank": 72, "score": 166600.2827511211 }, { "content": "/// Parses the Element section of the wasm module.\n\npub fn parse_element_section<'data>(\n\n elements: ElementSectionReader<'data>,\n\n environ: &mut dyn ModuleEnvironment,\n\n) -> WasmResult<()> {\n\n environ.reserve_table_elements(elements.get_count())?;\n\n\n\n for (index, entry) in elements.into_iter().enumerate() {\n\n let Element { kind, items, ty } = entry?;\n\n if ty != Type::AnyFunc {\n\n return Err(wasm_unsupported!(\n\n \"unsupported table element type: {:?}\",\n\n ty\n\n ));\n\n }\n\n let segments = read_elems(&items)?;\n\n match kind {\n\n ElementKind::Active {\n\n table_index,\n\n init_expr,\n\n } => {\n", "file_path": "cranelift-wasm/src/sections_translator.rs", "rank": 73, "score": 166600.2827511211 }, { "content": "/// Parses the Export section of the wasm module.\n\npub fn parse_export_section<'data>(\n\n exports: ExportSectionReader<'data>,\n\n environ: &mut dyn ModuleEnvironment<'data>,\n\n) -> WasmResult<()> {\n\n environ.reserve_exports(exports.get_count())?;\n\n\n\n for entry in exports {\n\n let Export {\n\n field,\n\n ref kind,\n\n index,\n\n } = entry?;\n\n\n\n // The input has already been validated, so we should be able to\n\n // assume valid UTF-8 and use `from_utf8_unchecked` if performance\n\n // becomes a concern here.\n\n let index = index as usize;\n\n match *kind {\n\n ExternalKind::Function => environ.declare_func_export(FuncIndex::new(index), field)?,\n\n ExternalKind::Table => environ.declare_table_export(TableIndex::new(index), field)?,\n", "file_path": "cranelift-wasm/src/sections_translator.rs", "rank": 74, "score": 166600.2827511211 }, { "content": "pub fn reg_RCX() -> Reg {\n\n info_RCX().0.to_reg()\n\n}\n", "file_path": "cranelift-codegen/src/isa/x64/inst.rs", "rank": 75, "score": 166600.2827511211 }, { "content": "// For external consumption. It's probably important that LLVM optimises\n\n// these into a 32-bit constant. That will require sprinkling a bunch of\n\n// inline-always pragmas around the place.\n\npub fn reg_RAX() -> Reg {\n\n info_RAX().0.to_reg()\n\n}\n", "file_path": "cranelift-codegen/src/isa/x64/inst.rs", "rank": 76, "score": 166600.2827511211 }, { "content": "/// Parses the Code section of the wasm module.\n\npub fn parse_code_section<'data>(\n\n code: CodeSectionReader<'data>,\n\n module_translation_state: &ModuleTranslationState,\n\n environ: &mut dyn ModuleEnvironment<'data>,\n\n) -> WasmResult<()> {\n\n for body in code {\n\n let mut reader = body?.get_binary_reader();\n\n let size = reader.bytes_remaining();\n\n let offset = reader.original_position();\n\n environ.define_function_body(module_translation_state, reader.read_bytes(size)?, offset)?;\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "cranelift-wasm/src/sections_translator.rs", "rank": 77, "score": 166600.2827511211 }, { "content": "pub fn reg_RSP() -> Reg {\n\n info_RSP().0.to_reg()\n\n}\n", "file_path": "cranelift-codegen/src/isa/x64/inst.rs", "rank": 78, "score": 166600.2827511211 }, { "content": "pub fn reg_R9() -> Reg {\n\n info_R9().0.to_reg()\n\n}\n\n\n", "file_path": "cranelift-codegen/src/isa/x64/inst.rs", "rank": 79, "score": 166600.2827511211 }, { "content": "/// Get a reference to the \"spill temp\" register. This register is used to\n\n/// compute the address of a spill slot when a direct offset addressing mode from\n\n/// FP is not sufficient (+/- 2^11 words). We exclude this register from regalloc\n\n/// and reserve it for this purpose for simplicity; otherwise we need a\n\n/// multi-stage analysis where we first determine how many spill slots we have,\n\n/// then perhaps remove the reg from the pool and recompute regalloc.\n\npub fn spilltmp_reg() -> Reg {\n\n xreg(15)\n\n}\n\n\n", "file_path": "cranelift-codegen/src/isa/arm64/inst/regs.rs", "rank": 80, "score": 164346.93372815652 }, { "content": "/// Builds ranges and location for specified value labels.\n\n/// The labels specified at DataFlowGraph's values_labels collection.\n\npub fn build_value_labels_ranges<T>(\n\n func: &Function,\n\n regalloc: &Context,\n\n isa: &dyn TargetIsa,\n\n) -> ValueLabelsRanges\n\nwhere\n\n T: From<SourceLoc> + Deref<Target = SourceLoc> + Ord + Copy,\n\n{\n\n let values_labels = build_value_labels_index::<T>(func);\n\n\n\n let mut blocks = func.layout.blocks().collect::<Vec<_>>();\n\n blocks.sort_by_key(|block| func.offsets[*block]); // Ensure inst offsets always increase\n\n let encinfo = isa.encoding_info();\n\n let values_locations = &func.locations;\n\n let liveness_ranges = regalloc.liveness().ranges();\n\n\n\n let mut ranges = HashMap::new();\n\n let mut add_range = |label, range: (u32, u32), loc: ValueLoc| {\n\n if range.0 >= range.1 || !loc.is_assigned() {\n\n return;\n", "file_path": "cranelift-codegen/src/value_label.rs", "rank": 81, "score": 164342.49299691487 }, { "content": "/// Get a reference to the frame pointer (x29).\n\npub fn fp_reg() -> Reg {\n\n xreg(29)\n\n}\n\n\n", "file_path": "cranelift-codegen/src/isa/arm64/inst/regs.rs", "rank": 82, "score": 164342.49299691487 }, { "content": "/// Get a reference to the zero-register.\n\npub fn zero_reg() -> Reg {\n\n // This should be the same as what xreg(31) returns, except that\n\n // we use the special index into the register index space.\n\n Reg::new_real(\n\n RegClass::I64,\n\n /* enc = */ 31,\n\n /* index = */ ZERO_REG_INDEX,\n\n )\n\n}\n\n\n", "file_path": "cranelift-codegen/src/isa/arm64/inst/regs.rs", "rank": 83, "score": 164342.49299691487 }, { "content": "/// Get a reference to the link register (x30).\n\npub fn link_reg() -> Reg {\n\n xreg(30)\n\n}\n\n\n", "file_path": "cranelift-codegen/src/isa/arm64/inst/regs.rs", "rank": 84, "score": 164342.49299691487 }, { "content": "/// Default names for `ir::LibCall`s. A function by this name is imported into the object as\n\n/// part of the translation of a `ir::ExternalName::LibCall` variant.\n\npub fn default_libcall_names() -> Box<dyn Fn(ir::LibCall) -> String> {\n\n Box::new(move |libcall| match libcall {\n\n ir::LibCall::Probestack => \"__cranelift_probestack\".to_owned(),\n\n ir::LibCall::CeilF32 => \"ceilf\".to_owned(),\n\n ir::LibCall::CeilF64 => \"ceil\".to_owned(),\n\n ir::LibCall::FloorF32 => \"floorf\".to_owned(),\n\n ir::LibCall::FloorF64 => \"floor\".to_owned(),\n\n ir::LibCall::TruncF32 => \"truncf\".to_owned(),\n\n ir::LibCall::TruncF64 => \"trunc\".to_owned(),\n\n ir::LibCall::NearestF32 => \"nearbyintf\".to_owned(),\n\n ir::LibCall::NearestF64 => \"nearbyint\".to_owned(),\n\n ir::LibCall::Memcpy => \"memcpy\".to_owned(),\n\n ir::LibCall::Memset => \"memset\".to_owned(),\n\n ir::LibCall::Memmove => \"memmove\".to_owned(),\n\n })\n\n}\n", "file_path": "cranelift-module/src/backend.rs", "rank": 85, "score": 163140.18853516545 }, { "content": "pub fn run(files: &[String]) -> CommandResult {\n\n for (i, f) in files.iter().enumerate() {\n\n if i != 0 {\n\n println!();\n\n }\n\n cat_one(f)?\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/cat.rs", "rank": 86, "score": 163056.8792318223 }, { "content": "/// Compile the given function down to VCode with allocated registers, ready\n\n/// for binary emission.\n\npub fn compile<B: LowerBackend>(\n\n f: &mut Function,\n\n b: &B,\n\n abi: Box<dyn ABIBody<B::MInst>>,\n\n) -> VCode<B::MInst>\n\nwhere\n\n B::MInst: ShowWithRRU,\n\n{\n\n // This lowers the CL IR.\n\n let mut vcode = Lower::new(f, abi).lower(b);\n\n\n\n let universe = &B::MInst::reg_universe();\n\n\n\n debug!(\"vcode from lowering: \\n{}\", vcode.show_rru(Some(universe)));\n\n\n\n // Perform register allocation.\n\n let result = allocate_registers(&mut vcode, RegAllocAlgorithm::Backtracking, universe)\n\n .expect(\"register allocation\");\n\n\n\n // Reorder vcode into final order and copy out final instruction sequence\n", "file_path": "cranelift-codegen/src/machinst/compile.rs", "rank": 87, "score": 162138.27499854352 }, { "content": "/// Writes `func` to `w` as text.\n\n/// write_function_plain is passed as 'closure' to print instructions as text.\n\n/// pretty_function_error is passed as 'closure' to add error decoration.\n\npub fn decorate_function<FW: FuncWriter>(\n\n func_w: &mut FW,\n\n w: &mut dyn Write,\n\n func: &Function,\n\n annotations: &DisplayFunctionAnnotations,\n\n) -> fmt::Result {\n\n let regs = annotations.isa.map(TargetIsa::register_info);\n\n let regs = regs.as_ref();\n\n\n\n write!(w, \"function \")?;\n\n write_spec(w, func, regs)?;\n\n writeln!(w, \" {{\")?;\n\n let aliases = alias_map(func);\n\n let mut any = func_w.write_preamble(w, func, regs)?;\n\n for block in &func.layout {\n\n if any {\n\n writeln!(w)?;\n\n }\n\n decorate_block(func_w, w, func, &aliases, annotations, block)?;\n\n any = true;\n\n }\n\n writeln!(w, \"}}\")\n\n}\n\n\n\n//----------------------------------------------------------------------\n\n//\n\n// Function spec.\n\n\n", "file_path": "cranelift-codegen/src/write.rs", "rank": 88, "score": 162138.27499854352 }, { "content": "#[allow(dead_code)]\n\npub fn is_all_zeroes(x: &ConstantData) -> bool {\n\n x.iter().all(|&f| f == 0)\n\n}\n\n\n\n/// Check that a constant contains all ones.\n", "file_path": "cranelift-codegen/src/predicates.rs", "rank": 89, "score": 160559.8590343457 }, { "content": "#[allow(dead_code)]\n\npub fn is_all_ones(x: &ConstantData) -> bool {\n\n x.iter().all(|&f| f == 0xff)\n\n}\n\n\n\n/// Check that `x` is the same as `y`.\n", "file_path": "cranelift-codegen/src/predicates.rs", "rank": 90, "score": 160559.8590343457 }, { "content": "pub fn run(files: &[String]) -> CommandResult {\n\n for (i, f) in files.iter().enumerate() {\n\n if i != 0 {\n\n println!();\n\n }\n\n print_cfg(f)?\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/print_cfg.rs", "rank": 91, "score": 160559.8590343457 }, { "content": "/// Create the register universe for X64.\n\npub fn create_reg_universe() -> RealRegUniverse {\n\n let mut regs = Vec::<(RealReg, String)>::new();\n\n let mut allocable_by_class = [None; NUM_REG_CLASSES];\n\n\n\n // Integer regs\n\n let mut base = regs.len();\n\n // Callee-saved, in the ELF x86_64 ABI\n\n regs.push(info_R12());\n\n regs.push(info_R13());\n\n regs.push(info_R14());\n\n regs.push(info_R15());\n\n regs.push(info_RBX());\n\n // Caller-saved, in the ELF x86_64 ABI\n\n regs.push(info_RSI());\n\n regs.push(info_RDI());\n\n regs.push(info_RAX());\n\n regs.push(info_RCX());\n\n regs.push(info_RDX());\n\n regs.push(info_R8());\n\n regs.push(info_R9());\n", "file_path": "cranelift-codegen/src/isa/x64/inst.rs", "rank": 92, "score": 160140.26328467875 }, { "content": "/// Create the register universe for ARM64.\n\npub fn create_reg_universe() -> RealRegUniverse {\n\n let mut regs = vec![];\n\n let mut allocable_by_class = [None; NUM_REG_CLASSES];\n\n\n\n // Numbering Scheme: we put V-regs first, then X-regs. The X-regs\n\n // exclude several registers: x18 (globally reserved for platform-specific\n\n // purposes), x29 (frame pointer), x30 (link register), x31 (stack pointer\n\n // or zero register, depending on context).\n\n\n\n let v_reg_base = 0u8; // in contiguous real-register index space\n\n let v_reg_count = 32;\n\n for i in 0u8..v_reg_count {\n\n let reg = Reg::new_real(\n\n RegClass::V128,\n\n /* enc = */ i,\n\n /* index = */ v_reg_base + i,\n\n )\n\n .to_real_reg();\n\n let name = format!(\"v{}\", i);\n\n regs.push((reg, name));\n", "file_path": "cranelift-codegen/src/isa/arm64/inst/regs.rs", "rank": 93, "score": 158181.654860693 }, { "content": "/// Try to remove an block.\n\nstruct RemoveBlock {\n\n block: Block,\n\n}\n\n\n\nimpl RemoveBlock {\n\n fn new(func: &Function) -> Self {\n\n Self {\n\n block: func.layout.entry_block().unwrap(),\n\n }\n\n }\n\n}\n\n\n\nimpl Mutator for RemoveBlock {\n\n fn name(&self) -> &'static str {\n\n \"remove block\"\n\n }\n\n\n\n fn mutation_count(&self, func: &Function) -> usize {\n\n block_count(func)\n\n }\n", "file_path": "src/bugpoint.rs", "rank": 94, "score": 156347.55293510697 }, { "content": "struct MergeBlocks {\n\n block: Block,\n\n prev_block: Option<Block>,\n\n}\n\n\n\nimpl MergeBlocks {\n\n fn new(func: &Function) -> Self {\n\n Self {\n\n block: func.layout.entry_block().unwrap(),\n\n prev_block: None,\n\n }\n\n }\n\n}\n\n\n\nimpl Mutator for MergeBlocks {\n\n fn name(&self) -> &'static str {\n\n \"merge blocks\"\n\n }\n\n\n\n fn mutation_count(&self, func: &Function) -> usize {\n", "file_path": "src/bugpoint.rs", "rank": 95, "score": 156341.2548777114 }, { "content": "struct BlockHeaderSSABlockData {\n\n // The predecessors of the Block header block, with the block and branch instruction.\n\n predecessors: PredBlockSmallVec,\n\n // A block header block is sealed if all of its predecessors have been declared.\n\n sealed: bool,\n\n // The block which this block is part of.\n\n block: Block,\n\n // List of current Block arguments for which an earlier def has not been found yet.\n\n undef_variables: Vec<(Variable, Value)>,\n\n}\n\n\n\n/// A opaque reference to a basic block.\n\n#[derive(Copy, Clone, PartialEq, Eq, Debug)]\n\npub struct SSABlock(u32);\n\nimpl EntityRef for SSABlock {\n\n fn new(index: usize) -> Self {\n\n debug_assert!(index < (u32::MAX as usize));\n\n Self(index as u32)\n\n }\n\n\n", "file_path": "cranelift-frontend/src/ssa.rs", "rank": 96, "score": 156123.54870152034 }, { "content": "pub fn magic_u32(d: u32) -> MU32 {\n\n debug_assert_ne!(d, 0);\n\n debug_assert_ne!(d, 1); // d==1 generates out of range shifts.\n\n\n\n let mut do_add: bool = false;\n\n let mut p: i32 = 31;\n\n let nc: u32 = 0xFFFFFFFFu32 - u32::wrapping_neg(d) % d;\n\n let mut q1: u32 = 0x80000000u32 / nc;\n\n let mut r1: u32 = 0x80000000u32 - q1 * nc;\n\n let mut q2: u32 = 0x7FFFFFFFu32 / d;\n\n let mut r2: u32 = 0x7FFFFFFFu32 - q2 * d;\n\n loop {\n\n p = p + 1;\n\n if r1 >= nc - r1 {\n\n q1 = u32::wrapping_add(u32::wrapping_mul(2, q1), 1);\n\n r1 = u32::wrapping_sub(u32::wrapping_mul(2, r1), nc);\n\n } else {\n\n q1 = u32::wrapping_mul(2, q1);\n\n r1 = 2 * r1;\n\n }\n", "file_path": "cranelift-codegen/src/divconst_magic_numbers.rs", "rank": 97, "score": 155929.18013109954 }, { "content": "/// Convert the string `s` to CamelCase.\n\npub fn camel_case(s: &str) -> String {\n\n let mut output_chars = String::with_capacity(s.len());\n\n\n\n let mut capitalize = true;\n\n for curr_char in s.chars() {\n\n if curr_char == '_' {\n\n capitalize = true;\n\n } else {\n\n if capitalize {\n\n output_chars.extend(curr_char.to_uppercase());\n\n } else {\n\n output_chars.push(curr_char);\n\n }\n\n capitalize = false;\n\n }\n\n }\n\n\n\n output_chars\n\n}\n\n\n", "file_path": "cranelift-codegen/meta/src/cdsl/mod.rs", "rank": 98, "score": 155929.18013109954 }, { "content": "pub fn magic_u64(d: u64) -> MU64 {\n\n debug_assert_ne!(d, 0);\n\n debug_assert_ne!(d, 1); // d==1 generates out of range shifts.\n\n\n\n let mut do_add: bool = false;\n\n let mut p: i32 = 63;\n\n let nc: u64 = 0xFFFFFFFFFFFFFFFFu64 - u64::wrapping_neg(d) % d;\n\n let mut q1: u64 = 0x8000000000000000u64 / nc;\n\n let mut r1: u64 = 0x8000000000000000u64 - q1 * nc;\n\n let mut q2: u64 = 0x7FFFFFFFFFFFFFFFu64 / d;\n\n let mut r2: u64 = 0x7FFFFFFFFFFFFFFFu64 - q2 * d;\n\n loop {\n\n p = p + 1;\n\n if r1 >= nc - r1 {\n\n q1 = u64::wrapping_add(u64::wrapping_mul(2, q1), 1);\n\n r1 = u64::wrapping_sub(u64::wrapping_mul(2, r1), nc);\n\n } else {\n\n q1 = u64::wrapping_mul(2, q1);\n\n r1 = 2 * r1;\n\n }\n", "file_path": "cranelift-codegen/src/divconst_magic_numbers.rs", "rank": 99, "score": 155929.18013109954 } ]